aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorErwin Jansen <jansene@google.com>2021-06-23 05:52:25 -0700
committerErwin Jansen <jansene@google.com>2021-06-23 06:45:54 -0700
commit16be34ae72cdb525c88c2b31b21b976f35fe36d8 (patch)
tree6eacaffe4bebf8e00c290c1e1839e084b0c52e88
parent97e54a7e73c7b24e464ef06ef3c3b3716f21bb15 (diff)
parent49cb4599560d6005d5df0dadfca2db04b288f216 (diff)
downloadwebrtc-16be34ae72cdb525c88c2b31b21b976f35fe36d8.tar.gz
Merge upstream-master and enable ARM64
We bring in the latest WebRTC changes and turn on arm. This adds a new third party lib: crc32c, and includes a workaround for handling a depencency issue for arm. Bug: 191745658 Change-Id: Ic5be99911990ef14a5f733f19394032b20f85024
-rw-r--r--.gn27
-rw-r--r--.vpython2
-rw-r--r--AUTHORS1
-rw-r--r--BUILD.gn11
-rw-r--r--CMakeLists.txt32
-rw-r--r--DEPS159
-rw-r--r--README.md2
-rw-r--r--api/BUILD.gn23
-rw-r--r--api/DEPS11
-rw-r--r--api/OWNERS.webrtc13
-rw-r--r--api/README.md4
-rw-r--r--api/audio/echo_canceller3_config.cc8
-rw-r--r--api/audio/echo_canceller3_config.h7
-rw-r--r--api/audio/echo_canceller3_config_json.cc23
-rw-r--r--api/audio/echo_detector_creator.cc2
-rw-r--r--api/audio_codecs/audio_decoder_factory_template.h5
-rw-r--r--api/audio_codecs/audio_encoder_factory_template.h5
-rw-r--r--api/audio_codecs/test/audio_decoder_factory_template_unittest.cc2
-rw-r--r--api/audio_codecs/test/audio_encoder_factory_template_unittest.cc2
-rw-r--r--api/candidate.cc19
-rw-r--r--api/data_channel_interface.h2
-rw-r--r--api/ice_transport_factory.cc4
-rw-r--r--api/neteq/neteq.h6
-rw-r--r--api/peer_connection_interface.cc20
-rw-r--r--api/peer_connection_interface.h38
-rw-r--r--api/proxy.cc12
-rw-r--r--api/ref_counted_base.h56
-rw-r--r--api/rtc_event_log/rtc_event.cc2
-rw-r--r--api/rtp_packet_info.cc31
-rw-r--r--api/rtp_packet_info.h39
-rw-r--r--api/rtp_packet_info_unittest.cc51
-rw-r--r--api/rtp_packet_infos_unittest.cc18
-rw-r--r--api/rtp_parameters.cc134
-rw-r--r--api/rtp_parameters.h42
-rw-r--r--api/rtp_parameters_unittest.cc229
-rw-r--r--api/rtp_receiver_interface.h29
-rw-r--r--api/rtp_sender_interface.h28
-rw-r--r--api/sequence_checker_unittest.cc23
-rw-r--r--api/stats/rtc_stats.h17
-rw-r--r--api/stats/rtc_stats_collector_callback.h2
-rw-r--r--api/stats/rtc_stats_report.h13
-rw-r--r--api/stats/rtcstats_objects.h5
-rw-r--r--api/test/create_time_controller.cc9
-rw-r--r--api/test/dummy_peer_connection.h4
-rw-r--r--api/test/mock_peerconnectioninterface.h4
-rw-r--r--api/transport/data_channel_transport_interface.h8
-rw-r--r--api/transport/network_types.cc6
-rw-r--r--api/transport/network_types.h2
-rw-r--r--api/uma_metrics.h11
-rw-r--r--api/video/encoded_image.cc4
-rw-r--r--api/video/i010_buffer.cc4
-rw-r--r--api/video/i420_buffer.cc6
-rw-r--r--api/video/nv12_buffer.cc8
-rw-r--r--api/video/video_timing.h2
-rw-r--r--api/video_codecs/BUILD.gn2
-rw-r--r--api/video_codecs/builtin_video_encoder_factory.cc20
-rw-r--r--api/video_codecs/sdp_video_format.cc64
-rw-r--r--api/video_codecs/sdp_video_format.h8
-rw-r--r--api/video_codecs/test/BUILD.gn1
-rw-r--r--api/video_codecs/test/sdp_video_format_unittest.cc74
-rw-r--r--api/video_codecs/video_decoder_factory.cc23
-rw-r--r--api/video_codecs/video_decoder_factory.h34
-rw-r--r--api/video_codecs/video_encoder.cc11
-rw-r--r--api/video_codecs/video_encoder_factory.h26
-rw-r--r--api/video_track_source_proxy_factory.h28
-rw-r--r--api/voip/test/voip_engine_factory_unittest.cc10
-rw-r--r--audio/BUILD.gn2
-rw-r--r--audio/audio_receive_stream.cc178
-rw-r--r--audio/audio_receive_stream.h66
-rw-r--r--audio/audio_receive_stream_unittest.cc68
-rw-r--r--audio/audio_send_stream.cc23
-rw-r--r--audio/audio_send_stream.h8
-rw-r--r--audio/audio_send_stream_unittest.cc11
-rw-r--r--audio/audio_state.cc2
-rw-r--r--audio/audio_state_unittest.cc15
-rw-r--r--audio/channel_receive.cc246
-rw-r--r--audio/channel_receive.h6
-rw-r--r--audio/channel_receive_frame_transformer_delegate.cc2
-rw-r--r--audio/channel_receive_frame_transformer_delegate.h4
-rw-r--r--audio/channel_receive_frame_transformer_delegate_unittest.cc16
-rw-r--r--audio/channel_send.cc2
-rw-r--r--audio/channel_send_frame_transformer_delegate_unittest.cc16
-rw-r--r--audio/mock_voe_channel_proxy.h8
-rw-r--r--audio/voip/test/audio_channel_unittest.cc2
-rw-r--r--audio/voip/test/voip_core_unittest.cc2
-rw-r--r--audio/voip/voip_core.cc2
-rw-r--r--build_overrides/build.gni6
-rw-r--r--call/BUILD.gn37
-rw-r--r--call/adaptation/broadcast_resource_listener.cc4
-rw-r--r--call/adaptation/resource_adaptation_processor.cc2
-rw-r--r--call/adaptation/test/fake_resource.cc2
-rw-r--r--call/adaptation/video_stream_adapter.cc28
-rw-r--r--call/audio_receive_stream.h48
-rw-r--r--call/call.cc854
-rw-r--r--call/call.h15
-rw-r--r--call/call_config.cc13
-rw-r--r--call/call_config.h6
-rw-r--r--call/call_factory.cc22
-rw-r--r--call/call_perf_tests.cc56
-rw-r--r--call/call_unittest.cc26
-rw-r--r--call/degraded_call.cc10
-rw-r--r--call/degraded_call.h5
-rw-r--r--call/flexfec_receive_stream.h19
-rw-r--r--call/flexfec_receive_stream_impl.cc67
-rw-r--r--call/flexfec_receive_stream_impl.h27
-rw-r--r--call/flexfec_receive_stream_unittest.cc25
-rw-r--r--call/packet_receiver.h33
-rw-r--r--call/rampup_tests.cc19
-rw-r--r--call/receive_stream.h89
-rw-r--r--call/rtp_payload_params.cc154
-rw-r--r--call/rtp_payload_params.h21
-rw-r--r--call/rtp_payload_params_unittest.cc406
-rw-r--r--call/rtp_transport_config.h51
-rw-r--r--call/rtp_transport_controller_send.cc5
-rw-r--r--call/rtp_transport_controller_send.h6
-rw-r--r--call/rtp_transport_controller_send_factory.h37
-rw-r--r--call/rtp_transport_controller_send_factory_interface.h32
-rw-r--r--call/rtp_transport_controller_send_interface.h1
-rw-r--r--call/rtp_video_sender.cc74
-rw-r--r--call/rtp_video_sender.h1
-rw-r--r--call/rtp_video_sender_unittest.cc70
-rw-r--r--call/rtx_receive_stream.cc2
-rw-r--r--call/rtx_receive_stream_unittest.cc6
-rw-r--r--call/version.cc2
-rw-r--r--call/video_receive_stream.cc6
-rw-r--r--call/video_receive_stream.h55
-rw-r--r--call/video_send_stream.cc9
-rw-r--r--call/video_send_stream.h1
-rw-r--r--common_audio/signal_processing/division_operations.c7
-rw-r--r--common_video/h264/h264_bitstream_parser.cc91
-rw-r--r--common_video/h264/pps_parser.cc65
-rw-r--r--common_video/h264/pps_parser.h1
-rw-r--r--common_video/h264/sps_parser.cc56
-rw-r--r--common_video/h264/sps_parser.h1
-rw-r--r--common_video/h264/sps_vui_rewriter.cc38
-rw-r--r--common_video/video_frame_buffer.cc8
-rw-r--r--common_video/video_frame_buffer_pool.cc4
-rw-r--r--darwin_aarch64.cmake547
-rw-r--r--darwin_x86_64.cmake545
-rw-r--r--examples/BUILD.gn2
-rw-r--r--examples/androidapp/res/values/arrays.xml1
-rw-r--r--examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java7
-rw-r--r--examples/androidnativeapi/jni/android_call_client.cc6
-rw-r--r--examples/objc/AppRTCMobile/ARDAppClient.h2
-rw-r--r--examples/objc/AppRTCMobile/ARDAppClient.m5
-rw-r--r--examples/objc/AppRTCMobile/ARDBitrateTracker.h30
-rw-r--r--examples/objc/AppRTCMobile/ARDBitrateTracker.m45
-rw-r--r--examples/objc/AppRTCMobile/ARDStatsBuilder.h9
-rw-r--r--examples/objc/AppRTCMobile/ARDStatsBuilder.m324
-rw-r--r--examples/objc/AppRTCMobile/ios/ARDStatsView.h6
-rw-r--r--examples/objc/AppRTCMobile/ios/ARDStatsView.m6
-rw-r--r--examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m3
-rw-r--r--examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m2
-rw-r--r--examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m2
-rw-r--r--examples/objcnativeapi/objc/objc_call_client.mm2
-rw-r--r--examples/unityplugin/simple_peer_connection.cc17
-rw-r--r--g3doc/abseil-in-webrtc.md (renamed from abseil-in-webrtc.md)4
-rw-r--r--g3doc/implementation_basics.md92
-rw-r--r--g3doc/sitemap.md10
-rw-r--r--g3doc/style-guide.md279
-rw-r--r--g3doc/style-guide/h-cc-pairs.md (renamed from style-guide/h-cc-pairs.md)3
-rw-r--r--import-webrtc.py49
-rwxr-xr-ximport_all.sh25
-rw-r--r--linux_aarch64.cmake937
-rw-r--r--linux_x86_64.cmake543
-rw-r--r--logging/BUILD.gn7
-rw-r--r--logging/g3doc/rtc_event_log.md85
-rw-r--r--logging/rtc_event_log/encoder/delta_encoding.cc18
-rw-r--r--logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc7
-rw-r--r--logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h3
-rw-r--r--logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc68
-rw-r--r--logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc287
-rw-r--r--logging/rtc_event_log/encoder/var_int.cc2
-rw-r--r--logging/rtc_event_log/events/rtc_event_alr_state.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_audio_playout.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h10
-rw-r--r--logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_dtls_transport_state.h7
-rw-r--r--logging/rtc_event_log/events/rtc_event_dtls_writable_state.h7
-rw-r--r--logging/rtc_event_log/events/rtc_event_frame_decoded.h7
-rw-r--r--logging/rtc_event_log/events/rtc_event_generic_ack_received.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_generic_packet_received.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_generic_packet_sent.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h7
-rw-r--r--logging/rtc_event_log/events/rtc_event_probe_cluster_created.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_probe_result_failure.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_probe_result_success.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_remote_estimate.h7
-rw-r--r--logging/rtc_event_log/events/rtc_event_route_change.h13
-rw-r--r--logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc15
-rw-r--r--logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h35
-rw-r--r--logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc17
-rw-r--r--logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h35
-rw-r--r--logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h11
-rw-r--r--logging/rtc_event_log/events/rtc_event_video_send_stream_config.h11
-rw-r--r--logging/rtc_event_log/logged_events.cc8
-rw-r--r--logging/rtc_event_log/logged_events.h173
-rw-r--r--logging/rtc_event_log/rtc_event_log_impl.cc6
-rw-r--r--logging/rtc_event_log/rtc_event_log_parser.cc159
-rw-r--r--logging/rtc_event_log/rtc_event_log_unittest.cc4
-rw-r--r--logging/rtc_event_log/rtc_event_log_unittest_helper.cc83
-rw-r--r--logging/rtc_event_log/rtc_event_log_unittest_helper.h20
-rw-r--r--logging/rtc_event_log/rtc_event_processor_unittest.cc8
-rw-r--r--media/BUILD.gn50
-rw-r--r--media/DEPS1
-rw-r--r--media/base/codec.cc35
-rw-r--r--media/base/codec.h17
-rw-r--r--media/base/codec_unittest.cc25
-rw-r--r--media/base/fake_media_engine.cc21
-rw-r--r--media/base/fake_media_engine.h20
-rw-r--r--media/base/fake_network_interface.h8
-rw-r--r--media/base/media_channel.cc130
-rw-r--r--media/base/media_channel.h101
-rw-r--r--media/base/media_constants.cc5
-rw-r--r--media/base/media_constants.h8
-rw-r--r--media/base/turn_utils.h2
-rw-r--r--media/base/video_source_base.cc49
-rw-r--r--media/base/video_source_base.h36
-rw-r--r--media/engine/fake_webrtc_call.cc40
-rw-r--r--media/engine/fake_webrtc_call.h36
-rw-r--r--media/engine/internal_decoder_factory.cc19
-rw-r--r--media/engine/payload_type_mapper.cc3
-rw-r--r--media/engine/payload_type_mapper_unittest.cc8
-rw-r--r--media/engine/simulcast_encoder_adapter.cc24
-rw-r--r--media/engine/simulcast_encoder_adapter_unittest.cc8
-rw-r--r--media/engine/webrtc_video_engine.cc217
-rw-r--r--media/engine/webrtc_video_engine.h14
-rw-r--r--media/engine/webrtc_video_engine_unittest.cc346
-rw-r--r--media/engine/webrtc_voice_engine.cc276
-rw-r--r--media/engine/webrtc_voice_engine.h35
-rw-r--r--media/engine/webrtc_voice_engine_unittest.cc19
-rw-r--r--media/sctp/OWNERS.webrtc2
-rw-r--r--media/sctp/dcsctp_transport.cc532
-rw-r--r--media/sctp/dcsctp_transport.h109
-rw-r--r--media/sctp/sctp_transport_factory.cc18
-rw-r--r--media/sctp/sctp_transport_factory.h2
-rw-r--r--media/sctp/sctp_transport_internal.h4
-rw-r--r--media/sctp/usrsctp_transport.cc139
-rw-r--r--media/sctp/usrsctp_transport.h14
-rw-r--r--media/sctp/usrsctp_transport_reliability_unittest.cc60
-rw-r--r--media/sctp/usrsctp_transport_unittest.cc18
-rw-r--r--modules/audio_coding/BUILD.gn6
-rw-r--r--modules/audio_coding/OWNERS.webrtc1
-rw-r--r--modules/audio_coding/acm2/acm_receiver.cc14
-rw-r--r--modules/audio_coding/acm2/acm_send_test.cc9
-rw-r--r--modules/audio_coding/acm2/audio_coding_module_unittest.cc110
-rw-r--r--modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc11
-rw-r--r--modules/audio_coding/codecs/opus/opus_interface.cc12
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red.cc162
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red.h19
-rw-r--r--modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc32
-rw-r--r--modules/audio_coding/g3doc/index.md4
-rw-r--r--modules/audio_coding/neteq/decoder_database_unittest.cc17
-rw-r--r--modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc2
-rw-r--r--modules/audio_coding/neteq/neteq_impl.cc12
-rw-r--r--modules/audio_coding/neteq/neteq_impl.h1
-rw-r--r--modules/audio_coding/neteq/neteq_impl_unittest.cc46
-rw-r--r--modules/audio_coding/neteq/neteq_network_stats_unittest.cc2
-rw-r--r--modules/audio_coding/neteq/neteq_unittest.cc2
-rw-r--r--modules/audio_coding/neteq/red_payload_splitter.cc2
-rw-r--r--modules/audio_coding/neteq/red_payload_splitter_unittest.cc4
-rw-r--r--modules/audio_coding/neteq/test/result_sink.cc22
-rw-r--r--modules/audio_coding/neteq/test/result_sink.h2
-rw-r--r--modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc7
-rw-r--r--modules/audio_coding/neteq/tools/neteq_test.cc2
-rw-r--r--modules/audio_coding/neteq/tools/neteq_test_factory.cc2
-rw-r--r--modules/audio_coding/neteq/tools/packet.cc98
-rw-r--r--modules/audio_coding/neteq/tools/packet.h66
-rw-r--r--modules/audio_coding/neteq/tools/packet_unittest.cc51
-rw-r--r--modules/audio_coding/neteq/tools/rtp_file_source.cc7
-rw-r--r--modules/audio_device/audio_device_data_observer.cc20
-rw-r--r--modules/audio_device/audio_device_impl.cc13
-rw-r--r--modules/audio_device/dummy/file_audio_device.cc47
-rw-r--r--modules/audio_device/dummy/file_audio_device.h10
-rw-r--r--modules/audio_device/g3doc/audio_device_module.md3
-rw-r--r--modules/audio_device/include/mock_audio_device.h5
-rw-r--r--modules/audio_device/include/test_audio_device.cc2
-rw-r--r--modules/audio_device/linux/audio_device_alsa_linux.cc66
-rw-r--r--modules/audio_device/linux/audio_device_alsa_linux.h6
-rw-r--r--modules/audio_device/linux/audio_device_pulse_linux.cc55
-rw-r--r--modules/audio_device/linux/audio_device_pulse_linux.h5
-rw-r--r--modules/audio_device/mac/audio_device_mac.cc51
-rw-r--r--modules/audio_device/mac/audio_device_mac.h11
-rw-r--r--modules/audio_device/win/audio_device_module_win.cc2
-rw-r--r--modules/audio_device/win/core_audio_base_win.cc46
-rw-r--r--modules/audio_device/win/core_audio_base_win.h2
-rw-r--r--modules/audio_device/win/core_audio_utility_win.cc2
-rw-r--r--modules/audio_mixer/BUILD.gn6
-rw-r--r--modules/audio_mixer/OWNERS.webrtc2
-rw-r--r--modules/audio_mixer/audio_mixer_impl.cc27
-rw-r--r--modules/audio_mixer/audio_mixer_impl.h14
-rw-r--r--modules/audio_mixer/audio_mixer_impl_unittest.cc164
-rw-r--r--modules/audio_mixer/frame_combiner.cc29
-rw-r--r--modules/audio_mixer/frame_combiner_unittest.cc69
-rw-r--r--modules/audio_mixer/g3doc/index.md54
-rw-r--r--modules/audio_processing/aec3/aec_state.h6
-rw-r--r--modules/audio_processing/aec3/echo_canceller3.cc22
-rw-r--r--modules/audio_processing/aec3/echo_path_delay_estimator.cc4
-rw-r--r--modules/audio_processing/aec3/echo_remover.cc14
-rw-r--r--modules/audio_processing/aec3/erle_estimator.h12
-rw-r--r--modules/audio_processing/aec3/erle_estimator_unittest.cc32
-rw-r--r--modules/audio_processing/aec3/matched_filter.cc32
-rw-r--r--modules/audio_processing/aec3/matched_filter.h9
-rw-r--r--modules/audio_processing/aec3/matched_filter_lag_aggregator.h3
-rw-r--r--modules/audio_processing/aec3/matched_filter_unittest.cc15
-rw-r--r--modules/audio_processing/aec3/residual_echo_estimator.cc33
-rw-r--r--modules/audio_processing/aec3/residual_echo_estimator.h14
-rw-r--r--modules/audio_processing/aec3/residual_echo_estimator_unittest.cc5
-rw-r--r--modules/audio_processing/aec3/reverb_model_estimator.cc1
-rw-r--r--modules/audio_processing/aec3/reverb_model_estimator.h1
-rw-r--r--modules/audio_processing/aec3/subband_erle_estimator.cc11
-rw-r--r--modules/audio_processing/aec3/subband_erle_estimator.h7
-rw-r--r--modules/audio_processing/aec3/subtractor.cc24
-rw-r--r--modules/audio_processing/aec3/subtractor.h10
-rw-r--r--modules/audio_processing/aec3/suppression_gain.cc64
-rw-r--r--modules/audio_processing/aec3/suppression_gain.h7
-rw-r--r--modules/audio_processing/aec3/suppression_gain_unittest.cc56
-rw-r--r--modules/audio_processing/aec3/transparent_mode.cc4
-rw-r--r--modules/audio_processing/agc/BUILD.gn56
-rw-r--r--modules/audio_processing/agc/agc_manager_direct.cc182
-rw-r--r--modules/audio_processing/agc/agc_manager_direct.h67
-rw-r--r--modules/audio_processing/agc/agc_manager_direct_unittest.cc256
-rw-r--r--modules/audio_processing/agc/clipping_predictor.cc383
-rw-r--r--modules/audio_processing/agc/clipping_predictor.h63
-rw-r--r--modules/audio_processing/agc/clipping_predictor_evaluator.cc175
-rw-r--r--modules/audio_processing/agc/clipping_predictor_evaluator.h102
-rw-r--r--modules/audio_processing/agc/clipping_predictor_evaluator_unittest.cc568
-rw-r--r--modules/audio_processing/agc/clipping_predictor_level_buffer.cc77
-rw-r--r--modules/audio_processing/agc/clipping_predictor_level_buffer.h71
-rw-r--r--modules/audio_processing/agc/clipping_predictor_level_buffer_unittest.cc131
-rw-r--r--modules/audio_processing/agc/clipping_predictor_unittest.cc491
-rw-r--r--modules/audio_processing/agc2/adaptive_agc.cc32
-rw-r--r--modules/audio_processing/agc2/adaptive_agc.h10
-rw-r--r--modules/audio_processing/agc2/adaptive_digital_gain_applier.cc63
-rw-r--r--modules/audio_processing/agc2/adaptive_digital_gain_applier.h13
-rw-r--r--modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc75
-rw-r--r--modules/audio_processing/agc2/agc2_common.h2
-rw-r--r--modules/audio_processing/agc2/vad_with_level.cc4
-rw-r--r--modules/audio_processing/agc2/vad_with_level.h2
-rw-r--r--modules/audio_processing/agc2/vad_with_level_unittest.cc14
-rw-r--r--modules/audio_processing/audio_processing_impl.cc18
-rw-r--r--modules/audio_processing/audio_processing_impl.h7
-rw-r--r--modules/audio_processing/audio_processing_impl_locking_unittest.cc75
-rw-r--r--modules/audio_processing/audio_processing_impl_unittest.cc12
-rw-r--r--modules/audio_processing/audio_processing_performance_unittest.cc51
-rw-r--r--modules/audio_processing/audio_processing_unittest.cc121
-rw-r--r--modules/audio_processing/gain_controller2.cc10
-rw-r--r--modules/audio_processing/gain_controller2.h2
-rw-r--r--modules/audio_processing/gain_controller2_unittest.cc7
-rw-r--r--modules/audio_processing/include/audio_processing.cc92
-rw-r--r--modules/audio_processing/include/audio_processing.h54
-rw-r--r--modules/audio_processing/logging/apm_data_dumper.h9
-rw-r--r--modules/audio_processing/residual_echo_detector_unittest.cc12
-rw-r--r--modules/congestion_controller/BUILD.gn13
-rw-r--r--modules/congestion_controller/goog_cc/goog_cc_network_control.cc2
-rw-r--r--modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc63
-rw-r--r--modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc2
-rw-r--r--modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc3
-rw-r--r--modules/congestion_controller/include/receive_side_congestion_controller.h12
-rw-r--r--modules/congestion_controller/pcc/monitor_interval.cc2
-rw-r--r--modules/congestion_controller/pcc/rtt_tracker.cc2
-rw-r--r--modules/congestion_controller/receive_side_congestion_controller.cc19
-rw-r--r--modules/congestion_controller/receive_side_congestion_controller_unittest.cc43
-rw-r--r--modules/congestion_controller/remb_throttler.cc63
-rw-r--r--modules/congestion_controller/remb_throttler.h54
-rw-r--r--modules/congestion_controller/remb_throttler_unittest.cc100
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc20
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_demuxer.cc19
-rw-r--r--modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc60
-rw-r--r--modules/desktop_capture/BUILD.gn10
-rw-r--r--modules/desktop_capture/cropping_window_capturer_win.cc11
-rw-r--r--modules/desktop_capture/desktop_capture_options.h36
-rw-r--r--modules/desktop_capture/desktop_capturer.cc12
-rw-r--r--modules/desktop_capture/full_screen_window_detector.h3
-rw-r--r--modules/desktop_capture/linux/base_capturer_pipewire.cc67
-rw-r--r--modules/desktop_capture/linux/base_capturer_pipewire.h6
-rw-r--r--modules/desktop_capture/linux/shared_x_display.h11
-rw-r--r--modules/desktop_capture/linux/x_error_trap.cc16
-rw-r--r--modules/desktop_capture/mac/desktop_configuration_monitor.h8
-rw-r--r--modules/desktop_capture/screen_drawer_unittest.cc14
-rw-r--r--modules/desktop_capture/shared_desktop_frame.h4
-rw-r--r--modules/desktop_capture/win/screen_capture_utils.cc12
-rw-r--r--modules/desktop_capture/win/screen_capture_utils.h4
-rw-r--r--modules/desktop_capture/win/test_support/test_window.cc10
-rw-r--r--modules/desktop_capture/win/test_support/test_window.h2
-rw-r--r--modules/desktop_capture/win/wgc_capture_source.cc63
-rw-r--r--modules/desktop_capture/win/wgc_capture_source.h6
-rw-r--r--modules/desktop_capture/win/wgc_capture_source_unittest.cc138
-rw-r--r--modules/desktop_capture/win/wgc_capturer_win.cc11
-rw-r--r--modules/desktop_capture/win/wgc_capturer_win.h9
-rw-r--r--modules/desktop_capture/win/wgc_capturer_win_unittest.cc29
-rw-r--r--modules/desktop_capture/win/window_capture_utils.cc51
-rw-r--r--modules/desktop_capture/win/window_capture_utils.h5
-rw-r--r--modules/desktop_capture/win/window_capture_utils_unittest.cc14
-rw-r--r--modules/desktop_capture/win/window_capturer_win_gdi.cc13
-rw-r--r--modules/desktop_capture/win/window_capturer_win_gdi.h4
-rw-r--r--modules/desktop_capture/window_capturer_unittest.cc5
-rw-r--r--modules/include/module_common_types_public.h2
-rw-r--r--modules/pacing/packet_router.cc80
-rw-r--r--modules/pacing/packet_router.h32
-rw-r--r--modules/pacing/packet_router_unittest.cc325
-rw-r--r--modules/pacing/round_robin_packet_queue.h3
-rw-r--r--modules/remote_bitrate_estimator/BUILD.gn10
-rw-r--r--modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h8
-rw-r--r--modules/remote_bitrate_estimator/packet_arrival_map.cc123
-rw-r--r--modules/remote_bitrate_estimator/packet_arrival_map.h88
-rw-r--r--modules/remote_bitrate_estimator/packet_arrival_map_test.cc252
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc329
-rw-r--r--modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h100
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy.cc184
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy.h55
-rw-r--r--modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc76
-rw-r--r--modules/remote_bitrate_estimator/tools/bwe_rtp.cc48
-rw-r--r--modules/remote_bitrate_estimator/tools/bwe_rtp.h21
-rw-r--r--modules/remote_bitrate_estimator/tools/rtp_to_text.cc30
-rw-r--r--modules/rtp_rtcp/BUILD.gn16
-rw-r--r--modules/rtp_rtcp/include/rtcp_statistics.h16
-rw-r--r--modules/rtp_rtcp/include/rtp_rtcp_defines.h14
-rw-r--r--modules/rtp_rtcp/mocks/mock_rtp_rtcp.h1
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc125
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_interpolator.h86
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc (renamed from modules/rtp_rtcp/source/absolute_capture_time_receiver_unittest.cc)215
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_receiver.cc125
-rw-r--r--modules/rtp_rtcp/source/absolute_capture_time_receiver.h77
-rw-r--r--modules/rtp_rtcp/source/capture_clock_offset_updater.cc33
-rw-r--r--modules/rtp_rtcp/source/capture_clock_offset_updater.h51
-rw-r--r--modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc58
-rw-r--r--modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc2
-rw-r--r--modules/rtp_rtcp/source/flexfec_header_reader_writer.cc7
-rw-r--r--modules/rtp_rtcp/source/flexfec_receiver_unittest.cc63
-rw-r--r--modules/rtp_rtcp/source/forward_error_correction.cc31
-rw-r--r--modules/rtp_rtcp/source/forward_error_correction.h5
-rw-r--r--modules/rtp_rtcp/source/receive_statistics_impl.cc107
-rw-r--r--modules/rtp_rtcp/source/receive_statistics_impl.h23
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver.cc193
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver.h120
-rw-r--r--modules/rtp_rtcp/source/rtcp_receiver_unittest.cc81
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender.cc159
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender.h72
-rw-r--r--modules/rtp_rtcp/source/rtcp_sender_unittest.cc86
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver.cc10
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver.h2
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_impl.cc51
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_impl.h13
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc214
-rw-r--r--modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc24
-rw-r--r--modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc4
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extension_map.cc1
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extensions.cc55
-rw-r--r--modules/rtp_rtcp/source/rtp_header_extensions.h15
-rw-r--r--modules/rtp_rtcp/source/rtp_packet.cc11
-rw-r--r--modules/rtp_rtcp/source/rtp_packet.h12
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_received.cc6
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_received.h21
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_to_send.h4
-rw-r--r--modules/rtp_rtcp/source/rtp_packet_unittest.cc29
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_config.h10
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.cc25
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl.h1
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2.cc121
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2.h35
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc497
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc11
-rw-r--r--modules/rtp_rtcp/source/rtp_rtcp_interface.h7
-rw-r--r--modules/rtp_rtcp/source/rtp_sender.cc1
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.cc38
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio.h2
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc13
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_egress.cc29
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc982
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_unittest.cc2629
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.cc15
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video.h1
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc7
-rw-r--r--modules/rtp_rtcp/source/rtp_sender_video_unittest.cc62
-rw-r--r--modules/rtp_rtcp/source/rtp_utility.cc4
-rw-r--r--modules/rtp_rtcp/source/source_tracker_unittest.cc166
-rw-r--r--modules/rtp_rtcp/source/time_util.cc42
-rw-r--r--modules/rtp_rtcp/source/time_util.h14
-rw-r--r--modules/rtp_rtcp/source/time_util_unittest.cc24
-rw-r--r--modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc7
-rw-r--r--modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc4
-rw-r--r--modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h2
-rw-r--r--modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc42
-rw-r--r--modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h2
-rw-r--r--modules/utility/source/process_thread_impl.cc34
-rw-r--r--modules/utility/source/process_thread_impl.h4
-rw-r--r--modules/video_capture/linux/device_info_linux.cc6
-rw-r--r--modules/video_capture/linux/video_capture_linux.cc28
-rw-r--r--modules/video_capture/linux/video_capture_linux.h3
-rw-r--r--modules/video_capture/windows/device_info_ds.cc6
-rw-r--r--modules/video_capture/windows/video_capture_ds.cc2
-rw-r--r--modules/video_capture/windows/video_capture_factory_windows.cc3
-rw-r--r--modules/video_coding/BUILD.gn20
-rw-r--r--modules/video_coding/codecs/av1/av1_svc_config.cc3
-rw-r--r--modules/video_coding/codecs/av1/av1_svc_config_unittest.cc8
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder.cc25
-rw-r--r--modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc32
-rw-r--r--modules/video_coding/codecs/h264/h264.cc2
-rw-r--r--modules/video_coding/codecs/h264/h264_decoder_impl.cc26
-rw-r--r--modules/video_coding/codecs/h264/h264_encoder_impl.cc2
-rw-r--r--modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc5
-rw-r--r--modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc6
-rw-r--r--modules/video_coding/codecs/test/videocodec_test_libvpx.cc2
-rw-r--r--modules/video_coding/codecs/test/videoprocessor.cc2
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers.cc3
-rw-r--r--modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc19
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc10
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h9
-rw-r--r--modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc28
-rw-r--r--modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc17
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc3
-rw-r--r--modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h7
-rw-r--r--modules/video_coding/deprecated/nack_module.h2
-rw-r--r--modules/video_coding/generic_decoder.cc3
-rw-r--r--modules/video_coding/jitter_buffer_unittest.cc5
-rw-r--r--modules/video_coding/packet.cc4
-rw-r--r--modules/video_coding/packet.h3
-rw-r--r--modules/video_coding/packet_buffer.cc11
-rw-r--r--modules/video_coding/packet_buffer.h6
-rw-r--r--modules/video_coding/rtp_frame_reference_finder.cc27
-rw-r--r--modules/video_coding/rtp_frame_reference_finder.h33
-rw-r--r--modules/video_coding/rtp_frame_reference_finder_unittest.cc53
-rw-r--r--modules/video_coding/svc/scalability_structure_simulcast.cc11
-rw-r--r--modules/video_coding/utility/vp9_uncompressed_header_parser.cc14
-rw-r--r--modules/video_coding/video_codec_initializer.cc6
-rw-r--r--modules/video_coding/video_codec_initializer_unittest.cc69
-rw-r--r--modules/video_coding/video_receiver.cc2
-rw-r--r--net/dcsctp/BUILD.gn3
-rw-r--r--net/dcsctp/common/BUILD.gn5
-rw-r--r--net/dcsctp/common/internal_types.h12
-rw-r--r--net/dcsctp/fuzzers/BUILD.gn50
-rw-r--r--net/dcsctp/fuzzers/dcsctp_fuzzers.cc460
-rw-r--r--net/dcsctp/fuzzers/dcsctp_fuzzers.h116
-rw-r--r--net/dcsctp/fuzzers/dcsctp_fuzzers_test.cc40
-rw-r--r--net/dcsctp/packet/BUILD.gn30
-rw-r--r--net/dcsctp/packet/bounded_byte_reader.h4
-rw-r--r--net/dcsctp/packet/bounded_byte_writer.h4
-rw-r--r--net/dcsctp/packet/chunk/sack_chunk.cc9
-rw-r--r--net/dcsctp/packet/chunk/sack_chunk.h7
-rw-r--r--net/dcsctp/packet/chunk_validators.cc7
-rw-r--r--net/dcsctp/packet/crc32c.cc64
-rw-r--r--net/dcsctp/packet/data.h2
-rw-r--r--net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc4
-rw-r--r--net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause_test.cc18
-rw-r--r--net/dcsctp/packet/tlv_trait.h13
-rw-r--r--net/dcsctp/public/BUILD.gn18
-rw-r--r--net/dcsctp/public/dcsctp_options.h45
-rw-r--r--net/dcsctp/public/dcsctp_socket.h148
-rw-r--r--net/dcsctp/public/dcsctp_socket_factory.cc31
-rw-r--r--net/dcsctp/public/dcsctp_socket_factory.h31
-rw-r--r--net/dcsctp/public/types.h5
-rw-r--r--net/dcsctp/rx/BUILD.gn46
-rw-r--r--net/dcsctp/rx/data_tracker.cc199
-rw-r--r--net/dcsctp/rx/data_tracker.h55
-rw-r--r--net/dcsctp/rx/data_tracker_test.cc406
-rw-r--r--net/dcsctp/rx/reassembly_queue.h4
-rw-r--r--net/dcsctp/socket/BUILD.gn234
-rw-r--r--net/dcsctp/socket/callback_deferrer.h184
-rw-r--r--net/dcsctp/socket/capabilities.h26
-rw-r--r--net/dcsctp/socket/context.h66
-rw-r--r--net/dcsctp/socket/dcsctp_socket.cc1550
-rw-r--r--net/dcsctp/socket/dcsctp_socket.h272
-rw-r--r--net/dcsctp/socket/dcsctp_socket_test.cc1394
-rw-r--r--net/dcsctp/socket/heartbeat_handler.cc194
-rw-r--r--net/dcsctp/socket/heartbeat_handler.h69
-rw-r--r--net/dcsctp/socket/heartbeat_handler_test.cc158
-rw-r--r--net/dcsctp/socket/mock_context.h72
-rw-r--r--net/dcsctp/socket/mock_dcsctp_socket_callbacks.h161
-rw-r--r--net/dcsctp/socket/state_cookie.cc78
-rw-r--r--net/dcsctp/socket/state_cookie.h65
-rw-r--r--net/dcsctp/socket/state_cookie_test.cc40
-rw-r--r--net/dcsctp/socket/stream_reset_handler.cc347
-rw-r--r--net/dcsctp/socket/stream_reset_handler.h222
-rw-r--r--net/dcsctp/socket/stream_reset_handler_test.cc550
-rw-r--r--net/dcsctp/socket/transmission_control_block.cc168
-rw-r--r--net/dcsctp/socket/transmission_control_block.h220
-rw-r--r--net/dcsctp/testing/BUILD.gn7
-rw-r--r--net/dcsctp/timer/BUILD.gn34
-rw-r--r--net/dcsctp/timer/fake_timeout.h27
-rw-r--r--net/dcsctp/timer/task_queue_timeout.cc96
-rw-r--r--net/dcsctp/timer/task_queue_timeout.h88
-rw-r--r--net/dcsctp/timer/task_queue_timeout_test.cc115
-rw-r--r--net/dcsctp/timer/timer.cc72
-rw-r--r--net/dcsctp/timer/timer.h34
-rw-r--r--net/dcsctp/timer/timer_test.cc80
-rw-r--r--net/dcsctp/tx/BUILD.gn141
-rw-r--r--net/dcsctp/tx/mock_send_queue.h60
-rw-r--r--net/dcsctp/tx/retransmission_error_counter.cc37
-rw-r--r--net/dcsctp/tx/retransmission_error_counter.h51
-rw-r--r--net/dcsctp/tx/retransmission_error_counter_test.cc76
-rw-r--r--net/dcsctp/tx/retransmission_queue.cc889
-rw-r--r--net/dcsctp/tx/retransmission_queue.h371
-rw-r--r--net/dcsctp/tx/retransmission_queue_test.cc1007
-rw-r--r--net/dcsctp/tx/retransmission_timeout.cc69
-rw-r--r--net/dcsctp/tx/retransmission_timeout.h58
-rw-r--r--net/dcsctp/tx/retransmission_timeout_test.cc151
-rw-r--r--net/dcsctp/tx/rr_send_queue.cc432
-rw-r--r--net/dcsctp/tx/rr_send_queue.h238
-rw-r--r--net/dcsctp/tx/rr_send_queue_test.cc742
-rw-r--r--net/dcsctp/tx/send_queue.h128
-rw-r--r--p2p/base/default_ice_transport_factory.cc2
-rw-r--r--p2p/base/dtls_transport.cc102
-rw-r--r--p2p/base/dtls_transport.h7
-rw-r--r--p2p/base/dtls_transport_internal.cc18
-rw-r--r--p2p/base/dtls_transport_internal.h41
-rw-r--r--p2p/base/dtls_transport_unittest.cc10
-rw-r--r--p2p/base/fake_dtls_transport.h9
-rw-r--r--p2p/base/fake_port_allocator.h9
-rw-r--r--p2p/base/p2p_transport_channel.cc3
-rw-r--r--p2p/base/port.cc37
-rw-r--r--p2p/base/port.h40
-rw-r--r--p2p/base/port_allocator.cc3
-rw-r--r--p2p/base/port_allocator_unittest.cc53
-rw-r--r--p2p/base/stun_port.cc10
-rw-r--r--p2p/base/turn_port.cc2
-rw-r--r--p2p/base/turn_port.h5
-rw-r--r--p2p/client/basic_port_allocator.cc160
-rw-r--r--p2p/client/basic_port_allocator.h63
-rw-r--r--p2p/g3doc/ice.md3
-rw-r--r--pc/BUILD.gn142
-rw-r--r--pc/audio_rtp_receiver.cc250
-rw-r--r--pc/audio_rtp_receiver.h71
-rw-r--r--pc/audio_track.cc12
-rw-r--r--pc/audio_track.h2
-rw-r--r--pc/channel.cc264
-rw-r--r--pc/channel.h74
-rw-r--r--pc/channel_interface.h10
-rw-r--r--pc/channel_manager.h1
-rw-r--r--pc/channel_unittest.cc432
-rw-r--r--pc/connection_context.cc9
-rw-r--r--pc/connection_context.h8
-rw-r--r--pc/data_channel_controller.cc26
-rw-r--r--pc/data_channel_controller.h6
-rw-r--r--pc/data_channel_integrationtest.cc297
-rw-r--r--pc/data_channel_unittest.cc29
-rw-r--r--pc/dtls_srtp_transport.cc21
-rw-r--r--pc/dtls_srtp_transport.h3
-rw-r--r--pc/dtls_transport.cc42
-rw-r--r--pc/dtls_transport.h2
-rw-r--r--pc/dtls_transport_unittest.cc6
-rw-r--r--pc/dtmf_sender.cc4
-rw-r--r--pc/dtmf_sender.h5
-rw-r--r--pc/dtmf_sender_unittest.cc4
-rw-r--r--pc/g3doc/dtls_transport.md53
-rw-r--r--pc/g3doc/peer_connection.md59
-rw-r--r--pc/g3doc/rtp.md56
-rw-r--r--pc/g3doc/srtp.md72
-rw-r--r--pc/ice_transport_unittest.cc5
-rw-r--r--pc/jitter_buffer_delay.cc45
-rw-r--r--pc/jitter_buffer_delay.h31
-rw-r--r--pc/jitter_buffer_delay_interface.h39
-rw-r--r--pc/jitter_buffer_delay_proxy.h31
-rw-r--r--pc/jitter_buffer_delay_unittest.cc64
-rw-r--r--pc/jsep_transport.cc28
-rw-r--r--pc/jsep_transport_collection.cc255
-rw-r--r--pc/jsep_transport_collection.h145
-rw-r--r--pc/jsep_transport_controller.cc447
-rw-r--r--pc/jsep_transport_controller.h48
-rw-r--r--pc/jsep_transport_controller_unittest.cc522
-rw-r--r--pc/jsep_transport_unittest.cc3
-rw-r--r--pc/local_audio_source.cc3
-rw-r--r--pc/media_session.cc367
-rw-r--r--pc/media_session_unittest.cc88
-rw-r--r--pc/media_stream.cc4
-rw-r--r--pc/media_stream_proxy.h (renamed from api/media_stream_proxy.h)14
-rw-r--r--pc/media_stream_track_proxy.h (renamed from api/media_stream_track_proxy.h)31
-rw-r--r--pc/peer_connection.cc72
-rw-r--r--pc/peer_connection.h7
-rw-r--r--pc/peer_connection_adaptation_integrationtest.cc4
-rw-r--r--pc/peer_connection_bundle_unittest.cc54
-rw-r--r--pc/peer_connection_crypto_unittest.cc2
-rw-r--r--pc/peer_connection_data_channel_unittest.cc2
-rw-r--r--pc/peer_connection_factory.cc52
-rw-r--r--pc/peer_connection_factory.h13
-rw-r--r--pc/peer_connection_factory_proxy.h (renamed from api/peer_connection_factory_proxy.h)24
-rw-r--r--pc/peer_connection_histogram_unittest.cc2
-rw-r--r--pc/peer_connection_ice_unittest.cc10
-rw-r--r--pc/peer_connection_integrationtest.cc28
-rw-r--r--pc/peer_connection_interface_unittest.cc20
-rw-r--r--pc/peer_connection_media_unittest.cc69
-rw-r--r--pc/peer_connection_proxy.h (renamed from api/peer_connection_proxy.h)18
-rw-r--r--pc/peer_connection_rampup_tests.cc10
-rw-r--r--pc/peer_connection_rtp_unittest.cc2
-rw-r--r--pc/peer_connection_signaling_unittest.cc156
-rw-r--r--pc/peer_connection_simulcast_unittest.cc24
-rw-r--r--pc/peer_connection_wrapper.cc18
-rw-r--r--pc/proxy.cc25
-rw-r--r--pc/proxy.h (renamed from api/proxy.h)110
-rw-r--r--pc/proxy_unittest.cc10
-rw-r--r--pc/remote_audio_source.cc36
-rw-r--r--pc/rtc_stats_collector.cc33
-rw-r--r--pc/rtc_stats_collector.h2
-rw-r--r--pc/rtc_stats_collector_unittest.cc31
-rw-r--r--pc/rtc_stats_integrationtest.cc8
-rw-r--r--pc/rtcp_mux_filter.cc3
-rw-r--r--pc/rtp_receiver.cc18
-rw-r--r--pc/rtp_receiver.h7
-rw-r--r--pc/rtp_receiver_proxy.h54
-rw-r--r--pc/rtp_sender.cc10
-rw-r--r--pc/rtp_sender_proxy.h51
-rw-r--r--pc/rtp_sender_receiver_unittest.cc38
-rw-r--r--pc/rtp_transceiver.cc89
-rw-r--r--pc/rtp_transceiver.h36
-rw-r--r--pc/rtp_transceiver_unittest.cc146
-rw-r--r--pc/rtp_transmission_manager.cc21
-rw-r--r--pc/rtp_transport.cc8
-rw-r--r--pc/scenario_tests/goog_cc_test.cc4
-rw-r--r--pc/sctp_data_channel.cc78
-rw-r--r--pc/sctp_data_channel.h3
-rw-r--r--pc/sctp_data_channel_transport.cc15
-rw-r--r--pc/sctp_transport.cc11
-rw-r--r--pc/sctp_transport.h2
-rw-r--r--pc/sctp_transport_unittest.cc14
-rw-r--r--pc/sctp_utils.cc29
-rw-r--r--pc/sctp_utils.h4
-rw-r--r--pc/sdp_offer_answer.cc356
-rw-r--r--pc/sdp_offer_answer.h58
-rw-r--r--pc/session_description.cc23
-rw-r--r--pc/session_description.h10
-rw-r--r--pc/srtp_transport.cc4
-rw-r--r--pc/stats_collector.cc56
-rw-r--r--pc/stats_collector_unittest.cc35
-rw-r--r--pc/stream_collection.h8
-rw-r--r--pc/test/fake_audio_capture_module.cc3
-rw-r--r--pc/test/fake_data_channel_provider.h12
-rw-r--r--pc/test/fake_peer_connection_base.h5
-rw-r--r--pc/test/fake_peer_connection_for_stats.h27
-rw-r--r--pc/test/fake_video_track_source.h2
-rw-r--r--pc/test/integration_test_helpers.h50
-rw-r--r--pc/test/mock_channel_interface.h13
-rw-r--r--pc/test/mock_delayable.h36
-rw-r--r--pc/test/mock_peer_connection_observers.h37
-rw-r--r--pc/test/peer_connection_test_wrapper.cc35
-rw-r--r--pc/test/rtc_stats_obtainer.h3
-rw-r--r--pc/track_media_info_map_unittest.cc79
-rw-r--r--pc/transport_stats.h3
-rw-r--r--pc/used_ids.h23
-rw-r--r--pc/video_rtp_receiver.cc287
-rw-r--r--pc/video_rtp_receiver.h94
-rw-r--r--pc/video_rtp_receiver_unittest.cc33
-rw-r--r--pc/video_rtp_track_source_unittest.cc4
-rw-r--r--pc/video_track.cc69
-rw-r--r--pc/video_track.h14
-rw-r--r--pc/video_track_source.cc2
-rw-r--r--pc/video_track_source_proxy.cc25
-rw-r--r--pc/video_track_source_proxy.h (renamed from api/video_track_source_proxy.h)14
-rw-r--r--pc/video_track_unittest.cc2
-rw-r--r--pc/webrtc_sdp.cc23
-rw-r--r--pc/webrtc_sdp_unittest.cc24
-rw-r--r--pc/webrtc_session_description_factory.cc3
-rw-r--r--rtc_base/BUILD.gn21
-rw-r--r--rtc_base/async_invoker.cc34
-rw-r--r--rtc_base/async_invoker.h14
-rw-r--r--rtc_base/async_invoker_inl.h11
-rw-r--r--rtc_base/async_resolver.cc61
-rw-r--r--rtc_base/async_resolver.h9
-rw-r--r--rtc_base/bit_buffer.cc99
-rw-r--r--rtc_base/bit_buffer.h45
-rw-r--r--rtc_base/bit_buffer_unittest.cc124
-rw-r--r--rtc_base/cpu_time_unittest.cc17
-rw-r--r--rtc_base/deprecated/recursive_critical_section_unittest.cc19
-rw-r--r--rtc_base/event_tracer.cc16
-rw-r--r--rtc_base/event_unittest.cc19
-rw-r--r--rtc_base/hash.h32
-rw-r--r--rtc_base/hash_unittest.cc49
-rw-r--r--rtc_base/ip_address.cc5
-rw-r--r--rtc_base/ip_address_unittest.cc6
-rw-r--r--rtc_base/logging_unittest.cc13
-rw-r--r--rtc_base/nat_socket_factory.cc13
-rw-r--r--rtc_base/nat_socket_factory.h4
-rw-r--r--rtc_base/network.cc2
-rw-r--r--rtc_base/network_unittest.cc3
-rw-r--r--rtc_base/openssl_adapter.cc3
-rw-r--r--rtc_base/openssl_stream_adapter.cc7
-rw-r--r--rtc_base/platform_thread.cc267
-rw-r--r--rtc_base/platform_thread.h143
-rw-r--r--rtc_base/platform_thread_unittest.cc104
-rw-r--r--rtc_base/rate_limiter_unittest.cc20
-rw-r--r--rtc_base/ref_counted_object.h120
-rw-r--r--rtc_base/ref_counted_object_unittest.cc68
-rw-r--r--rtc_base/rtc_certificate.cc7
-rw-r--r--rtc_base/rtc_certificate.h9
-rw-r--r--rtc_base/socket_address.cc10
-rw-r--r--rtc_base/socket_address.h4
-rw-r--r--rtc_base/socket_address_unittest.cc10
-rw-r--r--rtc_base/socket_server.h7
-rw-r--r--rtc_base/ssl_fingerprint.cc3
-rw-r--r--rtc_base/ssl_fingerprint.h3
-rw-r--r--rtc_base/string_utils.h37
-rw-r--r--rtc_base/string_utils_unittest.cc25
-rw-r--r--rtc_base/synchronization/mutex_race_check.h3
-rw-r--r--rtc_base/task_queue_libevent.cc43
-rw-r--r--rtc_base/task_queue_stdlib.cc45
-rw-r--r--rtc_base/task_queue_win.cc54
-rw-r--r--rtc_base/task_utils/BUILD.gn3
-rw-r--r--rtc_base/task_utils/pending_task_safety_flag.cc8
-rw-r--r--rtc_base/task_utils/pending_task_safety_flag.h9
-rw-r--r--rtc_base/task_utils/repeating_task.cc40
-rw-r--r--rtc_base/task_utils/repeating_task.h69
-rw-r--r--rtc_base/task_utils/repeating_task_unittest.cc18
-rw-r--r--rtc_base/thread.cc7
-rw-r--r--rtc_base/thread.h4
-rw-r--r--rtc_base/thread_unittest.cc46
-rw-r--r--rtc_base/virtual_socket_server.cc111
-rw-r--r--rtc_base/virtual_socket_server.h26
-rw-r--r--rtc_tools/BUILD.gn3
-rw-r--r--rtc_tools/frame_analyzer/video_geometry_aligner.cc2
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyze_audio.cc2
-rw-r--r--rtc_tools/rtc_event_log_visualizer/analyzer.cc21
-rw-r--r--rtc_tools/rtc_event_log_visualizer/log_simulation.cc7
-rw-r--r--rtc_tools/rtp_generator/rtp_generator.cc14
-rw-r--r--rtc_tools/video_file_reader.cc7
-rw-r--r--rtc_tools/video_replay.cc48
-rw-r--r--sdk/BUILD.gn110
-rw-r--r--sdk/android/BUILD.gn42
-rw-r--r--sdk/android/api/org/webrtc/Camera2Enumerator.java10
-rw-r--r--sdk/android/api/org/webrtc/DataChannel.java1
-rw-r--r--sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java13
-rw-r--r--sdk/android/api/org/webrtc/LibaomAv1Decoder.java22
-rw-r--r--sdk/android/api/org/webrtc/LibaomAv1Encoder.java27
-rw-r--r--sdk/android/api/org/webrtc/PeerConnection.java9
-rw-r--r--sdk/android/api/org/webrtc/RTCStats.java1
-rw-r--r--sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java25
-rw-r--r--sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java20
-rw-r--r--sdk/android/api/org/webrtc/VideoCodecInfo.java5
-rw-r--r--sdk/android/api/org/webrtc/VideoDecoderFactory.java13
-rw-r--r--sdk/android/api/org/webrtc/VideoEncoder.java2
-rw-r--r--sdk/android/api/org/webrtc/YuvConverter.java16
-rw-r--r--sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java25
-rw-r--r--sdk/android/native_api/video/video_source.cc4
-rw-r--r--sdk/android/native_unittests/android_network_monitor_unittest.cc1
-rw-r--r--sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc42
-rw-r--r--sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java2
-rw-r--r--sdk/android/src/java/org/webrtc/MediaCodecUtils.java1
-rw-r--r--sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java8
-rw-r--r--sdk/android/src/java/org/webrtc/VideoCodecMimeType.java11
-rw-r--r--sdk/android/src/jni/android_network_monitor.cc2
-rw-r--r--sdk/android/src/jni/audio_device/audio_device_module.cc2
-rw-r--r--sdk/android/src/jni/audio_device/opensles_common.cc2
-rw-r--r--sdk/android/src/jni/audio_device/opensles_common.h5
-rw-r--r--sdk/android/src/jni/av1_codec.cc39
-rw-r--r--sdk/android/src/jni/encoded_image.cc2
-rw-r--r--sdk/android/src/jni/pc/add_ice_candidate_observer.h5
-rw-r--r--sdk/android/src/jni/pc/peer_connection.cc50
-rw-r--r--sdk/android/src/jni/pc/peer_connection.h2
-rw-r--r--sdk/android/src/jni/pc/peer_connection_factory.cc10
-rw-r--r--sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc17
-rw-r--r--sdk/android/src/jni/pc/video.cc5
-rw-r--r--sdk/android/src/jni/video_frame.cc11
-rw-r--r--sdk/media_constraints.cc1
-rw-r--r--sdk/media_constraints.h2
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm8
-rw-r--r--sdk/objc/api/peerconnection/RTCPeerConnection.mm5
-rw-r--r--sdk/objc/api/peerconnection/RTCRtpCodecParameters.h2
-rw-r--r--sdk/objc/api/peerconnection/RTCStatisticsReport.h4
-rw-r--r--sdk/objc/api/peerconnection/RTCStatisticsReport.mm22
-rw-r--r--sdk/objc/api/peerconnection/RTCVideoSource.mm2
-rw-r--r--sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm10
-rw-r--r--sdk/objc/base/RTCVideoEncoder.h9
-rw-r--r--sdk/objc/components/audio/RTCAudioSession.h2
-rw-r--r--sdk/objc/components/renderer/metal/RTCMTLVideoView.h2
-rw-r--r--sdk/objc/components/renderer/metal/RTCMTLVideoView.m4
-rw-r--r--sdk/objc/components/video_codec/RTCVideoEncoderH264.mm14
-rw-r--r--sdk/objc/native/api/video_capturer.mm6
-rw-r--r--sdk/objc/native/src/objc_video_encoder_factory.mm35
-rw-r--r--stats/rtc_stats.cc40
-rw-r--r--stats/rtc_stats_report.cc5
-rw-r--r--stats/rtc_stats_unittest.cc51
-rw-r--r--stats/rtcstats_objects.cc3
-rw-r--r--stats/test/rtc_test_stats.cc12
-rw-r--r--stats/test/rtc_test_stats.h3
-rw-r--r--style-guide.md272
-rw-r--r--style-guide/OWNERS.webrtc1
-rw-r--r--system_wrappers/include/clock.h29
-rw-r--r--system_wrappers/source/clock.cc16
-rw-r--r--test/BUILD.gn1
-rw-r--r--test/call_test.cc6
-rw-r--r--test/drifting_clock.cc12
-rw-r--r--test/drifting_clock.h10
-rw-r--r--test/encoder_settings.cc2
-rw-r--r--test/fake_texture_frame.cc2
-rw-r--r--test/fuzzers/BUILD.gn36
-rw-r--r--test/fuzzers/congestion_controller_feedback_fuzzer.cc6
-rw-r--r--test/fuzzers/dcsctp_socket_fuzzer.cc28
-rw-r--r--test/fuzzers/rtp_frame_reference_finder_fuzzer.cc9
-rw-r--r--test/fuzzers/rtp_header_parser_fuzzer.cc21
-rw-r--r--test/fuzzers/rtp_packet_fuzzer.cc9
-rw-r--r--test/fuzzers/sdp_integration_fuzzer.cc8
-rw-r--r--test/fuzzers/utils/BUILD.gn1
-rw-r--r--test/fuzzers/utils/rtp_replayer.cc37
-rw-r--r--test/fuzzers/vp9_encoder_references_fuzzer.cc498
-rw-r--r--test/mappable_native_buffer.cc31
-rw-r--r--test/mock_audio_decoder_factory.h4
-rw-r--r--test/network/BUILD.gn1
-rw-r--r--test/network/fake_network_socket_server.cc8
-rw-r--r--test/network/fake_network_socket_server.h6
-rw-r--r--test/network/network_emulation_pc_unittest.cc7
-rw-r--r--test/pc/e2e/BUILD.gn3
-rw-r--r--test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc2
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc34
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer.h2
-rw-r--r--test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc14
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc10
-rw-r--r--test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h3
-rw-r--r--test/pc/e2e/media/media_helper.cc2
-rw-r--r--test/pc/e2e/stats_based_network_quality_metrics_reporter.cc2
-rw-r--r--test/pc/e2e/stats_poller.cc2
-rw-r--r--test/pc/e2e/test_peer_factory.cc6
-rw-r--r--test/pc/sctp/fake_sctp_transport.h3
-rw-r--r--test/peer_scenario/BUILD.gn1
-rw-r--r--test/peer_scenario/peer_scenario_client.cc4
-rw-r--r--test/peer_scenario/tests/BUILD.gn1
-rw-r--r--test/peer_scenario/tests/unsignaled_stream_test.cc148
-rw-r--r--test/rtp_header_parser.cc70
-rw-r--r--test/rtp_header_parser.h32
-rw-r--r--test/scenario/audio_stream.cc1
-rw-r--r--test/scenario/call_client.cc7
-rw-r--r--test/scenario/call_client.h3
-rw-r--r--test/scenario/scenario_unittest.cc6
-rw-r--r--test/scenario/stats_collection_unittest.cc10
-rw-r--r--test/scenario/video_frame_matcher.h2
-rw-r--r--test/scenario/video_stream.cc22
-rw-r--r--test/scenario/video_stream_unittest.cc10
-rw-r--r--test/time_controller/simulated_time_controller.cc10
-rw-r--r--test/time_controller/simulated_time_controller.h11
-rw-r--r--third_party/abseil-cpp/CMake/AbseilDll.cmake28
-rw-r--r--third_party/abseil-cpp/CMake/AbseilHelpers.cmake31
-rw-r--r--third_party/abseil-cpp/CMake/AbseilInstallDirs.cmake20
-rw-r--r--third_party/abseil-cpp/CMake/install_test_project/CMakeLists.txt2
-rw-r--r--third_party/abseil-cpp/CMakeLists.txt59
-rw-r--r--third_party/abseil-cpp/absl/CMakeLists.txt1
-rw-r--r--third_party/abseil-cpp/absl/algorithm/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/algorithm/container.h180
-rw-r--r--third_party/abseil-cpp/absl/base/CMakeLists.txt58
-rw-r--r--third_party/abseil-cpp/absl/base/attributes.h67
-rw-r--r--third_party/abseil-cpp/absl/base/config.h58
-rw-r--r--third_party/abseil-cpp/absl/base/dynamic_annotations.h2
-rw-r--r--third_party/abseil-cpp/absl/base/internal/exception_safety_testing.h26
-rw-r--r--third_party/abseil-cpp/absl/base/internal/sysinfo.cc67
-rw-r--r--third_party/abseil-cpp/absl/base/internal/thread_identity.cc8
-rw-r--r--third_party/abseil-cpp/absl/base/internal/thread_identity.h17
-rw-r--r--third_party/abseil-cpp/absl/base/optimization.h5
-rw-r--r--third_party/abseil-cpp/absl/cleanup/CMakeLists.txt2
-rw-r--r--third_party/abseil-cpp/absl/container/CMakeLists.txt60
-rw-r--r--third_party/abseil-cpp/absl/container/btree_test.cc75
-rw-r--r--third_party/abseil-cpp/absl/container/flat_hash_map_test.cc26
-rw-r--r--third_party/abseil-cpp/absl/container/internal/btree.h59
-rw-r--r--third_party/abseil-cpp/absl/container/internal/btree_container.h13
-rw-r--r--third_party/abseil-cpp/absl/container/internal/hash_generator_testing.h21
-rw-r--r--third_party/abseil-cpp/absl/container/internal/inlined_vector.h3
-rw-r--r--third_party/abseil-cpp/absl/container/internal/layout.h8
-rw-r--r--third_party/abseil-cpp/absl/container/internal/raw_hash_map.h5
-rw-r--r--third_party/abseil-cpp/absl/container/internal/raw_hash_set.h77
-rw-r--r--third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc68
-rw-r--r--third_party/abseil-cpp/absl/container/internal/unordered_map_constructor_test.h38
-rw-r--r--third_party/abseil-cpp/absl/container/internal/unordered_map_modifiers_test.h39
-rw-r--r--third_party/abseil-cpp/absl/container/internal/unordered_set_modifiers_test.h35
-rw-r--r--third_party/abseil-cpp/absl/copts/AbseilConfigureCopts.cmake3
-rw-r--r--third_party/abseil-cpp/absl/copts/GENERATED_AbseilCopts.cmake3
-rw-r--r--third_party/abseil-cpp/absl/debugging/CMakeLists.txt14
-rw-r--r--third_party/abseil-cpp/absl/debugging/failure_signal_handler.cc4
-rw-r--r--third_party/abseil-cpp/absl/debugging/failure_signal_handler.h2
-rw-r--r--third_party/abseil-cpp/absl/debugging/internal/demangle.cc28
-rw-r--r--third_party/abseil-cpp/absl/debugging/internal/demangle_test.cc30
-rw-r--r--third_party/abseil-cpp/absl/debugging/internal/stacktrace_x86-inl.inc7
-rw-r--r--third_party/abseil-cpp/absl/debugging/leak_check.cc16
-rw-r--r--third_party/abseil-cpp/absl/debugging/leak_check.h19
-rw-r--r--third_party/abseil-cpp/absl/debugging/leak_check_test.cc2
-rw-r--r--third_party/abseil-cpp/absl/debugging/symbolize_elf.inc10
-rw-r--r--third_party/abseil-cpp/absl/debugging/symbolize_test.cc43
-rw-r--r--third_party/abseil-cpp/absl/flags/CMakeLists.txt23
-rw-r--r--third_party/abseil-cpp/absl/flags/flag.h3
-rw-r--r--third_party/abseil-cpp/absl/flags/internal/usage.cc12
-rw-r--r--third_party/abseil-cpp/absl/flags/reflection.cc8
-rw-r--r--third_party/abseil-cpp/absl/functional/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/functional/function_ref.h1
-rw-r--r--third_party/abseil-cpp/absl/hash/CMakeLists.txt8
-rw-r--r--third_party/abseil-cpp/absl/hash/hash.h22
-rw-r--r--third_party/abseil-cpp/absl/hash/hash_test.cc35
-rw-r--r--third_party/abseil-cpp/absl/hash/internal/hash.cc14
-rw-r--r--third_party/abseil-cpp/absl/hash/internal/hash.h46
-rw-r--r--third_party/abseil-cpp/absl/hash/internal/wyhash.h2
-rw-r--r--third_party/abseil-cpp/absl/memory/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/memory/memory.h2
-rw-r--r--third_party/abseil-cpp/absl/meta/CMakeLists.txt2
-rw-r--r--third_party/abseil-cpp/absl/meta/type_traits.h23
-rw-r--r--third_party/abseil-cpp/absl/meta/type_traits_test.cc28
-rw-r--r--third_party/abseil-cpp/absl/numeric/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/numeric/int128.h18
-rw-r--r--third_party/abseil-cpp/absl/numeric/int128_test.cc18
-rw-r--r--third_party/abseil-cpp/absl/random/CMakeLists.txt120
-rw-r--r--third_party/abseil-cpp/absl/random/beta_distribution_test.cc9
-rw-r--r--third_party/abseil-cpp/absl/random/discrete_distribution_test.cc7
-rw-r--r--third_party/abseil-cpp/absl/random/distributions_test.cc10
-rw-r--r--third_party/abseil-cpp/absl/random/exponential_distribution_test.cc10
-rw-r--r--third_party/abseil-cpp/absl/random/internal/pool_urbg.cc7
-rw-r--r--third_party/abseil-cpp/absl/random/internal/seed_material.cc44
-rw-r--r--third_party/abseil-cpp/absl/random/uniform_real_distribution_test.cc9
-rw-r--r--third_party/abseil-cpp/absl/status/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/status/internal/status_internal.h21
-rw-r--r--third_party/abseil-cpp/absl/status/status.cc22
-rw-r--r--third_party/abseil-cpp/absl/status/status.h13
-rw-r--r--third_party/abseil-cpp/absl/status/status_test.cc12
-rw-r--r--third_party/abseil-cpp/absl/status/statusor.h2
-rw-r--r--third_party/abseil-cpp/absl/strings/CMakeLists.txt404
-rw-r--r--third_party/abseil-cpp/absl/strings/charconv.cc6
-rw-r--r--third_party/abseil-cpp/absl/strings/cord.cc551
-rw-r--r--third_party/abseil-cpp/absl/strings/cord.h186
-rw-r--r--third_party/abseil-cpp/absl/strings/cord_ring_reader_test.cc13
-rw-r--r--third_party/abseil-cpp/absl/strings/cord_ring_test.cc301
-rw-r--r--third_party/abseil-cpp/absl/strings/cord_test.cc73
-rw-r--r--third_party/abseil-cpp/absl/strings/cord_test_helpers.h62
-rw-r--r--third_party/abseil-cpp/absl/strings/cordz_test.cc466
-rw-r--r--third_party/abseil-cpp/absl/strings/cordz_test_helpers.h151
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc2
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cord_internal.h26
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc90
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.h58
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cord_rep_ring_reader.h4
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_functions.cc110
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_functions.h85
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_functions_test.cc149
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_handle.cc139
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_handle.h131
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_handle_test.cc265
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_info.cc436
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_info.h298
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_info_statistics_test.cc508
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_info_test.cc341
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.cc64
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.h97
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_sample_token_test.cc208
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_statistics.h84
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_update_scope.h71
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_update_scope_test.cc49
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker.h119
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker_test.cc143
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/resize_uninitialized.h23
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/resize_uninitialized_test.cc23
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/arg.h8
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc7
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/convert_test.cc3
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc12
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/extension.h54
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc135
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/parser.h40
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_format/parser_test.cc23
-rw-r--r--third_party/abseil-cpp/absl/strings/internal/str_split_internal.h68
-rw-r--r--third_party/abseil-cpp/absl/strings/numbers.h1
-rw-r--r--third_party/abseil-cpp/absl/strings/str_cat.cc8
-rw-r--r--third_party/abseil-cpp/absl/strings/str_split_test.cc22
-rw-r--r--third_party/abseil-cpp/absl/strings/string_view.cc37
-rw-r--r--third_party/abseil-cpp/absl/strings/string_view.h145
-rw-r--r--third_party/abseil-cpp/absl/strings/string_view_test.cc46
-rw-r--r--third_party/abseil-cpp/absl/synchronization/CMakeLists.txt14
-rw-r--r--third_party/abseil-cpp/absl/synchronization/blocking_counter.cc40
-rw-r--r--third_party/abseil-cpp/absl/synchronization/blocking_counter.h8
-rw-r--r--third_party/abseil-cpp/absl/synchronization/blocking_counter_benchmark.cc83
-rw-r--r--third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc12
-rw-r--r--third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem_test.cc2
-rw-r--r--third_party/abseil-cpp/absl/synchronization/internal/waiter.cc2
-rw-r--r--third_party/abseil-cpp/absl/synchronization/mutex_test.cc4
-rw-r--r--third_party/abseil-cpp/absl/time/CMakeLists.txt4
-rw-r--r--third_party/abseil-cpp/absl/time/civil_time.cc4
-rw-r--r--third_party/abseil-cpp/absl/time/duration_test.cc13
-rw-r--r--third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc2
-rw-r--r--third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format_test.cc4
-rw-r--r--third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup_test.cc12
-rw-r--r--third_party/abseil-cpp/absl/time/time.h16
-rw-r--r--third_party/abseil-cpp/absl/types/CMakeLists.txt26
-rw-r--r--third_party/abseil-cpp/absl/types/span.h4
-rw-r--r--third_party/abseil-cpp/absl/utility/CMakeLists.txt2
-rw-r--r--third_party/crc32c/CMakeLists.txt66
-rw-r--r--third_party/crc32c/config/crc32c/crc32c_config.h6
-rw-r--r--third_party/crc32c/src/include/crc32c/crc32c.h89
-rw-r--r--third_party/crc32c/src/src/crc32c.cc39
-rw-r--r--third_party/crc32c/src/src/crc32c_arm64.cc124
-rw-r--r--third_party/crc32c/src/src/crc32c_arm64.h25
-rw-r--r--third_party/crc32c/src/src/crc32c_arm64_linux_check.h48
-rw-r--r--third_party/crc32c/src/src/crc32c_arm64_unittest.cc24
-rw-r--r--third_party/crc32c/src/src/crc32c_benchmark.cc104
-rw-r--r--third_party/crc32c/src/src/crc32c_capi_unittest.c66
-rw-r--r--third_party/crc32c/src/src/crc32c_extend_unittests.h112
-rw-r--r--third_party/crc32c/src/src/crc32c_internal.h23
-rw-r--r--third_party/crc32c/src/src/crc32c_portable.cc351
-rw-r--r--third_party/crc32c/src/src/crc32c_portable_unittest.cc20
-rw-r--r--third_party/crc32c/src/src/crc32c_prefetch.h44
-rw-r--r--third_party/crc32c/src/src/crc32c_prefetch_unittest.cc9
-rw-r--r--third_party/crc32c/src/src/crc32c_read_le.h51
-rw-r--r--third_party/crc32c/src/src/crc32c_read_le_unittest.cc32
-rw-r--r--third_party/crc32c/src/src/crc32c_round_up.h34
-rw-r--r--third_party/crc32c/src/src/crc32c_round_up_unittest.cc84
-rw-r--r--third_party/crc32c/src/src/crc32c_sse42.cc256
-rw-r--r--third_party/crc32c/src/src/crc32c_sse42.h31
-rw-r--r--third_party/crc32c/src/src/crc32c_sse42_check.h48
-rw-r--r--third_party/crc32c/src/src/crc32c_sse42_unittest.cc24
-rw-r--r--third_party/crc32c/src/src/crc32c_test_main.cc20
-rw-r--r--third_party/crc32c/src/src/crc32c_unittest.cc129
-rw-r--r--third_party/libaom/CMakeLists.txt9
-rw-r--r--third_party/libaom/libaom_src.cmake5
-rw-r--r--third_party/libaom/source/config/config/aom_version.h12
-rw-r--r--third_party/libaom/source/config/ios/arm-neon/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/ios/arm-neon/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/ios/arm-neon/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/ios/arm64/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/ios/arm64/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/ios/arm64/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/arm-neon/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/arm-neon/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/arm-neon/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/arm/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/arm/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/arm/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/arm64/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/arm64/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/arm64/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/generic/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/generic/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/generic/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/ia32/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/ia32/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/ia32/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/linux/x64/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/linux/x64/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/linux/x64/config/aom_dsp_rtcd.h20
-rw-r--r--third_party/libaom/source/config/win/arm64/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/win/arm64/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/win/arm64/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/win/ia32/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/win/ia32/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/win/ia32/config/aom_dsp_rtcd.h11
-rw-r--r--third_party/libaom/source/config/win/x64/config/aom_config.asm4
-rw-r--r--third_party/libaom/source/config/win/x64/config/aom_config.h4
-rw-r--r--third_party/libaom/source/config/win/x64/config/aom_dsp_rtcd.h20
-rw-r--r--third_party/libaom/source/libaom/aom/aom.h50
-rw-r--r--third_party/libaom/source/libaom/aom/aom_codec.h2
-rw-r--r--third_party/libaom/source/libaom/aom/aom_encoder.h22
-rw-r--r--third_party/libaom/source/libaom/aom/aom_external_partition.h331
-rw-r--r--third_party/libaom/source/libaom/aom/aomcx.h79
-rw-r--r--third_party/libaom/source/libaom/aom/aomdx.h3
-rw-r--r--third_party/libaom/source/libaom/aom/internal/aom_codec_internal.h2
-rw-r--r--third_party/libaom/source/libaom/aom/src/aom_image.c4
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/arm/intrapred_neon.c2
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/butteraugli.c78
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/butteraugli.h4
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/fastssim.c1
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/grain_table.c10
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/noise_model.c25
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/psnrhvs.c7
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/ssim.c140
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/ssim.h26
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/vmaf.c247
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/vmaf.h45
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/x86/highbd_sad_sse2.asm45
-rw-r--r--third_party/libaom/source/libaom/aom_dsp/x86/variance_impl_avx2.c16
-rw-r--r--third_party/libaom/source/libaom/apps/aomenc.c70
-rw-r--r--third_party/libaom/source/libaom/av1/arg_defs.c21
-rw-r--r--third_party/libaom/source/libaom/av1/arg_defs.h2
-rw-r--r--third_party/libaom/source/libaom/av1/av1_cx_iface.c723
-rw-r--r--third_party/libaom/source/libaom/av1/av1_dx_iface.c35
-rw-r--r--third_party/libaom/source/libaom/av1/common/alloccommon.c227
-rw-r--r--third_party/libaom/source/libaom/av1/common/alloccommon.h8
-rw-r--r--third_party/libaom/source/libaom/av1/common/av1_common_int.h77
-rw-r--r--third_party/libaom/source/libaom/av1/common/av1_loopfilter.c268
-rw-r--r--third_party/libaom/source/libaom/av1/common/av1_loopfilter.h16
-rw-r--r--third_party/libaom/source/libaom/av1/common/blockd.h45
-rw-r--r--third_party/libaom/source/libaom/av1/common/cdef.c333
-rw-r--r--third_party/libaom/source/libaom/av1/common/cdef.h59
-rw-r--r--third_party/libaom/source/libaom/av1/common/cdef_block.h4
-rw-r--r--third_party/libaom/source/libaom/av1/common/cfl.h2
-rw-r--r--third_party/libaom/source/libaom/av1/common/common.h2
-rw-r--r--third_party/libaom/source/libaom/av1/common/common_data.h9
-rw-r--r--third_party/libaom/source/libaom/av1/common/enums.h9
-rw-r--r--third_party/libaom/source/libaom/av1/common/loopfiltermask.c41
-rw-r--r--third_party/libaom/source/libaom/av1/common/mv.h2
-rw-r--r--third_party/libaom/source/libaom/av1/common/mvref_common.c34
-rw-r--r--third_party/libaom/source/libaom/av1/common/pred_common.h4
-rw-r--r--third_party/libaom/source/libaom/av1/common/reconinter.c30
-rw-r--r--third_party/libaom/source/libaom/av1/common/reconinter.h4
-rw-r--r--third_party/libaom/source/libaom/av1/common/reconintra.c139
-rw-r--r--third_party/libaom/source/libaom/av1/common/reconintra.h15
-rw-r--r--third_party/libaom/source/libaom/av1/common/resize.c22
-rw-r--r--third_party/libaom/source/libaom/av1/common/restoration.c24
-rw-r--r--third_party/libaom/source/libaom/av1/common/thread_common.c296
-rw-r--r--third_party/libaom/source/libaom/av1/common/thread_common.h53
-rw-r--r--third_party/libaom/source/libaom/av1/common/tile_common.c26
-rw-r--r--third_party/libaom/source/libaom/av1/decoder/decodeframe.c274
-rw-r--r--third_party/libaom/source/libaom/av1/decoder/decodemv.c40
-rw-r--r--third_party/libaom/source/libaom/av1/decoder/decoder.c57
-rw-r--r--third_party/libaom/source/libaom/av1/decoder/decoder.h30
-rw-r--r--third_party/libaom/source/libaom/av1/decoder/obu.c160
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/aq_complexity.c10
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.c114
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.h24
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/aq_variance.c23
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/av1_noise_estimate.c10
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/av1_quantize.c61
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/av1_quantize.h26
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/av1_temporal_denoiser.c14
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/bitstream.c726
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/bitstream.h87
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/block.h59
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/compound_type.c125
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/context_tree.c2
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/dwt.c16
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/dwt.h5
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/enc_enums.h2
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encode_strategy.c570
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encode_strategy.h24
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodeframe.c243
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.c222
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.h147
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodemb.c58
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodemb.h15
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodemv.c7
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodemv.h4
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encoder.c1480
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encoder.h643
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encoder_alloc.h68
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encoder_utils.c125
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encoder_utils.h76
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/encodetxb.c11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/ethread.c528
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/ethread.h11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/external_partition.c93
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/external_partition.h55
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/firstpass.c271
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/firstpass.h38
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/global_motion_facade.c12
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/gop_structure.c165
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/gop_structure.h5
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.c24
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.h9
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/interp_search.c27
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/interp_search.h12
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/intra_mode_search.c648
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/intra_mode_search.h30
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/intra_mode_search_utils.h259
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/level.c14
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/level.h2
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/mcomp.c29
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/mcomp.h2
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/motion_search_facade.c100
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/motion_search_facade.h11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/mv_prec.c12
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/mv_prec.h4
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/nonrd_pickmode.c119
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/optical_flow.c11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/palette.c101
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/palette.h7
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/partition_search.c654
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/partition_search.h15
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/partition_strategy.c671
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/partition_strategy.h134
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/pass2_strategy.c1342
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/pickcdef.c38
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/pickcdef.h19
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/picklpf.c49
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/pickrst.c38
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/ratectrl.c571
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/ratectrl.h169
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rc_utils.h55
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rd.c104
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rd.h49
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rdopt.c678
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rdopt.h6
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/rdopt_utils.h8
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/segmentation.c18
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.c3
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.h6
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/speed_features.c274
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/speed_features.h225
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/superres_scale.c39
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/svc_layercontext.c199
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/svc_layercontext.h10
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/temporal_filter.c94
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/temporal_filter.h13
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tokenize.c12
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tokenize.h4
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tpl_model.c607
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tpl_model.h151
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.c125
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.h6
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tune_vmaf.c473
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tune_vmaf.h4
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/tx_search.c129
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/txb_rdopt.c12
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/txb_rdopt.h8
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/var_based_part.c98
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_avx2.c11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_sse2.c11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_avx2.c11
-rw-r--r--third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_sse2.c11
-rw-r--r--third_party/libaom/source/libaom/common/args.c1
-rw-r--r--third_party/libaom/source/libaom/examples/aom_cx_set_ref.c10
-rw-r--r--third_party/libaom/source/libaom/examples/set_maps.c12
-rw-r--r--third_party/libaom/source/libaom/examples/simple_encoder.c11
-rw-r--r--third_party/libaom/source/libaom/examples/svc_encoder_rtc.c191
-rw-r--r--third_party/libaom/source/libaom/test/active_map_test.cc11
-rw-r--r--third_party/libaom/source/libaom/test/altref_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/aom_image_test.cc31
-rw-r--r--third_party/libaom/source/libaom/test/aq_segment_test.cc32
-rw-r--r--third_party/libaom/source/libaom/test/arf_freq_test.cc10
-rw-r--r--third_party/libaom/source/libaom/test/av1_convolve_scale_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/av1_convolve_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/av1_external_partition_test.cc309
-rw-r--r--third_party/libaom/source/libaom/test/av1_fwd_txfm2d_test.cc78
-rw-r--r--third_party/libaom/source/libaom/test/av1_highbd_iht_test.cc6
-rw-r--r--third_party/libaom/source/libaom/test/av1_key_value_api_test.cc11
-rw-r--r--third_party/libaom/source/libaom/test/av1_quantize_test.cc27
-rw-r--r--third_party/libaom/source/libaom/test/block_test.cc12
-rw-r--r--third_party/libaom/source/libaom/test/coding_path_sync.cc4
-rw-r--r--third_party/libaom/source/libaom/test/comp_avg_pred_test.h34
-rw-r--r--third_party/libaom/source/libaom/test/cpu_used_firstpass_test.cc113
-rw-r--r--third_party/libaom/source/libaom/test/datarate_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/datarate_test.h1
-rw-r--r--third_party/libaom/source/libaom/test/encode_api_test.cc12
-rw-r--r--third_party/libaom/source/libaom/test/encode_small_width_height_test.cc15
-rw-r--r--third_party/libaom/source/libaom/test/encode_test_driver.cc34
-rw-r--r--third_party/libaom/source/libaom/test/encode_test_driver.h11
-rw-r--r--third_party/libaom/source/libaom/test/encodemb_test.cc245
-rw-r--r--third_party/libaom/source/libaom/test/end_to_end_psnr_test.cc220
-rw-r--r--third_party/libaom/source/libaom/test/end_to_end_ssim_test.cc189
-rw-r--r--third_party/libaom/source/libaom/test/error_resilience_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/ethread_test.cc91
-rw-r--r--third_party/libaom/source/libaom/test/external_frame_buffer_test.cc30
-rw-r--r--third_party/libaom/source/libaom/test/film_grain_table_test.cc14
-rw-r--r--third_party/libaom/source/libaom/test/frame_size_tests.cc2
-rw-r--r--third_party/libaom/source/libaom/test/hbd_metrics_test.cc2
-rw-r--r--third_party/libaom/source/libaom/test/horz_superres_test.cc2
-rw-r--r--third_party/libaom/source/libaom/test/intrabc_test.cc2
-rw-r--r--third_party/libaom/source/libaom/test/invalid_file_test.cc1
-rw-r--r--third_party/libaom/source/libaom/test/kf_test.cc50
-rw-r--r--third_party/libaom/source/libaom/test/lossless_test.cc36
-rw-r--r--third_party/libaom/source/libaom/test/metadata_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/monochrome_test.cc82
-rw-r--r--third_party/libaom/source/libaom/test/noise_model_test.cc6
-rw-r--r--third_party/libaom/source/libaom/test/quant_test.cc18
-rw-r--r--third_party/libaom/source/libaom/test/quantize_func_test.cc1
-rw-r--r--third_party/libaom/source/libaom/test/rd_test.cc87
-rw-r--r--third_party/libaom/source/libaom/test/resize_test.cc40
-rw-r--r--third_party/libaom/source/libaom/test/rt_end_to_end_test.cc5
-rw-r--r--third_party/libaom/source/libaom/test/sad_test.cc29
-rw-r--r--third_party/libaom/source/libaom/test/sharpness_test.cc143
-rw-r--r--third_party/libaom/source/libaom/test/svc_datarate_test.cc59
-rw-r--r--third_party/libaom/source/libaom/test/tile_config_test.cc17
-rw-r--r--third_party/libaom/source/libaom/test/time_stamp_test.cc9
-rw-r--r--third_party/libaom/source/libaom/test/tpl_model_test.cc232
-rw-r--r--third_party/libaom/source/libaom/test/variance_test.cc4
-rw-r--r--third_party/libaom/source/libaom/test/warp_filter_test_util.cc16
-rw-r--r--third_party/libaom/source/libaom/third_party/fastfeat/fast.c30
-rw-r--r--third_party/libaom/source/libaom/third_party/fastfeat/fast.h30
-rw-r--r--third_party/libaom/source/libaom/third_party/fastfeat/fast_9.c30
-rw-r--r--third_party/libaom/source/libaom/third_party/fastfeat/nonmax.c30
-rw-r--r--third_party/libaom/source/libaom/third_party/vector/vector.c2
-rw-r--r--third_party/libaom/source/libaom/third_party/vector/vector.h2
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/decl_status_code.c11
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/func_in_out.c11
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/global_variable.c11
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/parse_lvalue.c11
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/simple_code.c11
-rw-r--r--third_party/libaom/source/libaom/tools/auto_refactor/c_files/struct_code.c11
-rw-r--r--third_party/libyuv/CMakeLists.txt4
-rw-r--r--third_party/libyuv/include/libyuv/compare_row.h10
-rw-r--r--third_party/libyuv/include/libyuv/convert.h13
-rw-r--r--third_party/libyuv/include/libyuv/convert_argb.h300
-rw-r--r--third_party/libyuv/include/libyuv/convert_from_argb.h24
-rw-r--r--third_party/libyuv/include/libyuv/planar_functions.h67
-rw-r--r--third_party/libyuv/include/libyuv/rotate_row.h5
-rw-r--r--third_party/libyuv/include/libyuv/row.h965
-rw-r--r--third_party/libyuv/include/libyuv/scale_row.h71
-rw-r--r--third_party/libyuv/include/libyuv/version.h2
-rw-r--r--third_party/libyuv/include/libyuv/video_common.h6
-rw-r--r--third_party/libyuv/source/compare_common.cc30
-rw-r--r--third_party/libyuv/source/compare_gcc.cc3
-rw-r--r--third_party/libyuv/source/compare_win.cc14
-rw-r--r--third_party/libyuv/source/convert.cc272
-rw-r--r--third_party/libyuv/source/convert_argb.cc923
-rw-r--r--third_party/libyuv/source/convert_from_argb.cc118
-rw-r--r--third_party/libyuv/source/planar_functions.cc900
-rw-r--r--third_party/libyuv/source/rotate_gcc.cc3
-rw-r--r--third_party/libyuv/source/rotate_win.cc5
-rw-r--r--third_party/libyuv/source/row_any.cc391
-rw-r--r--third_party/libyuv/source/row_common.cc1230
-rw-r--r--third_party/libyuv/source/row_gcc.cc1805
-rw-r--r--third_party/libyuv/source/row_mmi.cc436
-rw-r--r--third_party/libyuv/source/row_msa.cc186
-rw-r--r--third_party/libyuv/source/row_neon.cc1068
-rw-r--r--third_party/libyuv/source/row_neon64.cc1521
-rw-r--r--third_party/libyuv/source/row_win.cc736
-rw-r--r--third_party/libyuv/source/scale.cc130
-rw-r--r--third_party/libyuv/source/scale_any.cc70
-rw-r--r--third_party/libyuv/source/scale_gcc.cc346
-rw-r--r--third_party/libyuv/source/scale_neon.cc102
-rw-r--r--third_party/libyuv/source/scale_neon64.cc108
-rw-r--r--third_party/libyuv/source/scale_uv.cc6
-rw-r--r--third_party/libyuv/source/scale_win.cc5
-rw-r--r--third_party/libyuv/unit_test/color_test.cc31
-rw-r--r--third_party/libyuv/unit_test/compare_test.cc2
-rw-r--r--third_party/libyuv/unit_test/convert_test.cc1044
-rw-r--r--third_party/libyuv/unit_test/cpu_test.cc100
-rw-r--r--third_party/libyuv/unit_test/planar_test.cc399
-rw-r--r--third_party/libyuv/unit_test/rotate_argb_test.cc16
-rw-r--r--third_party/libyuv/unit_test/rotate_test.cc48
-rw-r--r--third_party/libyuv/unit_test/scale_argb_test.cc51
-rw-r--r--third_party/libyuv/unit_test/scale_test.cc280
-rw-r--r--third_party/libyuv/unit_test/scale_uv_test.cc2
-rw-r--r--third_party/libyuv/unit_test/unit_test.cc8
-rw-r--r--third_party/libyuv/unit_test/unit_test.h2
-rw-r--r--third_party/libyuv/unit_test/video_common_test.cc4
-rw-r--r--third_party/opus/src/celt/celt_lpc.c8
-rw-r--r--third_party/opus/src/src/opus_encoder.c8
-rw-r--r--third_party/pffft/pffft_unittest.cc2
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_constants.h4
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_input.c545
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_os_userspace.h2
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_pcb.c8
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.c18
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.h4
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_userspace.c116
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_usrreq.c112
-rwxr-xr-xthird_party/usrsctp/usrsctplib/usrsctplib/netinet/sctputil.c9
-rw-r--r--third_party/usrsctp/usrsctplib/usrsctplib/netinet6/sctp6_usrreq.c62
-rwxr-xr-xtools_webrtc/get_landmines.py1
-rwxr-xr-xtools_webrtc/libs/generate_licenses.py15
-rwxr-xr-xtools_webrtc/libs/generate_licenses_test.py39
-rw-r--r--tools_webrtc/mb/mb_config.pyl11
-rw-r--r--tools_webrtc/msan/suppressions.txt4
-rw-r--r--tools_webrtc/ubsan/suppressions.txt4
-rw-r--r--tools_webrtc/ubsan/vptr_suppressions.txt4
-rw-r--r--video/BUILD.gn2
-rw-r--r--video/adaptation/balanced_constraint.cc8
-rw-r--r--video/adaptation/encode_usage_resource.cc2
-rw-r--r--video/adaptation/overuse_frame_detector_unittest.cc5
-rw-r--r--video/adaptation/pixel_limit_resource.cc4
-rw-r--r--video/adaptation/quality_scaler_resource.cc2
-rw-r--r--video/adaptation/video_stream_encoder_resource_manager.cc17
-rw-r--r--video/adaptation/video_stream_encoder_resource_manager.h4
-rw-r--r--video/buffered_frame_decryptor_unittest.cc2
-rw-r--r--video/encoder_rtcp_feedback.cc62
-rw-r--r--video/encoder_rtcp_feedback.h32
-rw-r--r--video/encoder_rtcp_feedback_unittest.cc3
-rw-r--r--video/end_to_end_tests/config_tests.cc2
-rw-r--r--video/end_to_end_tests/fec_tests.cc2
-rw-r--r--video/end_to_end_tests/network_state_tests.cc46
-rw-r--r--video/end_to_end_tests/resolution_bitrate_limits_tests.cc7
-rw-r--r--video/end_to_end_tests/rtp_rtcp_tests.cc11
-rw-r--r--video/end_to_end_tests/ssrc_tests.cc15
-rw-r--r--video/end_to_end_tests/stats_tests.cc28
-rw-r--r--video/frame_encode_metadata_writer.cc2
-rw-r--r--video/g3doc/adaptation.md1
-rw-r--r--video/quality_scaling_tests.cc460
-rw-r--r--video/receive_statistics_proxy2.cc35
-rw-r--r--video/report_block_stats.cc13
-rw-r--r--video/report_block_stats.h10
-rw-r--r--video/report_block_stats_unittest.cc54
-rw-r--r--video/rtp_video_stream_receiver.cc231
-rw-r--r--video/rtp_video_stream_receiver.h28
-rw-r--r--video/rtp_video_stream_receiver2.cc153
-rw-r--r--video/rtp_video_stream_receiver2.h120
-rw-r--r--video/rtp_video_stream_receiver2_unittest.cc19
-rw-r--r--video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc38
-rw-r--r--video/rtp_video_stream_receiver_unittest.cc7
-rw-r--r--video/send_statistics_proxy.cc19
-rw-r--r--video/send_statistics_proxy.h4
-rw-r--r--video/send_statistics_proxy_unittest.cc86
-rw-r--r--video/video_analyzer.cc22
-rw-r--r--video/video_analyzer.h2
-rw-r--r--video/video_quality_test.cc30
-rw-r--r--video/video_receive_stream.cc10
-rw-r--r--video/video_receive_stream.h17
-rw-r--r--video/video_receive_stream2.cc155
-rw-r--r--video/video_receive_stream2.h64
-rw-r--r--video/video_receive_stream2_unittest.cc83
-rw-r--r--video/video_receive_stream_unittest.cc20
-rw-r--r--video/video_send_stream.cc175
-rw-r--r--video/video_send_stream.h19
-rw-r--r--video/video_send_stream_impl.cc296
-rw-r--r--video/video_send_stream_impl.h84
-rw-r--r--video/video_send_stream_impl_unittest.cc237
-rw-r--r--video/video_send_stream_tests.cc85
-rw-r--r--video/video_stream_encoder.cc12
-rw-r--r--video/video_stream_encoder_unittest.cc488
-rw-r--r--webrtc.gni9
-rw-r--r--webrtc_lib_link_test.cc14
-rw-r--r--windows_x86_64.cmake545
1494 files changed, 76418 insertions, 27329 deletions
diff --git a/.gn b/.gn
index 9c43afd2ca..01fe1b87b1 100644
--- a/.gn
+++ b/.gn
@@ -20,32 +20,11 @@ script_executable = "python3"
# in the source tree, e.g. for third party source trees.
secondary_source = "//build/secondary/"
-# These are the targets to check headers for by default. The files in targets
-# matching these patterns (see "gn help label_pattern" for format) will have
+# These are the targets to skip header checking by default. The files in targets
+# matching these patterns (see "gn help label_pattern" for format) will not have
# their includes checked for proper dependencies when you run either
# "gn check" or "gn gen --check".
-check_targets = [
- "//api/*",
- "//audio/*",
- "//backup/*",
- "//call/*",
- "//common_audio/*",
- "//common_video/*",
- "//examples/*",
- "//logging/*",
- "//media/*",
- "//modules/*",
- "//p2p/*",
- "//pc/*",
- "//rtc_base/*",
- "//rtc_tools/*",
- "//sdk/*",
- "//stats/*",
- "//system_wrappers/*",
- "//test/*",
- "//video/*",
- "//third_party/libyuv/*",
-]
+no_check_targets = [ "//third_party/icu/*" ]
# These are the list of GN files that run exec_script. This whitelist exists
# to force additional review for new uses of exec_script, which is strongly
diff --git a/.vpython b/.vpython
index 92c9c51346..df838dccf8 100644
--- a/.vpython
+++ b/.vpython
@@ -52,7 +52,7 @@ wheel: <
wheel: <
name: "infra/python/wheels/six-py2_py3"
- version: "version:1.10.0"
+ version: "version:1.15.0"
>
wheel: <
name: "infra/python/wheels/pbr-py2_py3"
diff --git a/AUTHORS b/AUTHORS
index 64488bfc62..b4d4100c6a 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -53,6 +53,7 @@ James H. Brown <jbrown@burgoyne.com>
Jan Grulich <grulja@gmail.com>
Jan Kalab <pitlicek@gmail.com>
Jens Nielsen <jens.nielsen@berotec.se>
+Jesús Leganés-Combarro <piranna@gmail.com>
Jiawei Ou <jiawei.ou@gmail.com>
Jie Mao <maojie0924@gmail.com>
Jiwon Kim <jwkim0000@gmail.com>
diff --git a/BUILD.gn b/BUILD.gn
index 75c54b4eed..7c89521bb1 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -354,6 +354,13 @@ config("common_config") {
# recognize.
cflags += [ "-Wunused-lambda-capture" ]
}
+
+ if (use_xcode_clang) {
+ # This may be removed if the clang version in xcode > 12.4 includes the
+ # fix https://reviews.llvm.org/D73007.
+ # https://bugs.llvm.org/show_bug.cgi?id=44556
+ cflags += [ "-Wno-range-loop-analysis" ]
+ }
}
if (is_win && !is_clang) {
@@ -513,6 +520,10 @@ if (!build_with_chromium) {
rtc_executable("webrtc_lib_link_test") {
testonly = true
+ # This target is used for checking to link, so do not check dependencies
+ # on gn check.
+ check_includes = false # no-presubmit-check TODO(bugs.webrtc.org/12785)
+
sources = [ "webrtc_lib_link_test.cc" ]
deps = [
# NOTE: Don't add deps here. If this test fails to link, it means you
diff --git a/CMakeLists.txt b/CMakeLists.txt
index b25464297e..68eaca4f31 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -3,11 +3,12 @@ set(WEBRTC_ROOT ${CMAKE_CURRENT_LIST_DIR})
# Extract the test files needed for the unit tests.
message(STATUS "Extracting webrtc test resources")
if(${CMAKE_VERSION} VERSION_LESS "3.18.0")
- execute_process(
- COMMAND unzip -n "${WEBRTC_ROOT}/test_resources.zip" -d ${CMAKE_CURRENT_BINARY_DIR}
- WORKING_DIRECTORY ${WEBRTC_ROOT})
+ execute_process(
+ COMMAND unzip -n "${WEBRTC_ROOT}/test_resources.zip" -d
+ ${CMAKE_CURRENT_BINARY_DIR} WORKING_DIRECTORY ${WEBRTC_ROOT})
else()
- file(ARCHIVE_EXTRACT INPUT ${WEBRTC_ROOT}/test_resources.zip DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ file(ARCHIVE_EXTRACT INPUT ${WEBRTC_ROOT}/test_resources.zip DESTINATION
+ ${CMAKE_CURRENT_BINARY_DIR})
endif()
add_library(
@@ -35,8 +36,8 @@ target_include_directories(webrtc_fft_size_256 PUBLIC ${WEBRTC_ROOT})
add_library(webrtc_fft ${WEBRTC_ROOT}/modules/third_party/fft/fft.c)
target_include_directories(webrtc_fft PUBLIC ${WEBRTC_ROOT})
-android_add_library(TARGET emulator_test_overrides NODISTRIBUTE
- SRC emu_file_utils_override.cc)
+android_add_library(TARGET emulator_test_overrides NODISTRIBUTE SRC
+ emu_file_utils_override.cc)
target_link_libraries(
emulator_test_overrides
PRIVATE android-emu-base webrtc_test_fileutils_override_api absl::optional)
@@ -59,6 +60,7 @@ add_subdirectory(third_party/usrsctp)
add_subdirectory(third_party/libsrtp)
add_subdirectory(third_party/catapult/tracing)
add_subdirectory(third_party/libyuv)
+add_subdirectory(third_party/crc32c)
if(DARWIN_X86_64)
include(darwin_x86_64.cmake)
@@ -79,12 +81,16 @@ endif()
# We manually register the tests, as some of time can be flaky..
set(WEBRTC_TESTS
webrtc_common_audio_common_audio_unittests
- webrtc_common_video_common_video_unittests
- webrtc_media_rtc_media_unittests
webrtc_modules_audio_coding_audio_decoder_unittests
# webrtc_pc_peerconnection_unittests These are very expensive, and have
- # some timing flakiness.
- webrtc_pc_rtc_pc_unittests)
+ # some timing flakiness
+)
+
+# Arm64 has fewer tests that are supported.
+if(NOT LINUX_AARCH64)
+ list(APPEND WEBRTC_TESTS webrtc_common_video_common_video_unittests
+ webrtc_media_rtc_media_unittests webrtc_pc_rtc_pc_unittests)
+endif()
if(OPTION_ASAN)
message("Disabling webrtc tests in asan, they are too expensive/slow to run.")
@@ -92,8 +98,10 @@ if(OPTION_ASAN)
endif()
foreach(WEBRTC_TEST ${WEBRTC_TESTS})
- add_test(NAME ${WEBRTC_TEST} COMMAND $<TARGET_FILE:${WEBRTC_TEST}>
- WORKING_DIRECTORY $<TARGET_FILE_DIR:${WEBRTC_TEST}>)
+ add_test(
+ NAME ${WEBRTC_TEST}
+ COMMAND $<TARGET_FILE:${WEBRTC_TEST}>
+ WORKING_DIRECTORY $<TARGET_FILE_DIR:${WEBRTC_TEST}>)
android_install_as_debug_info(${WEBRTC_TEST})
android_add_default_test_properties(${WEBRTC_TEST})
diff --git a/DEPS b/DEPS
index 071900a23e..e15eca8bb5 100644
--- a/DEPS
+++ b/DEPS
@@ -1,43 +1,49 @@
# This file contains dependencies for WebRTC.
gclient_gn_args_file = 'src/build/config/gclient_args.gni'
+gclient_gn_args = [
+ 'generate_location_tags',
+]
vars = {
# By default, we should check out everything needed to run on the main
# chromium waterfalls. More info at: crbug.com/570091.
'checkout_configuration': 'default',
'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"',
- 'chromium_revision': '1a13f114996fdd98a2db9da1c7872831b58a140a',
+ 'chromium_revision': '6f7025c98c6df9abfa129b61b865193f2f80be09',
+
+ # Keep the Chromium default of generating location tags.
+ 'generate_location_tags': True,
}
deps = {
# TODO(kjellander): Move this to be Android-only once the libevent dependency
# in base/third_party/libevent is solved.
'src/base':
- 'https://chromium.googlesource.com/chromium/src/base@5700691dd44e945e14b90112aa73f29405531ef2',
+ 'https://chromium.googlesource.com/chromium/src/base@39aab38bd4f39695b73376432d3763a8ad59bee2',
'src/build':
- 'https://chromium.googlesource.com/chromium/src/build@5526928992d6f14e035bad6925949232f757d695',
+ 'https://chromium.googlesource.com/chromium/src/build@a6379d4f30dd17a8268e01142ceb4a76a7f7f59a',
'src/buildtools':
- 'https://chromium.googlesource.com/chromium/src/buildtools@5dbd89c9d9c0b0ff47cefdc2bc421b8c9a1c5a21',
+ 'https://chromium.googlesource.com/chromium/src/buildtools@466954eda391b83e90f8b925767e6152318d690b',
# Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC.
'src/examples/androidtests/third_party/gradle': {
'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3',
'condition': 'checkout_android',
},
'src/ios': {
- 'url': 'https://chromium.googlesource.com/chromium/src/ios@4eb37acafe33c1490f5bafb054c220d3dc7ea733',
+ 'url': 'https://chromium.googlesource.com/chromium/src/ios@9e4ba8b69f084a5e8bee79a5dbb3beebf89df8e7',
'condition': 'checkout_ios',
},
'src/testing':
- 'https://chromium.googlesource.com/chromium/src/testing@26f265efe456a8a3b591b2224a7307d4d5f69944',
+ 'https://chromium.googlesource.com/chromium/src/testing@941fd54fffb86923406d82cd6687df52801451ca',
'src/third_party':
- 'https://chromium.googlesource.com/chromium/src/third_party@e1c6211d47658dea8b8d09ccc7840491702ddf1f',
+ 'https://chromium.googlesource.com/chromium/src/third_party@57d2a56d1427d57b185d39d6bf0a1270a6979a34',
'src/buildtools/linux64': {
'packages': [
{
'package': 'gn/gn/linux-amd64',
- 'version': 'git_revision:dba01723a441c358d843a575cb7720d54ddcdf92',
+ 'version': 'git_revision:d2dce7523036ed7c55fbb8d2f272ab3720d5cf34',
}
],
'dep_type': 'cipd',
@@ -47,7 +53,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/mac-${{arch}}',
- 'version': 'git_revision:dba01723a441c358d843a575cb7720d54ddcdf92',
+ 'version': 'git_revision:d2dce7523036ed7c55fbb8d2f272ab3720d5cf34',
}
],
'dep_type': 'cipd',
@@ -57,7 +63,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/windows-amd64',
- 'version': 'git_revision:dba01723a441c358d843a575cb7720d54ddcdf92',
+ 'version': 'git_revision:d2dce7523036ed7c55fbb8d2f272ab3720d5cf34',
}
],
'dep_type': 'cipd',
@@ -69,9 +75,9 @@ deps = {
'src/buildtools/third_party/libc++/trunk':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@8fa87946779682841e21e2da977eccfb6cb3bded',
'src/buildtools/third_party/libc++abi/trunk':
- 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@d0f33885a2ffa7d5af74af6065b60eb48e3c70f5',
+ 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@f4328ad7c0d8242d36cb5bea530925f9fea34248',
'src/buildtools/third_party/libunwind/trunk':
- 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@08f35c8514a74817103121def05351186830d4b7',
+ 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@a38ef11ab68f975bb4fe5356aff5f0e3dabe9f23',
'src/tools/clang/dsymutil': {
'packages': [
@@ -118,20 +124,22 @@ deps = {
},
'src/third_party/boringssl/src':
- 'https://boringssl.googlesource.com/boringssl.git@49f0329110a1d93a5febc2bceceedc655d995420',
+ 'https://boringssl.googlesource.com/boringssl.git@7fffa4636cf7647daf981914286d5d32f1beab6d',
'src/third_party/breakpad/breakpad':
- 'https://chromium.googlesource.com/breakpad/breakpad.git@3bea2815bfea6e641d50aad15bde2c494ef8f34b',
+ 'https://chromium.googlesource.com/breakpad/breakpad.git@c484031f1f199ee53567241426efffee49008f82',
'src/third_party/catapult':
- 'https://chromium.googlesource.com/catapult.git@dafcf4aa951b0019da58238eea62eb2985669641',
+ 'https://chromium.googlesource.com/catapult.git@96bc38d7d5073ed80b2cb41325d07da67051f776',
'src/third_party/ced/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5',
},
'src/third_party/colorama/src':
'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8',
+ 'src/third_party/crc32c/src':
+ 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@5998f8451548244de8cde7fab387a550e7c4497d',
'src/third_party/depot_tools':
- 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@9955936084dc81bd94ed54d2088918391aca38a8',
+ 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@74ef838a40d8fecac485416f1a1fb07f9aeb6fd1',
'src/third_party/ffmpeg':
- 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@280d5fd0df8b4284ad040bd29deb3241bd6dfc4a',
+ 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@7e1d53a09f9ae4d8909bc7d426368887342eca01',
'src/third_party/findbugs': {
'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67',
'condition': 'checkout_android',
@@ -142,9 +150,9 @@ deps = {
'condition': 'checkout_linux',
},
'src/third_party/freetype/src':
- 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@b9b74f9f78565513086050acdda98ddc23cf605e',
+ 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@c6fcd61228ea349e76aa4f09442c431308da3262',
'src/third_party/harfbuzz-ng/src':
- 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@4c34478b28497acfce02b8a544fed4ae20526336',
+ 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@4811e8f5d76ef528b4cec00f241cc4ab8110db30',
'src/third_party/google_benchmark/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@ffe1342eb2faa7d2e7c35b4db2ccf99fab81ec20',
},
@@ -162,9 +170,9 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/googletest/src':
- 'https://chromium.googlesource.com/external/github.com/google/googletest.git@486a4a6be5565e89ae0d72f532c441423110bc9e',
+ 'https://chromium.googlesource.com/external/github.com/google/googletest.git@e2239ee6043f73722e7aa812a459f54a28552929',
'src/third_party/icu': {
- 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@81d656878ec611cb0b42d52c82e9dae93920d9ba',
+ 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@a0718d4f121727e30b8d52c7a189ebf5ab52421f',
},
'src/third_party/jdk': {
'packages': [
@@ -196,21 +204,21 @@ deps = {
'src/third_party/libFuzzer/src':
'https://chromium.googlesource.com/chromium/llvm-project/compiler-rt/lib/fuzzer.git@debe7d2d1982e540fbd6bd78604bf001753f9e74',
'src/third_party/libjpeg_turbo':
- 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@7b4981b6500ccba10733c352b9ed2dad14ce3c73',
+ 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@b7bef8c05b7cdb1a038ae271a2c2b6647af4c879',
'src/third_party/libsrtp':
- 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@7990ca64c616b150a9cb4714601c4a3b0c84fe91',
+ 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@5b7c744eb8310250ccc534f3f86a2015b3887a0a',
'src/third_party/libaom/source/libaom':
- 'https://aomedia.googlesource.com/aom.git@6c93db7ff63430d2e5dcdfc42e84e3a0514da608',
+ 'https://aomedia.googlesource.com/aom.git@12287adee94fc3b1f5349d3f4bd85cea4e57f62b',
'src/third_party/libunwindstack': {
'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@aab2c874731396232739889ebe8d9e122b9bc448',
'condition': 'checkout_android',
},
'src/third_party/perfetto':
- 'https://android.googlesource.com/platform/external/perfetto.git@2e2cb5197de3c0e92d931e37a13ac53604d8f787',
+ 'https://android.googlesource.com/platform/external/perfetto.git@d57b60b2a95fa369db906eaaeadc4623dab6eb90',
'src/third_party/libvpx/source/libvpx':
'https://chromium.googlesource.com/webm/libvpx.git@61edec1efbea1c02d71857e2aff9426d9cd2df4e',
'src/third_party/libyuv':
- 'https://chromium.googlesource.com/libyuv/libyuv.git@64994843e652443df2d5201c6ae3fb725097360f',
+ 'https://chromium.googlesource.com/libyuv/libyuv.git@49ebc996aa8c4bdf89c1b5ea461eb677234c61cc',
'src/third_party/lss': {
'url': 'https://chromium.googlesource.com/linux-syscall-support.git@92a65a8f5d705d1928874420c8d0d15bde8c89e5',
'condition': 'checkout_android or checkout_linux',
@@ -222,7 +230,7 @@ deps = {
# Used by boringssl.
'src/third_party/nasm': {
- 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@19f3fad68da99277b2882939d3b2fa4c4b8d51d9'
+ 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@e9be5fd6d723a435ca2da162f9e0ffcb688747c1'
},
'src/third_party/openh264/src':
@@ -231,7 +239,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/r8',
- 'version': 'wupRO-hEg2hxgKU9FC5HKY88dMpfEpdimjxcgekXH8oC',
+ 'version': 'gXyBDv_fM87KnLcxvF5AGV5lwnm-JXIALYH8zrzdoaMC',
},
],
'condition': 'checkout_android',
@@ -256,14 +264,14 @@ deps = {
'condition': 'checkout_android',
},
'src/third_party/usrsctp/usrsctplib':
- 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@70d42ae95a1de83bd317c8cc9503f894671d1392',
+ 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@22ba62ffe79c3881581ab430368bf3764d9533eb',
# Dependency used by libjpeg-turbo.
'src/third_party/yasm/binaries': {
'url': 'https://chromium.googlesource.com/chromium/deps/yasm/binaries.git@52f9b3f4b0aa06da24ef8b123058bb61ee468881',
'condition': 'checkout_win',
},
'src/tools':
- 'https://chromium.googlesource.com/chromium/src/tools@bbda6274f35cac6622c0f58b063ae2073c5bad89',
+ 'https://chromium.googlesource.com/chromium/src/tools@680815db180add328d56c772c251441ce3b358fa',
'src/tools/swarming_client':
'https://chromium.googlesource.com/infra/luci/client-py.git@a32a1607f6093d338f756c7e7c7b4333b0c50c9c',
@@ -364,7 +372,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/androidx',
- 'version': 'eXwYVabVnQThhcPnVG-yr1yweogZnSLAmAcy_kKQscsC',
+ 'version': 'X9QRQdySUF6AfnqQBWGClKiBkrEs0dsHy1AorJ0Ekt8C',
},
],
'condition': 'checkout_android',
@@ -458,31 +466,26 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/turbine',
- 'version': '_iPtB_ThhxlMOt2TsYqVppwriEEn0mp-NUNRwDwYLUAC',
+ 'version': 'Om6yIEXgJxuqghErK29h9RcMH6VaymMbxwScwXmcN6EC',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
- 'src/third_party/turbine/src': {
- 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '3c31e67ae25b5e43713fd868e3a9b535ff6298af',
- 'condition': 'checkout_android',
- },
-
'src/tools/luci-go': {
'packages': [
{
'package': 'infra/tools/luci/isolate/${{platform}}',
- 'version': 'git_revision:99ac75773c6241b6ddf82ade4c54553faa084530',
+ 'version': 'git_revision:2adc53281f4a72ecb71e84a8af5acc0fced04cc9',
},
{
'package': 'infra/tools/luci/isolated/${{platform}}',
- 'version': 'git_revision:99ac75773c6241b6ddf82ade4c54553faa084530',
+ 'version': 'git_revision:2adc53281f4a72ecb71e84a8af5acc0fced04cc9',
},
{
'package': 'infra/tools/luci/swarming/${{platform}}',
- 'version': 'git_revision:99ac75773c6241b6ddf82ade4c54553faa084530',
+ 'version': 'git_revision:2adc53281f4a72ecb71e84a8af5acc0fced04cc9',
},
],
'dep_type': 'cipd',
@@ -1004,7 +1007,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine',
- 'version': 'version:2@2.8.0.cr0',
+ 'version': 'version:2@2.8.8.cr0',
},
],
'condition': 'checkout_android',
@@ -1400,7 +1403,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation',
- 'version': 'version:2@2.4.0.cr0',
+ 'version': 'version:2@2.7.1.cr0',
},
],
'condition': 'checkout_android',
@@ -1411,7 +1414,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations',
- 'version': 'version:2@2.4.0.cr0',
+ 'version': 'version:2@2.7.1.cr0',
},
],
'condition': 'checkout_android',
@@ -1422,7 +1425,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api',
- 'version': 'version:2@2.4.0.cr0',
+ 'version': 'version:2@2.7.1.cr0',
},
],
'condition': 'checkout_android',
@@ -1433,7 +1436,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core',
- 'version': 'version:2@2.4.0.cr0',
+ 'version': 'version:2@2.7.1.cr0',
},
],
'condition': 'checkout_android',
@@ -1444,7 +1447,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations',
- 'version': 'version:2@2.4.0.cr0',
+ 'version': 'version:2@2.7.1.cr0',
},
],
'condition': 'checkout_android',
@@ -1715,6 +1718,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/io_github_java_diff_utils_java_diff_utils': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/io_github_java_diff_utils_java_diff_utils',
+ 'version': 'version:2@4.0.cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/javax_annotation_javax_annotation_api': {
'packages': [
{
@@ -1994,7 +2008,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual',
- 'version': 'version:2@3.5.0.cr0',
+ 'version': 'version:2@3.8.0.cr0',
},
],
'condition': 'checkout_android',
@@ -2005,7 +2019,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_shaded',
- 'version': 'version:2@3.1.2.cr0',
+ 'version': 'version:2@3.11.0.cr0',
},
],
'condition': 'checkout_android',
@@ -2056,6 +2070,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/org_eclipse_jgit_org_eclipse_jgit': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_eclipse_jgit_org_eclipse_jgit',
+ 'version': 'version:2@4.4.1.201607150455-r.cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/org_jetbrains_annotations': {
'packages': [
{
@@ -2071,7 +2096,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib',
- 'version': 'version:2@1.4.30.cr0',
+ 'version': 'version:2@1.5.10.cr0',
},
],
'condition': 'checkout_android',
@@ -2082,7 +2107,29 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_common',
- 'version': 'version:2@1.4.30.cr0',
+ 'version': 'version:2@1.5.10.cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android',
+ 'version': 'version:2@1.4.3.cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm',
+ 'version': 'version:2@1.4.3.cr0',
},
],
'condition': 'checkout_android',
@@ -2298,17 +2345,6 @@ deps = {
'dep_type': 'cipd',
},
- 'src/third_party/android_deps/libs/org_threeten_threeten_extra': {
- 'packages': [
- {
- 'package': 'chromium/third_party/android_deps/libs/org_threeten_threeten_extra',
- 'version': 'version:2@1.5.0.cr0',
- },
- ],
- 'condition': 'checkout_android',
- 'dep_type': 'cipd',
- },
-
# === ANDROID_DEPS Generated Code End ===
}
@@ -2588,6 +2624,7 @@ include_rules = [
"+absl/base/const_init.h",
"+absl/base/macros.h",
"+absl/container/inlined_vector.h",
+ "+absl/functional/bind_front.h",
"+absl/memory/memory.h",
"+absl/meta/type_traits.h",
"+absl/strings/ascii.h",
diff --git a/README.md b/README.md
index f9b92775f8..34f89bb080 100644
--- a/README.md
+++ b/README.md
@@ -33,7 +33,7 @@ native API header files.
* Samples and reference apps: https://github.com/webrtc
* Mailing list: http://groups.google.com/group/discuss-webrtc
* Continuous build: https://ci.chromium.org/p/webrtc/g/ci/console
- * [Coding style guide](style-guide.md)
+ * [Coding style guide](g3doc/style-guide.md)
* [Code of conduct](CODE_OF_CONDUCT.md)
* [Reporting bugs](docs/bug-reporting.md)
diff --git a/api/BUILD.gn b/api/BUILD.gn
index 4f729d5c77..e1c45e2032 100644
--- a/api/BUILD.gn
+++ b/api/BUILD.gn
@@ -29,7 +29,10 @@ rtc_source_set("call_api") {
rtc_source_set("callfactory_api") {
visibility = [ "*" ]
sources = [ "call/call_factory_interface.h" ]
- deps = [ "../rtc_base/system:rtc_export" ]
+ deps = [
+ "../call:rtp_interfaces",
+ "../rtc_base/system:rtc_export",
+ ]
}
if (!build_with_chromium) {
@@ -90,6 +93,7 @@ rtc_library("rtp_packet_info") {
":scoped_refptr",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
+ "units:timestamp",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
@@ -134,14 +138,8 @@ rtc_library("libjingle_peerconnection_api") {
"jsep_ice_candidate.cc",
"jsep_ice_candidate.h",
"jsep_session_description.h",
- "media_stream_proxy.h",
- "media_stream_track_proxy.h",
- "peer_connection_factory_proxy.h",
"peer_connection_interface.cc",
"peer_connection_interface.h",
- "peer_connection_proxy.h",
- "proxy.cc",
- "proxy.h",
"rtp_receiver_interface.cc",
"rtp_receiver_interface.h",
"rtp_sender_interface.cc",
@@ -156,7 +154,7 @@ rtc_library("libjingle_peerconnection_api") {
"stats_types.h",
"turn_customizer.h",
"uma_metrics.h",
- "video_track_source_proxy.h",
+ "video_track_source_proxy_factory.h",
]
deps = [
":array_view",
@@ -177,6 +175,7 @@ rtc_library("libjingle_peerconnection_api") {
":rtp_transceiver_direction",
":scoped_refptr",
":sequence_checker",
+ "../call:rtp_interfaces",
"../rtc_base:network_constants",
"adaptation:resource_adaptation_api",
"audio:audio_mixer_api",
@@ -196,6 +195,7 @@ rtc_library("libjingle_peerconnection_api") {
"units:data_rate",
"units:timestamp",
"video:encoded_image",
+ "video:video_bitrate_allocator_factory",
"video:video_frame",
"video:video_rtp_headers",
@@ -561,6 +561,7 @@ rtc_source_set("rtc_stats_api") {
deps = [
":scoped_refptr",
+ "../api:refcountedbase",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
@@ -682,7 +683,10 @@ rtc_source_set("array_view") {
rtc_source_set("refcountedbase") {
visibility = [ "*" ]
sources = [ "ref_counted_base.h" ]
- deps = [ "../rtc_base:rtc_base_approved" ]
+ deps = [
+ "../rtc_base:macromagic",
+ "../rtc_base:refcount",
+ ]
}
rtc_library("ice_transport_factory") {
@@ -1053,6 +1057,7 @@ if (rtc_include_tests) {
":time_controller",
"../call",
"../call:call_interfaces",
+ "../call:rtp_interfaces",
"../test/time_controller",
]
}
diff --git a/api/DEPS b/api/DEPS
index 2e46029174..f0caea5b55 100644
--- a/api/DEPS
+++ b/api/DEPS
@@ -42,6 +42,11 @@ include_rules = [
specific_include_rules = {
# Some internal headers are allowed even in API headers:
+
+ "call_factory_interface\.h": [
+ "+call/rtp_transport_controller_send_factory_interface.h",
+ ],
+
".*\.h": [
"+rtc_base/checks.h",
"+rtc_base/system/rtc_export.h",
@@ -126,15 +131,21 @@ specific_include_rules = {
],
"peer_connection_interface\.h": [
+ "+call/rtp_transport_controller_send_factory_interface.h",
"+media/base/media_config.h",
"+media/base/media_engine.h",
+ "+p2p/base/port.h",
"+p2p/base/port_allocator.h",
+ "+rtc_base/network.h",
+ "+rtc_base/network_constants.h",
"+rtc_base/network_monitor_factory.h",
+ "+rtc_base/ref_count.h",
"+rtc_base/rtc_certificate.h",
"+rtc_base/rtc_certificate_generator.h",
"+rtc_base/socket_address.h",
"+rtc_base/ssl_certificate.h",
"+rtc_base/ssl_stream_adapter.h",
+ "+rtc_base/thread.h",
],
"proxy\.h": [
diff --git a/api/OWNERS.webrtc b/api/OWNERS.webrtc
index e18667970b..6ffb2588aa 100644
--- a/api/OWNERS.webrtc
+++ b/api/OWNERS.webrtc
@@ -11,15 +11,4 @@ per-file peer_connection*=hbos@webrtc.org
per-file DEPS=mbonadei@webrtc.org
-# Please keep this list in sync with Chromium's //base/metrics/OWNERS and
-# send a CL when you notice any difference.
-# Even if people in the list below cannot formally grant +1 on WebRTC, it
-# is good to get their LGTM before sending the CL to one of the folder OWNERS.
-per-file uma_metrics.h=asvitkine@chromium.org
-per-file uma_metrics.h=bcwhite@chromium.org
-per-file uma_metrics.h=caitlinfischer@google.com
-per-file uma_metrics.h=holte@chromium.org
-per-file uma_metrics.h=isherman@chromium.org
-per-file uma_metrics.h=jwd@chromium.org
-per-file uma_metrics.h=mpearson@chromium.org
-per-file uma_metrics.h=rkaplow@chromium.org
+per-file uma_metrics.h=kron@webrtc.org
diff --git a/api/README.md b/api/README.md
index 4cc799362d..7c1a27f512 100644
--- a/api/README.md
+++ b/api/README.md
@@ -1,6 +1,6 @@
# How to write code in the `api/` directory
-Mostly, just follow the regular [style guide](../style-guide.md), but:
+Mostly, just follow the regular [style guide](../g3doc/style-guide.md), but:
* Note that `api/` code is not exempt from the “`.h` and `.cc` files come in
pairs” rule, so if you declare something in `api/path/to/foo.h`, it should be
@@ -17,7 +17,7 @@ it from a `.cc` file, so that users of our API headers won’t transitively
For headers in `api/` that need to refer to non-public types, forward
declarations are often a lesser evil than including non-public header files. The
-usual [rules](../style-guide.md#forward-declarations) still apply, though.
+usual [rules](../g3doc/style-guide.md#forward-declarations) still apply, though.
`.cc` files in `api/` should preferably be kept reasonably small. If a
substantial implementation is needed, consider putting it with our non-public
diff --git a/api/audio/echo_canceller3_config.cc b/api/audio/echo_canceller3_config.cc
index 5f1923e90f..b38d6b5b7e 100644
--- a/api/audio/echo_canceller3_config.cc
+++ b/api/audio/echo_canceller3_config.cc
@@ -153,7 +153,7 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) {
res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000);
res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f);
- res = res & Limit(&c->filter.coarse_reset_hangover_blocks, 0, 2500);
+ res = res & Limit(&c->filter.coarse_reset_hangover_blocks, 0, 250000);
res = res & Limit(&c->erle.min, 1.f, 100000.f);
res = res & Limit(&c->erle.max_l, 1.f, 100000.f);
@@ -229,6 +229,12 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) {
res =
res & Limit(&c->suppressor.nearend_tuning.max_dec_factor_lf, 0.f, 100.f);
+ res = res & Limit(&c->suppressor.last_permanent_lf_smoothing_band, 0, 64);
+ res = res & Limit(&c->suppressor.last_lf_smoothing_band, 0, 64);
+ res = res & Limit(&c->suppressor.last_lf_band, 0, 63);
+ res = res &
+ Limit(&c->suppressor.first_hf_band, c->suppressor.last_lf_band + 1, 64);
+
res = res & Limit(&c->suppressor.dominant_nearend_detection.enr_threshold,
0.f, 1000000.f);
res = res & Limit(&c->suppressor.dominant_nearend_detection.snr_threshold,
diff --git a/api/audio/echo_canceller3_config.h b/api/audio/echo_canceller3_config.h
index 8ffc3d9e89..087e8da439 100644
--- a/api/audio/echo_canceller3_config.h
+++ b/api/audio/echo_canceller3_config.h
@@ -43,6 +43,7 @@ struct RTC_EXPORT EchoCanceller3Config {
size_t hysteresis_limit_blocks = 1;
size_t fixed_capture_delay_samples = 0;
float delay_estimate_smoothing = 0.7f;
+ float delay_estimate_smoothing_delay_found = 0.7f;
float delay_candidate_detection_threshold = 0.2f;
struct DelaySelectionThresholds {
int initial;
@@ -193,6 +194,12 @@ struct RTC_EXPORT EchoCanceller3Config {
2.0f,
0.25f);
+ bool lf_smoothing_during_initial_phase = true;
+ int last_permanent_lf_smoothing_band = 0;
+ int last_lf_smoothing_band = 5;
+ int last_lf_band = 5;
+ int first_hf_band = 8;
+
struct DominantNearendDetection {
float enr_threshold = .25f;
float enr_exit_threshold = 10.f;
diff --git a/api/audio/echo_canceller3_config_json.cc b/api/audio/echo_canceller3_config_json.cc
index 89256b3e68..91a3c66d33 100644
--- a/api/audio/echo_canceller3_config_json.cc
+++ b/api/audio/echo_canceller3_config_json.cc
@@ -191,6 +191,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string,
&cfg.delay.fixed_capture_delay_samples);
ReadParam(section, "delay_estimate_smoothing",
&cfg.delay.delay_estimate_smoothing);
+ ReadParam(section, "delay_estimate_smoothing_delay_found",
+ &cfg.delay.delay_estimate_smoothing_delay_found);
ReadParam(section, "delay_candidate_detection_threshold",
&cfg.delay.delay_candidate_detection_threshold);
@@ -339,6 +341,15 @@ void Aec3ConfigFromJsonString(absl::string_view json_string,
&cfg.suppressor.nearend_tuning.max_dec_factor_lf);
}
+ ReadParam(section, "lf_smoothing_during_initial_phase",
+ &cfg.suppressor.lf_smoothing_during_initial_phase);
+ ReadParam(section, "last_permanent_lf_smoothing_band",
+ &cfg.suppressor.last_permanent_lf_smoothing_band);
+ ReadParam(section, "last_lf_smoothing_band",
+ &cfg.suppressor.last_lf_smoothing_band);
+ ReadParam(section, "last_lf_band", &cfg.suppressor.last_lf_band);
+ ReadParam(section, "first_hf_band", &cfg.suppressor.first_hf_band);
+
if (rtc::GetValueFromJsonObject(section, "dominant_nearend_detection",
&subsection)) {
ReadParam(subsection, "enr_threshold",
@@ -425,6 +436,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
<< config.delay.fixed_capture_delay_samples << ",";
ost << "\"delay_estimate_smoothing\": "
<< config.delay.delay_estimate_smoothing << ",";
+ ost << "\"delay_estimate_smoothing_delay_found\": "
+ << config.delay.delay_estimate_smoothing_delay_found << ",";
ost << "\"delay_candidate_detection_threshold\": "
<< config.delay.delay_candidate_detection_threshold << ",";
@@ -647,6 +660,16 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
ost << "\"max_dec_factor_lf\": "
<< config.suppressor.nearend_tuning.max_dec_factor_lf;
ost << "},";
+ ost << "\"lf_smoothing_during_initial_phase\": "
+ << (config.suppressor.lf_smoothing_during_initial_phase ? "true"
+ : "false")
+ << ",";
+ ost << "\"last_permanent_lf_smoothing_band\": "
+ << config.suppressor.last_permanent_lf_smoothing_band << ",";
+ ost << "\"last_lf_smoothing_band\": "
+ << config.suppressor.last_lf_smoothing_band << ",";
+ ost << "\"last_lf_band\": " << config.suppressor.last_lf_band << ",";
+ ost << "\"first_hf_band\": " << config.suppressor.first_hf_band << ",";
ost << "\"dominant_nearend_detection\": {";
ost << "\"enr_threshold\": "
<< config.suppressor.dominant_nearend_detection.enr_threshold << ",";
diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc
index 4c3d9e61fe..04215b0deb 100644
--- a/api/audio/echo_detector_creator.cc
+++ b/api/audio/echo_detector_creator.cc
@@ -15,7 +15,7 @@
namespace webrtc {
rtc::scoped_refptr<EchoDetector> CreateEchoDetector() {
- return new rtc::RefCountedObject<ResidualEchoDetector>();
+ return rtc::make_ref_counted<ResidualEchoDetector>();
}
} // namespace webrtc
diff --git a/api/audio_codecs/audio_decoder_factory_template.h b/api/audio_codecs/audio_decoder_factory_template.h
index e628cb62dc..388668d4c6 100644
--- a/api/audio_codecs/audio_decoder_factory_template.h
+++ b/api/audio_codecs/audio_decoder_factory_template.h
@@ -123,9 +123,8 @@ rtc::scoped_refptr<AudioDecoderFactory> CreateAudioDecoderFactory() {
static_assert(sizeof...(Ts) >= 1,
"Caller must give at least one template parameter");
- return rtc::scoped_refptr<AudioDecoderFactory>(
- new rtc::RefCountedObject<
- audio_decoder_factory_template_impl::AudioDecoderFactoryT<Ts...>>());
+ return rtc::make_ref_counted<
+ audio_decoder_factory_template_impl::AudioDecoderFactoryT<Ts...>>();
}
} // namespace webrtc
diff --git a/api/audio_codecs/audio_encoder_factory_template.h b/api/audio_codecs/audio_encoder_factory_template.h
index 74cb053425..cdc7defd25 100644
--- a/api/audio_codecs/audio_encoder_factory_template.h
+++ b/api/audio_codecs/audio_encoder_factory_template.h
@@ -142,9 +142,8 @@ rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory() {
static_assert(sizeof...(Ts) >= 1,
"Caller must give at least one template parameter");
- return rtc::scoped_refptr<AudioEncoderFactory>(
- new rtc::RefCountedObject<
- audio_encoder_factory_template_impl::AudioEncoderFactoryT<Ts...>>());
+ return rtc::make_ref_counted<
+ audio_encoder_factory_template_impl::AudioEncoderFactoryT<Ts...>>();
}
} // namespace webrtc
diff --git a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
index 0e2e8c229f..464ecfd487 100644
--- a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
+++ b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
@@ -78,7 +78,7 @@ struct AudioDecoderFakeApi {
TEST(AudioDecoderFactoryTemplateTest, NoDecoderTypes) {
rtc::scoped_refptr<AudioDecoderFactory> factory(
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
audio_decoder_factory_template_impl::AudioDecoderFactoryT<>>());
EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::IsEmpty());
EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1}));
diff --git a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
index 95ea85576d..110f9930bd 100644
--- a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
+++ b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
@@ -78,7 +78,7 @@ struct AudioEncoderFakeApi {
TEST(AudioEncoderFactoryTemplateTest, NoEncoderTypes) {
rtc::scoped_refptr<AudioEncoderFactory> factory(
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
audio_encoder_factory_template_impl::AudioEncoderFactoryT<>>());
EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::IsEmpty());
EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1}));
diff --git a/api/candidate.cc b/api/candidate.cc
index c857f89c3c..d5fe3a0672 100644
--- a/api/candidate.cc
+++ b/api/candidate.cc
@@ -12,6 +12,7 @@
#include "rtc_base/helpers.h"
#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
namespace cricket {
@@ -129,9 +130,21 @@ Candidate Candidate::ToSanitizedCopy(bool use_hostname_address,
bool filter_related_address) const {
Candidate copy(*this);
if (use_hostname_address) {
- rtc::SocketAddress hostname_only_addr(address().hostname(),
- address().port());
- copy.set_address(hostname_only_addr);
+ rtc::IPAddress ip;
+ if (address().hostname().empty()) {
+ // IP needs to be redacted, but no hostname available.
+ rtc::SocketAddress redacted_addr("redacted-ip.invalid", address().port());
+ copy.set_address(redacted_addr);
+ } else if (IPFromString(address().hostname(), &ip)) {
+ // The hostname is an IP literal, and needs to be redacted too.
+ rtc::SocketAddress redacted_addr("redacted-literal.invalid",
+ address().port());
+ copy.set_address(redacted_addr);
+ } else {
+ rtc::SocketAddress hostname_only_addr(address().hostname(),
+ address().port());
+ copy.set_address(hostname_only_addr);
+ }
}
if (filter_related_address) {
copy.set_related_address(
diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h
index 5b2b1263ab..56bb6c98fb 100644
--- a/api/data_channel_interface.h
+++ b/api/data_channel_interface.h
@@ -44,11 +44,13 @@ struct DataChannelInit {
//
// Cannot be set along with |maxRetransmits|.
// This is called |maxPacketLifeTime| in the WebRTC JS API.
+ // Negative values are ignored, and positive values are clamped to [0-65535]
absl::optional<int> maxRetransmitTime;
// The max number of retransmissions.
//
// Cannot be set along with |maxRetransmitTime|.
+ // Negative values are ignored, and positive values are clamped to [0-65535]
absl::optional<int> maxRetransmits;
// This is set by the application and opaque to the WebRTC implementation.
diff --git a/api/ice_transport_factory.cc b/api/ice_transport_factory.cc
index d507812ab7..26ef88bf1c 100644
--- a/api/ice_transport_factory.cc
+++ b/api/ice_transport_factory.cc
@@ -60,12 +60,12 @@ rtc::scoped_refptr<IceTransportInterface> CreateIceTransport(
IceTransportInit init) {
if (init.async_resolver_factory()) {
// Backwards compatibility mode
- return new rtc::RefCountedObject<IceTransportWithTransportChannel>(
+ return rtc::make_ref_counted<IceTransportWithTransportChannel>(
std::make_unique<cricket::P2PTransportChannel>(
"", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(),
init.async_resolver_factory(), init.event_log()));
} else {
- return new rtc::RefCountedObject<IceTransportWithTransportChannel>(
+ return rtc::make_ref_counted<IceTransportWithTransportChannel>(
cricket::P2PTransportChannel::Create(
"", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(),
init.async_dns_resolver_factory(), init.event_log()));
diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h
index 9781377ca8..ea7079e369 100644
--- a/api/neteq/neteq.h
+++ b/api/neteq/neteq.h
@@ -214,11 +214,15 @@ class NetEq {
// |data_| in |audio_frame| is not written, but should be interpreted as being
// all zeros. For testing purposes, an override can be supplied in the
// |action_override| argument, which will cause NetEq to take this action
- // next, instead of the action it would normally choose.
+ // next, instead of the action it would normally choose. An optional output
+ // argument for fetching the current sample rate can be provided, which
+ // will return the same value as last_output_sample_rate_hz() but will avoid
+ // additional synchronization.
// Returns kOK on success, or kFail in case of an error.
virtual int GetAudio(
AudioFrame* audio_frame,
bool* muted,
+ int* current_sample_rate_hz = nullptr,
absl::optional<Operation> action_override = absl::nullopt) = 0;
// Replaces the current set of decoders with the given one.
diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc
index e1d94dd8c7..230731c42d 100644
--- a/api/peer_connection_interface.cc
+++ b/api/peer_connection_interface.cc
@@ -10,8 +10,7 @@
#include "api/peer_connection_interface.h"
-#include "api/dtls_transport_interface.h"
-#include "api/sctp_transport_interface.h"
+#include <utility>
namespace webrtc {
@@ -77,14 +76,27 @@ PeerConnectionFactoryInterface::CreatePeerConnection(
std::unique_ptr<cricket::PortAllocator> allocator,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
PeerConnectionObserver* observer) {
- return nullptr;
+ PeerConnectionDependencies dependencies(observer);
+ dependencies.allocator = std::move(allocator);
+ dependencies.cert_generator = std::move(cert_generator);
+ auto result =
+ CreatePeerConnectionOrError(configuration, std::move(dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+ return result.MoveValue();
}
rtc::scoped_refptr<PeerConnectionInterface>
PeerConnectionFactoryInterface::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies) {
- return nullptr;
+ auto result =
+ CreatePeerConnectionOrError(configuration, std::move(dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+ return result.MoveValue();
}
RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>
diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h
index 800f36cb00..83ebc37035 100644
--- a/api/peer_connection_interface.h
+++ b/api/peer_connection_interface.h
@@ -67,12 +67,16 @@
#ifndef API_PEER_CONNECTION_INTERFACE_H_
#define API_PEER_CONNECTION_INTERFACE_H_
+#include <stdint.h>
#include <stdio.h>
+#include <functional>
#include <memory>
#include <string>
#include <vector>
+#include "absl/base/attributes.h"
+#include "absl/types/optional.h"
#include "api/adaptation/resource.h"
#include "api/async_dns_resolver.h"
#include "api/async_resolver_factory.h"
@@ -81,6 +85,7 @@
#include "api/audio_codecs/audio_encoder_factory.h"
#include "api/audio_options.h"
#include "api/call/call_factory_interface.h"
+#include "api/candidate.h"
#include "api/crypto/crypto_options.h"
#include "api/data_channel_interface.h"
#include "api/dtls_transport_interface.h"
@@ -88,15 +93,18 @@
#include "api/ice_transport_interface.h"
#include "api/jsep.h"
#include "api/media_stream_interface.h"
+#include "api/media_types.h"
#include "api/neteq/neteq_factory.h"
#include "api/network_state_predictor.h"
#include "api/packet_socket_factory.h"
#include "api/rtc_error.h"
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
#include "api/rtc_event_log_output.h"
+#include "api/rtp_parameters.h"
#include "api/rtp_receiver_interface.h"
#include "api/rtp_sender_interface.h"
#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
#include "api/sctp_transport_interface.h"
#include "api/set_local_description_observer_interface.h"
#include "api/set_remote_description_observer_interface.h"
@@ -109,19 +117,26 @@
#include "api/transport/sctp_transport_factory_interface.h"
#include "api/transport/webrtc_key_value_config.h"
#include "api/turn_customizer.h"
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
#include "media/base/media_config.h"
#include "media/base/media_engine.h"
// TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications
// inject a PacketSocketFactory and/or NetworkManager, and not expose
-// PortAllocator in the PeerConnection api.
+// PortAllocator in the PeerConnection api. This will let us remove nogncheck.
+#include "p2p/base/port.h" // nogncheck
#include "p2p/base/port_allocator.h" // nogncheck
+#include "rtc_base/network.h"
+#include "rtc_base/network_constants.h"
#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/ref_count.h"
#include "rtc_base/rtc_certificate.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/ssl_certificate.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/thread.h"
namespace rtc {
class Thread;
@@ -904,9 +919,24 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// Also, calling CreateDataChannel is the only way to get a data "m=" section
// in SDP, so it should be done before CreateOffer is called, if the
// application plans to use data channels.
+ virtual RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>
+ CreateDataChannelOrError(const std::string& label,
+ const DataChannelInit* config) {
+ return RTCError(RTCErrorType::INTERNAL_ERROR, "dummy function called");
+ }
+ // TODO(crbug.com/788659): Remove "virtual" below and default implementation
+ // above once mock in Chrome is fixed.
+ ABSL_DEPRECATED("Use CreateDataChannelOrError")
virtual rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
const std::string& label,
- const DataChannelInit* config) = 0;
+ const DataChannelInit* config) {
+ auto result = CreateDataChannelOrError(label, config);
+ if (!result.ok()) {
+ return nullptr;
+ } else {
+ return result.MoveValue();
+ }
+ }
// NOTE: For the following 6 methods, it's only safe to dereference the
// SessionDescriptionInterface on signaling_thread() (for example, calling
@@ -1371,6 +1401,8 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final {
std::unique_ptr<NetEqFactory> neteq_factory;
std::unique_ptr<SctpTransportFactoryInterface> sctp_factory;
std::unique_ptr<WebRtcKeyValueConfig> trials;
+ std::unique_ptr<RtpTransportControllerSendFactoryInterface>
+ transport_controller_send_factory;
};
// PeerConnectionFactoryInterface is the factory interface used for creating
@@ -1432,6 +1464,7 @@ class RTC_EXPORT PeerConnectionFactoryInterface
PeerConnectionDependencies dependencies);
// Deprecated creator - does not return an error code on error.
// TODO(bugs.webrtc.org:12238): Deprecate and remove.
+ ABSL_DEPRECATED("Use CreatePeerConnectionOrError")
virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies);
@@ -1445,6 +1478,7 @@ class RTC_EXPORT PeerConnectionFactoryInterface
// responsibility of the caller to delete it. It can be safely deleted after
// Close has been called on the returned PeerConnection, which ensures no
// more observer callbacks will be invoked.
+ ABSL_DEPRECATED("Use CreatePeerConnectionOrError")
virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
std::unique_ptr<cricket::PortAllocator> allocator,
diff --git a/api/proxy.cc b/api/proxy.cc
deleted file mode 100644
index 67318e7dab..0000000000
--- a/api/proxy.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/proxy.h"
-
diff --git a/api/ref_counted_base.h b/api/ref_counted_base.h
index a1761db851..931cb20762 100644
--- a/api/ref_counted_base.h
+++ b/api/ref_counted_base.h
@@ -10,8 +10,9 @@
#ifndef API_REF_COUNTED_BASE_H_
#define API_REF_COUNTED_BASE_H_
+#include <type_traits>
+
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/ref_counter.h"
namespace rtc {
@@ -30,6 +31,10 @@ class RefCountedBase {
}
protected:
+ // Provided for internal webrtc subclasses for corner cases where it's
+ // necessary to know whether or not a reference is exclusively held.
+ bool HasOneRef() const { return ref_count_.HasOneRef(); }
+
virtual ~RefCountedBase() = default;
private:
@@ -38,6 +43,55 @@ class RefCountedBase {
RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase);
};
+// Template based version of `RefCountedBase` for simple implementations that do
+// not need (or want) destruction via virtual destructor or the overhead of a
+// vtable.
+//
+// To use:
+// struct MyInt : public rtc::RefCountedNonVirtual<MyInt> {
+// int foo_ = 0;
+// };
+//
+// rtc::scoped_refptr<MyInt> my_int(new MyInt());
+//
+// sizeof(MyInt) on a 32 bit system would then be 8, int + refcount and no
+// vtable generated.
+template <typename T>
+class RefCountedNonVirtual {
+ public:
+ RefCountedNonVirtual() = default;
+
+ void AddRef() const { ref_count_.IncRef(); }
+ RefCountReleaseStatus Release() const {
+ // If you run into this assert, T has virtual methods. There are two
+ // options:
+ // 1) The class doesn't actually need virtual methods, the type is complete
+ // so the virtual attribute(s) can be removed.
+ // 2) The virtual methods are a part of the design of the class. In this
+ // case you can consider using `RefCountedBase` instead or alternatively
+ // use `rtc::RefCountedObject`.
+ static_assert(!std::is_polymorphic<T>::value,
+ "T has virtual methods. RefCountedBase is a better fit.");
+ const auto status = ref_count_.DecRef();
+ if (status == RefCountReleaseStatus::kDroppedLastRef) {
+ delete static_cast<const T*>(this);
+ }
+ return status;
+ }
+
+ protected:
+ // Provided for internal webrtc subclasses for corner cases where it's
+ // necessary to know whether or not a reference is exclusively held.
+ bool HasOneRef() const { return ref_count_.HasOneRef(); }
+
+ ~RefCountedNonVirtual() = default;
+
+ private:
+ mutable webrtc::webrtc_impl::RefCounter ref_count_{0};
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedNonVirtual);
+};
+
} // namespace rtc
#endif // API_REF_COUNTED_BASE_H_
diff --git a/api/rtc_event_log/rtc_event.cc b/api/rtc_event_log/rtc_event.cc
index 81e6a4e6da..631188b915 100644
--- a/api/rtc_event_log/rtc_event.cc
+++ b/api/rtc_event_log/rtc_event.cc
@@ -14,6 +14,6 @@
namespace webrtc {
-RtcEvent::RtcEvent() : timestamp_us_(rtc::TimeMicros()) {}
+RtcEvent::RtcEvent() : timestamp_us_(rtc::TimeMillis() * 1000) {}
} // namespace webrtc
diff --git a/api/rtp_packet_info.cc b/api/rtp_packet_info.cc
index a9ebd9df48..db818f7657 100644
--- a/api/rtp_packet_info.cc
+++ b/api/rtp_packet_info.cc
@@ -16,7 +16,7 @@
namespace webrtc {
RtpPacketInfo::RtpPacketInfo()
- : ssrc_(0), rtp_timestamp_(0), receive_time_ms_(-1) {}
+ : ssrc_(0), rtp_timestamp_(0), receive_time_(Timestamp::MinusInfinity()) {}
RtpPacketInfo::RtpPacketInfo(
uint32_t ssrc,
@@ -24,19 +24,19 @@ RtpPacketInfo::RtpPacketInfo(
uint32_t rtp_timestamp,
absl::optional<uint8_t> audio_level,
absl::optional<AbsoluteCaptureTime> absolute_capture_time,
- int64_t receive_time_ms)
+ Timestamp receive_time)
: ssrc_(ssrc),
csrcs_(std::move(csrcs)),
rtp_timestamp_(rtp_timestamp),
audio_level_(audio_level),
absolute_capture_time_(absolute_capture_time),
- receive_time_ms_(receive_time_ms) {}
+ receive_time_(receive_time) {}
RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header,
- int64_t receive_time_ms)
+ Timestamp receive_time)
: ssrc_(rtp_header.ssrc),
rtp_timestamp_(rtp_header.timestamp),
- receive_time_ms_(receive_time_ms) {
+ receive_time_(receive_time) {
const auto& extension = rtp_header.extension;
const auto csrcs_count = std::min<size_t>(rtp_header.numCSRCs, kRtpCsrcSize);
@@ -49,12 +49,31 @@ RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header,
absolute_capture_time_ = extension.absolute_capture_time;
}
+RtpPacketInfo::RtpPacketInfo(
+ uint32_t ssrc,
+ std::vector<uint32_t> csrcs,
+ uint32_t rtp_timestamp,
+ absl::optional<uint8_t> audio_level,
+ absl::optional<AbsoluteCaptureTime> absolute_capture_time,
+ int64_t receive_time_ms)
+ : RtpPacketInfo(ssrc,
+ csrcs,
+ rtp_timestamp,
+ audio_level,
+ absolute_capture_time,
+ Timestamp::Millis(receive_time_ms)) {}
+RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header,
+ int64_t receive_time_ms)
+ : RtpPacketInfo(rtp_header, Timestamp::Millis(receive_time_ms)) {}
+
bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) {
return (lhs.ssrc() == rhs.ssrc()) && (lhs.csrcs() == rhs.csrcs()) &&
(lhs.rtp_timestamp() == rhs.rtp_timestamp()) &&
(lhs.audio_level() == rhs.audio_level()) &&
(lhs.absolute_capture_time() == rhs.absolute_capture_time()) &&
- (lhs.receive_time_ms() == rhs.receive_time_ms());
+ (lhs.receive_time() == rhs.receive_time() &&
+ (lhs.local_capture_clock_offset() ==
+ rhs.local_capture_clock_offset()));
}
} // namespace webrtc
diff --git a/api/rtp_packet_info.h b/api/rtp_packet_info.h
index 639ba32770..605620d638 100644
--- a/api/rtp_packet_info.h
+++ b/api/rtp_packet_info.h
@@ -17,6 +17,7 @@
#include "absl/types/optional.h"
#include "api/rtp_headers.h"
+#include "api/units/timestamp.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
@@ -35,8 +36,18 @@ class RTC_EXPORT RtpPacketInfo {
uint32_t rtp_timestamp,
absl::optional<uint8_t> audio_level,
absl::optional<AbsoluteCaptureTime> absolute_capture_time,
- int64_t receive_time_ms);
+ Timestamp receive_time);
+
+ RtpPacketInfo(const RTPHeader& rtp_header, Timestamp receive_time);
+ // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects
+ // are updated.
+ RtpPacketInfo(uint32_t ssrc,
+ std::vector<uint32_t> csrcs,
+ uint32_t rtp_timestamp,
+ absl::optional<uint8_t> audio_level,
+ absl::optional<AbsoluteCaptureTime> absolute_capture_time,
+ int64_t receive_time_ms);
RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms);
RtpPacketInfo(const RtpPacketInfo& other) = default;
@@ -64,8 +75,19 @@ class RTC_EXPORT RtpPacketInfo {
absolute_capture_time_ = value;
}
- int64_t receive_time_ms() const { return receive_time_ms_; }
- void set_receive_time_ms(int64_t value) { receive_time_ms_ = value; }
+ const absl::optional<int64_t>& local_capture_clock_offset() const {
+ return local_capture_clock_offset_;
+ }
+
+ void set_local_capture_clock_offset(const absl::optional<int64_t>& value) {
+ local_capture_clock_offset_ = value;
+ }
+
+ Timestamp receive_time() const { return receive_time_; }
+ void set_receive_time(Timestamp value) { receive_time_ = value; }
+ // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects
+ // are updated.
+ int64_t receive_time_ms() const { return receive_time_.ms(); }
private:
// Fields from the RTP header:
@@ -80,10 +102,19 @@ class RTC_EXPORT RtpPacketInfo {
// Fields from the Absolute Capture Time header extension:
// http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time
+ // To not be confused with |local_capture_clock_offset_|, the
+ // |estimated_capture_clock_offset| in |absolute_capture_time_| should
+ // represent the clock offset between a remote sender and the capturer, and
+ // thus equals to the corresponding values in the received RTP packets,
+ // subjected to possible interpolations.
absl::optional<AbsoluteCaptureTime> absolute_capture_time_;
+ // Clock offset against capturer's clock. Should be derived from the estimated
+ // capture clock offset defined in the Absolute Capture Time header extension.
+ absl::optional<int64_t> local_capture_clock_offset_;
+
// Local |webrtc::Clock|-based timestamp of when the packet was received.
- int64_t receive_time_ms_;
+ Timestamp receive_time_;
};
bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs);
diff --git a/api/rtp_packet_info_unittest.cc b/api/rtp_packet_info_unittest.cc
index fe79f6df3c..601d34f49e 100644
--- a/api/rtp_packet_info_unittest.cc
+++ b/api/rtp_packet_info_unittest.cc
@@ -37,7 +37,7 @@ TEST(RtpPacketInfoTest, Ssrc) {
rhs = RtpPacketInfo();
EXPECT_NE(rhs.ssrc(), value);
- rhs = RtpPacketInfo(value, {}, {}, {}, {}, {});
+ rhs = RtpPacketInfo(value, {}, {}, {}, {}, Timestamp::Millis(0));
EXPECT_EQ(rhs.ssrc(), value);
}
@@ -64,7 +64,7 @@ TEST(RtpPacketInfoTest, Csrcs) {
rhs = RtpPacketInfo();
EXPECT_NE(rhs.csrcs(), value);
- rhs = RtpPacketInfo({}, value, {}, {}, {}, {});
+ rhs = RtpPacketInfo({}, value, {}, {}, {}, Timestamp::Millis(0));
EXPECT_EQ(rhs.csrcs(), value);
}
@@ -91,7 +91,7 @@ TEST(RtpPacketInfoTest, RtpTimestamp) {
rhs = RtpPacketInfo();
EXPECT_NE(rhs.rtp_timestamp(), value);
- rhs = RtpPacketInfo({}, {}, value, {}, {}, {});
+ rhs = RtpPacketInfo({}, {}, value, {}, {}, Timestamp::Millis(0));
EXPECT_EQ(rhs.rtp_timestamp(), value);
}
@@ -118,7 +118,7 @@ TEST(RtpPacketInfoTest, AudioLevel) {
rhs = RtpPacketInfo();
EXPECT_NE(rhs.audio_level(), value);
- rhs = RtpPacketInfo({}, {}, {}, value, {}, {});
+ rhs = RtpPacketInfo({}, {}, {}, value, {}, Timestamp::Millis(0));
EXPECT_EQ(rhs.audio_level(), value);
}
@@ -145,12 +145,41 @@ TEST(RtpPacketInfoTest, AbsoluteCaptureTime) {
rhs = RtpPacketInfo();
EXPECT_NE(rhs.absolute_capture_time(), value);
- rhs = RtpPacketInfo({}, {}, {}, {}, value, {});
+ rhs = RtpPacketInfo({}, {}, {}, {}, value, Timestamp::Millis(0));
EXPECT_EQ(rhs.absolute_capture_time(), value);
}
+TEST(RtpPacketInfoTest, LocalCaptureClockOffset) {
+ RtpPacketInfo lhs;
+ RtpPacketInfo rhs;
+
+ EXPECT_TRUE(lhs == rhs);
+ EXPECT_FALSE(lhs != rhs);
+
+ const absl::optional<int64_t> value = 10;
+ rhs.set_local_capture_clock_offset(value);
+ EXPECT_EQ(rhs.local_capture_clock_offset(), value);
+
+ EXPECT_FALSE(lhs == rhs);
+ EXPECT_TRUE(lhs != rhs);
+
+ lhs = rhs;
+
+ EXPECT_TRUE(lhs == rhs);
+ EXPECT_FALSE(lhs != rhs);
+
+ // Default local capture clock offset is null.
+ rhs = RtpPacketInfo();
+ EXPECT_EQ(rhs.local_capture_clock_offset(), absl::nullopt);
+
+ // Default local capture clock offset is null.
+ rhs = RtpPacketInfo({}, {}, {}, {}, AbsoluteCaptureTime{12, 34},
+ Timestamp::Millis(0));
+ EXPECT_EQ(rhs.local_capture_clock_offset(), absl::nullopt);
+}
+
TEST(RtpPacketInfoTest, ReceiveTimeMs) {
- const int64_t value = 8868963877546349045LL;
+ const Timestamp timestamp = Timestamp::Micros(8868963877546349045LL);
RtpPacketInfo lhs;
RtpPacketInfo rhs;
@@ -158,8 +187,8 @@ TEST(RtpPacketInfoTest, ReceiveTimeMs) {
EXPECT_TRUE(lhs == rhs);
EXPECT_FALSE(lhs != rhs);
- rhs.set_receive_time_ms(value);
- EXPECT_EQ(rhs.receive_time_ms(), value);
+ rhs.set_receive_time(timestamp);
+ EXPECT_EQ(rhs.receive_time(), timestamp);
EXPECT_FALSE(lhs == rhs);
EXPECT_TRUE(lhs != rhs);
@@ -170,10 +199,10 @@ TEST(RtpPacketInfoTest, ReceiveTimeMs) {
EXPECT_FALSE(lhs != rhs);
rhs = RtpPacketInfo();
- EXPECT_NE(rhs.receive_time_ms(), value);
+ EXPECT_NE(rhs.receive_time(), timestamp);
- rhs = RtpPacketInfo({}, {}, {}, {}, {}, value);
- EXPECT_EQ(rhs.receive_time_ms(), value);
+ rhs = RtpPacketInfo({}, {}, {}, {}, {}, timestamp);
+ EXPECT_EQ(rhs.receive_time(), timestamp);
}
} // namespace webrtc
diff --git a/api/rtp_packet_infos_unittest.cc b/api/rtp_packet_infos_unittest.cc
index ce502ac378..e83358fc17 100644
--- a/api/rtp_packet_infos_unittest.cc
+++ b/api/rtp_packet_infos_unittest.cc
@@ -27,9 +27,12 @@ RtpPacketInfos::vector_type ToVector(Iterator begin, Iterator end) {
} // namespace
TEST(RtpPacketInfosTest, BasicFunctionality) {
- RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78}, 7);
- RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21}, 1);
- RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78}, 7);
+ RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78},
+ Timestamp::Millis(7));
+ RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21},
+ Timestamp::Millis(1));
+ RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78},
+ Timestamp::Millis(7));
RtpPacketInfos x({p0, p1, p2});
@@ -52,9 +55,12 @@ TEST(RtpPacketInfosTest, BasicFunctionality) {
}
TEST(RtpPacketInfosTest, CopyShareData) {
- RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78}, 7);
- RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21}, 1);
- RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78}, 7);
+ RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78},
+ Timestamp::Millis(7));
+ RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21},
+ Timestamp::Millis(1));
+ RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78},
+ Timestamp::Millis(7));
RtpPacketInfos lhs({p0, p1, p2});
RtpPacketInfos rhs = lhs;
diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc
index 8a18f8983f..5ce6780753 100644
--- a/api/rtp_parameters.cc
+++ b/api/rtp_parameters.cc
@@ -170,63 +170,121 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) {
}
bool RtpExtension::IsEncryptionSupported(absl::string_view uri) {
- return uri == webrtc::RtpExtension::kAudioLevelUri ||
- uri == webrtc::RtpExtension::kTimestampOffsetUri ||
-#if !defined(ENABLE_EXTERNAL_AUTH)
- // TODO(jbauch): Figure out a way to always allow "kAbsSendTimeUri"
- // here and filter out later if external auth is really used in
- // srtpfilter. External auth is used by Chromium and replaces the
- // extension header value of "kAbsSendTimeUri", so it must not be
- // encrypted (which can't be done by Chromium).
- uri == webrtc::RtpExtension::kAbsSendTimeUri ||
+ return
+#if defined(ENABLE_EXTERNAL_AUTH)
+ // TODO(jbauch): Figure out a way to always allow "kAbsSendTimeUri"
+ // here and filter out later if external auth is really used in
+ // srtpfilter. External auth is used by Chromium and replaces the
+ // extension header value of "kAbsSendTimeUri", so it must not be
+ // encrypted (which can't be done by Chromium).
+ uri != webrtc::RtpExtension::kAbsSendTimeUri &&
#endif
- uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri ||
- uri == webrtc::RtpExtension::kVideoRotationUri ||
- uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
- uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri ||
- uri == webrtc::RtpExtension::kPlayoutDelayUri ||
- uri == webrtc::RtpExtension::kVideoContentTypeUri ||
- uri == webrtc::RtpExtension::kMidUri ||
- uri == webrtc::RtpExtension::kRidUri ||
- uri == webrtc::RtpExtension::kRepairedRidUri ||
- uri == webrtc::RtpExtension::kVideoLayersAllocationUri;
+ uri != webrtc::RtpExtension::kEncryptHeaderExtensionsUri;
}
-const RtpExtension* RtpExtension::FindHeaderExtensionByUri(
+// Returns whether a header extension with the given URI exists.
+// Note: This does not differentiate between encrypted and non-encrypted
+// extensions, so use with care!
+static bool HeaderExtensionWithUriExists(
const std::vector<RtpExtension>& extensions,
absl::string_view uri) {
for (const auto& extension : extensions) {
if (extension.uri == uri) {
+ return true;
+ }
+ }
+ return false;
+}
+
+const RtpExtension* RtpExtension::FindHeaderExtensionByUri(
+ const std::vector<RtpExtension>& extensions,
+ absl::string_view uri,
+ Filter filter) {
+ const webrtc::RtpExtension* fallback_extension = nullptr;
+ for (const auto& extension : extensions) {
+ if (extension.uri != uri) {
+ continue;
+ }
+
+ switch (filter) {
+ case kDiscardEncryptedExtension:
+ // We only accept an unencrypted extension.
+ if (!extension.encrypt) {
+ return &extension;
+ }
+ break;
+
+ case kPreferEncryptedExtension:
+ // We prefer an encrypted extension but we can fall back to an
+ // unencrypted extension.
+ if (extension.encrypt) {
+ return &extension;
+ } else {
+ fallback_extension = &extension;
+ }
+ break;
+
+ case kRequireEncryptedExtension:
+ // We only accept an encrypted extension.
+ if (extension.encrypt) {
+ return &extension;
+ }
+ break;
+ }
+ }
+
+ // Returning fallback extension (if any)
+ return fallback_extension;
+}
+
+const RtpExtension* RtpExtension::FindHeaderExtensionByUri(
+ const std::vector<RtpExtension>& extensions,
+ absl::string_view uri) {
+ return FindHeaderExtensionByUri(extensions, uri, kPreferEncryptedExtension);
+}
+
+const RtpExtension* RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ const std::vector<RtpExtension>& extensions,
+ absl::string_view uri,
+ bool encrypt) {
+ for (const auto& extension : extensions) {
+ if (extension.uri == uri && extension.encrypt == encrypt) {
return &extension;
}
}
return nullptr;
}
-std::vector<RtpExtension> RtpExtension::FilterDuplicateNonEncrypted(
- const std::vector<RtpExtension>& extensions) {
+const std::vector<RtpExtension> RtpExtension::DeduplicateHeaderExtensions(
+ const std::vector<RtpExtension>& extensions,
+ Filter filter) {
std::vector<RtpExtension> filtered;
- for (auto extension = extensions.begin(); extension != extensions.end();
- ++extension) {
- if (extension->encrypt) {
- filtered.push_back(*extension);
- continue;
- }
- // Only add non-encrypted extension if no encrypted with the same URI
- // is also present...
- if (std::any_of(extension + 1, extensions.end(),
- [&](const RtpExtension& check) {
- return extension->uri == check.uri;
- })) {
- continue;
+ // If we do not discard encrypted extensions, add them first
+ if (filter != kDiscardEncryptedExtension) {
+ for (const auto& extension : extensions) {
+ if (!extension.encrypt) {
+ continue;
+ }
+ if (!HeaderExtensionWithUriExists(filtered, extension.uri)) {
+ filtered.push_back(extension);
+ }
}
+ }
- // ...and has not been added before.
- if (!FindHeaderExtensionByUri(filtered, extension->uri)) {
- filtered.push_back(*extension);
+ // If we do not require encrypted extensions, add missing, non-encrypted
+ // extensions.
+ if (filter != kRequireEncryptedExtension) {
+ for (const auto& extension : extensions) {
+ if (extension.encrypt) {
+ continue;
+ }
+ if (!HeaderExtensionWithUriExists(filtered, extension.uri)) {
+ filtered.push_back(extension);
+ }
}
}
+
return filtered;
}
} // namespace webrtc
diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h
index 7fe9f2bc83..a098bad6b0 100644
--- a/api/rtp_parameters.h
+++ b/api/rtp_parameters.h
@@ -246,6 +246,18 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
// RTP header extension, see RFC8285.
struct RTC_EXPORT RtpExtension {
+ enum Filter {
+ // Encrypted extensions will be ignored and only non-encrypted extensions
+ // will be considered.
+ kDiscardEncryptedExtension,
+ // Encrypted extensions will be preferred but will fall back to
+ // non-encrypted extensions if necessary.
+ kPreferEncryptedExtension,
+ // Encrypted extensions will be required, so any non-encrypted extensions
+ // will be discarded.
+ kRequireEncryptedExtension,
+ };
+
RtpExtension();
RtpExtension(absl::string_view uri, int id);
RtpExtension(absl::string_view uri, int id, bool encrypt);
@@ -260,17 +272,28 @@ struct RTC_EXPORT RtpExtension {
// Return "true" if the given RTP header extension URI may be encrypted.
static bool IsEncryptionSupported(absl::string_view uri);
- // Returns the named header extension if found among all extensions,
- // nullptr otherwise.
+ // Returns the header extension with the given URI or nullptr if not found.
+ static const RtpExtension* FindHeaderExtensionByUri(
+ const std::vector<RtpExtension>& extensions,
+ absl::string_view uri,
+ Filter filter);
+ ABSL_DEPRECATED(
+ "Use RtpExtension::FindHeaderExtensionByUri with filter argument")
static const RtpExtension* FindHeaderExtensionByUri(
const std::vector<RtpExtension>& extensions,
absl::string_view uri);
- // Return a list of RTP header extensions with the non-encrypted extensions
- // removed if both the encrypted and non-encrypted extension is present for
- // the same URI.
- static std::vector<RtpExtension> FilterDuplicateNonEncrypted(
- const std::vector<RtpExtension>& extensions);
+ // Returns the header extension with the given URI and encrypt parameter,
+ // if found, otherwise nullptr.
+ static const RtpExtension* FindHeaderExtensionByUriAndEncryption(
+ const std::vector<RtpExtension>& extensions,
+ absl::string_view uri,
+ bool encrypt);
+
+ // Returns a list of extensions where any extension URI is unique.
+ static const std::vector<RtpExtension> DeduplicateHeaderExtensions(
+ const std::vector<RtpExtension>& extensions,
+ Filter filter);
// Encryption of Header Extensions, see RFC 6904 for details:
// https://tools.ietf.org/html/rfc6904
@@ -357,6 +380,11 @@ struct RTC_EXPORT RtpExtension {
static constexpr char kVideoFrameTrackingIdUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-frame-tracking-id";
+ // Header extension for Mixer-to-Client audio levels per CSRC as defined in
+ // https://tools.ietf.org/html/rfc6465
+ static constexpr char kCsrcAudioLevelsUri[] =
+ "urn:ietf:params:rtp-hdrext:csrc-audio-level";
+
// Inclusive min and max IDs for two-byte header extensions and one-byte
// header extensions, per RFC8285 Section 4.2-4.3.
static constexpr int kMinId = 1;
diff --git a/api/rtp_parameters_unittest.cc b/api/rtp_parameters_unittest.cc
index 5928cbda63..51ad426748 100644
--- a/api/rtp_parameters_unittest.cc
+++ b/api/rtp_parameters_unittest.cc
@@ -23,28 +23,249 @@ static const RtpExtension kExtension1(kExtensionUri1, 1);
static const RtpExtension kExtension1Encrypted(kExtensionUri1, 10, true);
static const RtpExtension kExtension2(kExtensionUri2, 2);
-TEST(RtpExtensionTest, FilterDuplicateNonEncrypted) {
+TEST(RtpExtensionTest, DeduplicateHeaderExtensions) {
std::vector<RtpExtension> extensions;
std::vector<RtpExtension> filtered;
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension1Encrypted);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kDiscardEncryptedExtension);
+ EXPECT_EQ(1u, filtered.size());
+ EXPECT_EQ(std::vector<RtpExtension>{kExtension1}, filtered);
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension1Encrypted);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kPreferEncryptedExtension);
+ EXPECT_EQ(1u, filtered.size());
+ EXPECT_EQ(std::vector<RtpExtension>{kExtension1Encrypted}, filtered);
+
+ extensions.clear();
extensions.push_back(kExtension1);
extensions.push_back(kExtension1Encrypted);
- filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kRequireEncryptedExtension);
+ EXPECT_EQ(1u, filtered.size());
+ EXPECT_EQ(std::vector<RtpExtension>{kExtension1Encrypted}, filtered);
+
+ extensions.clear();
+ extensions.push_back(kExtension1Encrypted);
+ extensions.push_back(kExtension1);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kDiscardEncryptedExtension);
+ EXPECT_EQ(1u, filtered.size());
+ EXPECT_EQ(std::vector<RtpExtension>{kExtension1}, filtered);
+
+ extensions.clear();
+ extensions.push_back(kExtension1Encrypted);
+ extensions.push_back(kExtension1);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kPreferEncryptedExtension);
EXPECT_EQ(1u, filtered.size());
EXPECT_EQ(std::vector<RtpExtension>{kExtension1Encrypted}, filtered);
extensions.clear();
extensions.push_back(kExtension1Encrypted);
extensions.push_back(kExtension1);
- filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kRequireEncryptedExtension);
EXPECT_EQ(1u, filtered.size());
EXPECT_EQ(std::vector<RtpExtension>{kExtension1Encrypted}, filtered);
extensions.clear();
extensions.push_back(kExtension1);
extensions.push_back(kExtension2);
- filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kDiscardEncryptedExtension);
+ EXPECT_EQ(2u, filtered.size());
+ EXPECT_EQ(extensions, filtered);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kPreferEncryptedExtension);
EXPECT_EQ(2u, filtered.size());
EXPECT_EQ(extensions, filtered);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kRequireEncryptedExtension);
+ EXPECT_EQ(0u, filtered.size());
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension2);
+ extensions.push_back(kExtension1Encrypted);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kDiscardEncryptedExtension);
+ EXPECT_EQ(2u, filtered.size());
+ EXPECT_EQ((std::vector<RtpExtension>{kExtension1, kExtension2}), filtered);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kPreferEncryptedExtension);
+ EXPECT_EQ(2u, filtered.size());
+ EXPECT_EQ((std::vector<RtpExtension>{kExtension1Encrypted, kExtension2}),
+ filtered);
+ filtered = RtpExtension::DeduplicateHeaderExtensions(
+ extensions, RtpExtension::Filter::kRequireEncryptedExtension);
+ EXPECT_EQ(1u, filtered.size());
+ EXPECT_EQ((std::vector<RtpExtension>{kExtension1Encrypted}), filtered);
+}
+
+TEST(RtpExtensionTest, FindHeaderExtensionByUriAndEncryption) {
+ std::vector<RtpExtension> extensions;
+
+ extensions.clear();
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri1, false));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri1, false));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri1, true));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri2, false));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension2);
+ extensions.push_back(kExtension1Encrypted);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri1, false));
+ EXPECT_EQ(kExtension2, *RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri2, false));
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri1, true));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ extensions, kExtensionUri2, true));
+}
+
+TEST(RtpExtensionTest, FindHeaderExtensionByUri) {
+ std::vector<RtpExtension> extensions;
+
+ extensions.clear();
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension1Encrypted);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension1Encrypted);
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension1Encrypted);
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1Encrypted);
+ extensions.push_back(kExtension1);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1Encrypted);
+ extensions.push_back(kExtension1);
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1Encrypted);
+ extensions.push_back(kExtension1);
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension2);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+ EXPECT_EQ(kExtension2, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(kExtension2, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+
+ extensions.clear();
+ extensions.push_back(kExtension1);
+ extensions.push_back(kExtension2);
+ extensions.push_back(kExtension1Encrypted);
+ EXPECT_EQ(kExtension1, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(kExtension1Encrypted,
+ *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri1,
+ RtpExtension::Filter::kRequireEncryptedExtension));
+ EXPECT_EQ(kExtension2, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kDiscardEncryptedExtension));
+ EXPECT_EQ(kExtension2, *RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kPreferEncryptedExtension));
+ EXPECT_EQ(nullptr, RtpExtension::FindHeaderExtensionByUri(
+ extensions, kExtensionUri2,
+ RtpExtension::Filter::kRequireEncryptedExtension));
}
} // namespace webrtc
diff --git a/api/rtp_receiver_interface.h b/api/rtp_receiver_interface.h
index e0ace545af..327c9f2fee 100644
--- a/api/rtp_receiver_interface.h
+++ b/api/rtp_receiver_interface.h
@@ -22,7 +22,6 @@
#include "api/frame_transformer_interface.h"
#include "api/media_stream_interface.h"
#include "api/media_types.h"
-#include "api/proxy.h"
#include "api/rtp_parameters.h"
#include "api/scoped_refptr.h"
#include "api/transport/rtp/rtp_source.h"
@@ -100,11 +99,13 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface {
// before it is sent across the network. This will decrypt the entire frame
// using the user provided decryption mechanism regardless of whether SRTP is
// enabled or not.
+ // TODO(bugs.webrtc.org/12772): Remove.
virtual void SetFrameDecryptor(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor);
// Returns a pointer to the frame decryptor set previously by the
// user. This can be used to update the state of the object.
+ // TODO(bugs.webrtc.org/12772): Remove.
virtual rtc::scoped_refptr<FrameDecryptorInterface> GetFrameDecryptor() const;
// Sets a frame transformer between the depacketizer and the decoder to enable
@@ -117,32 +118,6 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface {
~RtpReceiverInterface() override = default;
};
-// Define proxy for RtpReceiverInterface.
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
-BEGIN_PRIMARY_PROXY_MAP(RtpReceiver)
-PROXY_PRIMARY_THREAD_DESTRUCTOR()
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
-PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
-PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
- streams)
-BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
-BYPASS_PROXY_CONSTMETHOD0(std::string, id)
-PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
-PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*)
-PROXY_METHOD1(void, SetJitterBufferMinimumDelay, absl::optional<double>)
-PROXY_CONSTMETHOD0(std::vector<RtpSource>, GetSources)
-PROXY_METHOD1(void,
- SetFrameDecryptor,
- rtc::scoped_refptr<FrameDecryptorInterface>)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<FrameDecryptorInterface>,
- GetFrameDecryptor)
-PROXY_METHOD1(void,
- SetDepacketizerToDecoderFrameTransformer,
- rtc::scoped_refptr<FrameTransformerInterface>)
-END_PROXY_MAP()
-
} // namespace webrtc
#endif // API_RTP_RECEIVER_INTERFACE_H_
diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h
index dd93792a07..9ffad68644 100644
--- a/api/rtp_sender_interface.h
+++ b/api/rtp_sender_interface.h
@@ -23,7 +23,6 @@
#include "api/frame_transformer_interface.h"
#include "api/media_stream_interface.h"
#include "api/media_types.h"
-#include "api/proxy.h"
#include "api/rtc_error.h"
#include "api/rtp_parameters.h"
#include "api/scoped_refptr.h"
@@ -101,33 +100,6 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface {
~RtpSenderInterface() override = default;
};
-// Define proxy for RtpSenderInterface.
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
-BEGIN_PRIMARY_PROXY_MAP(RtpSender)
-PROXY_PRIMARY_THREAD_DESTRUCTOR()
-PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
-PROXY_CONSTMETHOD0(uint32_t, ssrc)
-BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
-BYPASS_PROXY_CONSTMETHOD0(std::string, id)
-PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
-PROXY_CONSTMETHOD0(std::vector<RtpEncodingParameters>, init_send_encodings)
-PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
-PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtmfSenderInterface>, GetDtmfSender)
-PROXY_METHOD1(void,
- SetFrameEncryptor,
- rtc::scoped_refptr<FrameEncryptorInterface>)
-PROXY_CONSTMETHOD0(rtc::scoped_refptr<FrameEncryptorInterface>,
- GetFrameEncryptor)
-PROXY_METHOD1(void, SetStreams, const std::vector<std::string>&)
-PROXY_METHOD1(void,
- SetEncoderToPacketizerFrameTransformer,
- rtc::scoped_refptr<FrameTransformerInterface>)
-END_PROXY_MAP()
-
} // namespace webrtc
#endif // API_RTP_SENDER_INTERFACE_H_
diff --git a/api/sequence_checker_unittest.cc b/api/sequence_checker_unittest.cc
index 4029b8c9a0..21a0894a8e 100644
--- a/api/sequence_checker_unittest.cc
+++ b/api/sequence_checker_unittest.cc
@@ -40,21 +40,14 @@ class CompileTimeTestForGuardedBy {
};
void RunOnDifferentThread(rtc::FunctionView<void()> run) {
- struct Object {
- static void Run(void* obj) {
- auto* me = static_cast<Object*>(obj);
- me->run();
- me->thread_has_run_event.Set();
- }
-
- rtc::FunctionView<void()> run;
- rtc::Event thread_has_run_event;
- } object{run};
-
- rtc::PlatformThread thread(&Object::Run, &object, "thread");
- thread.Start();
- EXPECT_TRUE(object.thread_has_run_event.Wait(1000));
- thread.Stop();
+ rtc::Event thread_has_run_event;
+ rtc::PlatformThread::SpawnJoinable(
+ [&] {
+ run();
+ thread_has_run_event.Set();
+ },
+ "thread");
+ EXPECT_TRUE(thread_has_run_event.Wait(1000));
}
} // namespace
diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h
index 5de5b7fbb0..9290e803fa 100644
--- a/api/stats/rtc_stats.h
+++ b/api/stats/rtc_stats.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdint.h>
+#include <map>
#include <memory>
#include <string>
#include <utility>
@@ -237,6 +238,9 @@ class RTCStatsMemberInterface {
kSequenceUint64, // std::vector<uint64_t>
kSequenceDouble, // std::vector<double>
kSequenceString, // std::vector<std::string>
+
+ kMapStringUint64, // std::map<std::string, uint64_t>
+ kMapStringDouble, // std::map<std::string, double>
};
virtual ~RTCStatsMemberInterface() {}
@@ -363,6 +367,13 @@ class RTCStatsMember : public RTCStatsMemberInterface {
T value_;
};
+namespace rtc_stats_internal {
+
+typedef std::map<std::string, uint64_t> MapStringUint64;
+typedef std::map<std::string, double> MapStringDouble;
+
+} // namespace rtc_stats_internal
+
#define WEBRTC_DECLARE_RTCSTATSMEMBER(T) \
template <> \
RTC_EXPORT RTCStatsMemberInterface::Type RTCStatsMember<T>::StaticType(); \
@@ -391,6 +402,8 @@ WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector<int64_t>);
WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector<uint64_t>);
WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector<double>);
WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector<std::string>);
+WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64);
+WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble);
// Using inheritance just so that it's obvious from the member's declaration
// whether it's standardized or not.
@@ -455,6 +468,10 @@ extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<double>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<std::string>>;
+extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
+ RTCNonStandardStatsMember<std::map<std::string, uint64_t>>;
+extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
+ RTCNonStandardStatsMember<std::map<std::string, double>>;
} // namespace webrtc
diff --git a/api/stats/rtc_stats_collector_callback.h b/api/stats/rtc_stats_collector_callback.h
index c3e08245ea..506cc63e6f 100644
--- a/api/stats/rtc_stats_collector_callback.h
+++ b/api/stats/rtc_stats_collector_callback.h
@@ -17,7 +17,7 @@
namespace webrtc {
-class RTCStatsCollectorCallback : public virtual rtc::RefCountInterface {
+class RTCStatsCollectorCallback : public rtc::RefCountInterface {
public:
~RTCStatsCollectorCallback() override = default;
diff --git a/api/stats/rtc_stats_report.h b/api/stats/rtc_stats_report.h
index 94bd813b07..0fe5ce91f9 100644
--- a/api/stats/rtc_stats_report.h
+++ b/api/stats/rtc_stats_report.h
@@ -19,9 +19,11 @@
#include <string>
#include <vector>
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "api/stats/rtc_stats.h"
-#include "rtc_base/ref_count.h"
+// TODO(tommi): Remove this include after fixing iwyu issue in chromium.
+// See: third_party/blink/renderer/platform/peerconnection/rtc_stats.cc
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/system/rtc_export.h"
@@ -29,7 +31,8 @@ namespace webrtc {
// A collection of stats.
// This is accessible as a map from |RTCStats::id| to |RTCStats|.
-class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface {
+class RTC_EXPORT RTCStatsReport final
+ : public rtc::RefCountedNonVirtual<RTCStatsReport> {
public:
typedef std::map<std::string, std::unique_ptr<const RTCStats>> StatsMap;
@@ -107,11 +110,11 @@ class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface {
// listing all of its stats objects.
std::string ToJson() const;
- friend class rtc::RefCountedObject<RTCStatsReport>;
+ protected:
+ friend class rtc::RefCountedNonVirtual<RTCStatsReport>;
+ ~RTCStatsReport() = default;
private:
- ~RTCStatsReport() override;
-
int64_t timestamp_us_;
StatsMap stats_;
};
diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h
index 60ff8c29fe..fe6d65832b 100644
--- a/api/stats/rtcstats_objects.h
+++ b/api/stats/rtcstats_objects.h
@@ -13,6 +13,7 @@
#include <stdint.h>
+#include <map>
#include <memory>
#include <string>
#include <vector>
@@ -542,10 +543,8 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats {
// implement it for audio as well.
RTCStatsMember<double> total_packet_send_delay;
// Enum type RTCQualityLimitationReason
- // TODO(https://crbug.com/webrtc/10686): Also expose
- // qualityLimitationDurations. Requires RTCStatsMember support for
- // "record<DOMString, double>", see https://crbug.com/webrtc/10685.
RTCStatsMember<std::string> quality_limitation_reason;
+ RTCStatsMember<std::map<std::string, double>> quality_limitation_durations;
// https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationresolutionchanges
RTCStatsMember<uint32_t> quality_limitation_resolution_changes;
// https://henbos.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype
diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc
index a2c0cb713f..f7faeaab42 100644
--- a/api/test/create_time_controller.cc
+++ b/api/test/create_time_controller.cc
@@ -13,6 +13,8 @@
#include <memory>
#include "call/call.h"
+#include "call/rtp_transport_config.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
#include "test/time_controller/external_time_controller.h"
#include "test/time_controller/simulated_time_controller.h"
@@ -40,8 +42,13 @@ std::unique_ptr<CallFactoryInterface> CreateTimeControllerBasedCallFactory(
time_controller_->CreateProcessThread("CallModules"),
[this]() { module_thread_ = nullptr; });
}
+
+ RtpTransportConfig transportConfig = config.ExtractTransportConfig();
+
return Call::Create(config, time_controller_->GetClock(), module_thread_,
- time_controller_->CreateProcessThread("Pacer"));
+ config.rtp_transport_controller_send_factory->Create(
+ transportConfig, time_controller_->GetClock(),
+ time_controller_->CreateProcessThread("Pacer")));
}
private:
diff --git a/api/test/dummy_peer_connection.h b/api/test/dummy_peer_connection.h
index 4d17aeddd0..80ae20c3c7 100644
--- a/api/test/dummy_peer_connection.h
+++ b/api/test/dummy_peer_connection.h
@@ -114,10 +114,10 @@ class DummyPeerConnection : public PeerConnectionInterface {
}
void ClearStatsCache() override {}
- rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
const std::string& label,
const DataChannelInit* config) override {
- return nullptr;
+ return RTCError(RTCErrorType::INTERNAL_ERROR, "Dummy function called");
}
const SessionDescriptionInterface* local_description() const override {
diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h
index be34df0b32..b5d94238c8 100644
--- a/api/test/mock_peerconnectioninterface.h
+++ b/api/test/mock_peerconnectioninterface.h
@@ -100,8 +100,8 @@ class MockPeerConnectionInterface
GetSctpTransport,
(),
(const override));
- MOCK_METHOD(rtc::scoped_refptr<DataChannelInterface>,
- CreateDataChannel,
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>,
+ CreateDataChannelOrError,
(const std::string&, const DataChannelInit*),
(override));
MOCK_METHOD(const SessionDescriptionInterface*,
diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h
index 7b8c653c39..550fabaacd 100644
--- a/api/transport/data_channel_transport_interface.h
+++ b/api/transport/data_channel_transport_interface.h
@@ -47,15 +47,15 @@ struct SendDataParams {
// If set, the maximum number of times this message may be
// retransmitted by the transport before it is dropped.
// Setting this value to zero disables retransmission.
- // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set
- // simultaneously.
+ // Valid values are in the range [0-UINT16_MAX].
+ // |max_rtx_count| and |max_rtx_ms| may not be set simultaneously.
absl::optional<int> max_rtx_count;
// If set, the maximum number of milliseconds for which the transport
// may retransmit this message before it is dropped.
// Setting this value to zero disables retransmission.
- // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set
- // simultaneously.
+ // Valid values are in the range [0-UINT16_MAX].
+ // |max_rtx_count| and |max_rtx_ms| may not be set simultaneously.
absl::optional<int> max_rtx_ms;
};
diff --git a/api/transport/network_types.cc b/api/transport/network_types.cc
index 88b67b3a47..7451940151 100644
--- a/api/transport/network_types.cc
+++ b/api/transport/network_types.cc
@@ -48,7 +48,7 @@ std::vector<PacketResult> TransportPacketsFeedback::ReceivedWithSendInfo()
const {
std::vector<PacketResult> res;
for (const PacketResult& fb : packet_feedbacks) {
- if (fb.receive_time.IsFinite()) {
+ if (fb.IsReceived()) {
res.push_back(fb);
}
}
@@ -58,7 +58,7 @@ std::vector<PacketResult> TransportPacketsFeedback::ReceivedWithSendInfo()
std::vector<PacketResult> TransportPacketsFeedback::LostWithSendInfo() const {
std::vector<PacketResult> res;
for (const PacketResult& fb : packet_feedbacks) {
- if (fb.receive_time.IsPlusInfinity()) {
+ if (!fb.IsReceived()) {
res.push_back(fb);
}
}
@@ -74,7 +74,7 @@ std::vector<PacketResult> TransportPacketsFeedback::SortedByReceiveTime()
const {
std::vector<PacketResult> res;
for (const PacketResult& fb : packet_feedbacks) {
- if (fb.receive_time.IsFinite()) {
+ if (fb.IsReceived()) {
res.push_back(fb);
}
}
diff --git a/api/transport/network_types.h b/api/transport/network_types.h
index 38a8917f1e..4e96b0f12e 100644
--- a/api/transport/network_types.h
+++ b/api/transport/network_types.h
@@ -158,6 +158,8 @@ struct PacketResult {
PacketResult(const PacketResult&);
~PacketResult();
+ inline bool IsReceived() const { return !receive_time.IsPlusInfinity(); }
+
SentPacket sent_packet;
Timestamp receive_time = Timestamp::PlusInfinity();
};
diff --git a/api/uma_metrics.h b/api/uma_metrics.h
index 3e0deb0093..a975b82aeb 100644
--- a/api/uma_metrics.h
+++ b/api/uma_metrics.h
@@ -202,6 +202,17 @@ enum BundlePolicyUsage {
kBundlePolicyUsageMax
};
+// Metrics for provisional answers as described in
+// https://datatracker.ietf.org/doc/html/rfc8829#section-4.1.10.1
+// These values are persisted to logs. Entries should not be renumbered and
+// numeric values should never be reused.
+enum ProvisionalAnswerUsage {
+ kProvisionalAnswerNotUsed = 0,
+ kProvisionalAnswerLocal = 1,
+ kProvisionalAnswerRemote = 2,
+ kProvisionalAnswerMax
+};
+
// When adding new metrics please consider using the style described in
// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#usage
// instead of the legacy enums used above.
diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc
index 61d921c4fd..fc77b9415b 100644
--- a/api/video/encoded_image.cc
+++ b/api/video/encoded_image.cc
@@ -32,13 +32,13 @@ EncodedImageBuffer::~EncodedImageBuffer() {
// static
rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(size_t size) {
- return new rtc::RefCountedObject<EncodedImageBuffer>(size);
+ return rtc::make_ref_counted<EncodedImageBuffer>(size);
}
// static
rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(
const uint8_t* data,
size_t size) {
- return new rtc::RefCountedObject<EncodedImageBuffer>(data, size);
+ return rtc::make_ref_counted<EncodedImageBuffer>(data, size);
}
const uint8_t* EncodedImageBuffer::data() const {
diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc
index 7286676ded..74d37d1b57 100644
--- a/api/video/i010_buffer.cc
+++ b/api/video/i010_buffer.cc
@@ -56,8 +56,8 @@ I010Buffer::~I010Buffer() {}
// static
rtc::scoped_refptr<I010Buffer> I010Buffer::Create(int width, int height) {
- return new rtc::RefCountedObject<I010Buffer>(
- width, height, width, (width + 1) / 2, (width + 1) / 2);
+ return rtc::make_ref_counted<I010Buffer>(width, height, width,
+ (width + 1) / 2, (width + 1) / 2);
}
// static
diff --git a/api/video/i420_buffer.cc b/api/video/i420_buffer.cc
index 2a52217ce3..8783a4a313 100644
--- a/api/video/i420_buffer.cc
+++ b/api/video/i420_buffer.cc
@@ -60,7 +60,7 @@ I420Buffer::~I420Buffer() {}
// static
rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
- return new rtc::RefCountedObject<I420Buffer>(width, height);
+ return rtc::make_ref_counted<I420Buffer>(width, height);
}
// static
@@ -69,8 +69,8 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width,
int stride_y,
int stride_u,
int stride_v) {
- return new rtc::RefCountedObject<I420Buffer>(width, height, stride_y,
- stride_u, stride_v);
+ return rtc::make_ref_counted<I420Buffer>(width, height, stride_y, stride_u,
+ stride_v);
}
// static
diff --git a/api/video/nv12_buffer.cc b/api/video/nv12_buffer.cc
index cfa85ac52e..37d688b88b 100644
--- a/api/video/nv12_buffer.cc
+++ b/api/video/nv12_buffer.cc
@@ -49,7 +49,7 @@ NV12Buffer::~NV12Buffer() = default;
// static
rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width, int height) {
- return new rtc::RefCountedObject<NV12Buffer>(width, height);
+ return rtc::make_ref_counted<NV12Buffer>(width, height);
}
// static
@@ -57,8 +57,7 @@ rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width,
int height,
int stride_y,
int stride_uv) {
- return new rtc::RefCountedObject<NV12Buffer>(width, height, stride_y,
- stride_uv);
+ return rtc::make_ref_counted<NV12Buffer>(width, height, stride_y, stride_uv);
}
// static
@@ -145,11 +144,10 @@ void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src,
const uint8_t* uv_plane =
src.DataUV() + src.StrideUV() * uv_offset_y + uv_offset_x * 2;
- // kFilterBox is unsupported in libyuv, so using kFilterBilinear instead.
int res = libyuv::NV12Scale(y_plane, src.StrideY(), uv_plane, src.StrideUV(),
crop_width, crop_height, MutableDataY(),
StrideY(), MutableDataUV(), StrideUV(), width(),
- height(), libyuv::kFilterBilinear);
+ height(), libyuv::kFilterBox);
RTC_DCHECK_EQ(res, 0);
}
diff --git a/api/video/video_timing.h b/api/video/video_timing.h
index fbd92254a0..80320daa83 100644
--- a/api/video/video_timing.h
+++ b/api/video/video_timing.h
@@ -41,7 +41,7 @@ struct VideoSendTiming {
uint16_t pacer_exit_delta_ms;
uint16_t network_timestamp_delta_ms;
uint16_t network2_timestamp_delta_ms;
- uint8_t flags;
+ uint8_t flags = TimingFrameFlags::kInvalid;
};
// Used to report precise timings of a 'timing frames'. Contains all important
diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn
index 4e28cd7e1c..83d67fcac4 100644
--- a/api/video_codecs/BUILD.gn
+++ b/api/video_codecs/BUILD.gn
@@ -25,7 +25,6 @@ rtc_library("video_codecs_api") {
"video_codec.h",
"video_decoder.cc",
"video_decoder.h",
- "video_decoder_factory.cc",
"video_decoder_factory.h",
"video_encoder.cc",
"video_encoder.h",
@@ -44,6 +43,7 @@ rtc_library("video_codecs_api") {
deps = [
"..:fec_controller_api",
"..:scoped_refptr",
+ "../../api:array_view",
"../../modules/video_coding:codec_globals_headers",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
diff --git a/api/video_codecs/builtin_video_encoder_factory.cc b/api/video_codecs/builtin_video_encoder_factory.cc
index 2f722a4a5c..9463a9cdf2 100644
--- a/api/video_codecs/builtin_video_encoder_factory.cc
+++ b/api/video_codecs/builtin_video_encoder_factory.cc
@@ -26,18 +26,6 @@ namespace webrtc {
namespace {
-bool IsFormatSupported(const std::vector<SdpVideoFormat>& supported_formats,
- const SdpVideoFormat& format) {
- for (const SdpVideoFormat& supported_format : supported_formats) {
- if (cricket::IsSameCodec(format.name, format.parameters,
- supported_format.name,
- supported_format.parameters)) {
- return true;
- }
- }
- return false;
-}
-
// This class wraps the internal factory and adds simulcast.
class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
public:
@@ -47,8 +35,8 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
VideoEncoderFactory::CodecInfo QueryVideoEncoder(
const SdpVideoFormat& format) const override {
// Format must be one of the internal formats.
- RTC_DCHECK(IsFormatSupported(
- internal_encoder_factory_->GetSupportedFormats(), format));
+ RTC_DCHECK(
+ format.IsCodecInList(internal_encoder_factory_->GetSupportedFormats()));
VideoEncoderFactory::CodecInfo info;
return info;
}
@@ -57,8 +45,8 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
const SdpVideoFormat& format) override {
// Try creating internal encoder.
std::unique_ptr<VideoEncoder> internal_encoder;
- if (IsFormatSupported(internal_encoder_factory_->GetSupportedFormats(),
- format)) {
+ if (format.IsCodecInList(
+ internal_encoder_factory_->GetSupportedFormats())) {
internal_encoder = std::make_unique<EncoderSimulcastProxy>(
internal_encoder_factory_.get(), format);
}
diff --git a/api/video_codecs/sdp_video_format.cc b/api/video_codecs/sdp_video_format.cc
index f8901492ee..689c337ced 100644
--- a/api/video_codecs/sdp_video_format.cc
+++ b/api/video_codecs/sdp_video_format.cc
@@ -10,10 +10,57 @@
#include "api/video_codecs/sdp_video_format.h"
+#include "absl/strings/match.h"
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/vp9_profile.h"
+#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
+namespace {
+
+std::string H264GetPacketizationModeOrDefault(
+ const SdpVideoFormat::Parameters& params) {
+ constexpr char kH264FmtpPacketizationMode[] = "packetization-mode";
+ const auto it = params.find(kH264FmtpPacketizationMode);
+ if (it != params.end()) {
+ return it->second;
+ }
+ // If packetization-mode is not present, default to "0".
+ // https://tools.ietf.org/html/rfc6184#section-6.2
+ return "0";
+}
+
+bool H264IsSamePacketizationMode(const SdpVideoFormat::Parameters& left,
+ const SdpVideoFormat::Parameters& right) {
+ return H264GetPacketizationModeOrDefault(left) ==
+ H264GetPacketizationModeOrDefault(right);
+}
+
+// Some (video) codecs are actually families of codecs and rely on parameters
+// to distinguish different incompatible family members.
+bool IsSameCodecSpecific(const SdpVideoFormat& format1,
+ const SdpVideoFormat& format2) {
+ // The assumption when calling this function is that the two formats have the
+ // same name.
+ RTC_DCHECK(absl::EqualsIgnoreCase(format1.name, format2.name));
+
+ VideoCodecType codec_type = PayloadStringToCodecType(format1.name);
+ switch (codec_type) {
+ case kVideoCodecH264:
+ return H264IsSameProfile(format1.parameters, format2.parameters) &&
+ H264IsSamePacketizationMode(format1.parameters,
+ format2.parameters);
+ case kVideoCodecVP9:
+ return VP9IsSameProfile(format1.parameters, format2.parameters);
+ default:
+ return true;
+ }
+}
+} // namespace
+
SdpVideoFormat::SdpVideoFormat(const std::string& name) : name(name) {}
SdpVideoFormat::SdpVideoFormat(const std::string& name,
@@ -37,6 +84,23 @@ std::string SdpVideoFormat::ToString() const {
return builder.str();
}
+bool SdpVideoFormat::IsSameCodec(const SdpVideoFormat& other) const {
+ // Two codecs are considered the same if the name matches (case insensitive)
+ // and certain codec-specific parameters match.
+ return absl::EqualsIgnoreCase(name, other.name) &&
+ IsSameCodecSpecific(*this, other);
+}
+
+bool SdpVideoFormat::IsCodecInList(
+ rtc::ArrayView<const webrtc::SdpVideoFormat> formats) const {
+ for (const auto& format : formats) {
+ if (IsSameCodec(format)) {
+ return true;
+ }
+ }
+ return false;
+}
+
bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) {
return a.name == b.name && a.parameters == b.parameters;
}
diff --git a/api/video_codecs/sdp_video_format.h b/api/video_codecs/sdp_video_format.h
index 97bb75489d..a1e23f4f9c 100644
--- a/api/video_codecs/sdp_video_format.h
+++ b/api/video_codecs/sdp_video_format.h
@@ -14,6 +14,7 @@
#include <map>
#include <string>
+#include "api/array_view.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
@@ -32,6 +33,13 @@ struct RTC_EXPORT SdpVideoFormat {
~SdpVideoFormat();
+ // Returns true if the SdpVideoFormats have the same names as well as codec
+ // specific parameters. Please note that two SdpVideoFormats can represent the
+ // same codec even though not all parameters are the same.
+ bool IsSameCodec(const SdpVideoFormat& other) const;
+ bool IsCodecInList(
+ rtc::ArrayView<const webrtc::SdpVideoFormat> formats) const;
+
std::string ToString() const;
friend RTC_EXPORT bool operator==(const SdpVideoFormat& a,
diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn
index ea8e6d5bee..c082dbc562 100644
--- a/api/video_codecs/test/BUILD.gn
+++ b/api/video_codecs/test/BUILD.gn
@@ -14,6 +14,7 @@ if (rtc_include_tests) {
sources = [
"builtin_video_encoder_factory_unittest.cc",
"h264_profile_level_id_unittest.cc",
+ "sdp_video_format_unittest.cc",
"video_decoder_software_fallback_wrapper_unittest.cc",
"video_encoder_software_fallback_wrapper_unittest.cc",
]
diff --git a/api/video_codecs/test/sdp_video_format_unittest.cc b/api/video_codecs/test/sdp_video_format_unittest.cc
new file mode 100644
index 0000000000..d55816690e
--- /dev/null
+++ b/api/video_codecs/test/sdp_video_format_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/sdp_video_format.h"
+
+#include <stdint.h>
+
+#include "test/gtest.h"
+
+namespace webrtc {
+
+typedef SdpVideoFormat Sdp;
+typedef SdpVideoFormat::Parameters Params;
+
+TEST(SdpVideoFormatTest, SameCodecNameNoParameters) {
+ EXPECT_TRUE(Sdp("H264").IsSameCodec(Sdp("h264")));
+ EXPECT_TRUE(Sdp("VP8").IsSameCodec(Sdp("vp8")));
+ EXPECT_TRUE(Sdp("Vp9").IsSameCodec(Sdp("vp9")));
+ EXPECT_TRUE(Sdp("AV1").IsSameCodec(Sdp("Av1")));
+}
+TEST(SdpVideoFormatTest, DifferentCodecNameNoParameters) {
+ EXPECT_FALSE(Sdp("H264").IsSameCodec(Sdp("VP8")));
+ EXPECT_FALSE(Sdp("VP8").IsSameCodec(Sdp("VP9")));
+ EXPECT_FALSE(Sdp("AV1").IsSameCodec(Sdp("")));
+}
+TEST(SdpVideoFormatTest, SameCodecNameSameParameters) {
+ EXPECT_TRUE(Sdp("VP9").IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_TRUE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_TRUE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "2"}})));
+ EXPECT_TRUE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "42e01f"}})));
+ EXPECT_TRUE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "640c34"}})));
+}
+
+TEST(SdpVideoFormatTest, SameCodecNameDifferentParameters) {
+ EXPECT_FALSE(Sdp("VP9").IsSameCodec(Sdp("VP9", Params{{"profile-id", "2"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "1"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "640c34"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "42f00b"}})));
+}
+
+TEST(SdpVideoFormatTest, DifferentCodecNameSameParameters) {
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-id", "0"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP8", Params{{"profile-id", "2"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-level-id", "42e01f"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("VP8", Params{{"profile-level-id", "640c34"}})));
+}
+
+} // namespace webrtc
diff --git a/api/video_codecs/video_decoder_factory.cc b/api/video_codecs/video_decoder_factory.cc
deleted file mode 100644
index 511a3c7e92..0000000000
--- a/api/video_codecs/video_decoder_factory.cc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/video_codecs/video_decoder_factory.h"
-
-#include "api/video_codecs/video_decoder.h"
-
-namespace webrtc {
-
-std::unique_ptr<VideoDecoder> VideoDecoderFactory::LegacyCreateVideoDecoder(
- const SdpVideoFormat& format,
- const std::string& receive_stream_id) {
- return CreateVideoDecoder(format);
-}
-
-} // namespace webrtc
diff --git a/api/video_codecs/video_decoder_factory.h b/api/video_codecs/video_decoder_factory.h
index e4d83c2465..0b6ea4f9f2 100644
--- a/api/video_codecs/video_decoder_factory.h
+++ b/api/video_codecs/video_decoder_factory.h
@@ -15,31 +15,51 @@
#include <string>
#include <vector>
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class VideoDecoder;
-struct SdpVideoFormat;
// A factory that creates VideoDecoders.
// NOTE: This class is still under development and may change without notice.
class RTC_EXPORT VideoDecoderFactory {
public:
+ struct CodecSupport {
+ bool is_supported = false;
+ bool is_power_efficient = false;
+ };
+
// Returns a list of supported video formats in order of preference, to use
// for signaling etc.
virtual std::vector<SdpVideoFormat> GetSupportedFormats() const = 0;
+ // Query whether the specifed format is supported or not and if it will be
+ // power efficient, which is currently interpreted as if there is support for
+ // hardware acceleration.
+ // See https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification
+ // of valid values for |scalability_mode|.
+ // NOTE: QueryCodecSupport is currently an experimental feature that is
+ // subject to change without notice.
+ virtual CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const {
+ // Default implementation, query for supported formats and check if the
+ // specified format is supported. Returns false if scalability_mode is
+ // specified.
+ CodecSupport codec_support;
+ if (!scalability_mode) {
+ codec_support.is_supported = format.IsCodecInList(GetSupportedFormats());
+ }
+ return codec_support;
+ }
+
// Creates a VideoDecoder for the specified format.
virtual std::unique_ptr<VideoDecoder> CreateVideoDecoder(
const SdpVideoFormat& format) = 0;
- // Note: Do not call or override this method! This method is a legacy
- // workaround and is scheduled for removal without notice.
- virtual std::unique_ptr<VideoDecoder> LegacyCreateVideoDecoder(
- const SdpVideoFormat& format,
- const std::string& receive_stream_id);
-
virtual ~VideoDecoderFactory() {}
};
diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc
index 486200bc82..a7e9d7487c 100644
--- a/api/video_codecs/video_encoder.cc
+++ b/api/video_codecs/video_encoder.cc
@@ -135,8 +135,17 @@ std::string VideoEncoder::EncoderInfo::ToString() const {
<< ", is_hardware_accelerated = " << is_hardware_accelerated
<< ", has_internal_source = " << has_internal_source
<< ", fps_allocation = [";
+ size_t num_spatial_layer_with_fps_allocation = 0;
+ for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
+ if (!fps_allocation[i].empty()) {
+ num_spatial_layer_with_fps_allocation = i + 1;
+ }
+ }
bool first = true;
- for (size_t i = 0; i < fps_allocation->size(); ++i) {
+ for (size_t i = 0; i < num_spatial_layer_with_fps_allocation; ++i) {
+ if (fps_allocation[i].empty()) {
+ break;
+ }
if (!first) {
oss << ", ";
}
diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h
index 22430eb19d..c2d66cfa86 100644
--- a/api/video_codecs/video_encoder_factory.h
+++ b/api/video_codecs/video_encoder_factory.h
@@ -12,6 +12,7 @@
#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_
#include <memory>
+#include <string>
#include <vector>
#include "absl/types/optional.h"
@@ -36,6 +37,11 @@ class VideoEncoderFactory {
bool has_internal_source = false;
};
+ struct CodecSupport {
+ bool is_supported = false;
+ bool is_power_efficient = false;
+ };
+
// An injectable class that is continuously updated with encoding conditions
// and selects the best encoder given those conditions.
class EncoderSelectorInterface {
@@ -78,6 +84,26 @@ class VideoEncoderFactory {
return CodecInfo();
}
+ // Query whether the specifed format is supported or not and if it will be
+ // power efficient, which is currently interpreted as if there is support for
+ // hardware acceleration.
+ // See https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification
+ // of valid values for |scalability_mode|.
+ // NOTE: QueryCodecSupport is currently an experimental feature that is
+ // subject to change without notice.
+ virtual CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const {
+ // Default implementation, query for supported formats and check if the
+ // specified format is supported. Returns false if scalability_mode is
+ // specified.
+ CodecSupport codec_support;
+ if (!scalability_mode) {
+ codec_support.is_supported = format.IsCodecInList(GetSupportedFormats());
+ }
+ return codec_support;
+ }
+
// Creates a VideoEncoder for the specified format.
virtual std::unique_ptr<VideoEncoder> CreateVideoEncoder(
const SdpVideoFormat& format) = 0;
diff --git a/api/video_track_source_proxy_factory.h b/api/video_track_source_proxy_factory.h
new file mode 100644
index 0000000000..974720d50b
--- /dev/null
+++ b/api/video_track_source_proxy_factory.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_
+#define API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_
+
+#include "api/media_stream_interface.h"
+
+namespace webrtc {
+
+// Creates a proxy source for |source| which makes sure the real
+// VideoTrackSourceInterface implementation is destroyed on the signaling thread
+// and marshals calls to |worker_thread| and |signaling_thread|.
+rtc::scoped_refptr<VideoTrackSourceInterface> RTC_EXPORT
+CreateVideoTrackSourceProxy(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ VideoTrackSourceInterface* source);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_
diff --git a/api/voip/test/voip_engine_factory_unittest.cc b/api/voip/test/voip_engine_factory_unittest.cc
index 84b474f3b8..f967a0ba8f 100644
--- a/api/voip/test/voip_engine_factory_unittest.cc
+++ b/api/voip/test/voip_engine_factory_unittest.cc
@@ -24,11 +24,11 @@ namespace {
// Create voip engine with mock modules as normal use case.
TEST(VoipEngineFactoryTest, CreateEngineWithMockModules) {
VoipEngineConfig config;
- config.encoder_factory = new rtc::RefCountedObject<MockAudioEncoderFactory>();
- config.decoder_factory = new rtc::RefCountedObject<MockAudioDecoderFactory>();
+ config.encoder_factory = rtc::make_ref_counted<MockAudioEncoderFactory>();
+ config.decoder_factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
config.task_queue_factory = CreateDefaultTaskQueueFactory();
config.audio_processing =
- new rtc::RefCountedObject<testing::NiceMock<test::MockAudioProcessing>>();
+ rtc::make_ref_counted<testing::NiceMock<test::MockAudioProcessing>>();
config.audio_device_module = test::MockAudioDeviceModule::CreateNice();
auto voip_engine = CreateVoipEngine(std::move(config));
@@ -38,8 +38,8 @@ TEST(VoipEngineFactoryTest, CreateEngineWithMockModules) {
// Create voip engine without setting audio processing as optional component.
TEST(VoipEngineFactoryTest, UseNoAudioProcessing) {
VoipEngineConfig config;
- config.encoder_factory = new rtc::RefCountedObject<MockAudioEncoderFactory>();
- config.decoder_factory = new rtc::RefCountedObject<MockAudioDecoderFactory>();
+ config.encoder_factory = rtc::make_ref_counted<MockAudioEncoderFactory>();
+ config.decoder_factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
config.task_queue_factory = CreateDefaultTaskQueueFactory();
config.audio_device_module = test::MockAudioDeviceModule::CreateNice();
diff --git a/audio/BUILD.gn b/audio/BUILD.gn
index ccbf9fd2e3..054e090ba6 100644
--- a/audio/BUILD.gn
+++ b/audio/BUILD.gn
@@ -95,6 +95,7 @@ rtc_library("audio") {
"../rtc_base/experiments:field_trial_parser",
"../rtc_base/synchronization:mutex",
"../rtc_base/system:no_unique_address",
+ "../rtc_base/task_utils:pending_task_safety_flag",
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers",
"../system_wrappers:field_trial",
@@ -151,6 +152,7 @@ if (rtc_include_tests) {
"../api/audio_codecs:audio_codecs_api",
"../api/audio_codecs/opus:audio_decoder_opus",
"../api/audio_codecs/opus:audio_encoder_opus",
+ "../api/crypto:frame_decryptor_interface",
"../api/rtc_event_log",
"../api/task_queue:default_task_queue_factory",
"../api/units:time_delta",
diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc
index 467647be5e..6ec266b68d 100644
--- a/audio/audio_receive_stream.cc
+++ b/audio/audio_receive_stream.cc
@@ -18,6 +18,7 @@
#include "api/audio_codecs/audio_format.h"
#include "api/call/audio_sink.h"
#include "api/rtp_parameters.h"
+#include "api/sequence_checker.h"
#include "audio/audio_send_stream.h"
#include "audio/audio_state.h"
#include "audio/channel_receive.h"
@@ -83,14 +84,13 @@ std::unique_ptr<voe::ChannelReceiveInterface> CreateChannelReceive(
config.jitter_buffer_max_packets, config.jitter_buffer_fast_accelerate,
config.jitter_buffer_min_delay_ms,
config.jitter_buffer_enable_rtx_handling, config.decoder_factory,
- config.codec_pair_id, config.frame_decryptor, config.crypto_options,
- std::move(config.frame_transformer));
+ config.codec_pair_id, std::move(config.frame_decryptor),
+ config.crypto_options, std::move(config.frame_transformer));
}
} // namespace
AudioReceiveStream::AudioReceiveStream(
Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
PacketRouter* packet_router,
ProcessThread* module_process_thread,
NetEqFactory* neteq_factory,
@@ -98,7 +98,6 @@ AudioReceiveStream::AudioReceiveStream(
const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
webrtc::RtcEventLog* event_log)
: AudioReceiveStream(clock,
- receiver_controller,
packet_router,
config,
audio_state,
@@ -112,13 +111,13 @@ AudioReceiveStream::AudioReceiveStream(
AudioReceiveStream::AudioReceiveStream(
Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
PacketRouter* packet_router,
const webrtc::AudioReceiveStream::Config& config,
const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
webrtc::RtcEventLog* event_log,
std::unique_ptr<voe::ChannelReceiveInterface> channel_receive)
- : audio_state_(audio_state),
+ : config_(config),
+ audio_state_(audio_state),
source_tracker_(clock),
channel_receive_(std::move(channel_receive)) {
RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc;
@@ -127,7 +126,8 @@ AudioReceiveStream::AudioReceiveStream(
RTC_DCHECK(audio_state_);
RTC_DCHECK(channel_receive_);
- RTC_DCHECK(receiver_controller);
+ packet_sequence_checker_.Detach();
+
RTC_DCHECK(packet_router);
// Configure bandwidth estimation.
channel_receive_->RegisterReceiverCongestionControlObjects(packet_router);
@@ -137,10 +137,14 @@ AudioReceiveStream::AudioReceiveStream(
// be updated.
channel_receive_->SetSourceTracker(&source_tracker_);
- // Register with transport.
- rtp_stream_receiver_ = receiver_controller->CreateReceiver(
- config.rtp.remote_ssrc, channel_receive_.get());
- ConfigureStream(this, config, true);
+ // Complete configuration.
+ // TODO(solenberg): Config NACK history window (which is a packet count),
+ // using the actual packet size for the configured codec.
+ channel_receive_->SetNACKStatus(config.rtp.nack.rtp_history_ms != 0,
+ config.rtp.nack.rtp_history_ms / 20);
+ channel_receive_->SetReceiveCodecs(config.decoder_map);
+ // `frame_transformer` and `frame_decryptor` have been given to
+ // `channel_receive_` already.
}
AudioReceiveStream::~AudioReceiveStream() {
@@ -151,10 +155,43 @@ AudioReceiveStream::~AudioReceiveStream() {
channel_receive_->ResetReceiverCongestionControlObjects();
}
-void AudioReceiveStream::Reconfigure(
+void AudioReceiveStream::RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ RTC_DCHECK(!rtp_stream_receiver_);
+ rtp_stream_receiver_ = receiver_controller->CreateReceiver(
+ config_.rtp.remote_ssrc, channel_receive_.get());
+}
+
+void AudioReceiveStream::UnregisterFromTransport() {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_stream_receiver_.reset();
+}
+
+void AudioReceiveStream::ReconfigureForTesting(
const webrtc::AudioReceiveStream::Config& config) {
- RTC_DCHECK(worker_thread_checker_.IsCurrent());
- ConfigureStream(this, config, false);
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+
+ // SSRC can't be changed mid-stream.
+ RTC_DCHECK_EQ(config_.rtp.remote_ssrc, config.rtp.remote_ssrc);
+ RTC_DCHECK_EQ(config_.rtp.local_ssrc, config.rtp.local_ssrc);
+
+ // Configuration parameters which cannot be changed.
+ RTC_DCHECK_EQ(config_.rtcp_send_transport, config.rtcp_send_transport);
+ // Decoder factory cannot be changed because it is configured at
+ // voe::Channel construction time.
+ RTC_DCHECK_EQ(config_.decoder_factory, config.decoder_factory);
+
+ // TODO(solenberg): Config NACK history window (which is a packet count),
+ // using the actual packet size for the configured codec.
+ RTC_DCHECK_EQ(config_.rtp.nack.rtp_history_ms, config.rtp.nack.rtp_history_ms)
+ << "Use SetUseTransportCcAndNackHistory";
+
+ RTC_DCHECK(config_.decoder_map == config.decoder_map) << "Use SetDecoderMap";
+ RTC_DCHECK_EQ(config_.frame_transformer, config.frame_transformer)
+ << "Use SetDepacketizerToDecoderFrameTransformer";
+
+ config_ = config;
}
void AudioReceiveStream::Start() {
@@ -182,6 +219,49 @@ bool AudioReceiveStream::IsRunning() const {
return playing_;
}
+void AudioReceiveStream::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ channel_receive_->SetDepacketizerToDecoderFrameTransformer(
+ std::move(frame_transformer));
+}
+
+void AudioReceiveStream::SetDecoderMap(
+ std::map<int, SdpAudioFormat> decoder_map) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ config_.decoder_map = std::move(decoder_map);
+ channel_receive_->SetReceiveCodecs(config_.decoder_map);
+}
+
+void AudioReceiveStream::SetUseTransportCcAndNackHistory(bool use_transport_cc,
+ int history_ms) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_GE(history_ms, 0);
+ config_.rtp.transport_cc = use_transport_cc;
+ if (config_.rtp.nack.rtp_history_ms != history_ms) {
+ config_.rtp.nack.rtp_history_ms = history_ms;
+ // TODO(solenberg): Config NACK history window (which is a packet count),
+ // using the actual packet size for the configured codec.
+ channel_receive_->SetNACKStatus(history_ms != 0, history_ms / 20);
+ }
+}
+
+void AudioReceiveStream::SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
+ // TODO(bugs.webrtc.org/11993): This is called via WebRtcAudioReceiveStream,
+ // expect to be called on the network thread.
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ channel_receive_->SetFrameDecryptor(std::move(frame_decryptor));
+}
+
+void AudioReceiveStream::SetRtpExtensions(
+ std::vector<RtpExtension> extensions) {
+ // TODO(bugs.webrtc.org/11993): This is called via WebRtcAudioReceiveStream,
+ // expect to be called on the network thread.
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ config_.rtp.extensions = std::move(extensions);
+}
+
webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats(
bool get_and_clear_legacy_stats) const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
@@ -351,8 +431,7 @@ bool AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
}
void AudioReceiveStream::AssociateSendStream(AudioSendStream* send_stream) {
- // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
channel_receive_->SetAssociatedSendChannel(
send_stream ? send_stream->GetChannel() : nullptr);
associated_send_stream_ = send_stream;
@@ -366,6 +445,24 @@ void AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
channel_receive_->ReceivedRTCPPacket(packet, length);
}
+void AudioReceiveStream::SetSyncGroup(const std::string& sync_group) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ config_.sync_group = sync_group;
+}
+
+void AudioReceiveStream::SetLocalSsrc(uint32_t local_ssrc) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ // TODO(tommi): Consider storing local_ssrc in one place.
+ config_.rtp.local_ssrc = local_ssrc;
+ channel_receive_->OnLocalSsrcChange(local_ssrc);
+}
+
+uint32_t AudioReceiveStream::local_ssrc() const {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ RTC_DCHECK_EQ(config_.rtp.local_ssrc, channel_receive_->GetLocalSsrc());
+ return config_.rtp.local_ssrc;
+}
+
const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
return config_;
@@ -373,9 +470,7 @@ const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
const AudioSendStream* AudioReceiveStream::GetAssociatedSendStreamForTesting()
const {
- // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread or
- // remove test method and |associated_send_stream_| variable.
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
return associated_send_stream_;
}
@@ -384,50 +479,5 @@ internal::AudioState* AudioReceiveStream::audio_state() const {
RTC_DCHECK(audio_state);
return audio_state;
}
-
-void AudioReceiveStream::ConfigureStream(AudioReceiveStream* stream,
- const Config& new_config,
- bool first_time) {
- RTC_LOG(LS_INFO) << "AudioReceiveStream::ConfigureStream: "
- << new_config.ToString();
- RTC_DCHECK(stream);
- const auto& channel_receive = stream->channel_receive_;
- const auto& old_config = stream->config_;
-
- // Configuration parameters which cannot be changed.
- RTC_DCHECK(first_time ||
- old_config.rtp.remote_ssrc == new_config.rtp.remote_ssrc);
- RTC_DCHECK(first_time ||
- old_config.rtcp_send_transport == new_config.rtcp_send_transport);
- // Decoder factory cannot be changed because it is configured at
- // voe::Channel construction time.
- RTC_DCHECK(first_time ||
- old_config.decoder_factory == new_config.decoder_factory);
-
- if (!first_time) {
- // SSRC can't be changed mid-stream.
- RTC_DCHECK_EQ(old_config.rtp.local_ssrc, new_config.rtp.local_ssrc);
- RTC_DCHECK_EQ(old_config.rtp.remote_ssrc, new_config.rtp.remote_ssrc);
- }
-
- // TODO(solenberg): Config NACK history window (which is a packet count),
- // using the actual packet size for the configured codec.
- if (first_time || old_config.rtp.nack.rtp_history_ms !=
- new_config.rtp.nack.rtp_history_ms) {
- channel_receive->SetNACKStatus(new_config.rtp.nack.rtp_history_ms != 0,
- new_config.rtp.nack.rtp_history_ms / 20);
- }
- if (first_time || old_config.decoder_map != new_config.decoder_map) {
- channel_receive->SetReceiveCodecs(new_config.decoder_map);
- }
-
- if (first_time ||
- old_config.frame_transformer != new_config.frame_transformer) {
- channel_receive->SetDepacketizerToDecoderFrameTransformer(
- new_config.frame_transformer);
- }
-
- stream->config_ = new_config;
-}
} // namespace internal
} // namespace webrtc
diff --git a/audio/audio_receive_stream.h b/audio/audio_receive_stream.h
index a8438c252a..dc64e94510 100644
--- a/audio/audio_receive_stream.h
+++ b/audio/audio_receive_stream.h
@@ -11,7 +11,9 @@
#ifndef AUDIO_AUDIO_RECEIVE_STREAM_H_
#define AUDIO_AUDIO_RECEIVE_STREAM_H_
+#include <map>
#include <memory>
+#include <string>
#include <vector>
#include "api/audio/audio_mixer.h"
@@ -22,6 +24,7 @@
#include "call/audio_receive_stream.h"
#include "call/syncable.h"
#include "modules/rtp_rtcp/source/source_tracker.h"
+#include "rtc_base/system/no_unique_address.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -44,7 +47,6 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream,
public Syncable {
public:
AudioReceiveStream(Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
PacketRouter* packet_router,
ProcessThread* module_process_thread,
NetEqFactory* neteq_factory,
@@ -54,7 +56,6 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream,
// For unit tests, which need to supply a mock channel receive.
AudioReceiveStream(
Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
PacketRouter* packet_router,
const webrtc::AudioReceiveStream::Config& config,
const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
@@ -65,13 +66,36 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream,
AudioReceiveStream(const AudioReceiveStream&) = delete;
AudioReceiveStream& operator=(const AudioReceiveStream&) = delete;
+ // Destruction happens on the worker thread. Prior to destruction the caller
+ // must ensure that a registration with the transport has been cleared. See
+ // `RegisterWithTransport` for details.
+ // TODO(tommi): As a further improvement to this, performing the full
+ // destruction on the network thread could be made the default.
~AudioReceiveStream() override;
+ // Called on the network thread to register/unregister with the network
+ // transport.
+ void RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller);
+ // If registration has previously been done (via `RegisterWithTransport`) then
+ // `UnregisterFromTransport` must be called prior to destruction, on the
+ // network thread.
+ void UnregisterFromTransport();
+
// webrtc::AudioReceiveStream implementation.
- void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override;
void Start() override;
void Stop() override;
+ const RtpConfig& rtp_config() const override { return config_.rtp; }
bool IsRunning() const override;
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
+ override;
+ void SetDecoderMap(std::map<int, SdpAudioFormat> decoder_map) override;
+ void SetUseTransportCcAndNackHistory(bool use_transport_cc,
+ int history_ms) override;
+ void SetFrameDecryptor(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
+ frame_decryptor) override;
+ void SetRtpExtensions(std::vector<RtpExtension> extensions) override;
webrtc::AudioReceiveStream::Stats GetStats(
bool get_and_clear_legacy_stats) const override;
@@ -98,26 +122,48 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream,
void AssociateSendStream(AudioSendStream* send_stream);
void DeliverRtcp(const uint8_t* packet, size_t length);
+
+ void SetSyncGroup(const std::string& sync_group);
+
+ void SetLocalSsrc(uint32_t local_ssrc);
+
+ uint32_t local_ssrc() const;
+
+ uint32_t remote_ssrc() const {
+ // The remote_ssrc member variable of config_ will never change and can be
+ // considered const.
+ return config_.rtp.remote_ssrc;
+ }
+
const webrtc::AudioReceiveStream::Config& config() const;
const AudioSendStream* GetAssociatedSendStreamForTesting() const;
- private:
- static void ConfigureStream(AudioReceiveStream* stream,
- const Config& new_config,
- bool first_time);
+ // TODO(tommi): Remove this method.
+ void ReconfigureForTesting(const webrtc::AudioReceiveStream::Config& config);
+ private:
AudioState* audio_state() const;
- SequenceChecker worker_thread_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_;
+ // TODO(bugs.webrtc.org/11993): This checker conceptually represents
+ // operations that belong to the network thread. The Call class is currently
+ // moving towards handling network packets on the network thread and while
+ // that work is ongoing, this checker may in practice represent the worker
+ // thread, but still serves as a mechanism of grouping together concepts
+ // that belong to the network thread. Once the packets are fully delivered
+ // on the network thread, this comment will be deleted.
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
webrtc::AudioReceiveStream::Config config_;
rtc::scoped_refptr<webrtc::AudioState> audio_state_;
SourceTracker source_tracker_;
const std::unique_ptr<voe::ChannelReceiveInterface> channel_receive_;
- AudioSendStream* associated_send_stream_ = nullptr;
+ AudioSendStream* associated_send_stream_
+ RTC_GUARDED_BY(packet_sequence_checker_) = nullptr;
bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false;
- std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_;
+ std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_
+ RTC_GUARDED_BY(packet_sequence_checker_);
};
} // namespace internal
} // namespace webrtc
diff --git a/audio/audio_receive_stream_unittest.cc b/audio/audio_receive_stream_unittest.cc
index 99e3a56e1b..fb5f1cb876 100644
--- a/audio/audio_receive_stream_unittest.cc
+++ b/audio/audio_receive_stream_unittest.cc
@@ -74,7 +74,7 @@ const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest();
struct ConfigHelper {
explicit ConfigHelper(bool use_null_audio_processing)
- : ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>(),
+ : ConfigHelper(rtc::make_ref_counted<MockAudioMixer>(),
use_null_audio_processing) {}
ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer,
@@ -87,9 +87,9 @@ struct ConfigHelper {
config.audio_processing =
use_null_audio_processing
? nullptr
- : new rtc::RefCountedObject<NiceMock<MockAudioProcessing>>();
+ : rtc::make_ref_counted<NiceMock<MockAudioProcessing>>();
config.audio_device_module =
- new rtc::RefCountedObject<testing::NiceMock<MockAudioDeviceModule>>();
+ rtc::make_ref_counted<testing::NiceMock<MockAudioDeviceModule>>();
audio_state_ = AudioState::Create(config);
channel_receive_ = new ::testing::StrictMock<MockChannelReceive>();
@@ -104,8 +104,6 @@ struct ConfigHelper {
.WillRepeatedly(Invoke([](const std::map<int, SdpAudioFormat>& codecs) {
EXPECT_THAT(codecs, ::testing::IsEmpty());
}));
- EXPECT_CALL(*channel_receive_, SetDepacketizerToDecoderFrameTransformer(_))
- .Times(1);
EXPECT_CALL(*channel_receive_, SetSourceTracker(_));
stream_config_.rtp.local_ssrc = kLocalSsrc;
@@ -117,15 +115,16 @@ struct ConfigHelper {
RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId));
stream_config_.rtcp_send_transport = &rtcp_send_transport_;
stream_config_.decoder_factory =
- new rtc::RefCountedObject<MockAudioDecoderFactory>;
+ rtc::make_ref_counted<MockAudioDecoderFactory>();
}
std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() {
- return std::unique_ptr<internal::AudioReceiveStream>(
- new internal::AudioReceiveStream(
- Clock::GetRealTimeClock(), &rtp_stream_receiver_controller_,
- &packet_router_, stream_config_, audio_state_, &event_log_,
- std::unique_ptr<voe::ChannelReceiveInterface>(channel_receive_)));
+ auto ret = std::make_unique<internal::AudioReceiveStream>(
+ Clock::GetRealTimeClock(), &packet_router_, stream_config_,
+ audio_state_, &event_log_,
+ std::unique_ptr<voe::ChannelReceiveInterface>(channel_receive_));
+ ret->RegisterWithTransport(&rtp_stream_receiver_controller_);
+ return ret;
}
AudioReceiveStream::Config& config() { return stream_config_; }
@@ -199,6 +198,7 @@ TEST(AudioReceiveStreamTest, ConstructDestruct) {
for (bool use_null_audio_processing : {false, true}) {
ConfigHelper helper(use_null_audio_processing);
auto recv_stream = helper.CreateAudioReceiveStream();
+ recv_stream->UnregisterFromTransport();
}
}
@@ -212,6 +212,7 @@ TEST(AudioReceiveStreamTest, ReceiveRtcpPacket) {
ReceivedRTCPPacket(&rtcp_packet[0], rtcp_packet.size()))
.WillOnce(Return());
recv_stream->DeliverRtcp(&rtcp_packet[0], rtcp_packet.size());
+ recv_stream->UnregisterFromTransport();
}
}
@@ -276,6 +277,7 @@ TEST(AudioReceiveStreamTest, GetStats) {
EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_,
stats.capture_start_ntp_time_ms);
EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms);
+ recv_stream->UnregisterFromTransport();
}
}
@@ -286,6 +288,7 @@ TEST(AudioReceiveStreamTest, SetGain) {
EXPECT_CALL(*helper.channel_receive(),
SetChannelOutputVolumeScaling(FloatEq(0.765f)));
recv_stream->SetGain(0.765f);
+ recv_stream->UnregisterFromTransport();
}
}
@@ -317,14 +320,9 @@ TEST(AudioReceiveStreamTest, StreamsShouldBeAddedToMixerOnceOnStart) {
// Stop stream before it is being destructed.
recv_stream2->Stop();
- }
-}
-TEST(AudioReceiveStreamTest, ReconfigureWithSameConfig) {
- for (bool use_null_audio_processing : {false, true}) {
- ConfigHelper helper(use_null_audio_processing);
- auto recv_stream = helper.CreateAudioReceiveStream();
- recv_stream->Reconfigure(helper.config());
+ recv_stream1->UnregisterFromTransport();
+ recv_stream2->UnregisterFromTransport();
}
}
@@ -334,20 +332,32 @@ TEST(AudioReceiveStreamTest, ReconfigureWithUpdatedConfig) {
auto recv_stream = helper.CreateAudioReceiveStream();
auto new_config = helper.config();
- new_config.rtp.nack.rtp_history_ms = 300 + 20;
+
new_config.rtp.extensions.clear();
new_config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1));
new_config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberId + 1));
- new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1));
MockChannelReceive& channel_receive = *helper.channel_receive();
- EXPECT_CALL(channel_receive, SetNACKStatus(true, 15 + 1)).Times(1);
+
+ // TODO(tommi, nisse): This applies new extensions to the internal config,
+ // but there's nothing that actually verifies that the changes take effect.
+ // In fact Call manages the extensions separately in Call::ReceiveRtpConfig
+ // and changing this config value (there seem to be a few copies), doesn't
+ // affect that logic.
+ recv_stream->ReconfigureForTesting(new_config);
+
+ new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1));
EXPECT_CALL(channel_receive, SetReceiveCodecs(new_config.decoder_map));
+ recv_stream->SetDecoderMap(new_config.decoder_map);
+
+ EXPECT_CALL(channel_receive, SetNACKStatus(true, 15 + 1)).Times(1);
+ recv_stream->SetUseTransportCcAndNackHistory(new_config.rtp.transport_cc,
+ 300 + 20);
- recv_stream->Reconfigure(new_config);
+ recv_stream->UnregisterFromTransport();
}
}
@@ -358,17 +368,23 @@ TEST(AudioReceiveStreamTest, ReconfigureWithFrameDecryptor) {
auto new_config_0 = helper.config();
rtc::scoped_refptr<FrameDecryptorInterface> mock_frame_decryptor_0(
- new rtc::RefCountedObject<MockFrameDecryptor>());
+ rtc::make_ref_counted<MockFrameDecryptor>());
new_config_0.frame_decryptor = mock_frame_decryptor_0;
- recv_stream->Reconfigure(new_config_0);
+ // TODO(tommi): While this changes the internal config value, it doesn't
+ // actually change what frame_decryptor is used. WebRtcAudioReceiveStream
+ // recreates the whole instance in order to change this value.
+ // So, it's not clear if changing this post initialization needs to be
+ // supported.
+ recv_stream->ReconfigureForTesting(new_config_0);
auto new_config_1 = helper.config();
rtc::scoped_refptr<FrameDecryptorInterface> mock_frame_decryptor_1(
- new rtc::RefCountedObject<MockFrameDecryptor>());
+ rtc::make_ref_counted<MockFrameDecryptor>());
new_config_1.frame_decryptor = mock_frame_decryptor_1;
new_config_1.crypto_options.sframe.require_frame_encryption = true;
- recv_stream->Reconfigure(new_config_1);
+ recv_stream->ReconfigureForTesting(new_config_1);
+ recv_stream->UnregisterFromTransport();
}
}
diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc
index b769569fd5..aca7cd38b8 100644
--- a/audio/audio_send_stream.cc
+++ b/audio/audio_send_stream.cc
@@ -142,7 +142,7 @@ AudioSendStream::AudioSendStream(
const absl::optional<RtpState>& suspended_rtp_state,
std::unique_ptr<voe::ChannelSendInterface> channel_send)
: clock_(clock),
- worker_queue_(rtp_transport->GetWorkerQueue()),
+ rtp_transport_queue_(rtp_transport->GetWorkerQueue()),
allocate_audio_without_feedback_(
field_trial::IsEnabled("WebRTC-Audio-ABWENoTWCC")),
enable_audio_alr_probing_(
@@ -160,7 +160,7 @@ AudioSendStream::AudioSendStream(
rtp_rtcp_module_(channel_send_->GetRtpRtcp()),
suspended_rtp_state_(suspended_rtp_state) {
RTC_LOG(LS_INFO) << "AudioSendStream: " << config.rtp.ssrc;
- RTC_DCHECK(worker_queue_);
+ RTC_DCHECK(rtp_transport_queue_);
RTC_DCHECK(audio_state_);
RTC_DCHECK(channel_send_);
RTC_DCHECK(bitrate_allocator_);
@@ -182,7 +182,7 @@ AudioSendStream::~AudioSendStream() {
// Blocking call to synchronize state with worker queue to ensure that there
// are no pending tasks left that keeps references to audio.
rtc::Event thread_sync_event;
- worker_queue_->PostTask([&] { thread_sync_event.Set(); });
+ rtp_transport_queue_->PostTask([&] { thread_sync_event.Set(); });
thread_sync_event.Wait(rtc::Event::kForever);
}
@@ -517,7 +517,7 @@ void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
}
uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
// Pick a target bitrate between the constraints. Overrules the allocator if
// it 1) allocated a bitrate of zero to disable the stream or 2) allocated a
@@ -855,9 +855,10 @@ void AudioSendStream::ConfigureBitrateObserver() {
if (allocation_settings_.priority_bitrate_raw)
priority_bitrate = *allocation_settings_.priority_bitrate_raw;
- worker_queue_->PostTask([this, constraints, priority_bitrate,
- config_bitrate_priority = config_.bitrate_priority] {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ rtp_transport_queue_->PostTask([this, constraints, priority_bitrate,
+ config_bitrate_priority =
+ config_.bitrate_priority] {
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
bitrate_allocator_->AddObserver(
this,
MediaStreamAllocationConfig{
@@ -872,8 +873,8 @@ void AudioSendStream::ConfigureBitrateObserver() {
void AudioSendStream::RemoveBitrateObserver() {
registered_with_allocator_ = false;
rtc::Event thread_sync_event;
- worker_queue_->PostTask([this, &thread_sync_event] {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ rtp_transport_queue_->PostTask([this, &thread_sync_event] {
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
bitrate_allocator_->RemoveObserver(this);
thread_sync_event.Set();
});
@@ -940,8 +941,8 @@ void AudioSendStream::UpdateCachedTargetAudioBitrateConstraints() {
if (!new_constraints.has_value()) {
return;
}
- worker_queue_->PostTask([this, new_constraints]() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ rtp_transport_queue_->PostTask([this, new_constraints]() {
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
cached_constraints_ = new_constraints;
});
}
diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h
index 25346ae373..223328b26b 100644
--- a/audio/audio_send_stream.h
+++ b/audio/audio_send_stream.h
@@ -165,7 +165,7 @@ class AudioSendStream final : public webrtc::AudioSendStream,
SequenceChecker worker_thread_checker_;
SequenceChecker pacer_thread_checker_;
rtc::RaceChecker audio_capture_race_checker_;
- rtc::TaskQueue* worker_queue_;
+ rtc::TaskQueue* rtp_transport_queue_;
const bool allocate_audio_without_feedback_;
const bool force_no_audio_feedback_ = allocate_audio_without_feedback_;
@@ -189,10 +189,10 @@ class AudioSendStream final : public webrtc::AudioSendStream,
webrtc::voe::AudioLevel audio_level_ RTC_GUARDED_BY(audio_level_lock_);
BitrateAllocatorInterface* const bitrate_allocator_
- RTC_GUARDED_BY(worker_queue_);
- // Constrains cached to be accessed from |worker_queue_|.
+ RTC_GUARDED_BY(rtp_transport_queue_);
+ // Constrains cached to be accessed from |rtp_transport_queue_|.
absl::optional<AudioSendStream::TargetAudioBitrateConstraints>
- cached_constraints_ RTC_GUARDED_BY(worker_queue_) = absl::nullopt;
+ cached_constraints_ RTC_GUARDED_BY(rtp_transport_queue_) = absl::nullopt;
RtpTransportControllerSendInterface* const rtp_transport_;
RtpRtcpInterface* const rtp_rtcp_module_;
diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc
index f76a8fa255..357e08040c 100644
--- a/audio/audio_send_stream_unittest.cc
+++ b/audio/audio_send_stream_unittest.cc
@@ -121,7 +121,7 @@ std::unique_ptr<MockAudioEncoder> SetupAudioEncoderMock(
rtc::scoped_refptr<MockAudioEncoderFactory> SetupEncoderFactoryMock() {
rtc::scoped_refptr<MockAudioEncoderFactory> factory =
- new rtc::RefCountedObject<MockAudioEncoderFactory>();
+ rtc::make_ref_counted<MockAudioEncoderFactory>();
ON_CALL(*factory.get(), GetSupportedEncoders())
.WillByDefault(Return(std::vector<AudioCodecSpec>(
std::begin(kCodecSpecs), std::end(kCodecSpecs))));
@@ -154,7 +154,7 @@ struct ConfigHelper {
audio_processing_(
use_null_audio_processing
? nullptr
- : new rtc::RefCountedObject<NiceMock<MockAudioProcessing>>()),
+ : rtc::make_ref_counted<NiceMock<MockAudioProcessing>>()),
bitrate_allocator_(&limit_observer_),
worker_queue_(task_queue_factory_->CreateTaskQueue(
"ConfigHelper_worker_queue",
@@ -165,8 +165,7 @@ struct ConfigHelper {
AudioState::Config config;
config.audio_mixer = AudioMixerImpl::Create();
config.audio_processing = audio_processing_;
- config.audio_device_module =
- new rtc::RefCountedObject<MockAudioDeviceModule>();
+ config.audio_device_module = rtc::make_ref_counted<MockAudioDeviceModule>();
audio_state_ = AudioState::Create(config);
SetupDefaultChannelSend(audio_bwe_enabled);
@@ -923,7 +922,7 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) {
auto new_config = helper.config();
rtc::scoped_refptr<FrameEncryptorInterface> mock_frame_encryptor_0(
- new rtc::RefCountedObject<MockFrameEncryptor>());
+ rtc::make_ref_counted<MockFrameEncryptor>());
new_config.frame_encryptor = mock_frame_encryptor_0;
EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr)))
.Times(1);
@@ -936,7 +935,7 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) {
// Updating frame encryptor to a new object should force a call to the
// proxy.
rtc::scoped_refptr<FrameEncryptorInterface> mock_frame_encryptor_1(
- new rtc::RefCountedObject<MockFrameEncryptor>());
+ rtc::make_ref_counted<MockFrameEncryptor>());
new_config.frame_encryptor = mock_frame_encryptor_1;
new_config.crypto_options.sframe.require_frame_encryption = true;
EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr)))
diff --git a/audio/audio_state.cc b/audio/audio_state.cc
index 566bae1311..0e60f0372b 100644
--- a/audio/audio_state.cc
+++ b/audio/audio_state.cc
@@ -187,6 +187,6 @@ void AudioState::UpdateNullAudioPollerState() {
rtc::scoped_refptr<AudioState> AudioState::Create(
const AudioState::Config& config) {
- return new rtc::RefCountedObject<internal::AudioState>(config);
+ return rtc::make_ref_counted<internal::AudioState>(config);
}
} // namespace webrtc
diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc
index 02fc04e6dc..5f07a7b339 100644
--- a/audio/audio_state_unittest.cc
+++ b/audio/audio_state_unittest.cc
@@ -90,7 +90,7 @@ struct FakeAsyncAudioProcessingHelper {
FakeTaskQueueFactory task_queue_factory_;
rtc::scoped_refptr<AsyncAudioProcessing::Factory> CreateFactory() {
- return new rtc::RefCountedObject<AsyncAudioProcessing::Factory>(
+ return rtc::make_ref_counted<AsyncAudioProcessing::Factory>(
audio_frame_processor_, task_queue_factory_);
}
};
@@ -107,10 +107,9 @@ struct ConfigHelper {
audio_state_config.audio_processing =
params.use_null_audio_processing
? nullptr
- : new rtc::RefCountedObject<
- testing::NiceMock<MockAudioProcessing>>();
+ : rtc::make_ref_counted<testing::NiceMock<MockAudioProcessing>>();
audio_state_config.audio_device_module =
- new rtc::RefCountedObject<NiceMock<MockAudioDeviceModule>>();
+ rtc::make_ref_counted<NiceMock<MockAudioDeviceModule>>();
if (params.use_async_audio_processing) {
audio_state_config.async_audio_processing_factory =
async_audio_processing_helper_.CreateFactory();
@@ -183,7 +182,7 @@ TEST_P(AudioStateTest, Create) {
TEST_P(AudioStateTest, ConstructDestruct) {
ConfigHelper helper(GetParam());
rtc::scoped_refptr<internal::AudioState> audio_state(
- new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+ rtc::make_ref_counted<internal::AudioState>(helper.config()));
}
TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
@@ -196,7 +195,7 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
}
rtc::scoped_refptr<internal::AudioState> audio_state(
- new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+ rtc::make_ref_counted<internal::AudioState>(helper.config()));
MockAudioSendStream stream;
audio_state->AddSendingStream(&stream, 8000, 2);
@@ -245,7 +244,7 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
}
rtc::scoped_refptr<internal::AudioState> audio_state(
- new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+ rtc::make_ref_counted<internal::AudioState>(helper.config()));
MockAudioSendStream stream_1;
MockAudioSendStream stream_2;
@@ -308,7 +307,7 @@ TEST_P(AudioStateTest, EnableChannelSwap) {
}
rtc::scoped_refptr<internal::AudioState> audio_state(
- new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+ rtc::make_ref_counted<internal::AudioState>(helper.config()));
audio_state->SetStereoChannelSwapping(true);
diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc
index 44a647b7a6..150e2074e4 100644
--- a/audio/channel_receive.cc
+++ b/audio/channel_receive.cc
@@ -10,8 +10,6 @@
#include "audio/channel_receive.h"
-#include <assert.h>
-
#include <algorithm>
#include <map>
#include <memory>
@@ -23,6 +21,7 @@
#include "api/frame_transformer_interface.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/sequence_checker.h"
+#include "api/task_queue/task_queue_base.h"
#include "audio/audio_level.h"
#include "audio/channel_receive_frame_transformer_delegate.h"
#include "audio/channel_send.h"
@@ -34,7 +33,8 @@
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
-#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
@@ -47,6 +47,9 @@
#include "rtc_base/numerics/safe_minmax.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/metrics.h"
@@ -174,44 +177,51 @@ class ChannelReceive : public ChannelReceiveInterface {
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override;
+ void SetFrameDecryptor(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
+ frame_decryptor) override;
+
+ void OnLocalSsrcChange(uint32_t local_ssrc) override;
+ uint32_t GetLocalSsrc() const override;
+
private:
void ReceivePacket(const uint8_t* packet,
size_t packet_length,
- const RTPHeader& header);
+ const RTPHeader& header)
+ RTC_RUN_ON(worker_thread_checker_);
int ResendPackets(const uint16_t* sequence_numbers, int length);
- void UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms);
+ void UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms)
+ RTC_RUN_ON(worker_thread_checker_);
int GetRtpTimestampRateHz() const;
int64_t GetRTT() const;
void OnReceivedPayloadData(rtc::ArrayView<const uint8_t> payload,
- const RTPHeader& rtpHeader);
+ const RTPHeader& rtpHeader)
+ RTC_RUN_ON(worker_thread_checker_);
void InitFrameTransformerDelegate(
- rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer);
-
- bool Playing() const {
- MutexLock lock(&playing_lock_);
- return playing_;
- }
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
+ RTC_RUN_ON(worker_thread_checker_);
// Thread checkers document and lock usage of some methods to specific threads
// we know about. The goal is to eventually split up voe::ChannelReceive into
// parts with single-threaded semantics, and thereby reduce the need for
// locks.
- SequenceChecker worker_thread_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker network_thread_checker_;
+
+ TaskQueueBase* const worker_thread_;
+ ScopedTaskSafety worker_safety_;
// Methods accessed from audio and video threads are checked for sequential-
// only access. We don't necessarily own and control these threads, so thread
// checkers cannot be used. E.g. Chromium may transfer "ownership" from one
// audio thread to another, but access is still sequential.
rtc::RaceChecker audio_thread_race_checker_;
- rtc::RaceChecker video_capture_thread_race_checker_;
Mutex callback_mutex_;
Mutex volume_settings_mutex_;
- mutable Mutex playing_lock_;
- bool playing_ RTC_GUARDED_BY(&playing_lock_) = false;
+ bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false;
RtcEventLog* const event_log_;
@@ -225,11 +235,10 @@ class ChannelReceive : public ChannelReceiveInterface {
// Info for GetSyncInfo is updated on network or worker thread, and queried on
// the worker thread.
- mutable Mutex sync_info_lock_;
absl::optional<uint32_t> last_received_rtp_timestamp_
- RTC_GUARDED_BY(&sync_info_lock_);
+ RTC_GUARDED_BY(&worker_thread_checker_);
absl::optional<int64_t> last_received_rtp_system_time_ms_
- RTC_GUARDED_BY(&sync_info_lock_);
+ RTC_GUARDED_BY(&worker_thread_checker_);
// The AcmReceiver is thread safe, using its own lock.
acm2::AcmReceiver acm_receiver_;
@@ -242,15 +251,14 @@ class ChannelReceive : public ChannelReceiveInterface {
// Timestamp of the audio pulled from NetEq.
absl::optional<uint32_t> jitter_buffer_playout_timestamp_;
- mutable Mutex video_sync_lock_;
- uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_);
+ uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(worker_thread_checker_);
absl::optional<int64_t> playout_timestamp_rtp_time_ms_
- RTC_GUARDED_BY(video_sync_lock_);
- uint32_t playout_delay_ms_ RTC_GUARDED_BY(video_sync_lock_);
+ RTC_GUARDED_BY(worker_thread_checker_);
+ uint32_t playout_delay_ms_ RTC_GUARDED_BY(worker_thread_checker_);
absl::optional<int64_t> playout_timestamp_ntp_
- RTC_GUARDED_BY(video_sync_lock_);
+ RTC_GUARDED_BY(worker_thread_checker_);
absl::optional<int64_t> playout_timestamp_ntp_time_ms_
- RTC_GUARDED_BY(video_sync_lock_);
+ RTC_GUARDED_BY(worker_thread_checker_);
mutable Mutex ts_stats_lock_;
@@ -266,26 +274,39 @@ class ChannelReceive : public ChannelReceiveInterface {
float _outputGain RTC_GUARDED_BY(volume_settings_mutex_);
const ChannelSendInterface* associated_send_channel_
- RTC_GUARDED_BY(worker_thread_checker_);
+ RTC_GUARDED_BY(network_thread_checker_);
PacketRouter* packet_router_ = nullptr;
SequenceChecker construction_thread_;
// E2EE Audio Frame Decryption
- rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_;
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_
+ RTC_GUARDED_BY(worker_thread_checker_);
webrtc::CryptoOptions crypto_options_;
- webrtc::AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_;
+ webrtc::AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_
+ RTC_GUARDED_BY(worker_thread_checker_);
+
+ webrtc::CaptureClockOffsetUpdater capture_clock_offset_updater_;
rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate>
frame_transformer_delegate_;
+
+ // Counter that's used to control the frequency of reporting histograms
+ // from the `GetAudioFrameWithInfo` callback.
+ int audio_frame_interval_count_ RTC_GUARDED_BY(audio_thread_race_checker_) =
+ 0;
+ // Controls how many callbacks we let pass by before reporting callback stats.
+ // A value of 100 means 100 callbacks, each one of which represents 10ms worth
+ // of data, so the stats reporting frequency will be 1Hz (modulo failures).
+ constexpr static int kHistogramReportingInterval = 100;
};
void ChannelReceive::OnReceivedPayloadData(
rtc::ArrayView<const uint8_t> payload,
const RTPHeader& rtpHeader) {
- if (!Playing()) {
+ if (!playing_) {
// Avoid inserting into NetEQ when we are not playing. Count the
// packet as discarded.
@@ -299,7 +320,7 @@ void ChannelReceive::OnReceivedPayloadData(
// updating RtpSource information.
if (source_tracker_) {
RtpPacketInfos::vector_type packet_vector = {
- RtpPacketInfo(rtpHeader, clock_->TimeInMilliseconds())};
+ RtpPacketInfo(rtpHeader, clock_->CurrentTime())};
source_tracker_->OnFrameDelivered(RtpPacketInfos(packet_vector));
}
@@ -328,18 +349,20 @@ void ChannelReceive::InitFrameTransformerDelegate(
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
RTC_DCHECK(frame_transformer);
RTC_DCHECK(!frame_transformer_delegate_);
+ RTC_DCHECK(worker_thread_->IsCurrent());
// Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by
// the delegate to receive transformed audio.
ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback
receive_audio_callback = [this](rtc::ArrayView<const uint8_t> packet,
const RTPHeader& header) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
OnReceivedPayloadData(packet, header);
};
frame_transformer_delegate_ =
- new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
std::move(receive_audio_callback), std::move(frame_transformer),
- rtc::Thread::Current());
+ worker_thread_);
frame_transformer_delegate_->Init();
}
@@ -434,17 +457,37 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo(
}
}
- {
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs",
- acm_receiver_.TargetDelayMs());
- const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs();
- MutexLock lock(&video_sync_lock_);
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs",
- jitter_buffer_delay + playout_delay_ms_);
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs",
- jitter_buffer_delay);
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDeviceDelayMs",
- playout_delay_ms_);
+ // Fill in local capture clock offset in |audio_frame->packet_infos_|.
+ RtpPacketInfos::vector_type packet_infos;
+ for (auto& packet_info : audio_frame->packet_infos_) {
+ absl::optional<int64_t> local_capture_clock_offset;
+ if (packet_info.absolute_capture_time().has_value()) {
+ local_capture_clock_offset =
+ capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset(
+ packet_info.absolute_capture_time()
+ ->estimated_capture_clock_offset);
+ }
+ RtpPacketInfo new_packet_info(packet_info);
+ new_packet_info.set_local_capture_clock_offset(local_capture_clock_offset);
+ packet_infos.push_back(std::move(new_packet_info));
+ }
+ audio_frame->packet_infos_ = RtpPacketInfos(packet_infos);
+
+ ++audio_frame_interval_count_;
+ if (audio_frame_interval_count_ >= kHistogramReportingInterval) {
+ audio_frame_interval_count_ = 0;
+ worker_thread_->PostTask(ToQueuedTask(worker_safety_, [this]() {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs",
+ acm_receiver_.TargetDelayMs());
+ const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs();
+ RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs",
+ jitter_buffer_delay + playout_delay_ms_);
+ RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs",
+ jitter_buffer_delay);
+ RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDeviceDelayMs",
+ playout_delay_ms_);
+ }));
}
return muted ? AudioMixer::Source::AudioFrameInfo::kMuted
@@ -480,7 +523,8 @@ ChannelReceive::ChannelReceive(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
const webrtc::CryptoOptions& crypto_options,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
- : event_log_(rtc_event_log),
+ : worker_thread_(TaskQueueBase::Current()),
+ event_log_(rtc_event_log),
rtp_receive_statistics_(ReceiveStatistics::Create(clock)),
remote_ssrc_(remote_ssrc),
acm_receiver_(AcmConfig(neteq_factory,
@@ -502,10 +546,13 @@ ChannelReceive::ChannelReceive(
associated_send_channel_(nullptr),
frame_decryptor_(frame_decryptor),
crypto_options_(crypto_options),
- absolute_capture_time_receiver_(clock) {
+ absolute_capture_time_interpolator_(clock) {
+ RTC_DCHECK(worker_thread_);
RTC_DCHECK(module_process_thread_);
RTC_DCHECK(audio_device_module);
+ network_thread_checker_.Detach();
+
acm_receiver_.ResetInitialDelay();
acm_receiver_.SetMinimumDelay(0);
acm_receiver_.SetMaximumDelay(0);
@@ -540,7 +587,7 @@ ChannelReceive::ChannelReceive(
}
ChannelReceive::~ChannelReceive() {
- RTC_DCHECK(construction_thread_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&construction_thread_);
// Unregister the module before stopping playout etc, to match the order
// things were set up in the ctor.
@@ -561,13 +608,11 @@ void ChannelReceive::SetSink(AudioSinkInterface* sink) {
void ChannelReceive::StartPlayout() {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- MutexLock lock(&playing_lock_);
playing_ = true;
}
void ChannelReceive::StopPlayout() {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- MutexLock lock(&playing_lock_);
playing_ = false;
_outputAudioLevel.ResetLevelFullRange();
}
@@ -595,11 +640,8 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) {
// UpdatePlayoutTimestamp and
int64_t now_ms = rtc::TimeMillis();
- {
- MutexLock lock(&sync_info_lock_);
- last_received_rtp_timestamp_ = packet.Timestamp();
- last_received_rtp_system_time_ms_ = now_ms;
- }
+ last_received_rtp_timestamp_ = packet.Timestamp();
+ last_received_rtp_system_time_ms_ = now_ms;
// Store playout timestamp for the received RTP packet
UpdatePlayoutTimestamp(false, now_ms);
@@ -618,9 +660,9 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) {
// Interpolates absolute capture timestamp RTP header extension.
header.extension.absolute_capture_time =
- absolute_capture_time_receiver_.OnReceivePacket(
- AbsoluteCaptureTimeReceiver::GetSource(header.ssrc,
- header.arrOfCSRCs),
+ absolute_capture_time_interpolator_.OnReceivePacket(
+ AbsoluteCaptureTimeInterpolator::GetSource(header.ssrc,
+ header.arrOfCSRCs),
header.timestamp,
rtc::saturated_cast<uint32_t>(packet_copy.payload_type_frequency()),
header.extension.absolute_capture_time);
@@ -632,7 +674,7 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet,
size_t packet_length,
const RTPHeader& header) {
const uint8_t* payload = packet + header.headerLength;
- assert(packet_length >= header.headerLength);
+ RTC_DCHECK_GE(packet_length, header.headerLength);
size_t payload_length = packet_length - header.headerLength;
size_t payload_data_length = payload_length - header.paddingLength;
@@ -713,7 +755,7 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
absl::optional<int64_t> remote_to_local_clock_offset_ms =
ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
if (remote_to_local_clock_offset_ms.has_value()) {
- absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
+ capture_clock_offset_updater_.SetRemoteToLocalClockOffset(
Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
}
}
@@ -838,8 +880,7 @@ int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers,
void ChannelReceive::SetAssociatedSendChannel(
const ChannelSendInterface* channel) {
- // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_RUN_ON(&network_thread_checker_);
associated_send_channel_ = channel;
}
@@ -848,11 +889,33 @@ void ChannelReceive::SetDepacketizerToDecoderFrameTransformer(
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
// Depending on when the channel is created, the transformer might be set
// twice. Don't replace the delegate if it was already initialized.
- if (!frame_transformer || frame_transformer_delegate_)
+ if (!frame_transformer || frame_transformer_delegate_) {
+ RTC_NOTREACHED() << "Not setting the transformer?";
return;
+ }
+
InitFrameTransformerDelegate(std::move(frame_transformer));
}
+void ChannelReceive::SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
+ // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ frame_decryptor_ = std::move(frame_decryptor);
+}
+
+void ChannelReceive::OnLocalSsrcChange(uint32_t local_ssrc) {
+ // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ rtp_rtcp_->SetLocalSsrc(local_ssrc);
+}
+
+uint32_t ChannelReceive::GetLocalSsrc() const {
+ // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ return rtp_rtcp_->local_media_ssrc();
+}
+
NetworkStatistics ChannelReceive::GetNetworkStatistics(
bool get_and_clear_legacy_stats) const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
@@ -870,14 +933,8 @@ AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const {
uint32_t ChannelReceive::GetDelayEstimate() const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
-
- uint32_t playout_delay;
- {
- MutexLock lock(&video_sync_lock_);
- playout_delay = playout_delay_ms_;
- }
// Return the current jitter buffer delay + playout delay.
- return acm_receiver_.FilteredCurrentDelayMs() + playout_delay;
+ return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_;
}
bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) {
@@ -899,21 +956,17 @@ bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) {
bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
int64_t* time_ms) const {
- RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
- {
- MutexLock lock(&video_sync_lock_);
- if (!playout_timestamp_rtp_time_ms_)
- return false;
- *rtp_timestamp = playout_timestamp_rtp_;
- *time_ms = playout_timestamp_rtp_time_ms_.value();
- return true;
- }
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ if (!playout_timestamp_rtp_time_ms_)
+ return false;
+ *rtp_timestamp = playout_timestamp_rtp_;
+ *time_ms = playout_timestamp_rtp_time_ms_.value();
+ return true;
}
void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
int64_t time_ms) {
- RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
- MutexLock lock(&video_sync_lock_);
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
playout_timestamp_ntp_ = ntp_timestamp_ms;
playout_timestamp_ntp_time_ms_ = time_ms;
}
@@ -921,7 +974,6 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
absl::optional<int64_t>
ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- MutexLock lock(&video_sync_lock_);
if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_)
return absl::nullopt;
@@ -951,24 +1003,19 @@ absl::optional<Syncable::Info> ChannelReceive::GetSyncInfo() const {
return absl::nullopt;
}
- {
- MutexLock lock(&sync_info_lock_);
- if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
- return absl::nullopt;
- }
- info.latest_received_capture_timestamp = *last_received_rtp_timestamp_;
- info.latest_receive_time_ms = *last_received_rtp_system_time_ms_;
+ if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
+ return absl::nullopt;
}
+ info.latest_received_capture_timestamp = *last_received_rtp_timestamp_;
+ info.latest_receive_time_ms = *last_received_rtp_system_time_ms_;
int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs();
- {
- MutexLock lock(&video_sync_lock_);
- info.current_delay_ms = jitter_buffer_delay + playout_delay_ms_;
- }
+ info.current_delay_ms = jitter_buffer_delay + playout_delay_ms_;
return info;
}
+// RTC_RUN_ON(worker_thread_checker_)
void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
// TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the
// network thread. Once that's done, we won't need video_sync_lock_.
@@ -995,14 +1042,11 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
// Remove the playout delay.
playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000));
- {
- MutexLock lock(&video_sync_lock_);
- if (!rtcp && playout_timestamp != playout_timestamp_rtp_) {
- playout_timestamp_rtp_ = playout_timestamp;
- playout_timestamp_rtp_time_ms_ = now_ms;
- }
- playout_delay_ms_ = delay_ms;
+ if (!rtcp && playout_timestamp != playout_timestamp_rtp_) {
+ playout_timestamp_rtp_ = playout_timestamp;
+ playout_timestamp_rtp_time_ms_ = now_ms;
}
+ playout_delay_ms_ = delay_ms;
}
int ChannelReceive::GetRtpTimestampRateHz() const {
@@ -1020,7 +1064,7 @@ int ChannelReceive::GetRtpTimestampRateHz() const {
}
int64_t ChannelReceive::GetRTT() const {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_RUN_ON(&network_thread_checker_);
std::vector<ReportBlockData> report_blocks =
rtp_rtcp_->GetLatestReportBlockData();
@@ -1067,8 +1111,8 @@ std::unique_ptr<ChannelReceiveInterface> CreateChannelReceive(
rtcp_send_transport, rtc_event_log, local_ssrc, remote_ssrc,
jitter_buffer_max_packets, jitter_buffer_fast_playout,
jitter_buffer_min_delay_ms, jitter_buffer_enable_rtx_handling,
- decoder_factory, codec_pair_id, frame_decryptor, crypto_options,
- std::move(frame_transformer));
+ decoder_factory, codec_pair_id, std::move(frame_decryptor),
+ crypto_options, std::move(frame_transformer));
}
} // namespace voe
diff --git a/audio/channel_receive.h b/audio/channel_receive.h
index c55968b55f..196e441fac 100644
--- a/audio/channel_receive.h
+++ b/audio/channel_receive.h
@@ -159,6 +159,12 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface {
virtual void SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<webrtc::FrameTransformerInterface>
frame_transformer) = 0;
+
+ virtual void SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) = 0;
+
+ virtual void OnLocalSsrcChange(uint32_t local_ssrc) = 0;
+ virtual uint32_t GetLocalSsrc() const = 0;
};
std::unique_ptr<ChannelReceiveInterface> CreateChannelReceive(
diff --git a/audio/channel_receive_frame_transformer_delegate.cc b/audio/channel_receive_frame_transformer_delegate.cc
index 261afbb100..7e617df780 100644
--- a/audio/channel_receive_frame_transformer_delegate.cc
+++ b/audio/channel_receive_frame_transformer_delegate.cc
@@ -47,7 +47,7 @@ class TransformableAudioFrame : public TransformableAudioFrameInterface {
ChannelReceiveFrameTransformerDelegate::ChannelReceiveFrameTransformerDelegate(
ReceiveFrameCallback receive_frame_callback,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
- rtc::Thread* channel_receive_thread)
+ TaskQueueBase* channel_receive_thread)
: receive_frame_callback_(receive_frame_callback),
frame_transformer_(std::move(frame_transformer)),
channel_receive_thread_(channel_receive_thread) {}
diff --git a/audio/channel_receive_frame_transformer_delegate.h b/audio/channel_receive_frame_transformer_delegate.h
index 0af748e37f..f59834d24e 100644
--- a/audio/channel_receive_frame_transformer_delegate.h
+++ b/audio/channel_receive_frame_transformer_delegate.h
@@ -32,7 +32,7 @@ class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback {
ChannelReceiveFrameTransformerDelegate(
ReceiveFrameCallback receive_frame_callback,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
- rtc::Thread* channel_receive_thread);
+ TaskQueueBase* channel_receive_thread);
// Registers |this| as callback for |frame_transformer_|, to get the
// transformed frames.
@@ -67,7 +67,7 @@ class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback {
RTC_GUARDED_BY(sequence_checker_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
RTC_GUARDED_BY(sequence_checker_);
- rtc::Thread* channel_receive_thread_;
+ TaskQueueBase* const channel_receive_thread_;
};
} // namespace webrtc
diff --git a/audio/channel_receive_frame_transformer_delegate_unittest.cc b/audio/channel_receive_frame_transformer_delegate_unittest.cc
index e7f5a454b8..01aac45b24 100644
--- a/audio/channel_receive_frame_transformer_delegate_unittest.cc
+++ b/audio/channel_receive_frame_transformer_delegate_unittest.cc
@@ -41,9 +41,9 @@ class MockChannelReceive {
TEST(ChannelReceiveFrameTransformerDelegateTest,
RegisterTransformedFrameCallbackOnInit) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<MockFrameTransformer>();
+ rtc::make_ref_counted<MockFrameTransformer>();
rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(),
mock_frame_transformer, nullptr);
EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback);
@@ -55,9 +55,9 @@ TEST(ChannelReceiveFrameTransformerDelegateTest,
TEST(ChannelReceiveFrameTransformerDelegateTest,
UnregisterTransformedFrameCallbackOnReset) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<MockFrameTransformer>();
+ rtc::make_ref_counted<MockFrameTransformer>();
rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(),
mock_frame_transformer, nullptr);
EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback);
@@ -69,10 +69,10 @@ TEST(ChannelReceiveFrameTransformerDelegateTest,
TEST(ChannelReceiveFrameTransformerDelegateTest,
TransformRunsChannelReceiveCallback) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
MockChannelReceive mock_channel;
rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
mock_channel.callback(), mock_frame_transformer,
rtc::Thread::Current());
rtc::scoped_refptr<TransformedFrameCallback> callback;
@@ -100,10 +100,10 @@ TEST(ChannelReceiveFrameTransformerDelegateTest,
TEST(ChannelReceiveFrameTransformerDelegateTest,
OnTransformedDoesNotRunChannelReceiveCallbackAfterReset) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
MockChannelReceive mock_channel;
rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
mock_channel.callback(), mock_frame_transformer,
rtc::Thread::Current());
diff --git a/audio/channel_send.cc b/audio/channel_send.cc
index 0434e484cb..47afc7982b 100644
--- a/audio/channel_send.cc
+++ b/audio/channel_send.cc
@@ -919,7 +919,7 @@ void ChannelSend::InitFrameTransformerDelegate(
absolute_capture_timestamp_ms);
};
frame_transformer_delegate_ =
- new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
std::move(send_audio_callback), std::move(frame_transformer),
&encoder_queue_);
frame_transformer_delegate_->Init();
diff --git a/audio/channel_send_frame_transformer_delegate_unittest.cc b/audio/channel_send_frame_transformer_delegate_unittest.cc
index e2f3647c0a..2ec78f8922 100644
--- a/audio/channel_send_frame_transformer_delegate_unittest.cc
+++ b/audio/channel_send_frame_transformer_delegate_unittest.cc
@@ -53,9 +53,9 @@ class MockChannelSend {
TEST(ChannelSendFrameTransformerDelegateTest,
RegisterTransformedFrameCallbackOnInit) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<MockFrameTransformer>();
+ rtc::make_ref_counted<MockFrameTransformer>();
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
ChannelSendFrameTransformerDelegate::SendFrameCallback(),
mock_frame_transformer, nullptr);
EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback);
@@ -67,9 +67,9 @@ TEST(ChannelSendFrameTransformerDelegateTest,
TEST(ChannelSendFrameTransformerDelegateTest,
UnregisterTransformedFrameCallbackOnReset) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<MockFrameTransformer>();
+ rtc::make_ref_counted<MockFrameTransformer>();
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
ChannelSendFrameTransformerDelegate::SendFrameCallback(),
mock_frame_transformer, nullptr);
EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback);
@@ -82,10 +82,10 @@ TEST(ChannelSendFrameTransformerDelegateTest,
TransformRunsChannelSendCallback) {
TaskQueueForTest channel_queue("channel_queue");
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
MockChannelSend mock_channel;
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
mock_channel.callback(), mock_frame_transformer, &channel_queue);
rtc::scoped_refptr<TransformedFrameCallback> callback;
EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback)
@@ -112,10 +112,10 @@ TEST(ChannelSendFrameTransformerDelegateTest,
OnTransformedDoesNotRunChannelSendCallbackAfterReset) {
TaskQueueForTest channel_queue("channel_queue");
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
MockChannelSend mock_channel;
rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+ rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
mock_channel.callback(), mock_frame_transformer, &channel_queue);
delegate->Reset();
diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h
index 7f140d400d..ea2a2ac3f0 100644
--- a/audio/mock_voe_channel_proxy.h
+++ b/audio/mock_voe_channel_proxy.h
@@ -17,6 +17,7 @@
#include <utility>
#include <vector>
+#include "api/crypto/frame_decryptor_interface.h"
#include "api/test/mock_frame_encryptor.h"
#include "audio/channel_receive.h"
#include "audio/channel_send.h"
@@ -98,6 +99,13 @@ class MockChannelReceive : public voe::ChannelReceiveInterface {
SetDepacketizerToDecoderFrameTransformer,
(rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
(override));
+ MOCK_METHOD(
+ void,
+ SetFrameDecryptor,
+ (rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor),
+ (override));
+ MOCK_METHOD(void, OnLocalSsrcChange, (uint32_t local_ssrc), (override));
+ MOCK_METHOD(uint32_t, GetLocalSsrc, (), (const, override));
};
class MockChannelSend : public voe::ChannelSendInterface {
diff --git a/audio/voip/test/audio_channel_unittest.cc b/audio/voip/test/audio_channel_unittest.cc
index e0244c76b7..f99d163022 100644
--- a/audio/voip/test/audio_channel_unittest.cc
+++ b/audio/voip/test/audio_channel_unittest.cc
@@ -65,7 +65,7 @@ class AudioChannelTest : public ::testing::Test {
// Also this uses the same transport object for different audio channel to
// simplify network routing logic.
rtc::scoped_refptr<AudioChannel> audio_channel =
- new rtc::RefCountedObject<AudioChannel>(
+ rtc::make_ref_counted<AudioChannel>(
&transport_, ssrc, task_queue_factory_.get(), process_thread_.get(),
audio_mixer_.get(), decoder_factory_);
audio_channel->SetEncoder(kPcmuPayload, kPcmuFormat,
diff --git a/audio/voip/test/voip_core_unittest.cc b/audio/voip/test/voip_core_unittest.cc
index d290bd6ec3..0d407601a3 100644
--- a/audio/voip/test/voip_core_unittest.cc
+++ b/audio/voip/test/voip_core_unittest.cc
@@ -39,7 +39,7 @@ class VoipCoreTest : public ::testing::Test {
auto encoder_factory = CreateBuiltinAudioEncoderFactory();
auto decoder_factory = CreateBuiltinAudioDecoderFactory();
rtc::scoped_refptr<AudioProcessing> audio_processing =
- new rtc::RefCountedObject<NiceMock<test::MockAudioProcessing>>();
+ rtc::make_ref_counted<NiceMock<test::MockAudioProcessing>>();
auto process_thread = std::make_unique<NiceMock<MockProcessThread>>();
// Hold the pointer to use for testing.
diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc
index 33dadbc9af..67ae4c6521 100644
--- a/audio/voip/voip_core.cc
+++ b/audio/voip/voip_core.cc
@@ -138,7 +138,7 @@ ChannelId VoipCore::CreateChannel(Transport* transport,
}
rtc::scoped_refptr<AudioChannel> channel =
- new rtc::RefCountedObject<AudioChannel>(
+ rtc::make_ref_counted<AudioChannel>(
transport, local_ssrc.value(), task_queue_factory_.get(),
process_thread_.get(), audio_mixer_.get(), decoder_factory_);
diff --git a/build_overrides/build.gni b/build_overrides/build.gni
index c21069535b..137b6a40b2 100644
--- a/build_overrides/build.gni
+++ b/build_overrides/build.gni
@@ -20,11 +20,11 @@ checkout_google_benchmark = true
asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc"
tsan_suppressions_file = "//tools_webrtc/sanitizers/tsan_suppressions_webrtc.cc"
-msan_blacklist_path =
+msan_ignorelist_path =
rebase_path("//tools_webrtc/msan/suppressions.txt", root_build_dir)
-ubsan_blacklist_path =
+ubsan_ignorelist_path =
rebase_path("//tools_webrtc/ubsan/suppressions.txt", root_build_dir)
-ubsan_vptr_blacklist_path =
+ubsan_vptr_ignorelist_path =
rebase_path("//tools_webrtc/ubsan/vptr_suppressions.txt", root_build_dir)
# For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size
diff --git a/call/BUILD.gn b/call/BUILD.gn
index 0e3e89d5d4..1e920b80b7 100644
--- a/call/BUILD.gn
+++ b/call/BUILD.gn
@@ -35,8 +35,10 @@ rtc_library("call_interfaces") {
if (!build_with_mozilla) {
sources += [ "audio_send_stream.cc" ]
}
+
deps = [
":audio_sender_interface",
+ ":receive_stream_interface",
":rtp_interfaces",
":video_stream_api",
"../api:fec_controller_api",
@@ -51,7 +53,6 @@ rtc_library("call_interfaces") {
"../api/audio:audio_frame_processor",
"../api/audio:audio_mixer_api",
"../api/audio_codecs:audio_codecs_api",
- "../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
"../api/neteq:neteq_api",
@@ -59,7 +60,6 @@ rtc_library("call_interfaces") {
"../api/transport:bitrate_settings",
"../api/transport:network_control",
"../api/transport:webrtc_key_value_config",
- "../api/transport/rtp:rtp_source",
"../modules/async_audio_processing",
"../modules/audio_device",
"../modules/audio_processing",
@@ -73,7 +73,10 @@ rtc_library("call_interfaces") {
"../rtc_base:rtc_base_approved",
"../rtc_base/network:sent_packet",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/functional:bind_front",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_source_set("audio_sender_interface") {
@@ -95,22 +98,29 @@ rtc_library("rtp_interfaces") {
"rtp_config.h",
"rtp_packet_sink_interface.h",
"rtp_stream_receiver_controller_interface.h",
+ "rtp_transport_config.h",
+ "rtp_transport_controller_send_factory_interface.h",
"rtp_transport_controller_send_interface.h",
]
deps = [
"../api:array_view",
"../api:fec_controller_api",
"../api:frame_transformer_interface",
+ "../api:network_state_predictor_api",
"../api:rtp_headers",
"../api:rtp_parameters",
"../api/crypto:options",
"../api/rtc_event_log",
"../api/transport:bitrate_settings",
+ "../api/transport:network_control",
+ "../api/transport:webrtc_key_value_config",
"../api/units:timestamp",
"../common_video:frame_counts",
"../modules/rtp_rtcp:rtp_rtcp_format",
+ "../modules/utility",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ "../rtc_base:rtc_task_queue",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
@@ -147,6 +157,7 @@ rtc_library("rtp_sender") {
"rtp_payload_params.h",
"rtp_transport_controller_send.cc",
"rtp_transport_controller_send.h",
+ "rtp_transport_controller_send_factory.h",
"rtp_video_sender.cc",
"rtp_video_sender.h",
"rtp_video_sender_interface.h",
@@ -304,7 +315,21 @@ rtc_library("call") {
"../video",
"adaptation:resource_adaptation",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/functional:bind_front",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("receive_stream_interface") {
+ sources = [ "receive_stream.h" ]
+ deps = [
+ "../api:frame_transformer_interface",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/transport/rtp:rtp_source",
+ ]
}
rtc_library("video_stream_api") {
@@ -315,6 +340,7 @@ rtc_library("video_stream_api") {
"video_send_stream.h",
]
deps = [
+ ":receive_stream_interface",
":rtp_interfaces",
"../api:frame_transformer_interface",
"../api:rtp_headers",
@@ -322,10 +348,8 @@ rtc_library("video_stream_api") {
"../api:scoped_refptr",
"../api:transport_api",
"../api/adaptation:resource_adaptation_api",
- "../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
- "../api/transport/rtp:rtp_source",
"../api/video:recordable_encoded_frame",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
@@ -454,6 +478,7 @@ if (rtc_include_tests) {
"../test:audio_codec_mocks",
"../test:direct_transport",
"../test:encoder_settings",
+ "../test:explicit_key_value_config",
"../test:fake_video_codecs",
"../test:field_trial",
"../test:mock_frame_transformer",
diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc
index 59bd1e0c7f..876d4c0bf6 100644
--- a/call/adaptation/broadcast_resource_listener.cc
+++ b/call/adaptation/broadcast_resource_listener.cc
@@ -83,8 +83,8 @@ BroadcastResourceListener::CreateAdapterResource() {
MutexLock lock(&lock_);
RTC_DCHECK(is_listening_);
rtc::scoped_refptr<AdapterResource> adapter =
- new rtc::RefCountedObject<AdapterResource>(source_resource_->Name() +
- "Adapter");
+ rtc::make_ref_counted<AdapterResource>(source_resource_->Name() +
+ "Adapter");
adapters_.push_back(adapter);
return adapter;
}
diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc
index 4925b6410a..741575ae38 100644
--- a/call/adaptation/resource_adaptation_processor.cc
+++ b/call/adaptation/resource_adaptation_processor.cc
@@ -72,7 +72,7 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor(
VideoStreamAdapter* stream_adapter)
: task_queue_(nullptr),
resource_listener_delegate_(
- new rtc::RefCountedObject<ResourceListenerDelegate>(this)),
+ rtc::make_ref_counted<ResourceListenerDelegate>(this)),
resources_(),
stream_adapter_(stream_adapter),
last_reported_source_restrictions_(),
diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc
index fa69e886bf..d125468cb6 100644
--- a/call/adaptation/test/fake_resource.cc
+++ b/call/adaptation/test/fake_resource.cc
@@ -19,7 +19,7 @@ namespace webrtc {
// static
rtc::scoped_refptr<FakeResource> FakeResource::Create(std::string name) {
- return new rtc::RefCountedObject<FakeResource>(name);
+ return rtc::make_ref_counted<FakeResource>(name);
}
FakeResource::FakeResource(std::string name)
diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc
index 6620eff311..64e1a77786 100644
--- a/call/adaptation/video_stream_adapter.cc
+++ b/call/adaptation/video_stream_adapter.cc
@@ -416,8 +416,10 @@ VideoStreamAdapter::AdaptIfFpsDiffInsufficient(
const VideoStreamInputState& input_state,
const RestrictionsWithCounters& restrictions) const {
RTC_DCHECK_EQ(degradation_preference_, DegradationPreference::BALANCED);
+ int frame_size_pixels = input_state.single_active_stream_pixels().value_or(
+ input_state.frame_size_pixels().value());
absl::optional<int> min_fps_diff =
- balanced_settings_.MinFpsDiff(input_state.frame_size_pixels().value());
+ balanced_settings_.MinFpsDiff(frame_size_pixels);
if (current_restrictions_.counters.fps_adaptations <
restrictions.counters.fps_adaptations &&
min_fps_diff && input_state.frames_per_second() > 0) {
@@ -502,9 +504,10 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseFramerate(
if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) {
max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second());
} else if (degradation_preference_ == DegradationPreference::BALANCED) {
- max_frame_rate =
- balanced_settings_.MinFps(input_state.video_codec_type(),
- input_state.frame_size_pixels().value());
+ int frame_size_pixels = input_state.single_active_stream_pixels().value_or(
+ input_state.frame_size_pixels().value());
+ max_frame_rate = balanced_settings_.MinFps(input_state.video_codec_type(),
+ frame_size_pixels);
} else {
RTC_NOTREACHED();
max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second());
@@ -561,12 +564,21 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseFramerate(
if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) {
max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second());
} else if (degradation_preference_ == DegradationPreference::BALANCED) {
- max_frame_rate =
- balanced_settings_.MaxFps(input_state.video_codec_type(),
- input_state.frame_size_pixels().value());
+ int frame_size_pixels = input_state.single_active_stream_pixels().value_or(
+ input_state.frame_size_pixels().value());
+ max_frame_rate = balanced_settings_.MaxFps(input_state.video_codec_type(),
+ frame_size_pixels);
+ // Temporary fix for cases when there are fewer framerate adaptation steps
+ // up than down. Make number of down/up steps equal.
+ if (max_frame_rate == std::numeric_limits<int>::max() &&
+ current_restrictions.counters.fps_adaptations > 1) {
+ // Do not unrestrict framerate to allow additional adaptation up steps.
+ RTC_LOG(LS_INFO) << "Modifying framerate due to remaining fps count.";
+ max_frame_rate -= current_restrictions.counters.fps_adaptations;
+ }
// In BALANCED, the max_frame_rate must be checked before proceeding. This
// is because the MaxFps might be the current Fps and so the balanced
- // settings may want to scale up the resolution.=
+ // settings may want to scale up the resolution.
if (!CanIncreaseFrameRateTo(max_frame_rate,
current_restrictions.restrictions)) {
return Adaptation::Status::kLimitReached;
diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h
index 6f74492927..54729d0dc0 100644
--- a/call/audio_receive_stream.h
+++ b/call/audio_receive_stream.h
@@ -20,17 +20,14 @@
#include "api/audio_codecs/audio_decoder_factory.h"
#include "api/call/transport.h"
#include "api/crypto/crypto_options.h"
-#include "api/crypto/frame_decryptor_interface.h"
-#include "api/frame_transformer_interface.h"
#include "api/rtp_parameters.h"
-#include "api/scoped_refptr.h"
-#include "api/transport/rtp/rtp_source.h"
+#include "call/receive_stream.h"
#include "call/rtp_config.h"
namespace webrtc {
class AudioSinkInterface;
-class AudioReceiveStream {
+class AudioReceiveStream : public MediaReceiveStream {
public:
struct Stats {
Stats();
@@ -106,29 +103,14 @@ class AudioReceiveStream {
std::string ToString() const;
// Receive-stream specific RTP settings.
- struct Rtp {
+ struct Rtp : public RtpConfig {
Rtp();
~Rtp();
std::string ToString() const;
- // Synchronization source (stream identifier) to be received.
- uint32_t remote_ssrc = 0;
-
- // Sender SSRC used for sending RTCP (such as receiver reports).
- uint32_t local_ssrc = 0;
-
- // Enable feedback for send side bandwidth estimation.
- // See
- // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions
- // for details.
- bool transport_cc = false;
-
// See NackConfig for description.
NackConfig nack;
-
- // RTP header extensions used for the received stream.
- std::vector<RtpExtension> extensions;
} rtp;
Transport* rtcp_send_transport = nullptr;
@@ -157,22 +139,26 @@ class AudioReceiveStream {
// An optional custom frame decryptor that allows the entire frame to be
// decrypted in whatever way the caller choses. This is not required by
// default.
+ // TODO(tommi): Remove this member variable from the struct. It's not
+ // a part of the AudioReceiveStream state but rather a pass through
+ // variable.
rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor;
// An optional frame transformer used by insertable streams to transform
// encoded frames.
+ // TODO(tommi): Remove this member variable from the struct. It's not
+ // a part of the AudioReceiveStream state but rather a pass through
+ // variable.
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer;
};
- // Reconfigure the stream according to the Configuration.
- virtual void Reconfigure(const Config& config) = 0;
-
- // Starts stream activity.
- // When a stream is active, it can receive, process and deliver packets.
- virtual void Start() = 0;
- // Stops stream activity.
- // When a stream is stopped, it can't receive, process or deliver packets.
- virtual void Stop() = 0;
+ // Methods that support reconfiguring the stream post initialization.
+ virtual void SetDecoderMap(std::map<int, SdpAudioFormat> decoder_map) = 0;
+ virtual void SetUseTransportCcAndNackHistory(bool use_transport_cc,
+ int history_ms) = 0;
+ // Set/change the rtp header extensions. Must be called on the packet
+ // delivery thread.
+ virtual void SetRtpExtensions(std::vector<RtpExtension> extensions) = 0;
// Returns true if the stream has been started.
virtual bool IsRunning() const = 0;
@@ -202,8 +188,6 @@ class AudioReceiveStream {
// Returns current value of base minimum delay in milliseconds.
virtual int GetBaseMinimumPlayoutDelayMs() const = 0;
- virtual std::vector<RtpSource> GetSources() const = 0;
-
protected:
virtual ~AudioReceiveStream() {}
};
diff --git a/call/call.cc b/call/call.cc
index 437f4df2fb..f4a7d7cc9e 100644
--- a/call/call.cc
+++ b/call/call.cc
@@ -13,12 +13,14 @@
#include <string.h>
#include <algorithm>
+#include <atomic>
#include <map>
#include <memory>
#include <set>
#include <utility>
#include <vector>
+#include "absl/functional/bind_front.h"
#include "absl/types/optional.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/sequence_checker.h"
@@ -32,6 +34,7 @@
#include "call/receive_time_calculator.h"
#include "call/rtp_stream_receiver_controller.h"
#include "call/rtp_transport_controller_send.h"
+#include "call/rtp_transport_controller_send_factory.h"
#include "call/version.h"
#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h"
#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h"
@@ -78,12 +81,10 @@ bool SendPeriodicFeedback(const std::vector<RtpExtension>& extensions) {
return true;
}
-// TODO(nisse): This really begs for a shared context struct.
-bool UseSendSideBwe(const std::vector<RtpExtension>& extensions,
- bool transport_cc) {
- if (!transport_cc)
+bool UseSendSideBwe(const ReceiveStream::RtpConfig& rtp) {
+ if (!rtp.transport_cc)
return false;
- for (const auto& extension : extensions) {
+ for (const auto& extension : rtp.extensions) {
if (extension.uri == RtpExtension::kTransportSequenceNumberUri ||
extension.uri == RtpExtension::kTransportSequenceNumberV2Uri)
return true;
@@ -91,18 +92,6 @@ bool UseSendSideBwe(const std::vector<RtpExtension>& extensions,
return false;
}
-bool UseSendSideBwe(const VideoReceiveStream::Config& config) {
- return UseSendSideBwe(config.rtp.extensions, config.rtp.transport_cc);
-}
-
-bool UseSendSideBwe(const AudioReceiveStream::Config& config) {
- return UseSendSideBwe(config.rtp.extensions, config.rtp.transport_cc);
-}
-
-bool UseSendSideBwe(const FlexfecReceiveStream::Config& config) {
- return UseSendSideBwe(config.rtp_header_extensions, config.transport_cc);
-}
-
const int* FindKeyByValue(const std::map<int, int>& m, int v) {
for (const auto& kv : m) {
if (kv.second == v)
@@ -167,34 +156,6 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() {
return current;
}
-// Called from the destructor of Call to report the collected send histograms.
-void UpdateSendHistograms(Timestamp now,
- Timestamp first_sent_packet,
- AvgCounter& estimated_send_bitrate_kbps_counter,
- AvgCounter& pacer_bitrate_kbps_counter) {
- TimeDelta elapsed = now - first_sent_packet;
- if (elapsed.seconds() < metrics::kMinRunTimeInSeconds)
- return;
-
- const int kMinRequiredPeriodicSamples = 5;
- AggregatedStats send_bitrate_stats =
- estimated_send_bitrate_kbps_counter.ProcessAndGetStats();
- if (send_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.EstimatedSendBitrateInKbps",
- send_bitrate_stats.average);
- RTC_LOG(LS_INFO) << "WebRTC.Call.EstimatedSendBitrateInKbps, "
- << send_bitrate_stats.ToString();
- }
- AggregatedStats pacer_bitrate_stats =
- pacer_bitrate_kbps_counter.ProcessAndGetStats();
- if (pacer_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.PacerBitrateInKbps",
- pacer_bitrate_stats.average);
- RTC_LOG(LS_INFO) << "WebRTC.Call.PacerBitrateInKbps, "
- << pacer_bitrate_stats.ToString();
- }
-}
-
} // namespace
namespace internal {
@@ -299,10 +260,6 @@ class Call final : public webrtc::Call,
DeliveryStatus DeliverPacket(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) override;
- void DeliverPacketAsync(MediaType media_type,
- rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us,
- PacketCallback callback) override;
// Implements RecoveredPacketReceiver.
void OnRecoveredPacket(const uint8_t* packet, size_t length) override;
@@ -312,6 +269,12 @@ class Call final : public webrtc::Call,
void OnAudioTransportOverheadChanged(
int transport_overhead_per_packet) override;
+ void OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream,
+ uint32_t local_ssrc) override;
+
+ void OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream,
+ const std::string& sync_group) override;
+
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
// Implements TargetTransferRateObserver,
@@ -324,45 +287,96 @@ class Call final : public webrtc::Call,
void SetClientBitratePreferences(const BitrateSettings& preferences) override;
private:
- DeliveryStatus DeliverRtcp(MediaType media_type,
- const uint8_t* packet,
- size_t length)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+ // Thread-compatible class that collects received packet stats and exposes
+ // them as UMA histograms on destruction.
+ class ReceiveStats {
+ public:
+ explicit ReceiveStats(Clock* clock);
+ ~ReceiveStats();
+
+ void AddReceivedRtcpBytes(int bytes);
+ void AddReceivedAudioBytes(int bytes, webrtc::Timestamp arrival_time);
+ void AddReceivedVideoBytes(int bytes, webrtc::Timestamp arrival_time);
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ RateCounter received_bytes_per_second_counter_
+ RTC_GUARDED_BY(sequence_checker_);
+ RateCounter received_audio_bytes_per_second_counter_
+ RTC_GUARDED_BY(sequence_checker_);
+ RateCounter received_video_bytes_per_second_counter_
+ RTC_GUARDED_BY(sequence_checker_);
+ RateCounter received_rtcp_bytes_per_second_counter_
+ RTC_GUARDED_BY(sequence_checker_);
+ absl::optional<Timestamp> first_received_rtp_audio_timestamp_
+ RTC_GUARDED_BY(sequence_checker_);
+ absl::optional<Timestamp> last_received_rtp_audio_timestamp_
+ RTC_GUARDED_BY(sequence_checker_);
+ absl::optional<Timestamp> first_received_rtp_video_timestamp_
+ RTC_GUARDED_BY(sequence_checker_);
+ absl::optional<Timestamp> last_received_rtp_video_timestamp_
+ RTC_GUARDED_BY(sequence_checker_);
+ };
+
+ // Thread-compatible class that collects sent packet stats and exposes
+ // them as UMA histograms on destruction, provided SetFirstPacketTime was
+ // called with a non-empty packet timestamp before the destructor.
+ class SendStats {
+ public:
+ explicit SendStats(Clock* clock);
+ ~SendStats();
+
+ void SetFirstPacketTime(absl::optional<Timestamp> first_sent_packet_time);
+ void PauseSendAndPacerBitrateCounters();
+ void AddTargetBitrateSample(uint32_t target_bitrate_bps);
+ void SetMinAllocatableRate(BitrateAllocationLimits limits);
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker destructor_sequence_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ Clock* const clock_ RTC_GUARDED_BY(destructor_sequence_checker_);
+ AvgCounter estimated_send_bitrate_kbps_counter_
+ RTC_GUARDED_BY(sequence_checker_);
+ AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(sequence_checker_);
+ uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(sequence_checker_){
+ 0};
+ absl::optional<Timestamp> first_sent_packet_time_
+ RTC_GUARDED_BY(destructor_sequence_checker_);
+ };
+
+ void DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet)
+ RTC_RUN_ON(network_thread_);
DeliveryStatus DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
- void ConfigureSync(const std::string& sync_group)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+ int64_t packet_time_us) RTC_RUN_ON(worker_thread_);
+ void ConfigureSync(const std::string& sync_group) RTC_RUN_ON(worker_thread_);
void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet,
MediaType media_type)
- RTC_SHARED_LOCKS_REQUIRED(worker_thread_);
+ RTC_RUN_ON(worker_thread_);
- void UpdateReceiveHistograms();
void UpdateAggregateNetworkState();
// Ensure that necessary process threads are started, and any required
// callbacks have been registered.
- void EnsureStarted() RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
-
- rtc::TaskQueue* send_transport_queue() const {
- return transport_send_ptr_->GetWorkerQueue();
- }
+ void EnsureStarted() RTC_RUN_ON(worker_thread_);
Clock* const clock_;
TaskQueueFactory* const task_queue_factory_;
TaskQueueBase* const worker_thread_;
TaskQueueBase* const network_thread_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker send_transport_sequence_checker_;
const int num_cpu_cores_;
const rtc::scoped_refptr<SharedModuleThread> module_process_thread_;
const std::unique_ptr<CallStats> call_stats_;
const std::unique_ptr<BitrateAllocator> bitrate_allocator_;
- Call::Config config_;
+ const Call::Config config_ RTC_GUARDED_BY(worker_thread_);
+ // Maps to config_.trials, can be used from any thread via `trials()`.
+ const WebRtcKeyValueConfig& trials_;
- NetworkState audio_network_state_;
- NetworkState video_network_state_;
+ NetworkState audio_network_state_ RTC_GUARDED_BY(worker_thread_);
+ NetworkState video_network_state_ RTC_GUARDED_BY(worker_thread_);
// TODO(bugs.webrtc.org/11993): Move aggregate_network_up_ over to the
// network thread.
bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_);
@@ -380,39 +394,17 @@ class Call final : public webrtc::Call,
// TODO(nisse): Should eventually be injected at creation,
// with a single object in the bundled case.
- RtpStreamReceiverController audio_receiver_controller_;
- RtpStreamReceiverController video_receiver_controller_;
+ RtpStreamReceiverController audio_receiver_controller_
+ RTC_GUARDED_BY(worker_thread_);
+ RtpStreamReceiverController video_receiver_controller_
+ RTC_GUARDED_BY(worker_thread_);
// This extra map is used for receive processing which is
// independent of media type.
- // TODO(nisse): In the RTP transport refactoring, we should have a
- // single mapping from ssrc to a more abstract receive stream, with
- // accessor methods for all configuration we need at this level.
- struct ReceiveRtpConfig {
- explicit ReceiveRtpConfig(const webrtc::AudioReceiveStream::Config& config)
- : extensions(config.rtp.extensions),
- use_send_side_bwe(UseSendSideBwe(config)) {}
- explicit ReceiveRtpConfig(const webrtc::VideoReceiveStream::Config& config)
- : extensions(config.rtp.extensions),
- use_send_side_bwe(UseSendSideBwe(config)) {}
- explicit ReceiveRtpConfig(const FlexfecReceiveStream::Config& config)
- : extensions(config.rtp_header_extensions),
- use_send_side_bwe(UseSendSideBwe(config)) {}
-
- // Registered RTP header extensions for each stream. Note that RTP header
- // extensions are negotiated per track ("m= line") in the SDP, but we have
- // no notion of tracks at the Call level. We therefore store the RTP header
- // extensions per SSRC instead, which leads to some storage overhead.
- const RtpHeaderExtensionMap extensions;
- // Set if both RTP extension the RTCP feedback message needed for
- // send side BWE are negotiated.
- const bool use_send_side_bwe;
- };
-
// TODO(bugs.webrtc.org/11993): Move receive_rtp_config_ over to the
// network thread.
- std::map<uint32_t, ReceiveRtpConfig> receive_rtp_config_
+ std::map<uint32_t, ReceiveStream*> receive_rtp_config_
RTC_GUARDED_BY(worker_thread_);
// Audio and Video send streams are owned by the client that creates them.
@@ -421,6 +413,10 @@ class Call final : public webrtc::Call,
std::map<uint32_t, VideoSendStream*> video_send_ssrcs_
RTC_GUARDED_BY(worker_thread_);
std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(worker_thread_);
+ // True if |video_send_streams_| is empty, false if not. The atomic variable
+ // is used to decide UMA send statistics behavior and enables avoiding a
+ // PostTask().
+ std::atomic<bool> video_send_streams_empty_{true};
// Each forwarder wraps an adaptation resource that was added to the call.
std::vector<std::unique_ptr<ResourceVideoSendStreamForwarder>>
@@ -434,49 +430,41 @@ class Call final : public webrtc::Call,
RtpPayloadStateMap suspended_video_payload_states_
RTC_GUARDED_BY(worker_thread_);
- webrtc::RtcEventLog* event_log_;
-
- // The following members are only accessed (exclusively) from one thread and
- // from the destructor, and therefore doesn't need any explicit
- // synchronization.
- RateCounter received_bytes_per_second_counter_;
- RateCounter received_audio_bytes_per_second_counter_;
- RateCounter received_video_bytes_per_second_counter_;
- RateCounter received_rtcp_bytes_per_second_counter_;
- absl::optional<int64_t> first_received_rtp_audio_ms_;
- absl::optional<int64_t> last_received_rtp_audio_ms_;
- absl::optional<int64_t> first_received_rtp_video_ms_;
- absl::optional<int64_t> last_received_rtp_video_ms_;
-
- uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(worker_thread_);
- // TODO(holmer): Remove this lock once BitrateController no longer calls
- // OnNetworkChanged from multiple threads.
- uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
- uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
- AvgCounter estimated_send_bitrate_kbps_counter_
- RTC_GUARDED_BY(worker_thread_);
- AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(worker_thread_);
+ webrtc::RtcEventLog* const event_log_;
+
+ // TODO(bugs.webrtc.org/11993) ready to move stats access to the network
+ // thread.
+ ReceiveStats receive_stats_ RTC_GUARDED_BY(worker_thread_);
+ SendStats send_stats_ RTC_GUARDED_BY(send_transport_sequence_checker_);
+ // |last_bandwidth_bps_| and |configured_max_padding_bitrate_bps_| being
+ // atomic avoids a PostTask. The variables are used for stats gathering.
+ std::atomic<uint32_t> last_bandwidth_bps_{0};
+ std::atomic<uint32_t> configured_max_padding_bitrate_bps_{0};
ReceiveSideCongestionController receive_side_cc_;
const std::unique_ptr<ReceiveTimeCalculator> receive_time_calculator_;
const std::unique_ptr<SendDelayStats> video_send_delay_stats_;
- const int64_t start_ms_;
+ const Timestamp start_of_call_;
// Note that |task_safety_| needs to be at a greater scope than the task queue
// owned by |transport_send_| since calls might arrive on the network thread
// while Call is being deleted and the task queue is being torn down.
- ScopedTaskSafety task_safety_;
+ const ScopedTaskSafety task_safety_;
// Caches transport_send_.get(), to avoid racing with destructor.
// Note that this is declared before transport_send_ to ensure that it is not
// invalidated until no more tasks can be running on the transport_send_ task
// queue.
- RtpTransportControllerSendInterface* const transport_send_ptr_;
+ // For more details on the background of this member variable, see:
+ // https://webrtc-review.googlesource.com/c/src/+/63023/9/call/call.cc
+ // https://bugs.chromium.org/p/chromium/issues/detail?id=992640
+ RtpTransportControllerSendInterface* const transport_send_ptr_
+ RTC_GUARDED_BY(send_transport_sequence_checker_);
// Declared last since it will issue callbacks from a task queue. Declaring it
// last ensures that it is destroyed first and any running tasks are finished.
- std::unique_ptr<RtpTransportControllerSendInterface> transport_send_;
+ const std::unique_ptr<RtpTransportControllerSendInterface> transport_send_;
bool is_started_ RTC_GUARDED_BY(worker_thread_) = false;
@@ -501,11 +489,6 @@ Call* Call::Create(const Call::Config& config) {
rtc::scoped_refptr<SharedModuleThread> call_thread =
SharedModuleThread::Create(ProcessThread::Create("ModuleProcessThread"),
nullptr);
- return Create(config, std::move(call_thread));
-}
-
-Call* Call::Create(const Call::Config& config,
- rtc::scoped_refptr<SharedModuleThread> call_thread) {
return Create(config, Clock::GetRealTimeClock(), std::move(call_thread),
ProcessThread::Create("PacerThread"));
}
@@ -515,15 +498,28 @@ Call* Call::Create(const Call::Config& config,
rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread) {
RTC_DCHECK(config.task_queue_factory);
+
+ RtpTransportControllerSendFactory transport_controller_factory_;
+
+ RtpTransportConfig transportConfig = config.ExtractTransportConfig();
+
return new internal::Call(
clock, config,
- std::make_unique<RtpTransportControllerSend>(
- clock, config.event_log, config.network_state_predictor_factory,
- config.network_controller_factory, config.bitrate_config,
- std::move(pacer_thread), config.task_queue_factory, config.trials),
+ transport_controller_factory_.Create(transportConfig, clock,
+ std::move(pacer_thread)),
std::move(call_thread), config.task_queue_factory);
}
+Call* Call::Create(const Call::Config& config,
+ Clock* clock,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
+ std::unique_ptr<RtpTransportControllerSendInterface>
+ transportControllerSend) {
+ RTC_DCHECK(config.task_queue_factory);
+ return new internal::Call(clock, config, std::move(transportControllerSend),
+ std::move(call_thread), config.task_queue_factory);
+}
+
class SharedModuleThread::Impl {
public:
Impl(std::unique_ptr<ProcessThread> process_thread,
@@ -628,6 +624,157 @@ VideoSendStream* Call::CreateVideoSendStream(
namespace internal {
+Call::ReceiveStats::ReceiveStats(Clock* clock)
+ : received_bytes_per_second_counter_(clock, nullptr, false),
+ received_audio_bytes_per_second_counter_(clock, nullptr, false),
+ received_video_bytes_per_second_counter_(clock, nullptr, false),
+ received_rtcp_bytes_per_second_counter_(clock, nullptr, false) {
+ sequence_checker_.Detach();
+}
+
+void Call::ReceiveStats::AddReceivedRtcpBytes(int bytes) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (received_bytes_per_second_counter_.HasSample()) {
+ // First RTP packet has been received.
+ received_bytes_per_second_counter_.Add(static_cast<int>(bytes));
+ received_rtcp_bytes_per_second_counter_.Add(static_cast<int>(bytes));
+ }
+}
+
+void Call::ReceiveStats::AddReceivedAudioBytes(int bytes,
+ webrtc::Timestamp arrival_time) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ received_bytes_per_second_counter_.Add(bytes);
+ received_audio_bytes_per_second_counter_.Add(bytes);
+ if (!first_received_rtp_audio_timestamp_)
+ first_received_rtp_audio_timestamp_ = arrival_time;
+ last_received_rtp_audio_timestamp_ = arrival_time;
+}
+
+void Call::ReceiveStats::AddReceivedVideoBytes(int bytes,
+ webrtc::Timestamp arrival_time) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ received_bytes_per_second_counter_.Add(bytes);
+ received_video_bytes_per_second_counter_.Add(bytes);
+ if (!first_received_rtp_video_timestamp_)
+ first_received_rtp_video_timestamp_ = arrival_time;
+ last_received_rtp_video_timestamp_ = arrival_time;
+}
+
+Call::ReceiveStats::~ReceiveStats() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (first_received_rtp_audio_timestamp_) {
+ RTC_HISTOGRAM_COUNTS_100000(
+ "WebRTC.Call.TimeReceivingAudioRtpPacketsInSeconds",
+ (*last_received_rtp_audio_timestamp_ -
+ *first_received_rtp_audio_timestamp_)
+ .seconds());
+ }
+ if (first_received_rtp_video_timestamp_) {
+ RTC_HISTOGRAM_COUNTS_100000(
+ "WebRTC.Call.TimeReceivingVideoRtpPacketsInSeconds",
+ (*last_received_rtp_video_timestamp_ -
+ *first_received_rtp_video_timestamp_)
+ .seconds());
+ }
+ const int kMinRequiredPeriodicSamples = 5;
+ AggregatedStats video_bytes_per_sec =
+ received_video_bytes_per_second_counter_.GetStats();
+ if (video_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.VideoBitrateReceivedInKbps",
+ video_bytes_per_sec.average * 8 / 1000);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.VideoBitrateReceivedInBps, "
+ << video_bytes_per_sec.ToStringWithMultiplier(8);
+ }
+ AggregatedStats audio_bytes_per_sec =
+ received_audio_bytes_per_second_counter_.GetStats();
+ if (audio_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.AudioBitrateReceivedInKbps",
+ audio_bytes_per_sec.average * 8 / 1000);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.AudioBitrateReceivedInBps, "
+ << audio_bytes_per_sec.ToStringWithMultiplier(8);
+ }
+ AggregatedStats rtcp_bytes_per_sec =
+ received_rtcp_bytes_per_second_counter_.GetStats();
+ if (rtcp_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.RtcpBitrateReceivedInBps",
+ rtcp_bytes_per_sec.average * 8);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.RtcpBitrateReceivedInBps, "
+ << rtcp_bytes_per_sec.ToStringWithMultiplier(8);
+ }
+ AggregatedStats recv_bytes_per_sec =
+ received_bytes_per_second_counter_.GetStats();
+ if (recv_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.BitrateReceivedInKbps",
+ recv_bytes_per_sec.average * 8 / 1000);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.BitrateReceivedInBps, "
+ << recv_bytes_per_sec.ToStringWithMultiplier(8);
+ }
+}
+
+Call::SendStats::SendStats(Clock* clock)
+ : clock_(clock),
+ estimated_send_bitrate_kbps_counter_(clock, nullptr, true),
+ pacer_bitrate_kbps_counter_(clock, nullptr, true) {
+ destructor_sequence_checker_.Detach();
+ sequence_checker_.Detach();
+}
+
+Call::SendStats::~SendStats() {
+ RTC_DCHECK_RUN_ON(&destructor_sequence_checker_);
+ if (!first_sent_packet_time_)
+ return;
+
+ TimeDelta elapsed = clock_->CurrentTime() - *first_sent_packet_time_;
+ if (elapsed.seconds() < metrics::kMinRunTimeInSeconds)
+ return;
+
+ const int kMinRequiredPeriodicSamples = 5;
+ AggregatedStats send_bitrate_stats =
+ estimated_send_bitrate_kbps_counter_.ProcessAndGetStats();
+ if (send_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.EstimatedSendBitrateInKbps",
+ send_bitrate_stats.average);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.EstimatedSendBitrateInKbps, "
+ << send_bitrate_stats.ToString();
+ }
+ AggregatedStats pacer_bitrate_stats =
+ pacer_bitrate_kbps_counter_.ProcessAndGetStats();
+ if (pacer_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) {
+ RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.PacerBitrateInKbps",
+ pacer_bitrate_stats.average);
+ RTC_LOG(LS_INFO) << "WebRTC.Call.PacerBitrateInKbps, "
+ << pacer_bitrate_stats.ToString();
+ }
+}
+
+void Call::SendStats::SetFirstPacketTime(
+ absl::optional<Timestamp> first_sent_packet_time) {
+ RTC_DCHECK_RUN_ON(&destructor_sequence_checker_);
+ first_sent_packet_time_ = first_sent_packet_time;
+}
+
+void Call::SendStats::PauseSendAndPacerBitrateCounters() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ estimated_send_bitrate_kbps_counter_.ProcessAndPause();
+ pacer_bitrate_kbps_counter_.ProcessAndPause();
+}
+
+void Call::SendStats::AddTargetBitrateSample(uint32_t target_bitrate_bps) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
+ // Pacer bitrate may be higher than bitrate estimate if enforcing min
+ // bitrate.
+ uint32_t pacer_bitrate_bps =
+ std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
+ pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
+}
+
+void Call::SendStats::SetMinAllocatableRate(BitrateAllocationLimits limits) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
+}
+
Call::Call(Clock* clock,
const Call::Config& config,
std::unique_ptr<RtpTransportControllerSendInterface> transport_send,
@@ -645,23 +792,22 @@ Call::Call(Clock* clock,
call_stats_(new CallStats(clock_, worker_thread_)),
bitrate_allocator_(new BitrateAllocator(this)),
config_(config),
+ trials_(*config.trials),
audio_network_state_(kNetworkDown),
video_network_state_(kNetworkDown),
aggregate_network_up_(false),
event_log_(config.event_log),
- received_bytes_per_second_counter_(clock_, nullptr, true),
- received_audio_bytes_per_second_counter_(clock_, nullptr, true),
- received_video_bytes_per_second_counter_(clock_, nullptr, true),
- received_rtcp_bytes_per_second_counter_(clock_, nullptr, true),
- last_bandwidth_bps_(0),
- min_allocated_send_bitrate_bps_(0),
- configured_max_padding_bitrate_bps_(0),
- estimated_send_bitrate_kbps_counter_(clock_, nullptr, true),
- pacer_bitrate_kbps_counter_(clock_, nullptr, true),
- receive_side_cc_(clock_, transport_send->packet_router()),
+ receive_stats_(clock_),
+ send_stats_(clock_),
+ receive_side_cc_(clock,
+ absl::bind_front(&PacketRouter::SendCombinedRtcpPacket,
+ transport_send->packet_router()),
+ absl::bind_front(&PacketRouter::SendRemb,
+ transport_send->packet_router()),
+ /*network_state_estimator=*/nullptr),
receive_time_calculator_(ReceiveTimeCalculator::CreateFromFieldTrial()),
video_send_delay_stats_(new SendDelayStats(clock_)),
- start_ms_(clock_->TimeInMilliseconds()),
+ start_of_call_(clock_->CurrentTime()),
transport_send_ptr_(transport_send.get()),
transport_send_(std::move(transport_send)) {
RTC_DCHECK(config.event_log != nullptr);
@@ -669,6 +815,8 @@ Call::Call(Clock* clock,
RTC_DCHECK(network_thread_);
RTC_DCHECK(worker_thread_->IsCurrent());
+ send_transport_sequence_checker_.Detach();
+
// Do not remove this call; it is here to convince the compiler that the
// WebRTC source timestamp string needs to be in the final binary.
LoadWebRTCVersionInRegister();
@@ -694,24 +842,11 @@ Call::~Call() {
receive_side_cc_.GetRemoteBitrateEstimator(true));
module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_);
call_stats_->DeregisterStatsObserver(&receive_side_cc_);
+ send_stats_.SetFirstPacketTime(transport_send_->GetFirstPacketTime());
- absl::optional<Timestamp> first_sent_packet_time =
- transport_send_->GetFirstPacketTime();
-
- Timestamp now = clock_->CurrentTime();
-
- // Only update histograms after process threads have been shut down, so that
- // they won't try to concurrently update stats.
- if (first_sent_packet_time) {
- UpdateSendHistograms(now, *first_sent_packet_time,
- estimated_send_bitrate_kbps_counter_,
- pacer_bitrate_kbps_counter_);
- }
-
- UpdateReceiveHistograms();
-
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.LifetimeInSeconds",
- (now.ms() - start_ms_) / 1000);
+ RTC_HISTOGRAM_COUNTS_100000(
+ "WebRTC.Call.LifetimeInSeconds",
+ (clock_->CurrentTime() - start_of_call_).seconds());
}
void Call::EnsureStarted() {
@@ -724,10 +859,10 @@ void Call::EnsureStarted() {
// This call seems to kick off a number of things, so probably better left
// off being kicked off on request rather than in the ctor.
- transport_send_ptr_->RegisterTargetTransferRateObserver(this);
+ transport_send_->RegisterTargetTransferRateObserver(this);
module_process_thread_->EnsureStarted();
- transport_send_ptr_->EnsureStarted();
+ transport_send_->EnsureStarted();
}
void Call::SetClientBitratePreferences(const BitrateSettings& preferences) {
@@ -735,52 +870,6 @@ void Call::SetClientBitratePreferences(const BitrateSettings& preferences) {
GetTransportControllerSend()->SetClientBitratePreferences(preferences);
}
-void Call::UpdateReceiveHistograms() {
- if (first_received_rtp_audio_ms_) {
- RTC_HISTOGRAM_COUNTS_100000(
- "WebRTC.Call.TimeReceivingAudioRtpPacketsInSeconds",
- (*last_received_rtp_audio_ms_ - *first_received_rtp_audio_ms_) / 1000);
- }
- if (first_received_rtp_video_ms_) {
- RTC_HISTOGRAM_COUNTS_100000(
- "WebRTC.Call.TimeReceivingVideoRtpPacketsInSeconds",
- (*last_received_rtp_video_ms_ - *first_received_rtp_video_ms_) / 1000);
- }
- const int kMinRequiredPeriodicSamples = 5;
- AggregatedStats video_bytes_per_sec =
- received_video_bytes_per_second_counter_.GetStats();
- if (video_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.VideoBitrateReceivedInKbps",
- video_bytes_per_sec.average * 8 / 1000);
- RTC_LOG(LS_INFO) << "WebRTC.Call.VideoBitrateReceivedInBps, "
- << video_bytes_per_sec.ToStringWithMultiplier(8);
- }
- AggregatedStats audio_bytes_per_sec =
- received_audio_bytes_per_second_counter_.GetStats();
- if (audio_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.AudioBitrateReceivedInKbps",
- audio_bytes_per_sec.average * 8 / 1000);
- RTC_LOG(LS_INFO) << "WebRTC.Call.AudioBitrateReceivedInBps, "
- << audio_bytes_per_sec.ToStringWithMultiplier(8);
- }
- AggregatedStats rtcp_bytes_per_sec =
- received_rtcp_bytes_per_second_counter_.GetStats();
- if (rtcp_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.RtcpBitrateReceivedInBps",
- rtcp_bytes_per_sec.average * 8);
- RTC_LOG(LS_INFO) << "WebRTC.Call.RtcpBitrateReceivedInBps, "
- << rtcp_bytes_per_sec.ToStringWithMultiplier(8);
- }
- AggregatedStats recv_bytes_per_sec =
- received_bytes_per_second_counter_.GetStats();
- if (recv_bytes_per_sec.num_samples > kMinRequiredPeriodicSamples) {
- RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.BitrateReceivedInKbps",
- recv_bytes_per_sec.average * 8 / 1000);
- RTC_LOG(LS_INFO) << "WebRTC.Call.BitrateReceivedInBps, "
- << recv_bytes_per_sec.ToStringWithMultiplier(8);
- }
-}
-
PacketReceiver* Call::Receiver() {
return this;
}
@@ -804,7 +893,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream(
AudioSendStream* send_stream = new AudioSendStream(
clock_, config, config_.audio_state, task_queue_factory_,
- module_process_thread_->process_thread(), transport_send_ptr_,
+ module_process_thread_->process_thread(), transport_send_.get(),
bitrate_allocator_.get(), event_log_, call_stats_->AsRtcpRttStats(),
suspended_rtp_state);
RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) ==
@@ -814,7 +903,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream(
// TODO(bugs.webrtc.org/11993): call AssociateSendStream and
// UpdateAggregateNetworkState asynchronously on the network thread.
for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == config.rtp.ssrc) {
+ if (stream->local_ssrc() == config.rtp.ssrc) {
stream->AssociateSendStream(send_stream);
}
}
@@ -842,7 +931,7 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
// TODO(bugs.webrtc.org/11993): call AssociateSendStream and
// UpdateAggregateNetworkState asynchronously on the network thread.
for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == ssrc) {
+ if (stream->local_ssrc() == ssrc) {
stream->AssociateSendStream(nullptr);
}
}
@@ -860,20 +949,21 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
event_log_->Log(std::make_unique<RtcEventAudioReceiveStreamConfig>(
CreateRtcLogStreamConfig(config)));
- // TODO(bugs.webrtc.org/11993): Move the registration between |receive_stream|
- // and |audio_receiver_controller_| out of AudioReceiveStream construction and
- // set it up asynchronously on the network thread (the registration and
- // |audio_receiver_controller_| need to live on the network thread).
AudioReceiveStream* receive_stream = new AudioReceiveStream(
- clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(),
+ clock_, transport_send_->packet_router(),
module_process_thread_->process_thread(), config_.neteq_factory, config,
config_.audio_state, event_log_);
+ audio_receive_streams_.insert(receive_stream);
+
+ // TODO(bugs.webrtc.org/11993): Make the registration on the network thread
+ // (asynchronously). The registration and `audio_receiver_controller_` need
+ // to live on the network thread.
+ receive_stream->RegisterWithTransport(&audio_receiver_controller_);
// TODO(bugs.webrtc.org/11993): Update the below on the network thread.
// We could possibly set up the audio_receiver_controller_ association up
// as part of the async setup.
- receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
- audio_receive_streams_.insert(receive_stream);
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream);
ConfigureSync(config.sync_group);
@@ -894,20 +984,22 @@ void Call::DestroyAudioReceiveStream(
webrtc::internal::AudioReceiveStream* audio_receive_stream =
static_cast<webrtc::internal::AudioReceiveStream*>(receive_stream);
+ // TODO(bugs.webrtc.org/11993): Access the map, rtp config, call ConfigureSync
+ // and UpdateAggregateNetworkState on the network thread. The call to
+ // `UnregisterFromTransport` should also happen on the network thread.
+ audio_receive_stream->UnregisterFromTransport();
+
+ uint32_t ssrc = audio_receive_stream->remote_ssrc();
const AudioReceiveStream::Config& config = audio_receive_stream->config();
- uint32_t ssrc = config.rtp.remote_ssrc;
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config.rtp))
->RemoveStream(ssrc);
- // TODO(bugs.webrtc.org/11993): Access the map, rtp config, call ConfigureSync
- // and UpdateAggregateNetworkState on the network thread.
audio_receive_streams_.erase(audio_receive_stream);
- const std::string& sync_group = audio_receive_stream->config().sync_group;
- const auto it = sync_stream_mapping_.find(sync_group);
+ const auto it = sync_stream_mapping_.find(config.sync_group);
if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) {
sync_stream_mapping_.erase(it);
- ConfigureSync(sync_group);
+ ConfigureSync(config.sync_group);
}
receive_rtp_config_.erase(ssrc);
@@ -942,7 +1034,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
VideoSendStream* send_stream = new VideoSendStream(
clock_, num_cpu_cores_, module_process_thread_->process_thread(),
- task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_,
+ task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_.get(),
bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_,
std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_,
suspended_video_payload_states_, std::move(fec_controller));
@@ -952,6 +1044,8 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
video_send_ssrcs_[ssrc] = send_stream;
}
video_send_streams_.insert(send_stream);
+ video_send_streams_empty_.store(false, std::memory_order_relaxed);
+
// Forward resources that were previously added to the call to the new stream.
for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
resource_forwarder->OnCreateVideoSendStream(send_stream);
@@ -965,6 +1059,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
webrtc::VideoSendStream* Call::CreateVideoSendStream(
webrtc::VideoSendStream::Config config,
VideoEncoderConfig encoder_config) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
if (config_.fec_controller_factory) {
RTC_LOG(LS_INFO) << "External FEC Controller will be used.";
}
@@ -981,9 +1076,12 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
RTC_DCHECK(send_stream != nullptr);
RTC_DCHECK_RUN_ON(worker_thread_);
- send_stream->Stop();
-
- VideoSendStream* send_stream_impl = nullptr;
+ VideoSendStream* send_stream_impl =
+ static_cast<VideoSendStream*>(send_stream);
+ VideoSendStream::RtpStateMap rtp_states;
+ VideoSendStream::RtpPayloadStateMap rtp_payload_states;
+ send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states,
+ &rtp_payload_states);
auto it = video_send_ssrcs_.begin();
while (it != video_send_ssrcs_.end()) {
@@ -994,18 +1092,15 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
++it;
}
}
+
// Stop forwarding resources to the stream being destroyed.
for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
resource_forwarder->OnDestroyVideoSendStream(send_stream_impl);
}
video_send_streams_.erase(send_stream_impl);
+ if (video_send_streams_.empty())
+ video_send_streams_empty_.store(true, std::memory_order_relaxed);
- RTC_CHECK(send_stream_impl != nullptr);
-
- VideoSendStream::RtpStateMap rtp_states;
- VideoSendStream::RtpPayloadStateMap rtp_payload_states;
- send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states,
- &rtp_payload_states);
for (const auto& kv : rtp_states) {
suspended_video_send_ssrcs_[kv.first] = kv.second;
}
@@ -1014,6 +1109,8 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
}
UpdateAggregateNetworkState();
+ // TODO(tommi): consider deleting on the same thread as runs
+ // StopPermanentlyAndGetRtpStates.
delete send_stream_impl;
}
@@ -1032,10 +1129,13 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
// and set it up asynchronously on the network thread (the registration and
// |video_receiver_controller_| need to live on the network thread).
VideoReceiveStream2* receive_stream = new VideoReceiveStream2(
- task_queue_factory_, worker_thread_, &video_receiver_controller_,
- num_cpu_cores_, transport_send_ptr_->packet_router(),
- std::move(configuration), module_process_thread_->process_thread(),
- call_stats_.get(), clock_, new VCMTiming(clock_));
+ task_queue_factory_, this, num_cpu_cores_,
+ transport_send_->packet_router(), std::move(configuration),
+ module_process_thread_->process_thread(), call_stats_.get(), clock_,
+ new VCMTiming(clock_));
+ // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network
+ // thread.
+ receive_stream->RegisterWithTransport(&video_receiver_controller_);
const webrtc::VideoReceiveStream::Config& config = receive_stream->config();
if (config.rtp.rtx_ssrc) {
@@ -1043,9 +1143,9 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
// stream. Since the transport_send_cc negotiation is per payload
// type, we may get an incorrect value for the rtx stream, but
// that is unlikely to matter in practice.
- receive_rtp_config_.emplace(config.rtp.rtx_ssrc, ReceiveRtpConfig(config));
+ receive_rtp_config_.emplace(config.rtp.rtx_ssrc, receive_stream);
}
- receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream);
video_receive_streams_.insert(receive_stream);
ConfigureSync(config.sync_group);
@@ -1063,6 +1163,9 @@ void Call::DestroyVideoReceiveStream(
RTC_DCHECK(receive_stream != nullptr);
VideoReceiveStream2* receive_stream_impl =
static_cast<VideoReceiveStream2*>(receive_stream);
+ // TODO(bugs.webrtc.org/11993): Unregister on the network thread.
+ receive_stream_impl->UnregisterFromTransport();
+
const VideoReceiveStream::Config& config = receive_stream_impl->config();
// Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
@@ -1074,7 +1177,7 @@ void Call::DestroyVideoReceiveStream(
video_receive_streams_.erase(receive_stream_impl);
ConfigureSync(config.sync_group);
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config.rtp))
->RemoveStream(config.rtp.remote_ssrc);
UpdateAggregateNetworkState();
@@ -1097,12 +1200,16 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream(
// OnRtpPacket until the constructor is finished and the object is
// in a valid state, since OnRtpPacket runs on the same thread.
receive_stream = new FlexfecReceiveStreamImpl(
- clock_, &video_receiver_controller_, config, recovered_packet_receiver,
- call_stats_->AsRtcpRttStats(), module_process_thread_->process_thread());
+ clock_, config, recovered_packet_receiver, call_stats_->AsRtcpRttStats(),
+ module_process_thread_->process_thread());
- RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) ==
+ // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network
+ // thread.
+ receive_stream->RegisterWithTransport(&video_receiver_controller_);
+
+ RTC_DCHECK(receive_rtp_config_.find(config.rtp.remote_ssrc) ==
receive_rtp_config_.end());
- receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config));
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream);
// TODO(brandtr): Store config in RtcEventLog here.
@@ -1113,15 +1220,19 @@ void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyFlexfecReceiveStream");
RTC_DCHECK_RUN_ON(worker_thread_);
+ FlexfecReceiveStreamImpl* receive_stream_impl =
+ static_cast<FlexfecReceiveStreamImpl*>(receive_stream);
+ // TODO(bugs.webrtc.org/11993): Unregister on the network thread.
+ receive_stream_impl->UnregisterFromTransport();
+
RTC_DCHECK(receive_stream != nullptr);
- const FlexfecReceiveStream::Config& config = receive_stream->GetConfig();
- uint32_t ssrc = config.remote_ssrc;
- receive_rtp_config_.erase(ssrc);
+ const FlexfecReceiveStream::RtpConfig& rtp = receive_stream->rtp_config();
+ receive_rtp_config_.erase(rtp.remote_ssrc);
// Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be
// destroyed.
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
- ->RemoveStream(ssrc);
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(rtp))
+ ->RemoveStream(rtp.remote_ssrc);
delete receive_stream;
}
@@ -1137,7 +1248,7 @@ void Call::AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {
}
RtpTransportControllerSendInterface* Call::GetTransportControllerSend() {
- return transport_send_ptr_;
+ return transport_send_.get();
}
Call::Stats Call::GetStats() const {
@@ -1147,7 +1258,7 @@ Call::Stats Call::GetStats() const {
// TODO(srte): It is unclear if we only want to report queues if network is
// available.
stats.pacer_delay_ms =
- aggregate_network_up_ ? transport_send_ptr_->GetPacerQueuingDelayMs() : 0;
+ aggregate_network_up_ ? transport_send_->GetPacerQueuingDelayMs() : 0;
stats.rtt_ms = call_stats_->LastProcessedRtt();
@@ -1157,14 +1268,16 @@ Call::Stats Call::GetStats() const {
receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate(
&ssrcs, &recv_bandwidth);
stats.recv_bandwidth_bps = recv_bandwidth;
- stats.send_bandwidth_bps = last_bandwidth_bps_;
- stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_;
+ stats.send_bandwidth_bps =
+ last_bandwidth_bps_.load(std::memory_order_relaxed);
+ stats.max_padding_bitrate_bps =
+ configured_max_padding_bitrate_bps_.load(std::memory_order_relaxed);
return stats;
}
const WebRtcKeyValueConfig& Call::trials() const {
- return *config_.trials;
+ return trials_;
}
TaskQueueBase* Call::network_thread() const {
@@ -1246,7 +1359,28 @@ void Call::UpdateAggregateNetworkState() {
}
aggregate_network_up_ = aggregate_network_up;
- transport_send_ptr_->OnNetworkAvailability(aggregate_network_up);
+ transport_send_->OnNetworkAvailability(aggregate_network_up);
+}
+
+void Call::OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream,
+ uint32_t local_ssrc) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ webrtc::internal::AudioReceiveStream& receive_stream =
+ static_cast<webrtc::internal::AudioReceiveStream&>(stream);
+
+ receive_stream.SetLocalSsrc(local_ssrc);
+ auto it = audio_send_ssrcs_.find(local_ssrc);
+ receive_stream.AssociateSendStream(it != audio_send_ssrcs_.end() ? it->second
+ : nullptr);
+}
+
+void Call::OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream,
+ const std::string& sync_group) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ webrtc::internal::AudioReceiveStream& receive_stream =
+ static_cast<webrtc::internal::AudioReceiveStream&>(stream);
+ receive_stream.SetSyncGroup(sync_group);
+ ConfigureSync(sync_group);
}
void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
@@ -1258,56 +1392,47 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
// implementations that either just do a PostTask or use locking.
video_send_delay_stats_->OnSentPacket(sent_packet.packet_id,
clock_->TimeInMilliseconds());
- transport_send_ptr_->OnSentPacket(sent_packet);
+ transport_send_->OnSentPacket(sent_packet);
}
void Call::OnStartRateUpdate(DataRate start_rate) {
- RTC_DCHECK_RUN_ON(send_transport_queue());
+ RTC_DCHECK_RUN_ON(&send_transport_sequence_checker_);
bitrate_allocator_->UpdateStartRate(start_rate.bps<uint32_t>());
}
void Call::OnTargetTransferRate(TargetTransferRate msg) {
- RTC_DCHECK_RUN_ON(send_transport_queue());
+ RTC_DCHECK_RUN_ON(&send_transport_sequence_checker_);
uint32_t target_bitrate_bps = msg.target_rate.bps();
// For controlling the rate of feedback messages.
receive_side_cc_.OnBitrateChanged(target_bitrate_bps);
bitrate_allocator_->OnNetworkEstimateChanged(msg);
- worker_thread_->PostTask(
- ToQueuedTask(task_safety_, [this, target_bitrate_bps]() {
- RTC_DCHECK_RUN_ON(worker_thread_);
- last_bandwidth_bps_ = target_bitrate_bps;
-
- // Ignore updates if bitrate is zero (the aggregate network state is
- // down) or if we're not sending video.
- if (target_bitrate_bps == 0 || video_send_streams_.empty()) {
- estimated_send_bitrate_kbps_counter_.ProcessAndPause();
- pacer_bitrate_kbps_counter_.ProcessAndPause();
- return;
- }
+ last_bandwidth_bps_.store(target_bitrate_bps, std::memory_order_relaxed);
- estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
- // Pacer bitrate may be higher than bitrate estimate if enforcing min
- // bitrate.
- uint32_t pacer_bitrate_bps =
- std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
- pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
- }));
+ // Ignore updates if bitrate is zero (the aggregate network state is
+ // down) or if we're not sending video.
+ // Using |video_send_streams_empty_| is racy but as the caller can't
+ // reasonably expect synchronize with changes in |video_send_streams_| (being
+ // on |send_transport_sequence_checker|), we can avoid a PostTask this way.
+ if (target_bitrate_bps == 0 ||
+ video_send_streams_empty_.load(std::memory_order_relaxed)) {
+ send_stats_.PauseSendAndPacerBitrateCounters();
+ } else {
+ send_stats_.AddTargetBitrateSample(target_bitrate_bps);
+ }
}
void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) {
- RTC_DCHECK_RUN_ON(send_transport_queue());
+ RTC_DCHECK_RUN_ON(&send_transport_sequence_checker_);
transport_send_ptr_->SetAllocatedSendBitrateLimits(limits);
-
- worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, limits]() {
- RTC_DCHECK_RUN_ON(worker_thread_);
- min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
- configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps();
- }));
+ send_stats_.SetMinAllocatableRate(limits);
+ configured_max_padding_bitrate_bps_.store(limits.max_padding_rate.bps(),
+ std::memory_order_relaxed);
}
+// RTC_RUN_ON(worker_thread_)
void Call::ConfigureSync(const std::string& sync_group) {
// TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
// Set sync only if there was no previous one.
@@ -1359,56 +1484,62 @@ void Call::ConfigureSync(const std::string& sync_group) {
}
}
-PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
- const uint8_t* packet,
- size_t length) {
+// RTC_RUN_ON(network_thread_)
+void Call::DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet) {
TRACE_EVENT0("webrtc", "Call::DeliverRtcp");
- // TODO(pbos): Make sure it's a valid packet.
- // Return DELIVERY_UNKNOWN_SSRC if it can be determined that
- // there's no receiver of the packet.
- if (received_bytes_per_second_counter_.HasSample()) {
- // First RTP packet has been received.
- received_bytes_per_second_counter_.Add(static_cast<int>(length));
- received_rtcp_bytes_per_second_counter_.Add(static_cast<int>(length));
- }
- bool rtcp_delivered = false;
- if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- for (VideoReceiveStream2* stream : video_receive_streams_) {
- if (stream->DeliverRtcp(packet, length))
- rtcp_delivered = true;
- }
- }
- if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- for (AudioReceiveStream* stream : audio_receive_streams_) {
- stream->DeliverRtcp(packet, length);
- rtcp_delivered = true;
- }
- }
- if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- for (VideoSendStream* stream : video_send_streams_) {
- stream->DeliverRtcp(packet, length);
- rtcp_delivered = true;
- }
- }
- if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- for (auto& kv : audio_send_ssrcs_) {
- kv.second->DeliverRtcp(packet, length);
- rtcp_delivered = true;
- }
- }
- if (rtcp_delivered) {
- event_log_->Log(std::make_unique<RtcEventRtcpPacketIncoming>(
- rtc::MakeArrayView(packet, length)));
- }
+ // TODO(bugs.webrtc.org/11993): This DCHECK is here just to maintain the
+ // invariant that currently the only call path to this function is via
+ // `PeerConnection::InitializeRtcpCallback()`. DeliverRtp on the other hand
+ // gets called via the channel classes and
+ // WebRtc[Audio|Video]Channel's `OnPacketReceived`. We'll remove the
+ // PeerConnection involvement as well as
+ // `JsepTransportController::OnRtcpPacketReceived_n` and `rtcp_handler`
+ // and make sure that the flow of packets is consistent from the
+ // `RtpTransport` class, via the *Channel and *Engine classes and into Call.
+ // This way we'll also know more about the context of the packet.
+ RTC_DCHECK_EQ(media_type, MediaType::ANY);
+
+ // TODO(bugs.webrtc.org/11993): This should execute directly on the network
+ // thread.
+ worker_thread_->PostTask(
+ ToQueuedTask(task_safety_, [this, packet = std::move(packet)]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+
+ receive_stats_.AddReceivedRtcpBytes(static_cast<int>(packet.size()));
+ bool rtcp_delivered = false;
+ for (VideoReceiveStream2* stream : video_receive_streams_) {
+ if (stream->DeliverRtcp(packet.cdata(), packet.size()))
+ rtcp_delivered = true;
+ }
+
+ for (AudioReceiveStream* stream : audio_receive_streams_) {
+ stream->DeliverRtcp(packet.cdata(), packet.size());
+ rtcp_delivered = true;
+ }
+
+ for (VideoSendStream* stream : video_send_streams_) {
+ stream->DeliverRtcp(packet.cdata(), packet.size());
+ rtcp_delivered = true;
+ }
+
+ for (auto& kv : audio_send_ssrcs_) {
+ kv.second->DeliverRtcp(packet.cdata(), packet.size());
+ rtcp_delivered = true;
+ }
- return rtcp_delivered ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
+ if (rtcp_delivered) {
+ event_log_->Log(std::make_unique<RtcEventRtcpPacketIncoming>(
+ rtc::MakeArrayView(packet.cdata(), packet.size())));
+ }
+ }));
}
PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
TRACE_EVENT0("webrtc", "Call::DeliverRtp");
+ RTC_DCHECK_NE(media_type, MediaType::ANY);
RtpPacketReceived parsed_packet;
if (!parsed_packet.Parse(std::move(packet)))
@@ -1421,9 +1552,9 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
packet_time_us = receive_time_calculator_->ReconcileReceiveTimes(
packet_time_us, rtc::TimeUTCMicros(), clock_->TimeInMicroseconds());
}
- parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000);
+ parsed_packet.set_arrival_time(Timestamp::Micros(packet_time_us));
} else {
- parsed_packet.set_arrival_time_ms(clock_->TimeInMilliseconds());
+ parsed_packet.set_arrival_time(clock_->CurrentTime());
}
// We might get RTP keep-alive packets in accordance with RFC6263 section 4.6.
@@ -1446,7 +1577,8 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
return DELIVERY_UNKNOWN_SSRC;
}
- parsed_packet.IdentifyExtensions(it->second.extensions);
+ parsed_packet.IdentifyExtensions(
+ RtpHeaderExtensionMap(it->second->rtp_config().extensions));
NotifyBweOfReceivedPacket(parsed_packet, media_type);
@@ -1455,29 +1587,19 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
int length = static_cast<int>(parsed_packet.size());
if (media_type == MediaType::AUDIO) {
if (audio_receiver_controller_.OnRtpPacket(parsed_packet)) {
- received_bytes_per_second_counter_.Add(length);
- received_audio_bytes_per_second_counter_.Add(length);
+ receive_stats_.AddReceivedAudioBytes(length,
+ parsed_packet.arrival_time());
event_log_->Log(
std::make_unique<RtcEventRtpPacketIncoming>(parsed_packet));
- const int64_t arrival_time_ms = parsed_packet.arrival_time_ms();
- if (!first_received_rtp_audio_ms_) {
- first_received_rtp_audio_ms_.emplace(arrival_time_ms);
- }
- last_received_rtp_audio_ms_.emplace(arrival_time_ms);
return DELIVERY_OK;
}
} else if (media_type == MediaType::VIDEO) {
parsed_packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
if (video_receiver_controller_.OnRtpPacket(parsed_packet)) {
- received_bytes_per_second_counter_.Add(length);
- received_video_bytes_per_second_counter_.Add(length);
+ receive_stats_.AddReceivedVideoBytes(length,
+ parsed_packet.arrival_time());
event_log_->Log(
std::make_unique<RtcEventRtpPacketIncoming>(parsed_packet));
- const int64_t arrival_time_ms = parsed_packet.arrival_time_ms();
- if (!first_received_rtp_video_ms_) {
- first_received_rtp_video_ms_.emplace(arrival_time_ms);
- }
- last_received_rtp_video_ms_.emplace(arrival_time_ms);
return DELIVERY_OK;
}
}
@@ -1488,38 +1610,16 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket(
MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
- RTC_DCHECK_RUN_ON(worker_thread_);
-
- if (IsRtcp(packet.cdata(), packet.size()))
- return DeliverRtcp(media_type, packet.cdata(), packet.size());
+ if (IsRtcp(packet.cdata(), packet.size())) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ DeliverRtcp(media_type, std::move(packet));
+ return DELIVERY_OK;
+ }
+ RTC_DCHECK_RUN_ON(worker_thread_);
return DeliverRtp(media_type, std::move(packet), packet_time_us);
}
-void Call::DeliverPacketAsync(MediaType media_type,
- rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us,
- PacketCallback callback) {
- RTC_DCHECK_RUN_ON(network_thread_);
-
- TaskQueueBase* network_thread = rtc::Thread::Current();
- RTC_DCHECK(network_thread);
-
- worker_thread_->PostTask(ToQueuedTask(
- task_safety_, [this, network_thread, media_type, p = std::move(packet),
- packet_time_us, cb = std::move(callback)] {
- RTC_DCHECK_RUN_ON(worker_thread_);
- DeliveryStatus status = DeliverPacket(media_type, p, packet_time_us);
- if (cb) {
- network_thread->PostTask(
- ToQueuedTask([cb = std::move(cb), status, media_type,
- p = std::move(p), packet_time_us]() {
- cb(status, media_type, std::move(p), packet_time_us);
- }));
- }
- }));
-}
-
void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) {
// TODO(bugs.webrtc.org/11993): Expect to be called on the network thread.
// This method is called synchronously via |OnRtpPacket()| (see DeliverRtp)
@@ -1543,29 +1643,31 @@ void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) {
// which is being torn down.
return;
}
- parsed_packet.IdentifyExtensions(it->second.extensions);
+ parsed_packet.IdentifyExtensions(
+ RtpHeaderExtensionMap(it->second->rtp_config().extensions));
// TODO(brandtr): Update here when we support protecting audio packets too.
parsed_packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
video_receiver_controller_.OnRtpPacket(parsed_packet);
}
+// RTC_RUN_ON(worker_thread_)
void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet,
MediaType media_type) {
auto it = receive_rtp_config_.find(packet.Ssrc());
- bool use_send_side_bwe =
- (it != receive_rtp_config_.end()) && it->second.use_send_side_bwe;
+ bool use_send_side_bwe = (it != receive_rtp_config_.end()) &&
+ UseSendSideBwe(it->second->rtp_config());
RTPHeader header;
packet.GetHeader(&header);
ReceivedPacket packet_msg;
packet_msg.size = DataSize::Bytes(packet.payload_size());
- packet_msg.receive_time = Timestamp::Millis(packet.arrival_time_ms());
+ packet_msg.receive_time = packet.arrival_time();
if (header.extension.hasAbsoluteSendTime) {
packet_msg.send_time = header.extension.GetAbsoluteSendTimestamp();
}
- transport_send_ptr_->OnReceivedPacket(packet_msg);
+ transport_send_->OnReceivedPacket(packet_msg);
if (!use_send_side_bwe && header.extension.hasTransportSequenceNumber) {
// Inconsistent configuration of send side BWE. Do nothing.
@@ -1581,8 +1683,8 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet,
if (media_type == MediaType::VIDEO ||
(use_send_side_bwe && header.extension.hasTransportSequenceNumber)) {
receive_side_cc_.OnReceivedPacket(
- packet.arrival_time_ms(), packet.payload_size() + packet.padding_size(),
- header);
+ packet.arrival_time().ms(),
+ packet.payload_size() + packet.padding_size(), header);
}
}
diff --git a/call/call.h b/call/call.h
index 37d784f726..f6388c3c78 100644
--- a/call/call.h
+++ b/call/call.h
@@ -84,11 +84,14 @@ class Call {
static Call* Create(const Call::Config& config);
static Call* Create(const Call::Config& config,
- rtc::scoped_refptr<SharedModuleThread> call_thread);
- static Call* Create(const Call::Config& config,
Clock* clock,
rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread);
+ static Call* Create(const Call::Config& config,
+ Clock* clock,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
+ std::unique_ptr<RtpTransportControllerSendInterface>
+ transportControllerSend);
virtual AudioSendStream* CreateAudioSendStream(
const AudioSendStream::Config& config) = 0;
@@ -152,6 +155,14 @@ class Call {
virtual void OnAudioTransportOverheadChanged(
int transport_overhead_per_packet) = 0;
+ // Called when a receive stream's local ssrc has changed and association with
+ // send streams needs to be updated.
+ virtual void OnLocalSsrcUpdated(AudioReceiveStream& stream,
+ uint32_t local_ssrc) = 0;
+
+ virtual void OnUpdateSyncGroup(AudioReceiveStream& stream,
+ const std::string& sync_group) = 0;
+
virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0;
virtual void SetClientBitratePreferences(
diff --git a/call/call_config.cc b/call/call_config.cc
index 8b3c91222e..23b60ce436 100644
--- a/call/call_config.cc
+++ b/call/call_config.cc
@@ -22,6 +22,19 @@ CallConfig::CallConfig(RtcEventLog* event_log,
CallConfig::CallConfig(const CallConfig& config) = default;
+RtpTransportConfig CallConfig::ExtractTransportConfig() const {
+ RtpTransportConfig transportConfig;
+ transportConfig.bitrate_config = bitrate_config;
+ transportConfig.event_log = event_log;
+ transportConfig.network_controller_factory = network_controller_factory;
+ transportConfig.network_state_predictor_factory =
+ network_state_predictor_factory;
+ transportConfig.task_queue_factory = task_queue_factory;
+ transportConfig.trials = trials;
+
+ return transportConfig;
+}
+
CallConfig::~CallConfig() = default;
} // namespace webrtc
diff --git a/call/call_config.h b/call/call_config.h
index 95dad36002..ba6dec3ad6 100644
--- a/call/call_config.h
+++ b/call/call_config.h
@@ -19,6 +19,8 @@
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "call/audio_state.h"
+#include "call/rtp_transport_config.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
namespace webrtc {
@@ -32,6 +34,7 @@ struct CallConfig {
explicit CallConfig(RtcEventLog* event_log,
TaskQueueBase* network_task_queue = nullptr);
CallConfig(const CallConfig&);
+ RtpTransportConfig ExtractTransportConfig() const;
~CallConfig();
// Bitrate config used until valid bitrate estimates are calculated. Also
@@ -69,6 +72,9 @@ struct CallConfig {
const WebRtcKeyValueConfig* trials = nullptr;
TaskQueueBase* const network_task_queue_ = nullptr;
+ // RtpTransportControllerSend to use for this call.
+ RtpTransportControllerSendFactoryInterface*
+ rtp_transport_controller_send_factory = nullptr;
};
} // namespace webrtc
diff --git a/call/call_factory.cc b/call/call_factory.cc
index cc02c02835..aeb3cbdaa7 100644
--- a/call/call_factory.cc
+++ b/call/call_factory.cc
@@ -14,11 +14,13 @@
#include <memory>
#include <string>
+#include <utility>
#include "absl/types/optional.h"
#include "api/test/simulated_network.h"
#include "call/call.h"
#include "call/degraded_call.h"
+#include "call/rtp_transport_config.h"
#include "rtc_base/checks.h"
#include "system_wrappers/include/field_trial.h"
@@ -81,10 +83,19 @@ Call* CallFactory::CreateCall(const Call::Config& config) {
absl::optional<webrtc::BuiltInNetworkBehaviorConfig>
receive_degradation_config = ParseDegradationConfig(false);
+ RtpTransportConfig transportConfig = config.ExtractTransportConfig();
+
if (send_degradation_config || receive_degradation_config) {
- return new DegradedCall(std::unique_ptr<Call>(Call::Create(config)),
- send_degradation_config, receive_degradation_config,
- config.task_queue_factory);
+ return new DegradedCall(
+ std::unique_ptr<Call>(Call::Create(
+ config, Clock::GetRealTimeClock(),
+ SharedModuleThread::Create(
+ ProcessThread::Create("ModuleProcessThread"), nullptr),
+ config.rtp_transport_controller_send_factory->Create(
+ transportConfig, Clock::GetRealTimeClock(),
+ ProcessThread::Create("PacerThread")))),
+ send_degradation_config, receive_degradation_config,
+ config.task_queue_factory);
}
if (!module_thread_) {
@@ -95,7 +106,10 @@ Call* CallFactory::CreateCall(const Call::Config& config) {
});
}
- return Call::Create(config, module_thread_);
+ return Call::Create(config, Clock::GetRealTimeClock(), module_thread_,
+ config.rtp_transport_controller_send_factory->Create(
+ transportConfig, Clock::GetRealTimeClock(),
+ ProcessThread::Create("PacerThread")));
}
std::unique_ptr<CallFactoryInterface> CreateCallFactory() {
diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc
index 4cb9766c84..c163ab2fe7 100644
--- a/call/call_perf_tests.cc
+++ b/call/call_perf_tests.cc
@@ -654,7 +654,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
static const int kAcceptableBitrateErrorMargin = 15; // +- 7
class BitrateObserver : public test::EndToEndTest {
public:
- explicit BitrateObserver(bool using_min_transmit_bitrate)
+ explicit BitrateObserver(bool using_min_transmit_bitrate,
+ TaskQueueBase* task_queue)
: EndToEndTest(kLongTimeoutMs),
send_stream_(nullptr),
converged_(false),
@@ -667,27 +668,31 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
? kMaxAcceptableTransmitBitrate
: (kMaxEncodeBitrateKbps +
kAcceptableBitrateErrorMargin / 2)),
- num_bitrate_observations_in_range_(0) {}
+ num_bitrate_observations_in_range_(0),
+ task_queue_(task_queue) {}
private:
// TODO(holmer): Run this with a timer instead of once per packet.
Action OnSendRtp(const uint8_t* packet, size_t length) override {
- VideoSendStream::Stats stats = send_stream_->GetStats();
- if (!stats.substreams.empty()) {
- RTC_DCHECK_EQ(1, stats.substreams.size());
- int bitrate_kbps =
- stats.substreams.begin()->second.total_bitrate_bps / 1000;
- if (bitrate_kbps > min_acceptable_bitrate_ &&
- bitrate_kbps < max_acceptable_bitrate_) {
- converged_ = true;
- ++num_bitrate_observations_in_range_;
- if (num_bitrate_observations_in_range_ ==
- kNumBitrateObservationsInRange)
- observation_complete_.Set();
+ task_queue_->PostTask(ToQueuedTask([this]() {
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+
+ if (!stats.substreams.empty()) {
+ RTC_DCHECK_EQ(1, stats.substreams.size());
+ int bitrate_kbps =
+ stats.substreams.begin()->second.total_bitrate_bps / 1000;
+ if (bitrate_kbps > min_acceptable_bitrate_ &&
+ bitrate_kbps < max_acceptable_bitrate_) {
+ converged_ = true;
+ ++num_bitrate_observations_in_range_;
+ if (num_bitrate_observations_in_range_ ==
+ kNumBitrateObservationsInRange)
+ observation_complete_.Set();
+ }
+ if (converged_)
+ bitrate_kbps_list_.push_back(bitrate_kbps);
}
- if (converged_)
- bitrate_kbps_list_.push_back(bitrate_kbps);
- }
+ }));
return SEND_PACKET;
}
@@ -724,7 +729,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
const int max_acceptable_bitrate_;
int num_bitrate_observations_in_range_;
std::vector<double> bitrate_kbps_list_;
- } test(pad_to_min_bitrate);
+ TaskQueueBase* task_queue_;
+ } test(pad_to_min_bitrate, task_queue());
fake_encoder_max_bitrate_ = kMaxEncodeBitrateKbps;
RunBaseTest(&test);
@@ -775,7 +781,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) {
class BitrateObserver : public test::EndToEndTest, public test::FakeEncoder {
public:
- BitrateObserver()
+ explicit BitrateObserver(TaskQueueBase* task_queue)
: EndToEndTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
encoder_inits_(0),
@@ -784,7 +790,8 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) {
frame_generator_(nullptr),
encoder_factory_(this),
bitrate_allocator_factory_(
- CreateBuiltinVideoBitrateAllocatorFactory()) {}
+ CreateBuiltinVideoBitrateAllocatorFactory()),
+ task_queue_(task_queue) {}
int32_t InitEncode(const VideoCodec* config,
const VideoEncoder::Settings& settings) override {
@@ -834,7 +841,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) {
bitrate_allocator_factory_.get();
encoder_config->max_bitrate_bps = 2 * kReconfigureThresholdKbps * 1000;
encoder_config->video_stream_factory =
- new rtc::RefCountedObject<VideoStreamFactory>();
+ rtc::make_ref_counted<VideoStreamFactory>();
encoder_config_ = encoder_config->Copy();
}
@@ -854,7 +861,9 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) {
ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeoutMs))
<< "Timed out before receiving an initial high bitrate.";
frame_generator_->ChangeResolution(kDefaultWidth * 2, kDefaultHeight * 2);
- send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ SendTask(RTC_FROM_HERE, task_queue_, [&]() {
+ send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ });
EXPECT_TRUE(Wait())
<< "Timed out while waiting for a couple of high bitrate estimates "
"after reconfiguring the send stream.";
@@ -869,7 +878,8 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) {
test::VideoEncoderProxyFactory encoder_factory_;
std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
VideoEncoderConfig encoder_config_;
- } test;
+ TaskQueueBase* task_queue_;
+ } test(task_queue());
RunBaseTest(&test);
}
diff --git a/call/call_unittest.cc b/call/call_unittest.cc
index d836362416..92a037f157 100644
--- a/call/call_unittest.cc
+++ b/call/call_unittest.cc
@@ -50,14 +50,14 @@ struct CallHelper {
task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory();
webrtc::AudioState::Config audio_state_config;
audio_state_config.audio_mixer =
- new rtc::RefCountedObject<webrtc::test::MockAudioMixer>();
+ rtc::make_ref_counted<webrtc::test::MockAudioMixer>();
audio_state_config.audio_processing =
use_null_audio_processing
? nullptr
- : new rtc::RefCountedObject<
+ : rtc::make_ref_counted<
NiceMock<webrtc::test::MockAudioProcessing>>();
audio_state_config.audio_device_module =
- new rtc::RefCountedObject<webrtc::test::MockAudioDeviceModule>();
+ rtc::make_ref_counted<webrtc::test::MockAudioDeviceModule>();
webrtc::Call::Config config(&event_log_);
config.audio_state = webrtc::AudioState::Create(audio_state_config);
config.task_queue_factory = task_queue_factory_.get();
@@ -118,7 +118,7 @@ TEST(CallTest, CreateDestroy_AudioReceiveStream) {
config.rtp.remote_ssrc = 42;
config.rtcp_send_transport = &rtcp_send_transport;
config.decoder_factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
AudioReceiveStream* stream = call->CreateAudioReceiveStream(config);
EXPECT_NE(stream, nullptr);
call->DestroyAudioReceiveStream(stream);
@@ -157,7 +157,7 @@ TEST(CallTest, CreateDestroy_AudioReceiveStreams) {
MockTransport rtcp_send_transport;
config.rtcp_send_transport = &rtcp_send_transport;
config.decoder_factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
std::list<AudioReceiveStream*> streams;
for (int i = 0; i < 2; ++i) {
for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) {
@@ -187,7 +187,7 @@ TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_RecvFirst) {
recv_config.rtp.local_ssrc = 777;
recv_config.rtcp_send_transport = &rtcp_send_transport;
recv_config.decoder_factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
AudioReceiveStream* recv_stream =
call->CreateAudioReceiveStream(recv_config);
EXPECT_NE(recv_stream, nullptr);
@@ -226,7 +226,7 @@ TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_SendFirst) {
recv_config.rtp.local_ssrc = 777;
recv_config.rtcp_send_transport = &rtcp_send_transport;
recv_config.decoder_factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
AudioReceiveStream* recv_stream =
call->CreateAudioReceiveStream(recv_config);
EXPECT_NE(recv_stream, nullptr);
@@ -248,7 +248,7 @@ TEST(CallTest, CreateDestroy_FlexfecReceiveStream) {
MockTransport rtcp_send_transport;
FlexfecReceiveStream::Config config(&rtcp_send_transport);
config.payload_type = 118;
- config.remote_ssrc = 38837212;
+ config.rtp.remote_ssrc = 38837212;
config.protected_media_ssrcs = {27273};
FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config);
@@ -267,7 +267,7 @@ TEST(CallTest, CreateDestroy_FlexfecReceiveStreams) {
for (int i = 0; i < 2; ++i) {
for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) {
- config.remote_ssrc = ssrc;
+ config.rtp.remote_ssrc = ssrc;
config.protected_media_ssrcs = {ssrc + 1};
FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config);
EXPECT_NE(stream, nullptr);
@@ -295,22 +295,22 @@ TEST(CallTest, MultipleFlexfecReceiveStreamsProtectingSingleVideoStream) {
FlexfecReceiveStream* stream;
std::list<FlexfecReceiveStream*> streams;
- config.remote_ssrc = 838383;
+ config.rtp.remote_ssrc = 838383;
stream = call->CreateFlexfecReceiveStream(config);
EXPECT_NE(stream, nullptr);
streams.push_back(stream);
- config.remote_ssrc = 424993;
+ config.rtp.remote_ssrc = 424993;
stream = call->CreateFlexfecReceiveStream(config);
EXPECT_NE(stream, nullptr);
streams.push_back(stream);
- config.remote_ssrc = 99383;
+ config.rtp.remote_ssrc = 99383;
stream = call->CreateFlexfecReceiveStream(config);
EXPECT_NE(stream, nullptr);
streams.push_back(stream);
- config.remote_ssrc = 5548;
+ config.rtp.remote_ssrc = 5548;
stream = call->CreateFlexfecReceiveStream(config);
EXPECT_NE(stream, nullptr);
streams.push_back(stream);
diff --git a/call/degraded_call.cc b/call/degraded_call.cc
index 73c236bc0c..5462085490 100644
--- a/call/degraded_call.cc
+++ b/call/degraded_call.cc
@@ -288,6 +288,16 @@ void DegradedCall::OnAudioTransportOverheadChanged(
call_->OnAudioTransportOverheadChanged(transport_overhead_per_packet);
}
+void DegradedCall::OnLocalSsrcUpdated(AudioReceiveStream& stream,
+ uint32_t local_ssrc) {
+ call_->OnLocalSsrcUpdated(stream, local_ssrc);
+}
+
+void DegradedCall::OnUpdateSyncGroup(AudioReceiveStream& stream,
+ const std::string& sync_group) {
+ call_->OnUpdateSyncGroup(stream, sync_group);
+}
+
void DegradedCall::OnSentPacket(const rtc::SentPacket& sent_packet) {
if (send_config_) {
// If we have a degraded send-transport, we have already notified call
diff --git a/call/degraded_call.h b/call/degraded_call.h
index 03fc14f284..70dc126807 100644
--- a/call/degraded_call.h
+++ b/call/degraded_call.h
@@ -16,6 +16,7 @@
#include <map>
#include <memory>
+#include <string>
#include "absl/types/optional.h"
#include "api/call/transport.h"
@@ -93,6 +94,10 @@ class DegradedCall : public Call, private PacketReceiver {
void SignalChannelNetworkState(MediaType media, NetworkState state) override;
void OnAudioTransportOverheadChanged(
int transport_overhead_per_packet) override;
+ void OnLocalSsrcUpdated(AudioReceiveStream& stream,
+ uint32_t local_ssrc) override;
+ void OnUpdateSyncGroup(AudioReceiveStream& stream,
+ const std::string& sync_group) override;
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
protected:
diff --git a/call/flexfec_receive_stream.h b/call/flexfec_receive_stream.h
index 2f7438f9a4..72e544e7ec 100644
--- a/call/flexfec_receive_stream.h
+++ b/call/flexfec_receive_stream.h
@@ -19,11 +19,13 @@
#include "api/call/transport.h"
#include "api/rtp_headers.h"
#include "api/rtp_parameters.h"
+#include "call/receive_stream.h"
#include "call/rtp_packet_sink_interface.h"
namespace webrtc {
-class FlexfecReceiveStream : public RtpPacketSinkInterface {
+class FlexfecReceiveStream : public RtpPacketSinkInterface,
+ public ReceiveStream {
public:
~FlexfecReceiveStream() override = default;
@@ -48,8 +50,7 @@ class FlexfecReceiveStream : public RtpPacketSinkInterface {
// Payload type for FlexFEC.
int payload_type = -1;
- // SSRC for FlexFEC stream to be received.
- uint32_t remote_ssrc = 0;
+ RtpConfig rtp;
// Vector containing a single element, corresponding to the SSRC of the
// media stream being protected by this FlexFEC stream. The vector MUST have
@@ -59,26 +60,14 @@ class FlexfecReceiveStream : public RtpPacketSinkInterface {
// protection.
std::vector<uint32_t> protected_media_ssrcs;
- // SSRC for RTCP reports to be sent.
- uint32_t local_ssrc = 0;
-
// What RTCP mode to use in the reports.
RtcpMode rtcp_mode = RtcpMode::kCompound;
// Transport for outgoing RTCP packets.
Transport* rtcp_send_transport = nullptr;
-
- // |transport_cc| is true whenever the send-side BWE RTCP feedback message
- // has been negotiated. This is a prerequisite for enabling send-side BWE.
- bool transport_cc = false;
-
- // RTP header extensions that have been negotiated for this track.
- std::vector<RtpExtension> rtp_header_extensions;
};
virtual Stats GetStats() const = 0;
-
- virtual const Config& GetConfig() const = 0;
};
} // namespace webrtc
diff --git a/call/flexfec_receive_stream_impl.cc b/call/flexfec_receive_stream_impl.cc
index e629bca347..ab82a6d71a 100644
--- a/call/flexfec_receive_stream_impl.cc
+++ b/call/flexfec_receive_stream_impl.cc
@@ -44,21 +44,21 @@ std::string FlexfecReceiveStream::Config::ToString() const {
char buf[1024];
rtc::SimpleStringBuilder ss(buf);
ss << "{payload_type: " << payload_type;
- ss << ", remote_ssrc: " << remote_ssrc;
- ss << ", local_ssrc: " << local_ssrc;
+ ss << ", remote_ssrc: " << rtp.remote_ssrc;
+ ss << ", local_ssrc: " << rtp.local_ssrc;
ss << ", protected_media_ssrcs: [";
size_t i = 0;
for (; i + 1 < protected_media_ssrcs.size(); ++i)
ss << protected_media_ssrcs[i] << ", ";
if (!protected_media_ssrcs.empty())
ss << protected_media_ssrcs[i];
- ss << "], transport_cc: " << (transport_cc ? "on" : "off");
- ss << ", rtp_header_extensions: [";
+ ss << "], transport_cc: " << (rtp.transport_cc ? "on" : "off");
+ ss << ", rtp.extensions: [";
i = 0;
- for (; i + 1 < rtp_header_extensions.size(); ++i)
- ss << rtp_header_extensions[i].ToString() << ", ";
- if (!rtp_header_extensions.empty())
- ss << rtp_header_extensions[i].ToString();
+ for (; i + 1 < rtp.extensions.size(); ++i)
+ ss << rtp.extensions[i].ToString() << ", ";
+ if (!rtp.extensions.empty())
+ ss << rtp.extensions[i].ToString();
ss << "]}";
return ss.str();
}
@@ -68,7 +68,7 @@ bool FlexfecReceiveStream::Config::IsCompleteAndEnabled() const {
if (payload_type < 0)
return false;
// Do we have the necessary SSRC information?
- if (remote_ssrc == 0)
+ if (rtp.remote_ssrc == 0)
return false;
// TODO(brandtr): Update this check when we support multistream protection.
if (protected_media_ssrcs.size() != 1u)
@@ -91,7 +91,7 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
}
RTC_DCHECK_GE(config.payload_type, 0);
RTC_DCHECK_LE(config.payload_type, 127);
- if (config.remote_ssrc == 0) {
+ if (config.rtp.remote_ssrc == 0) {
RTC_LOG(LS_WARNING)
<< "Invalid FlexFEC SSRC given. "
"This FlexfecReceiveStream will therefore be useless.";
@@ -114,7 +114,7 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
}
RTC_DCHECK_EQ(1U, config.protected_media_ssrcs.size());
return std::unique_ptr<FlexfecReceiver>(new FlexfecReceiver(
- clock, config.remote_ssrc, config.protected_media_ssrcs[0],
+ clock, config.rtp.remote_ssrc, config.protected_media_ssrcs[0],
recovered_packet_receiver));
}
@@ -130,7 +130,7 @@ std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
configuration.receive_statistics = receive_statistics;
configuration.outgoing_transport = config.rtcp_send_transport;
configuration.rtt_stats = rtt_stats;
- configuration.local_media_ssrc = config.local_ssrc;
+ configuration.local_media_ssrc = config.rtp.local_ssrc;
return ModuleRtpRtcpImpl2::Create(configuration);
}
@@ -138,7 +138,6 @@ std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
FlexfecReceiveStreamImpl::FlexfecReceiveStreamImpl(
Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
const Config& config,
RecoveredPacketReceiver* recovered_packet_receiver,
RtcpRttStats* rtt_stats,
@@ -155,28 +154,37 @@ FlexfecReceiveStreamImpl::FlexfecReceiveStreamImpl(
process_thread_(process_thread) {
RTC_LOG(LS_INFO) << "FlexfecReceiveStreamImpl: " << config_.ToString();
+ packet_sequence_checker_.Detach();
+
// RTCP reporting.
rtp_rtcp_->SetRTCPStatus(config_.rtcp_mode);
process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
+}
+
+FlexfecReceiveStreamImpl::~FlexfecReceiveStreamImpl() {
+ RTC_LOG(LS_INFO) << "~FlexfecReceiveStreamImpl: " << config_.ToString();
+ process_thread_->DeRegisterModule(rtp_rtcp_.get());
+}
+
+void FlexfecReceiveStreamImpl::RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ RTC_DCHECK(!rtp_stream_receiver_);
+
+ if (!receiver_)
+ return;
- // Register with transport.
// TODO(nisse): OnRtpPacket in this class delegates all real work to
- // |receiver_|. So maybe we don't need to implement RtpPacketSinkInterface
+ // `receiver_`. So maybe we don't need to implement RtpPacketSinkInterface
// here at all, we'd then delete the OnRtpPacket method and instead register
- // |receiver_| as the RtpPacketSinkInterface for this stream.
- // TODO(nisse): Passing |this| from the constructor to the RtpDemuxer, before
- // the object is fully initialized, is risky. But it works in this case
- // because locking in our caller, Call::CreateFlexfecReceiveStream, ensures
- // that the demuxer doesn't call OnRtpPacket before this object is fully
- // constructed. Registering |receiver_| instead of |this| would solve this
- // problem too.
+ // `receiver_` as the RtpPacketSinkInterface for this stream.
rtp_stream_receiver_ =
- receiver_controller->CreateReceiver(config_.remote_ssrc, this);
+ receiver_controller->CreateReceiver(config_.rtp.remote_ssrc, this);
}
-FlexfecReceiveStreamImpl::~FlexfecReceiveStreamImpl() {
- RTC_LOG(LS_INFO) << "~FlexfecReceiveStreamImpl: " << config_.ToString();
- process_thread_->DeRegisterModule(rtp_rtcp_.get());
+void FlexfecReceiveStreamImpl::UnregisterFromTransport() {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_stream_receiver_.reset();
}
void FlexfecReceiveStreamImpl::OnRtpPacket(const RtpPacketReceived& packet) {
@@ -186,7 +194,7 @@ void FlexfecReceiveStreamImpl::OnRtpPacket(const RtpPacketReceived& packet) {
receiver_->OnRtpPacket(packet);
// Do not report media packets in the RTCP RRs generated by |rtp_rtcp_|.
- if (packet.Ssrc() == config_.remote_ssrc) {
+ if (packet.Ssrc() == config_.rtp.remote_ssrc) {
rtp_receive_statistics_->OnRtpPacket(packet);
}
}
@@ -197,9 +205,4 @@ FlexfecReceiveStreamImpl::Stats FlexfecReceiveStreamImpl::GetStats() const {
return FlexfecReceiveStream::Stats();
}
-const FlexfecReceiveStream::Config& FlexfecReceiveStreamImpl::GetConfig()
- const {
- return config_;
-}
-
} // namespace webrtc
diff --git a/call/flexfec_receive_stream_impl.h b/call/flexfec_receive_stream_impl.h
index 888dae9ebd..12c4b04332 100644
--- a/call/flexfec_receive_stream_impl.h
+++ b/call/flexfec_receive_stream_impl.h
@@ -16,6 +16,7 @@
#include "call/flexfec_receive_stream.h"
#include "call/rtp_packet_sink_interface.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "rtc_base/system/no_unique_address.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -34,20 +35,37 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream {
public:
FlexfecReceiveStreamImpl(
Clock* clock,
- RtpStreamReceiverControllerInterface* receiver_controller,
const Config& config,
RecoveredPacketReceiver* recovered_packet_receiver,
RtcpRttStats* rtt_stats,
ProcessThread* process_thread);
+ // Destruction happens on the worker thread. Prior to destruction the caller
+ // must ensure that a registration with the transport has been cleared. See
+ // `RegisterWithTransport` for details.
+ // TODO(tommi): As a further improvement to this, performing the full
+ // destruction on the network thread could be made the default.
~FlexfecReceiveStreamImpl() override;
+ // Called on the network thread to register/unregister with the network
+ // transport.
+ void RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller);
+ // If registration has previously been done (via `RegisterWithTransport`) then
+ // `UnregisterFromTransport` must be called prior to destruction, on the
+ // network thread.
+ void UnregisterFromTransport();
+
// RtpPacketSinkInterface.
void OnRtpPacket(const RtpPacketReceived& packet) override;
Stats GetStats() const override;
- const Config& GetConfig() const override;
+
+ // ReceiveStream impl.
+ const RtpConfig& rtp_config() const override { return config_.rtp; }
private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
+
// Config.
const Config config_;
@@ -57,9 +75,10 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream {
// RTCP reporting.
const std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
- ProcessThread* process_thread_;
+ ProcessThread* const process_thread_;
- std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_;
+ std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_
+ RTC_GUARDED_BY(packet_sequence_checker_);
};
} // namespace webrtc
diff --git a/call/flexfec_receive_stream_unittest.cc b/call/flexfec_receive_stream_unittest.cc
index 5e8ee47433..f4944d054f 100644
--- a/call/flexfec_receive_stream_unittest.cc
+++ b/call/flexfec_receive_stream_unittest.cc
@@ -45,7 +45,7 @@ FlexfecReceiveStream::Config CreateDefaultConfig(
Transport* rtcp_send_transport) {
FlexfecReceiveStream::Config config(rtcp_send_transport);
config.payload_type = kFlexfecPlType;
- config.remote_ssrc = ByteReader<uint32_t>::ReadBigEndian(kFlexfecSsrc);
+ config.rtp.remote_ssrc = ByteReader<uint32_t>::ReadBigEndian(kFlexfecSsrc);
config.protected_media_ssrcs = {
ByteReader<uint32_t>::ReadBigEndian(kMediaSsrc)};
EXPECT_TRUE(config.IsCompleteAndEnabled());
@@ -64,16 +64,16 @@ TEST(FlexfecReceiveStreamConfigTest, IsCompleteAndEnabled) {
MockTransport rtcp_send_transport;
FlexfecReceiveStream::Config config(&rtcp_send_transport);
- config.local_ssrc = 18374743;
+ config.rtp.local_ssrc = 18374743;
config.rtcp_mode = RtcpMode::kCompound;
- config.transport_cc = true;
- config.rtp_header_extensions.emplace_back(TransportSequenceNumber::kUri, 7);
+ config.rtp.transport_cc = true;
+ config.rtp.extensions.emplace_back(TransportSequenceNumber::kUri, 7);
EXPECT_FALSE(config.IsCompleteAndEnabled());
config.payload_type = 123;
EXPECT_FALSE(config.IsCompleteAndEnabled());
- config.remote_ssrc = 238423838;
+ config.rtp.remote_ssrc = 238423838;
EXPECT_FALSE(config.IsCompleteAndEnabled());
config.protected_media_ssrcs.push_back(138989393);
@@ -89,12 +89,14 @@ class FlexfecReceiveStreamTest : public ::testing::Test {
: config_(CreateDefaultConfig(&rtcp_send_transport_)) {
EXPECT_CALL(process_thread_, RegisterModule(_, _)).Times(1);
receive_stream_ = std::make_unique<FlexfecReceiveStreamImpl>(
- Clock::GetRealTimeClock(), &rtp_stream_receiver_controller_, config_,
- &recovered_packet_receiver_, &rtt_stats_, &process_thread_);
+ Clock::GetRealTimeClock(), config_, &recovered_packet_receiver_,
+ &rtt_stats_, &process_thread_);
+ receive_stream_->RegisterWithTransport(&rtp_stream_receiver_controller_);
}
~FlexfecReceiveStreamTest() {
EXPECT_CALL(process_thread_, DeRegisterModule(_)).Times(1);
+ receive_stream_->UnregisterFromTransport();
}
MockTransport rtcp_send_transport_;
@@ -145,9 +147,10 @@ TEST_F(FlexfecReceiveStreamTest, RecoversPacket) {
::testing::StrictMock<MockRecoveredPacketReceiver> recovered_packet_receiver;
EXPECT_CALL(process_thread_, RegisterModule(_, _)).Times(1);
- FlexfecReceiveStreamImpl receive_stream(
- Clock::GetRealTimeClock(), &rtp_stream_receiver_controller_, config_,
- &recovered_packet_receiver, &rtt_stats_, &process_thread_);
+ FlexfecReceiveStreamImpl receive_stream(Clock::GetRealTimeClock(), config_,
+ &recovered_packet_receiver,
+ &rtt_stats_, &process_thread_);
+ receive_stream.RegisterWithTransport(&rtp_stream_receiver_controller_);
EXPECT_CALL(recovered_packet_receiver,
OnRecoveredPacket(_, kRtpHeaderSize + kPayloadLength[1]));
@@ -156,6 +159,8 @@ TEST_F(FlexfecReceiveStreamTest, RecoversPacket) {
// Tear-down
EXPECT_CALL(process_thread_, DeRegisterModule(_)).Times(1);
+
+ receive_stream.UnregisterFromTransport();
}
} // namespace webrtc
diff --git a/call/packet_receiver.h b/call/packet_receiver.h
index f18ee65c70..13d3b84c90 100644
--- a/call/packet_receiver.h
+++ b/call/packet_receiver.h
@@ -10,13 +10,6 @@
#ifndef CALL_PACKET_RECEIVER_H_
#define CALL_PACKET_RECEIVER_H_
-#include <algorithm>
-#include <functional>
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
#include "api/media_types.h"
#include "rtc_base/copy_on_write_buffer.h"
@@ -30,32 +23,6 @@ class PacketReceiver {
DELIVERY_PACKET_ERROR,
};
- // Definition of the callback to execute when packet delivery is complete.
- // The callback will be issued on the same thread as called DeliverPacket.
- typedef std::function<
- void(DeliveryStatus, MediaType, rtc::CopyOnWriteBuffer, int64_t)>
- PacketCallback;
-
- // Asynchronously handle packet delivery and report back to the caller when
- // delivery of the packet has completed.
- // Note that if the packet is invalid or can be processed without the need of
- // asynchronous operations that the |callback| may have been called before
- // the function returns.
- // TODO(bugs.webrtc.org/11993): This function is meant to be called on the
- // network thread exclusively but while the code is being updated to align
- // with those goals, it may be called either on the worker or network threads.
- // Update docs etc when the work has been completed. Once we're done with the
- // updates, we might be able to go back to returning the status from this
- // function instead of having to report it via a callback.
- virtual void DeliverPacketAsync(MediaType media_type,
- rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us,
- PacketCallback callback) {
- DeliveryStatus status = DeliverPacket(media_type, packet, packet_time_us);
- if (callback)
- callback(status, media_type, std::move(packet), packet_time_us);
- }
-
virtual DeliveryStatus DeliverPacket(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) = 0;
diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc
index 379f9dcf84..bf136a5df9 100644
--- a/call/rampup_tests.cc
+++ b/call/rampup_tests.cc
@@ -160,7 +160,7 @@ void RampUpTester::ModifyVideoConfigs(
encoder_config->number_of_streams = num_video_streams_;
encoder_config->max_bitrate_bps = 2000000;
encoder_config->video_stream_factory =
- new rtc::RefCountedObject<RampUpTester::VideoStreamFactory>();
+ rtc::make_ref_counted<RampUpTester::VideoStreamFactory>();
if (num_video_streams_ == 1) {
// For single stream rampup until 1mbps
expected_bitrate_bps_ = kSingleStreamTargetBps;
@@ -295,16 +295,16 @@ void RampUpTester::ModifyFlexfecConfigs(
return;
RTC_DCHECK_EQ(1, num_flexfec_streams_);
(*receive_configs)[0].payload_type = test::CallTest::kFlexfecPayloadType;
- (*receive_configs)[0].remote_ssrc = test::CallTest::kFlexfecSendSsrc;
+ (*receive_configs)[0].rtp.remote_ssrc = test::CallTest::kFlexfecSendSsrc;
(*receive_configs)[0].protected_media_ssrcs = {video_ssrcs_[0]};
- (*receive_configs)[0].local_ssrc = video_ssrcs_[0];
+ (*receive_configs)[0].rtp.local_ssrc = video_ssrcs_[0];
if (extension_type_ == RtpExtension::kAbsSendTimeUri) {
- (*receive_configs)[0].transport_cc = false;
- (*receive_configs)[0].rtp_header_extensions.push_back(
+ (*receive_configs)[0].rtp.transport_cc = false;
+ (*receive_configs)[0].rtp.extensions.push_back(
RtpExtension(extension_type_.c_str(), kAbsSendTimeExtensionId));
} else if (extension_type_ == RtpExtension::kTransportSequenceNumberUri) {
- (*receive_configs)[0].transport_cc = true;
- (*receive_configs)[0].rtp_header_extensions.push_back(RtpExtension(
+ (*receive_configs)[0].rtp.transport_cc = true;
+ (*receive_configs)[0].rtp.extensions.push_back(RtpExtension(
extension_type_.c_str(), kTransportSequenceNumberExtensionId));
}
}
@@ -370,7 +370,10 @@ void RampUpTester::TriggerTestDone() {
if (!send_stream_)
return;
- VideoSendStream::Stats send_stats = send_stream_->GetStats();
+ VideoSendStream::Stats send_stats;
+ SendTask(RTC_FROM_HERE, task_queue_,
+ [&] { send_stats = send_stream_->GetStats(); });
+
send_stream_ = nullptr; // To avoid dereferencing a bad pointer.
size_t total_packets_sent = 0;
diff --git a/call/receive_stream.h b/call/receive_stream.h
new file mode 100644
index 0000000000..0f59b37ae3
--- /dev/null
+++ b/call/receive_stream.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_RECEIVE_STREAM_H_
+#define CALL_RECEIVE_STREAM_H_
+
+#include <vector>
+
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/frame_transformer_interface.h"
+#include "api/media_types.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/rtp/rtp_source.h"
+
+namespace webrtc {
+
+// Common base interface for MediaReceiveStream based classes and
+// FlexfecReceiveStream.
+class ReceiveStream {
+ public:
+ // Receive-stream specific RTP settings.
+ struct RtpConfig {
+ // Synchronization source (stream identifier) to be received.
+ // This member will not change mid-stream and can be assumed to be const
+ // post initialization.
+ uint32_t remote_ssrc = 0;
+
+ // Sender SSRC used for sending RTCP (such as receiver reports).
+ // This value may change mid-stream and must be done on the same thread
+ // that the value is read on (i.e. packet delivery).
+ uint32_t local_ssrc = 0;
+
+ // Enable feedback for send side bandwidth estimation.
+ // See
+ // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions
+ // for details.
+ // This value may change mid-stream and must be done on the same thread
+ // that the value is read on (i.e. packet delivery).
+ bool transport_cc = false;
+
+ // RTP header extensions used for the received stream.
+ // This value may change mid-stream and must be done on the same thread
+ // that the value is read on (i.e. packet delivery).
+ std::vector<RtpExtension> extensions;
+ };
+
+ // Called on the packet delivery thread since some members of the config may
+ // change mid-stream (e.g. the local ssrc). All mutation must also happen on
+ // the packet delivery thread. Return value can be assumed to
+ // only be used in the calling context (on the stack basically).
+ virtual const RtpConfig& rtp_config() const = 0;
+
+ protected:
+ virtual ~ReceiveStream() {}
+};
+
+// Either an audio or video receive stream.
+class MediaReceiveStream : public ReceiveStream {
+ public:
+ // Starts stream activity.
+ // When a stream is active, it can receive, process and deliver packets.
+ virtual void Start() = 0;
+
+ // Stops stream activity. Must be called to match with a previous call to
+ // `Start()`. When a stream has been stopped, it won't receive, decode,
+ // process or deliver packets to downstream objects such as callback pointers
+ // set in the config struct.
+ virtual void Stop() = 0;
+
+ virtual void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface>
+ frame_transformer) = 0;
+
+ virtual void SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) = 0;
+
+ virtual std::vector<RtpSource> GetSources() const = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_RECEIVE_STREAM_H_
diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc
index 18b113852e..c6a56a389e 100644
--- a/call/rtp_payload_params.cc
+++ b/call/rtp_payload_params.cc
@@ -131,6 +131,9 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc,
: ssrc_(ssrc),
generic_picture_id_experiment_(
absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"),
+ "Enabled")),
+ simulate_generic_vp9_(
+ absl::StartsWith(trials.Lookup("WebRTC-Vp9DependencyDescriptor"),
"Enabled")) {
for (auto& spatial_layer : last_shared_frame_id_)
spatial_layer.fill(-1);
@@ -277,8 +280,13 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info,
}
return;
case VideoCodecType::kVideoCodecVP9:
+ if (simulate_generic_vp9_ && codec_specific_info != nullptr) {
+ Vp9ToGeneric(codec_specific_info->codecSpecific.VP9, frame_id,
+ *rtp_video_header);
+ }
+ return;
case VideoCodecType::kVideoCodecAV1:
- // TODO(philipel): Implement VP9 and AV1 to generic descriptor.
+ // TODO(philipel): Implement AV1 to generic descriptor.
return;
case VideoCodecType::kVideoCodecH264:
if (codec_specific_info) {
@@ -399,6 +407,150 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info,
}
}
+FrameDependencyStructure RtpPayloadParams::MinimalisticVp9Structure(
+ const CodecSpecificInfoVP9& vp9) {
+ const int num_spatial_layers = vp9.num_spatial_layers;
+ const int num_temporal_layers = kMaxTemporalStreams;
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = num_spatial_layers * num_temporal_layers;
+ structure.num_chains = num_spatial_layers;
+ structure.templates.reserve(num_spatial_layers * num_temporal_layers);
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ for (int tid = 0; tid < num_temporal_layers; ++tid) {
+ FrameDependencyTemplate a_template;
+ a_template.spatial_id = sid;
+ a_template.temporal_id = tid;
+ for (int s = 0; s < num_spatial_layers; ++s) {
+ for (int t = 0; t < num_temporal_layers; ++t) {
+ // Prefer kSwitch for indication frame is part of the decode target
+ // because RtpPayloadParams::Vp9ToGeneric uses that indication more
+ // often that kRequired, increasing chance custom dti need not to
+ // use more bits in dependency descriptor on the wire.
+ a_template.decode_target_indications.push_back(
+ sid <= s && tid <= t ? DecodeTargetIndication::kSwitch
+ : DecodeTargetIndication::kNotPresent);
+ }
+ }
+ a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers *
+ num_temporal_layers
+ : num_spatial_layers);
+ a_template.chain_diffs.assign(structure.num_chains, 1);
+ structure.templates.push_back(a_template);
+
+ structure.decode_target_protected_by_chain.push_back(sid);
+ }
+ if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) {
+ structure.resolutions.emplace_back(vp9.width[sid], vp9.height[sid]);
+ }
+ }
+ return structure;
+}
+
+void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
+ int64_t shared_frame_id,
+ RTPVideoHeader& rtp_video_header) {
+ const auto& vp9_header =
+ absl::get<RTPVideoHeaderVP9>(rtp_video_header.video_type_header);
+ const int num_spatial_layers = vp9_header.num_spatial_layers;
+ const int num_temporal_layers = kMaxTemporalStreams;
+
+ int spatial_index =
+ vp9_header.spatial_idx != kNoSpatialIdx ? vp9_header.spatial_idx : 0;
+ int temporal_index =
+ vp9_header.temporal_idx != kNoTemporalIdx ? vp9_header.temporal_idx : 0;
+
+ if (spatial_index >= num_spatial_layers ||
+ temporal_index >= num_temporal_layers ||
+ num_spatial_layers > RtpGenericFrameDescriptor::kMaxSpatialLayers) {
+ // Prefer to generate no generic layering than an inconsistent one.
+ return;
+ }
+
+ RTPVideoHeader::GenericDescriptorInfo& result =
+ rtp_video_header.generic.emplace();
+
+ result.frame_id = shared_frame_id;
+ result.spatial_index = spatial_index;
+ result.temporal_index = temporal_index;
+
+ result.decode_target_indications.reserve(num_spatial_layers *
+ num_temporal_layers);
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ for (int tid = 0; tid < num_temporal_layers; ++tid) {
+ DecodeTargetIndication dti;
+ if (sid < spatial_index || tid < temporal_index) {
+ dti = DecodeTargetIndication::kNotPresent;
+ } else if (spatial_index != sid &&
+ vp9_header.non_ref_for_inter_layer_pred) {
+ dti = DecodeTargetIndication::kNotPresent;
+ } else if (sid == spatial_index && tid == temporal_index) {
+ // Assume that if frame is decodable, all of its own layer is decodable.
+ dti = DecodeTargetIndication::kSwitch;
+ } else if (sid == spatial_index && vp9_header.temporal_up_switch) {
+ dti = DecodeTargetIndication::kSwitch;
+ } else if (!vp9_header.inter_pic_predicted) {
+ // Key frame or spatial upswitch
+ dti = DecodeTargetIndication::kSwitch;
+ } else {
+ // Make no other assumptions. That should be safe, though suboptimal.
+ // To provide more accurate dti, encoder wrapper should fill in
+ // CodecSpecificInfo::generic_frame_info
+ dti = DecodeTargetIndication::kRequired;
+ }
+ result.decode_target_indications.push_back(dti);
+ }
+ }
+
+ // Calculate frame dependencies.
+ static constexpr int kPictureDiffLimit = 128;
+ if (last_vp9_frame_id_.empty()) {
+ // Create the array only if it is ever used.
+ last_vp9_frame_id_.resize(kPictureDiffLimit);
+ }
+ if (vp9_header.inter_layer_predicted && spatial_index > 0) {
+ result.dependencies.push_back(
+ last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit]
+ [spatial_index - 1]);
+ }
+ if (vp9_header.inter_pic_predicted) {
+ for (size_t i = 0; i < vp9_header.num_ref_pics; ++i) {
+ // picture_id is 15 bit number that wraps around. Though undeflow may
+ // produce picture that exceeds 2^15, it is ok because in this
+ // code block only last 7 bits of the picture_id are used.
+ uint16_t depend_on = vp9_header.picture_id - vp9_header.pid_diff[i];
+ result.dependencies.push_back(
+ last_vp9_frame_id_[depend_on % kPictureDiffLimit][spatial_index]);
+ }
+ }
+ last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit][spatial_index] =
+ shared_frame_id;
+
+ // Calculate chains, asuming chain includes all frames with temporal_id = 0
+ if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) {
+ // Assume frames without dependencies also reset chains.
+ for (int sid = spatial_index; sid < num_spatial_layers; ++sid) {
+ chain_last_frame_id_[sid] = -1;
+ }
+ }
+ result.chain_diffs.resize(num_spatial_layers);
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ if (chain_last_frame_id_[sid] == -1) {
+ result.chain_diffs[sid] = 0;
+ continue;
+ }
+ result.chain_diffs[sid] = shared_frame_id - chain_last_frame_id_[sid];
+ }
+
+ if (temporal_index == 0) {
+ chain_last_frame_id_[spatial_index] = shared_frame_id;
+ if (!vp9_header.non_ref_for_inter_layer_pred) {
+ for (int sid = spatial_index + 1; sid < num_spatial_layers; ++sid) {
+ chain_last_frame_id_[sid] = shared_frame_id;
+ }
+ }
+ }
+}
+
void RtpPayloadParams::SetDependenciesVp8Deprecated(
const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
diff --git a/call/rtp_payload_params.h b/call/rtp_payload_params.h
index ebfdd4605a..da53cbc5c4 100644
--- a/call/rtp_payload_params.h
+++ b/call/rtp_payload_params.h
@@ -12,6 +12,7 @@
#define CALL_RTP_PAYLOAD_PARAMS_H_
#include <array>
+#include <vector>
#include "absl/types/optional.h"
#include "api/transport/webrtc_key_value_config.h"
@@ -41,6 +42,14 @@ class RtpPayloadParams final {
const CodecSpecificInfo* codec_specific_info,
int64_t shared_frame_id);
+ // Returns structure that aligns with simulated generic info for VP9.
+ // The templates allow to produce valid dependency descriptor for any vp9
+ // stream with up to 4 temporal layers. The set of the templates is not tuned
+ // for any paricular structure thus dependency descriptor would use more bytes
+ // on the wire than with tuned templates.
+ static FrameDependencyStructure MinimalisticVp9Structure(
+ const CodecSpecificInfoVP9& vp9);
+
uint32_t ssrc() const;
RtpPayloadState state() const;
@@ -61,6 +70,10 @@ class RtpPayloadParams final {
bool is_keyframe,
RTPVideoHeader* rtp_video_header);
+ void Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
+ int64_t shared_frame_id,
+ RTPVideoHeader& rtp_video_header);
+
void H264ToGeneric(const CodecSpecificInfoH264& h264_info,
int64_t shared_frame_id,
bool is_keyframe,
@@ -94,6 +107,13 @@ class RtpPayloadParams final {
std::array<std::array<int64_t, RtpGenericFrameDescriptor::kMaxTemporalLayers>,
RtpGenericFrameDescriptor::kMaxSpatialLayers>
last_shared_frame_id_;
+ // circular buffer of frame ids for the last 128 vp9 pictures.
+ // ids for the `picture_id` are stored at the index `picture_id % 128`.
+ std::vector<std::array<int64_t, RtpGenericFrameDescriptor::kMaxSpatialLayers>>
+ last_vp9_frame_id_;
+ // Last frame id for each chain
+ std::array<int64_t, RtpGenericFrameDescriptor::kMaxSpatialLayers>
+ chain_last_frame_id_;
// TODO(eladalon): When additional codecs are supported,
// set kMaxCodecBuffersCount to the max() of these codecs' buffer count.
@@ -113,6 +133,7 @@ class RtpPayloadParams final {
RtpPayloadState state_;
const bool generic_picture_id_experiment_;
+ const bool simulate_generic_vp9_;
};
} // namespace webrtc
#endif // CALL_RTP_PAYLOAD_PARAMS_H_
diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc
index 56ed2cdea6..7db38dbcb8 100644
--- a/call/rtp_payload_params_unittest.cc
+++ b/call/rtp_payload_params_unittest.cc
@@ -26,10 +26,12 @@
#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "modules/video_coding/include/video_codec_interface.h"
+#include "test/explicit_key_value_config.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
+using ::testing::Each;
using ::testing::ElementsAre;
using ::testing::IsEmpty;
using ::testing::SizeIs;
@@ -461,6 +463,410 @@ TEST_F(RtpPayloadParamsVp8ToGenericTest, FrameIdGaps) {
ConvertAndCheck(1, 20, VideoFrameType::kVideoFrameDelta, kNoSync, {10, 15});
}
+class RtpPayloadParamsVp9ToGenericTest : public ::testing::Test {
+ protected:
+ RtpPayloadParamsVp9ToGenericTest()
+ : field_trials_("WebRTC-Vp9DependencyDescriptor/Enabled/") {}
+
+ test::ExplicitKeyValueConfig field_trials_;
+ RtpPayloadState state_;
+};
+
+TEST_F(RtpPayloadParamsVp9ToGenericTest, NoScalability) {
+ RtpPayloadParams params(/*ssrc=*/123, &state_, field_trials_);
+
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP9;
+ codec_info.codecSpecific.VP9.num_spatial_layers = 1;
+ codec_info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
+ codec_info.codecSpecific.VP9.first_frame_in_picture = true;
+ codec_info.end_of_picture = true;
+
+ // Key frame.
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ codec_info.codecSpecific.VP9.inter_pic_predicted = false;
+ codec_info.codecSpecific.VP9.num_ref_pics = 0;
+ RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info,
+ /*shared_frame_id=*/1);
+
+ ASSERT_TRUE(header.generic);
+ EXPECT_EQ(header.generic->spatial_index, 0);
+ EXPECT_EQ(header.generic->temporal_index, 0);
+ EXPECT_EQ(header.generic->frame_id, 1);
+ ASSERT_THAT(header.generic->decode_target_indications, Not(IsEmpty()));
+ EXPECT_EQ(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kSwitch);
+ EXPECT_THAT(header.generic->dependencies, IsEmpty());
+ EXPECT_THAT(header.generic->chain_diffs, ElementsAre(0));
+
+ // Delta frame.
+ encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
+ codec_info.codecSpecific.VP9.inter_pic_predicted = true;
+ codec_info.codecSpecific.VP9.num_ref_pics = 1;
+ codec_info.codecSpecific.VP9.p_diff[0] = 1;
+ header = params.GetRtpVideoHeader(encoded_image, &codec_info,
+ /*shared_frame_id=*/3);
+
+ ASSERT_TRUE(header.generic);
+ EXPECT_EQ(header.generic->spatial_index, 0);
+ EXPECT_EQ(header.generic->temporal_index, 0);
+ EXPECT_EQ(header.generic->frame_id, 3);
+ ASSERT_THAT(header.generic->decode_target_indications, Not(IsEmpty()));
+ EXPECT_EQ(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kSwitch);
+ EXPECT_THAT(header.generic->dependencies, ElementsAre(1));
+ // previous frame in the chain was frame#1,
+ EXPECT_THAT(header.generic->chain_diffs, ElementsAre(3 - 1));
+}
+
+TEST_F(RtpPayloadParamsVp9ToGenericTest, TemporalScalabilityWith2Layers) {
+ // Test with 2 temporal layers structure that is not used by webrtc:
+ // 1---3 5
+ // / / / ...
+ // 0---2---4---
+ RtpPayloadParams params(/*ssrc=*/123, &state_, field_trials_);
+
+ EncodedImage image;
+ CodecSpecificInfo info;
+ info.codecType = kVideoCodecVP9;
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.end_of_picture = true;
+
+ RTPVideoHeader headers[6];
+ // Key frame.
+ image._frameType = VideoFrameType::kVideoFrameKey;
+ info.codecSpecific.VP9.inter_pic_predicted = false;
+ info.codecSpecific.VP9.num_ref_pics = 0;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ headers[0] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/1);
+
+ // Delta frames.
+ info.codecSpecific.VP9.inter_pic_predicted = true;
+ image._frameType = VideoFrameType::kVideoFrameDelta;
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 1;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ headers[1] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/3);
+
+ info.codecSpecific.VP9.temporal_up_switch = false;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 2;
+ headers[2] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/5);
+
+ info.codecSpecific.VP9.temporal_up_switch = false;
+ info.codecSpecific.VP9.temporal_idx = 1;
+ info.codecSpecific.VP9.num_ref_pics = 2;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ info.codecSpecific.VP9.p_diff[1] = 2;
+ headers[3] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/7);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 2;
+ headers[4] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/9);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 1;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ headers[5] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/11);
+
+ ASSERT_TRUE(headers[0].generic);
+ int num_decode_targets = headers[0].generic->decode_target_indications.size();
+ ASSERT_GE(num_decode_targets, 2);
+
+ for (int frame_idx = 0; frame_idx < 6; ++frame_idx) {
+ const RTPVideoHeader& header = headers[frame_idx];
+ ASSERT_TRUE(header.generic);
+ EXPECT_EQ(header.generic->spatial_index, 0);
+ EXPECT_EQ(header.generic->temporal_index, frame_idx % 2);
+ EXPECT_EQ(header.generic->frame_id, 1 + 2 * frame_idx);
+ ASSERT_THAT(header.generic->decode_target_indications,
+ SizeIs(num_decode_targets));
+ // Expect only T0 frames are needed for the 1st decode target.
+ if (header.generic->temporal_index == 0) {
+ EXPECT_NE(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ } else {
+ EXPECT_EQ(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ }
+ // Expect all frames are needed for the 2nd decode target.
+ EXPECT_NE(header.generic->decode_target_indications[1],
+ DecodeTargetIndication::kNotPresent);
+ }
+
+ // Expect switch at every beginning of the pattern.
+ EXPECT_THAT(headers[0].generic->decode_target_indications,
+ Each(DecodeTargetIndication::kSwitch));
+ EXPECT_THAT(headers[4].generic->decode_target_indications,
+ Each(DecodeTargetIndication::kSwitch));
+
+ EXPECT_THAT(headers[0].generic->dependencies, IsEmpty()); // T0, 1
+ EXPECT_THAT(headers[1].generic->dependencies, ElementsAre(1)); // T1, 3
+ EXPECT_THAT(headers[2].generic->dependencies, ElementsAre(1)); // T0, 5
+ EXPECT_THAT(headers[3].generic->dependencies, ElementsAre(5, 3)); // T1, 7
+ EXPECT_THAT(headers[4].generic->dependencies, ElementsAre(5)); // T0, 9
+ EXPECT_THAT(headers[5].generic->dependencies, ElementsAre(9)); // T1, 11
+
+ EXPECT_THAT(headers[0].generic->chain_diffs, ElementsAre(0));
+ EXPECT_THAT(headers[1].generic->chain_diffs, ElementsAre(2));
+ EXPECT_THAT(headers[2].generic->chain_diffs, ElementsAre(4));
+ EXPECT_THAT(headers[3].generic->chain_diffs, ElementsAre(2));
+ EXPECT_THAT(headers[4].generic->chain_diffs, ElementsAre(4));
+ EXPECT_THAT(headers[5].generic->chain_diffs, ElementsAre(2));
+}
+
+TEST_F(RtpPayloadParamsVp9ToGenericTest, TemporalScalabilityWith3Layers) {
+ // Test with 3 temporal layers structure that is not used by webrtc, but used
+ // by chromium: https://imgur.com/pURAGvp
+ RtpPayloadParams params(/*ssrc=*/123, &state_, field_trials_);
+
+ EncodedImage image;
+ CodecSpecificInfo info;
+ info.codecType = kVideoCodecVP9;
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.end_of_picture = true;
+
+ RTPVideoHeader headers[9];
+ // Key frame.
+ image._frameType = VideoFrameType::kVideoFrameKey;
+ info.codecSpecific.VP9.inter_pic_predicted = false;
+ info.codecSpecific.VP9.num_ref_pics = 0;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ headers[0] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/1);
+
+ // Delta frames.
+ info.codecSpecific.VP9.inter_pic_predicted = true;
+ image._frameType = VideoFrameType::kVideoFrameDelta;
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 2;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ headers[1] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/3);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 1;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 2;
+ headers[2] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/5);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 2;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ headers[3] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/7);
+
+ info.codecSpecific.VP9.temporal_up_switch = false;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 4;
+ headers[4] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/9);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 2;
+ info.codecSpecific.VP9.num_ref_pics = 2;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ info.codecSpecific.VP9.p_diff[1] = 3;
+ headers[5] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/11);
+
+ info.codecSpecific.VP9.temporal_up_switch = false;
+ info.codecSpecific.VP9.temporal_idx = 1;
+ info.codecSpecific.VP9.num_ref_pics = 2;
+ info.codecSpecific.VP9.p_diff[0] = 2;
+ info.codecSpecific.VP9.p_diff[1] = 4;
+ headers[6] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/13);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 2;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+ headers[7] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/15);
+
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.temporal_idx = 0;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 4;
+ headers[8] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/17);
+
+ ASSERT_TRUE(headers[0].generic);
+ int num_decode_targets = headers[0].generic->decode_target_indications.size();
+ ASSERT_GE(num_decode_targets, 3);
+
+ for (int frame_idx = 0; frame_idx < 9; ++frame_idx) {
+ const RTPVideoHeader& header = headers[frame_idx];
+ ASSERT_TRUE(header.generic);
+ EXPECT_EQ(header.generic->spatial_index, 0);
+ EXPECT_EQ(header.generic->frame_id, 1 + 2 * frame_idx);
+ ASSERT_THAT(header.generic->decode_target_indications,
+ SizeIs(num_decode_targets));
+ // Expect only T0 frames are needed for the 1st decode target.
+ if (header.generic->temporal_index == 0) {
+ EXPECT_NE(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ } else {
+ EXPECT_EQ(header.generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ }
+ // Expect only T0 and T1 frames are needed for the 2nd decode target.
+ if (header.generic->temporal_index <= 1) {
+ EXPECT_NE(header.generic->decode_target_indications[1],
+ DecodeTargetIndication::kNotPresent);
+ } else {
+ EXPECT_EQ(header.generic->decode_target_indications[1],
+ DecodeTargetIndication::kNotPresent);
+ }
+ // Expect all frames are needed for the 3rd decode target.
+ EXPECT_NE(header.generic->decode_target_indications[2],
+ DecodeTargetIndication::kNotPresent);
+ }
+
+ EXPECT_EQ(headers[0].generic->temporal_index, 0);
+ EXPECT_EQ(headers[1].generic->temporal_index, 2);
+ EXPECT_EQ(headers[2].generic->temporal_index, 1);
+ EXPECT_EQ(headers[3].generic->temporal_index, 2);
+ EXPECT_EQ(headers[4].generic->temporal_index, 0);
+ EXPECT_EQ(headers[5].generic->temporal_index, 2);
+ EXPECT_EQ(headers[6].generic->temporal_index, 1);
+ EXPECT_EQ(headers[7].generic->temporal_index, 2);
+ EXPECT_EQ(headers[8].generic->temporal_index, 0);
+
+ // Expect switch at every beginning of the pattern.
+ EXPECT_THAT(headers[0].generic->decode_target_indications,
+ Each(DecodeTargetIndication::kSwitch));
+ EXPECT_THAT(headers[8].generic->decode_target_indications,
+ Each(DecodeTargetIndication::kSwitch));
+
+ EXPECT_THAT(headers[0].generic->dependencies, IsEmpty()); // T0, 1
+ EXPECT_THAT(headers[1].generic->dependencies, ElementsAre(1)); // T2, 3
+ EXPECT_THAT(headers[2].generic->dependencies, ElementsAre(1)); // T1, 5
+ EXPECT_THAT(headers[3].generic->dependencies, ElementsAre(5)); // T2, 7
+ EXPECT_THAT(headers[4].generic->dependencies, ElementsAre(1)); // T0, 9
+ EXPECT_THAT(headers[5].generic->dependencies, ElementsAre(9, 5)); // T2, 11
+ EXPECT_THAT(headers[6].generic->dependencies, ElementsAre(9, 5)); // T1, 13
+ EXPECT_THAT(headers[7].generic->dependencies, ElementsAre(13)); // T2, 15
+ EXPECT_THAT(headers[8].generic->dependencies, ElementsAre(9)); // T0, 17
+
+ EXPECT_THAT(headers[0].generic->chain_diffs, ElementsAre(0));
+ EXPECT_THAT(headers[1].generic->chain_diffs, ElementsAre(2));
+ EXPECT_THAT(headers[2].generic->chain_diffs, ElementsAre(4));
+ EXPECT_THAT(headers[3].generic->chain_diffs, ElementsAre(6));
+ EXPECT_THAT(headers[4].generic->chain_diffs, ElementsAre(8));
+ EXPECT_THAT(headers[5].generic->chain_diffs, ElementsAre(2));
+ EXPECT_THAT(headers[6].generic->chain_diffs, ElementsAre(4));
+ EXPECT_THAT(headers[7].generic->chain_diffs, ElementsAre(6));
+ EXPECT_THAT(headers[8].generic->chain_diffs, ElementsAre(8));
+}
+
+TEST_F(RtpPayloadParamsVp9ToGenericTest, SpatialScalabilityKSvc) {
+ // 1---3--
+ // | ...
+ // 0---2--
+ RtpPayloadParams params(/*ssrc=*/123, &state_, field_trials_);
+
+ EncodedImage image;
+ CodecSpecificInfo info;
+ info.codecType = kVideoCodecVP9;
+ info.codecSpecific.VP9.num_spatial_layers = 2;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+
+ RTPVideoHeader headers[4];
+ // Key frame.
+ image._frameType = VideoFrameType::kVideoFrameKey;
+ image.SetSpatialIndex(0);
+ info.codecSpecific.VP9.inter_pic_predicted = false;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.non_ref_for_inter_layer_pred = false;
+ info.codecSpecific.VP9.num_ref_pics = 0;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.end_of_picture = false;
+ headers[0] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/1);
+
+ image.SetSpatialIndex(1);
+ info.codecSpecific.VP9.inter_layer_predicted = true;
+ info.codecSpecific.VP9.non_ref_for_inter_layer_pred = true;
+ info.codecSpecific.VP9.first_frame_in_picture = false;
+ info.end_of_picture = true;
+ headers[1] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/3);
+
+ // Delta frames.
+ info.codecSpecific.VP9.inter_pic_predicted = true;
+ image._frameType = VideoFrameType::kVideoFrameDelta;
+ info.codecSpecific.VP9.num_ref_pics = 1;
+ info.codecSpecific.VP9.p_diff[0] = 1;
+
+ image.SetSpatialIndex(0);
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.non_ref_for_inter_layer_pred = true;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.end_of_picture = false;
+ headers[2] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/5);
+
+ image.SetSpatialIndex(1);
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.non_ref_for_inter_layer_pred = true;
+ info.codecSpecific.VP9.first_frame_in_picture = false;
+ info.end_of_picture = true;
+ headers[3] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/7);
+
+ ASSERT_TRUE(headers[0].generic);
+ int num_decode_targets = headers[0].generic->decode_target_indications.size();
+ // Rely on implementation detail there are always kMaxTemporalStreams temporal
+ // layers assumed, in particular assume Decode Target#0 matches layer S0T0,
+ // and Decode Target#kMaxTemporalStreams matches layer S1T0.
+ ASSERT_EQ(num_decode_targets, kMaxTemporalStreams * 2);
+
+ for (int frame_idx = 0; frame_idx < 4; ++frame_idx) {
+ const RTPVideoHeader& header = headers[frame_idx];
+ ASSERT_TRUE(header.generic);
+ EXPECT_EQ(header.generic->spatial_index, frame_idx % 2);
+ EXPECT_EQ(header.generic->temporal_index, 0);
+ EXPECT_EQ(header.generic->frame_id, 1 + 2 * frame_idx);
+ ASSERT_THAT(header.generic->decode_target_indications,
+ SizeIs(num_decode_targets));
+ }
+
+ // Expect S0 key frame is switch for both Decode Targets.
+ EXPECT_EQ(headers[0].generic->decode_target_indications[0],
+ DecodeTargetIndication::kSwitch);
+ EXPECT_EQ(headers[0].generic->decode_target_indications[kMaxTemporalStreams],
+ DecodeTargetIndication::kSwitch);
+ // S1 key frame is only needed for the 2nd Decode Targets.
+ EXPECT_EQ(headers[1].generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ EXPECT_NE(headers[1].generic->decode_target_indications[kMaxTemporalStreams],
+ DecodeTargetIndication::kNotPresent);
+ // Delta frames are only needed for their own Decode Targets.
+ EXPECT_NE(headers[2].generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ EXPECT_EQ(headers[2].generic->decode_target_indications[kMaxTemporalStreams],
+ DecodeTargetIndication::kNotPresent);
+ EXPECT_EQ(headers[3].generic->decode_target_indications[0],
+ DecodeTargetIndication::kNotPresent);
+ EXPECT_NE(headers[3].generic->decode_target_indications[kMaxTemporalStreams],
+ DecodeTargetIndication::kNotPresent);
+
+ EXPECT_THAT(headers[0].generic->dependencies, IsEmpty()); // S0, 1
+ EXPECT_THAT(headers[1].generic->dependencies, ElementsAre(1)); // S1, 3
+ EXPECT_THAT(headers[2].generic->dependencies, ElementsAre(1)); // S0, 5
+ EXPECT_THAT(headers[3].generic->dependencies, ElementsAre(3)); // S1, 7
+
+ EXPECT_THAT(headers[0].generic->chain_diffs, ElementsAre(0, 0));
+ EXPECT_THAT(headers[1].generic->chain_diffs, ElementsAre(2, 2));
+ EXPECT_THAT(headers[2].generic->chain_diffs, ElementsAre(4, 2));
+ EXPECT_THAT(headers[3].generic->chain_diffs, ElementsAre(2, 4));
+}
+
class RtpPayloadParamsH264ToGenericTest : public ::testing::Test {
public:
enum LayerSync { kNoSync, kSync };
diff --git a/call/rtp_transport_config.h b/call/rtp_transport_config.h
new file mode 100644
index 0000000000..9aa9f14c16
--- /dev/null
+++ b/call/rtp_transport_config.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_RTP_TRANSPORT_CONFIG_H_
+#define CALL_RTP_TRANSPORT_CONFIG_H_
+
+#include <memory>
+
+#include "api/network_state_predictor.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/transport/bitrate_settings.h"
+#include "api/transport/network_control.h"
+#include "api/transport/webrtc_key_value_config.h"
+#include "modules/utility/include/process_thread.h"
+#include "rtc_base/task_queue.h"
+
+namespace webrtc {
+
+struct RtpTransportConfig {
+ // Bitrate config used until valid bitrate estimates are calculated. Also
+ // used to cap total bitrate used. This comes from the remote connection.
+ BitrateConstraints bitrate_config;
+
+ // RtcEventLog to use for this call. Required.
+ // Use webrtc::RtcEventLog::CreateNull() for a null implementation.
+ RtcEventLog* event_log = nullptr;
+
+ // Task Queue Factory to be used in this call. Required.
+ TaskQueueFactory* task_queue_factory = nullptr;
+
+ // NetworkStatePredictor to use for this call.
+ NetworkStatePredictorFactoryInterface* network_state_predictor_factory =
+ nullptr;
+
+ // Network controller factory to use for this call.
+ NetworkControllerFactoryInterface* network_controller_factory = nullptr;
+
+ // Key-value mapping of internal configurations to apply,
+ // e.g. field trials.
+ const WebRtcKeyValueConfig* trials = nullptr;
+};
+} // namespace webrtc
+
+#endif // CALL_RTP_TRANSPORT_CONFIG_H_
diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc
index d743a0bf43..f7b6b11fd7 100644
--- a/call/rtp_transport_controller_send.cc
+++ b/call/rtp_transport_controller_send.cc
@@ -142,6 +142,7 @@ RtpTransportControllerSend::RtpTransportControllerSend(
}
RtpTransportControllerSend::~RtpTransportControllerSend() {
+ RTC_DCHECK(video_rtp_senders_.empty());
process_thread_->Stop();
}
@@ -156,6 +157,7 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender(
std::unique_ptr<FecController> fec_controller,
const RtpSenderFrameEncryptionConfig& frame_encryption_config,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
video_rtp_senders_.push_back(std::make_unique<RtpVideoSender>(
clock_, suspended_ssrcs, states, rtp_config, rtcp_report_interval_ms,
send_transport, observers,
@@ -169,6 +171,7 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender(
void RtpTransportControllerSend::DestroyRtpVideoSender(
RtpVideoSenderInterface* rtp_video_sender) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
std::vector<std::unique_ptr<RtpVideoSenderInterface>>::iterator it =
video_rtp_senders_.end();
for (it = video_rtp_senders_.begin(); it != video_rtp_senders_.end(); ++it) {
@@ -354,6 +357,7 @@ void RtpTransportControllerSend::OnNetworkRouteChanged(
}
}
void RtpTransportControllerSend::OnNetworkAvailability(bool network_available) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
RTC_LOG(LS_VERBOSE) << "SignalNetworkState "
<< (network_available ? "Up" : "Down");
NetworkAvailability msg;
@@ -470,6 +474,7 @@ RtpTransportControllerSend::ApplyOrLiftRelayCap(bool is_relayed) {
void RtpTransportControllerSend::OnTransportOverheadChanged(
size_t transport_overhead_bytes_per_packet) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
if (transport_overhead_bytes_per_packet >= kMaxOverheadBytes) {
RTC_LOG(LS_ERROR) << "Transport overhead exceeds " << kMaxOverheadBytes;
return;
diff --git a/call/rtp_transport_controller_send.h b/call/rtp_transport_controller_send.h
index f0f74c9f2a..7455060945 100644
--- a/call/rtp_transport_controller_send.h
+++ b/call/rtp_transport_controller_send.h
@@ -18,6 +18,7 @@
#include <vector>
#include "api/network_state_predictor.h"
+#include "api/sequence_checker.h"
#include "api/transport/network_control.h"
#include "api/units/data_rate.h"
#include "call/rtp_bitrate_configurator.h"
@@ -62,6 +63,7 @@ class RtpTransportControllerSend final
const WebRtcKeyValueConfig* trials);
~RtpTransportControllerSend() override;
+ // TODO(tommi): Change to std::unique_ptr<>.
RtpVideoSenderInterface* CreateRtpVideoSender(
std::map<uint32_t, RtpState> suspended_ssrcs,
const std::map<uint32_t, RtpPayloadState>&
@@ -148,8 +150,10 @@ class RtpTransportControllerSend final
Clock* const clock_;
RtcEventLog* const event_log_;
+ SequenceChecker main_thread_;
PacketRouter packet_router_;
- std::vector<std::unique_ptr<RtpVideoSenderInterface>> video_rtp_senders_;
+ std::vector<std::unique_ptr<RtpVideoSenderInterface>> video_rtp_senders_
+ RTC_GUARDED_BY(&main_thread_);
RtpBitrateConfigurator bitrate_configurator_;
std::map<std::string, rtc::NetworkRoute> network_routes_;
bool pacer_started_;
diff --git a/call/rtp_transport_controller_send_factory.h b/call/rtp_transport_controller_send_factory.h
new file mode 100644
index 0000000000..a857ca7e6f
--- /dev/null
+++ b/call/rtp_transport_controller_send_factory.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_H_
+#define CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_H_
+
+#include <memory>
+#include <utility>
+
+#include "call/rtp_transport_controller_send.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
+
+namespace webrtc {
+class RtpTransportControllerSendFactory
+ : public RtpTransportControllerSendFactoryInterface {
+ public:
+ std::unique_ptr<RtpTransportControllerSendInterface> Create(
+ const RtpTransportConfig& config,
+ Clock* clock,
+ std::unique_ptr<ProcessThread> process_thread) override {
+ return std::make_unique<RtpTransportControllerSend>(
+ clock, config.event_log, config.network_state_predictor_factory,
+ config.network_controller_factory, config.bitrate_config,
+ std::move(process_thread), config.task_queue_factory, config.trials);
+ }
+
+ virtual ~RtpTransportControllerSendFactory() {}
+};
+} // namespace webrtc
+#endif // CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_H_
diff --git a/call/rtp_transport_controller_send_factory_interface.h b/call/rtp_transport_controller_send_factory_interface.h
new file mode 100644
index 0000000000..a0218532a1
--- /dev/null
+++ b/call/rtp_transport_controller_send_factory_interface.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_INTERFACE_H_
+#define CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_INTERFACE_H_
+
+#include <memory>
+
+#include "call/rtp_transport_config.h"
+#include "call/rtp_transport_controller_send_interface.h"
+#include "modules/utility/include/process_thread.h"
+
+namespace webrtc {
+// A factory used for dependency injection on the send side of the transport
+// controller.
+class RtpTransportControllerSendFactoryInterface {
+ public:
+ virtual std::unique_ptr<RtpTransportControllerSendInterface> Create(
+ const RtpTransportConfig& config,
+ Clock* clock,
+ std::unique_ptr<ProcessThread> process_thread) = 0;
+
+ virtual ~RtpTransportControllerSendFactoryInterface() {}
+};
+} // namespace webrtc
+#endif // CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_INTERFACE_H_
diff --git a/call/rtp_transport_controller_send_interface.h b/call/rtp_transport_controller_send_interface.h
index 59263447a4..2aa6d739da 100644
--- a/call/rtp_transport_controller_send_interface.h
+++ b/call/rtp_transport_controller_send_interface.h
@@ -52,7 +52,6 @@ struct RtpSenderObservers {
RtcpRttStats* rtcp_rtt_stats;
RtcpIntraFrameObserver* intra_frame_callback;
RtcpLossNotificationObserver* rtcp_loss_notification_observer;
- RtcpStatisticsCallback* rtcp_stats;
ReportBlockDataObserver* report_block_data_observer;
StreamDataCountersCallback* rtp_stats;
BitrateStatisticsObserver* bitrate_observer;
diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc
index 1f17c7e989..e20ba321c9 100644
--- a/call/rtp_video_sender.cc
+++ b/call/rtp_video_sender.cc
@@ -31,6 +31,7 @@
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/trace_event.h"
namespace webrtc {
@@ -216,7 +217,6 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
configuration.rtt_stats = observers.rtcp_rtt_stats;
configuration.rtcp_packet_type_counter_observer =
observers.rtcp_type_observer;
- configuration.rtcp_statistics_callback = observers.rtcp_stats;
configuration.report_block_data_observer =
observers.report_block_data_observer;
configuration.paced_sender = transport->packet_sender();
@@ -367,6 +367,9 @@ RtpVideoSender::RtpVideoSender(
field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"),
"Enabled")),
has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)),
+ simulate_vp9_structure_(absl::StartsWith(
+ field_trials_.Lookup("WebRTC-Vp9DependencyDescriptor"),
+ "Enabled")),
active_(false),
module_process_thread_(nullptr),
suspended_ssrcs_(std::move(suspended_ssrcs)),
@@ -459,6 +462,7 @@ RtpVideoSender::~RtpVideoSender() {
void RtpVideoSender::RegisterProcessThread(
ProcessThread* module_process_thread) {
+ TRACE_EVENT0("webrtc", "RtpVideoSender::RegisterProcessThread");
RTC_DCHECK_RUN_ON(&module_process_thread_checker_);
RTC_DCHECK(!module_process_thread_);
module_process_thread_ = module_process_thread;
@@ -578,10 +582,18 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
// If encoder adapter produce FrameDependencyStructure, pass it so that
// dependency descriptor rtp header extension can be used.
// If not supported, disable using dependency descriptor by passing nullptr.
- rtp_streams_[stream_index].sender_video->SetVideoStructure(
- (codec_specific_info && codec_specific_info->template_structure)
- ? &*codec_specific_info->template_structure
- : nullptr);
+ RTPSenderVideo& sender_video = *rtp_streams_[stream_index].sender_video;
+ if (codec_specific_info && codec_specific_info->template_structure) {
+ sender_video.SetVideoStructure(&*codec_specific_info->template_structure);
+ } else if (simulate_vp9_structure_ && codec_specific_info &&
+ codec_specific_info->codecType == kVideoCodecVP9) {
+ FrameDependencyStructure structure =
+ RtpPayloadParams::MinimalisticVp9Structure(
+ codec_specific_info->codecSpecific.VP9);
+ sender_video.SetVideoStructure(&structure);
+ } else {
+ sender_video.SetVideoStructure(nullptr);
+ }
}
bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage(
@@ -920,43 +932,45 @@ void RtpVideoSender::OnPacketFeedbackVector(
// Map from SSRC to all acked packets for that RTP module.
std::map<uint32_t, std::vector<uint16_t>> acked_packets_per_ssrc;
for (const StreamPacketInfo& packet : packet_feedback_vector) {
- if (packet.received) {
- acked_packets_per_ssrc[packet.ssrc].push_back(packet.rtp_sequence_number);
+ if (packet.received && packet.ssrc) {
+ acked_packets_per_ssrc[*packet.ssrc].push_back(
+ packet.rtp_sequence_number);
}
}
- // Map from SSRC to vector of RTP sequence numbers that are indicated as
- // lost by feedback, without being trailed by any received packets.
- std::map<uint32_t, std::vector<uint16_t>> early_loss_detected_per_ssrc;
+ // Map from SSRC to vector of RTP sequence numbers that are indicated as
+ // lost by feedback, without being trailed by any received packets.
+ std::map<uint32_t, std::vector<uint16_t>> early_loss_detected_per_ssrc;
- for (const StreamPacketInfo& packet : packet_feedback_vector) {
- if (!packet.received) {
- // Last known lost packet, might not be detectable as lost by remote
- // jitter buffer.
- early_loss_detected_per_ssrc[packet.ssrc].push_back(
- packet.rtp_sequence_number);
- } else {
- // Packet received, so any loss prior to this is already detectable.
- early_loss_detected_per_ssrc.erase(packet.ssrc);
- }
+ for (const StreamPacketInfo& packet : packet_feedback_vector) {
+ // Only include new media packets, not retransmissions/padding/fec.
+ if (!packet.received && packet.ssrc && !packet.is_retransmission) {
+ // Last known lost packet, might not be detectable as lost by remote
+ // jitter buffer.
+ early_loss_detected_per_ssrc[*packet.ssrc].push_back(
+ packet.rtp_sequence_number);
+ } else {
+ // Packet received, so any loss prior to this is already detectable.
+ early_loss_detected_per_ssrc.erase(*packet.ssrc);
}
+ }
- for (const auto& kv : early_loss_detected_per_ssrc) {
- const uint32_t ssrc = kv.first;
- auto it = ssrc_to_rtp_module_.find(ssrc);
- RTC_DCHECK(it != ssrc_to_rtp_module_.end());
- RTPSender* rtp_sender = it->second->RtpSender();
- for (uint16_t sequence_number : kv.second) {
- rtp_sender->ReSendPacket(sequence_number);
- }
+ for (const auto& kv : early_loss_detected_per_ssrc) {
+ const uint32_t ssrc = kv.first;
+ auto it = ssrc_to_rtp_module_.find(ssrc);
+ RTC_CHECK(it != ssrc_to_rtp_module_.end());
+ RTPSender* rtp_sender = it->second->RtpSender();
+ for (uint16_t sequence_number : kv.second) {
+ rtp_sender->ReSendPacket(sequence_number);
}
+ }
for (const auto& kv : acked_packets_per_ssrc) {
const uint32_t ssrc = kv.first;
auto it = ssrc_to_rtp_module_.find(ssrc);
if (it == ssrc_to_rtp_module_.end()) {
- // Packets not for a media SSRC, so likely RTX or FEC. If so, ignore
- // since there's no RTP history to clean up anyway.
+ // No media, likely FEC or padding. Ignore since there's no RTP history to
+ // clean up anyway.
continue;
}
rtc::ArrayView<const uint16_t> rtp_sequence_numbers(kv.second);
diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h
index cf71f1eff6..611edc6b27 100644
--- a/call/rtp_video_sender.h
+++ b/call/rtp_video_sender.h
@@ -178,6 +178,7 @@ class RtpVideoSender : public RtpVideoSenderInterface,
const bool send_side_bwe_with_overhead_;
const bool use_frame_rate_for_overhead_;
const bool has_packet_feedback_;
+ const bool simulate_vp9_structure_;
// TODO(holmer): Remove mutex_ once RtpVideoSender runs on the
// transport task queue.
diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc
index e8689e7fc6..334d97ccfa 100644
--- a/call/rtp_video_sender_unittest.cc
+++ b/call/rtp_video_sender_unittest.cc
@@ -61,7 +61,6 @@ class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver {
RtpSenderObservers CreateObservers(
RtcpRttStats* rtcp_rtt_stats,
RtcpIntraFrameObserver* intra_frame_callback,
- RtcpStatisticsCallback* rtcp_stats,
ReportBlockDataObserver* report_block_data_observer,
StreamDataCountersCallback* rtp_stats,
BitrateStatisticsObserver* bitrate_observer,
@@ -73,7 +72,6 @@ RtpSenderObservers CreateObservers(
observers.rtcp_rtt_stats = rtcp_rtt_stats;
observers.intra_frame_callback = intra_frame_callback;
observers.rtcp_loss_notification_observer = nullptr;
- observers.rtcp_stats = rtcp_stats;
observers.report_block_data_observer = report_block_data_observer;
observers.rtp_stats = rtp_stats;
observers.bitrate_observer = bitrate_observer;
@@ -107,6 +105,7 @@ VideoSendStream::Config CreateVideoSendStreamConfig(
kTransportsSequenceExtensionId);
config.rtp.extensions.emplace_back(RtpDependencyDescriptorExtension::kUri,
kDependencyDescriptorExtensionId);
+ config.rtp.extmap_allow_mixed = true;
return config;
}
@@ -146,9 +145,8 @@ class RtpVideoSenderTestFixture {
time_controller_.GetClock(), suspended_ssrcs, suspended_payload_states,
config_.rtp, config_.rtcp_report_interval_ms, &transport_,
CreateObservers(nullptr, &encoder_feedback_, &stats_proxy_,
- &stats_proxy_, &stats_proxy_, &stats_proxy_,
- frame_count_observer, &stats_proxy_, &stats_proxy_,
- &send_delay_stats_),
+ &stats_proxy_, &stats_proxy_, frame_count_observer,
+ &stats_proxy_, &stats_proxy_, &send_delay_stats_),
&transport_controller_, &event_log_, &retransmission_rate_limiter_,
std::make_unique<FecControllerDefault>(time_controller_.GetClock()),
nullptr, CryptoOptions{}, frame_transformer);
@@ -464,11 +462,13 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) {
lost_packet_feedback.rtp_sequence_number = rtp_sequence_numbers[0];
lost_packet_feedback.ssrc = kSsrc1;
lost_packet_feedback.received = false;
+ lost_packet_feedback.is_retransmission = false;
StreamFeedbackObserver::StreamPacketInfo received_packet_feedback;
received_packet_feedback.rtp_sequence_number = rtp_sequence_numbers[1];
received_packet_feedback.ssrc = kSsrc1;
received_packet_feedback.received = true;
+ lost_packet_feedback.is_retransmission = false;
test.router()->OnPacketFeedbackVector(
{lost_packet_feedback, received_packet_feedback});
@@ -640,11 +640,13 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) {
first_packet_feedback.rtp_sequence_number = frame1_rtp_sequence_number;
first_packet_feedback.ssrc = kSsrc1;
first_packet_feedback.received = false;
+ first_packet_feedback.is_retransmission = false;
StreamFeedbackObserver::StreamPacketInfo second_packet_feedback;
second_packet_feedback.rtp_sequence_number = frame2_rtp_sequence_number;
second_packet_feedback.ssrc = kSsrc2;
second_packet_feedback.received = true;
+ first_packet_feedback.is_retransmission = false;
test.router()->OnPacketFeedbackVector(
{first_packet_feedback, second_packet_feedback});
@@ -768,6 +770,62 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) {
EXPECT_TRUE(sent_packets[1].HasExtension<RtpDependencyDescriptorExtension>());
}
+TEST(RtpVideoSenderTest,
+ SupportsDependencyDescriptorForVp9NotProvidedByEncoder) {
+ test::ScopedFieldTrials field_trials(
+ "WebRTC-Vp9DependencyDescriptor/Enabled/");
+ RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
+ test.router()->SetActive(true);
+
+ RtpHeaderExtensionMap extensions;
+ extensions.Register<RtpDependencyDescriptorExtension>(
+ kDependencyDescriptorExtensionId);
+ std::vector<RtpPacket> sent_packets;
+ ON_CALL(test.transport(), SendRtp)
+ .WillByDefault([&](const uint8_t* packet, size_t length,
+ const PacketOptions& options) {
+ sent_packets.emplace_back(&extensions);
+ EXPECT_TRUE(sent_packets.back().Parse(packet, length));
+ return true;
+ });
+
+ const uint8_t kPayload[1] = {'a'};
+ EncodedImage encoded_image;
+ encoded_image.SetTimestamp(1);
+ encoded_image.capture_time_ms_ = 2;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ encoded_image._encodedWidth = 320;
+ encoded_image._encodedHeight = 180;
+ encoded_image.SetEncodedData(
+ EncodedImageBuffer::Create(kPayload, sizeof(kPayload)));
+
+ CodecSpecificInfo codec_specific;
+ codec_specific.codecType = VideoCodecType::kVideoCodecVP9;
+ codec_specific.codecSpecific.VP9.num_spatial_layers = 1;
+ codec_specific.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
+ codec_specific.codecSpecific.VP9.first_frame_in_picture = true;
+ codec_specific.end_of_picture = true;
+ codec_specific.codecSpecific.VP9.inter_pic_predicted = false;
+
+ // Send two tiny images, each mapping to single RTP packet.
+ EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
+ EncodedImageCallback::Result::OK);
+
+ // Send in 2nd picture.
+ encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
+ encoded_image.SetTimestamp(3000);
+ codec_specific.codecSpecific.VP9.inter_pic_predicted = true;
+ codec_specific.codecSpecific.VP9.num_ref_pics = 1;
+ codec_specific.codecSpecific.VP9.p_diff[0] = 1;
+ EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
+ EncodedImageCallback::Result::OK);
+
+ test.AdvanceTime(TimeDelta::Millis(33));
+ ASSERT_THAT(sent_packets, SizeIs(2));
+ EXPECT_TRUE(sent_packets[0].HasExtension<RtpDependencyDescriptorExtension>());
+ EXPECT_TRUE(sent_packets[1].HasExtension<RtpDependencyDescriptorExtension>());
+}
+
TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
test.router()->SetActive(true);
@@ -891,7 +949,7 @@ TEST(RtpVideoSenderTest, CanSetZeroBitrate) {
TEST(RtpVideoSenderTest, SimulcastSenderRegistersFrameTransformers) {
rtc::scoped_refptr<MockFrameTransformer> transformer =
- new rtc::RefCountedObject<MockFrameTransformer>();
+ rtc::make_ref_counted<MockFrameTransformer>();
EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc1));
EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc2));
diff --git a/call/rtx_receive_stream.cc b/call/rtx_receive_stream.cc
index 9e4a41bc8f..c0b138b416 100644
--- a/call/rtx_receive_stream.cc
+++ b/call/rtx_receive_stream.cc
@@ -64,7 +64,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) {
media_packet.SetSequenceNumber((payload[0] << 8) + payload[1]);
media_packet.SetPayloadType(it->second);
media_packet.set_recovered(true);
- media_packet.set_arrival_time_ms(rtx_packet.arrival_time_ms());
+ media_packet.set_arrival_time(rtx_packet.arrival_time());
// Skip the RTX header.
rtc::ArrayView<const uint8_t> rtx_payload = payload.subview(kRtxHeaderSize);
diff --git a/call/rtx_receive_stream_unittest.cc b/call/rtx_receive_stream_unittest.cc
index 75086fef9c..b06990820f 100644
--- a/call/rtx_receive_stream_unittest.cc
+++ b/call/rtx_receive_stream_unittest.cc
@@ -194,9 +194,9 @@ TEST(RtxReceiveStreamTest, PropagatesArrivalTime) {
RtxReceiveStream rtx_sink(&media_sink, PayloadTypeMapping(), kMediaSSRC);
RtpPacketReceived rtx_packet(nullptr);
EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacket)));
- rtx_packet.set_arrival_time_ms(123);
- EXPECT_CALL(media_sink,
- OnRtpPacket(Property(&RtpPacketReceived::arrival_time_ms, 123)));
+ rtx_packet.set_arrival_time(Timestamp::Millis(123));
+ EXPECT_CALL(media_sink, OnRtpPacket(Property(&RtpPacketReceived::arrival_time,
+ Timestamp::Millis(123))));
rtx_sink.OnRtpPacket(rtx_packet);
}
diff --git a/call/version.cc b/call/version.cc
index 0d4a8bad21..6ee1dab3fa 100644
--- a/call/version.cc
+++ b/call/version.cc
@@ -13,7 +13,7 @@
namespace webrtc {
// The timestamp is always in UTC.
-const char* const kSourceTimestamp = "WebRTC source stamp 2021-04-19T04:03:03";
+const char* const kSourceTimestamp = "WebRTC source stamp 2021-06-22T04:05:30";
void LoadWebRTCVersionInRegister() {
// Using volatile to instruct the compiler to not optimize `p` away even
diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc
index e0f3de366b..0b95d66767 100644
--- a/call/video_receive_stream.cc
+++ b/call/video_receive_stream.cc
@@ -74,8 +74,10 @@ std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const {
VideoReceiveStream::Config::Config(const Config&) = default;
VideoReceiveStream::Config::Config(Config&&) = default;
-VideoReceiveStream::Config::Config(Transport* rtcp_send_transport)
- : rtcp_send_transport(rtcp_send_transport) {}
+VideoReceiveStream::Config::Config(Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory)
+ : decoder_factory(decoder_factory),
+ rtcp_send_transport(rtcp_send_transport) {}
VideoReceiveStream::Config& VideoReceiveStream::Config::operator=(Config&&) =
default;
diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h
index cc9020b5ed..61edc886cb 100644
--- a/call/video_receive_stream.h
+++ b/call/video_receive_stream.h
@@ -20,17 +20,15 @@
#include "api/call/transport.h"
#include "api/crypto/crypto_options.h"
-#include "api/crypto/frame_decryptor_interface.h"
-#include "api/frame_transformer_interface.h"
#include "api/rtp_headers.h"
#include "api/rtp_parameters.h"
-#include "api/transport/rtp/rtp_source.h"
#include "api/video/recordable_encoded_frame.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_timing.h"
#include "api/video_codecs/sdp_video_format.h"
+#include "call/receive_stream.h"
#include "call/rtp_config.h"
#include "common_video/frame_counts.h"
#include "modules/rtp_rtcp/include/rtcp_statistics.h"
@@ -41,7 +39,7 @@ namespace webrtc {
class RtpPacketSinkInterface;
class VideoDecoderFactory;
-class VideoReceiveStream {
+class VideoReceiveStream : public MediaReceiveStream {
public:
// Class for handling moving in/out recording state.
struct RecordingState {
@@ -53,11 +51,6 @@ class VideoReceiveStream {
// Callback stored from the VideoReceiveStream. The VideoReceiveStream
// client should not interpret the attribute.
std::function<void(const RecordableEncodedFrame&)> callback;
- // Memento of internal state in VideoReceiveStream, recording wether
- // we're currently causing generation of a keyframe from the sender. Needed
- // to avoid sending double keyframe requests. The VideoReceiveStream client
- // should not interpret the attribute.
- bool keyframe_needed = false;
// Memento of when a keyframe request was last sent. The VideoReceiveStream
// client should not interpret the attribute.
absl::optional<int64_t> last_keyframe_request_ms;
@@ -157,7 +150,8 @@ class VideoReceiveStream {
public:
Config() = delete;
Config(Config&&);
- explicit Config(Transport* rtcp_send_transport);
+ Config(Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory = nullptr);
Config& operator=(Config&&);
Config& operator=(const Config&) = delete;
~Config();
@@ -174,17 +168,14 @@ class VideoReceiveStream {
VideoDecoderFactory* decoder_factory = nullptr;
// Receive-stream specific RTP settings.
- struct Rtp {
+ struct Rtp : public RtpConfig {
Rtp();
Rtp(const Rtp&);
~Rtp();
std::string ToString() const;
- // Synchronization source (stream identifier) to be received.
- uint32_t remote_ssrc = 0;
-
- // Sender SSRC used for sending RTCP (such as receiver reports).
- uint32_t local_ssrc = 0;
+ // See NackConfig for description.
+ NackConfig nack;
// See RtcpMode for description.
RtcpMode rtcp_mode = RtcpMode::kCompound;
@@ -196,15 +187,9 @@ class VideoReceiveStream {
bool receiver_reference_time_report = false;
} rtcp_xr;
- // See draft-holmer-rmcat-transport-wide-cc-extensions for details.
- bool transport_cc = false;
-
// See LntfConfig for description.
LntfConfig lntf;
- // See NackConfig for description.
- NackConfig nack;
-
// Payload types for ULPFEC and RED, respectively.
int ulpfec_payload_type = -1;
int red_payload_type = -1;
@@ -228,9 +213,6 @@ class VideoReceiveStream {
// meta data is expected to be present in generic frame descriptor
// RTP header extension).
std::set<int> raw_payload_types;
-
- // RTP header extensions used for the received stream.
- std::vector<RtpExtension> extensions;
} rtp;
// Transport for outgoing packets (RTCP).
@@ -256,10 +238,6 @@ class VideoReceiveStream {
// used for streaming instead of a real-time call.
int target_delay_ms = 0;
- // TODO(nisse): Used with VideoDecoderFactory::LegacyCreateVideoDecoder.
- // Delete when that method is retired.
- std::string stream_id;
-
// An optional custom frame decryptor that allows the entire frame to be
// decrypted in whatever way the caller choses. This is not required by
// default.
@@ -271,18 +249,9 @@ class VideoReceiveStream {
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer;
};
- // Starts stream activity.
- // When a stream is active, it can receive, process and deliver packets.
- virtual void Start() = 0;
- // Stops stream activity.
- // When a stream is stopped, it can't receive, process or deliver packets.
- virtual void Stop() = 0;
-
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
- virtual std::vector<RtpSource> GetSources() const = 0;
-
// Sets a base minimum for the playout delay. Base minimum delay sets lower
// bound on minimum delay value determining lower bound on playout delay.
//
@@ -292,16 +261,6 @@ class VideoReceiveStream {
// Returns current value of base minimum delay in milliseconds.
virtual int GetBaseMinimumPlayoutDelayMs() const = 0;
- // Allows a FrameDecryptor to be attached to a VideoReceiveStream after
- // creation without resetting the decoder state.
- virtual void SetFrameDecryptor(
- rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) = 0;
-
- // Allows a frame transformer to be attached to a VideoReceiveStream after
- // creation without resetting the decoder state.
- virtual void SetDepacketizerToDecoderFrameTransformer(
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) = 0;
-
// Sets and returns recording state. The old state is moved out
// of the video receive stream and returned to the caller, and |state|
// is moved in. If the state's callback is set, it will be called with
diff --git a/call/video_send_stream.cc b/call/video_send_stream.cc
index 244d78089c..25513e4e4c 100644
--- a/call/video_send_stream.cc
+++ b/call/video_send_stream.cc
@@ -51,8 +51,13 @@ std::string VideoSendStream::StreamStats::ToString() const {
ss << "retransmit_bps: " << retransmit_bitrate_bps << ", ";
ss << "avg_delay_ms: " << avg_delay_ms << ", ";
ss << "max_delay_ms: " << max_delay_ms << ", ";
- ss << "cum_loss: " << rtcp_stats.packets_lost << ", ";
- ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", ";
+ if (report_block_data) {
+ ss << "cum_loss: " << report_block_data->report_block().packets_lost
+ << ", ";
+ ss << "max_ext_seq: "
+ << report_block_data->report_block().extended_highest_sequence_number
+ << ", ";
+ }
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
diff --git a/call/video_send_stream.h b/call/video_send_stream.h
index 5c8906fbaf..fd7a101b0a 100644
--- a/call/video_send_stream.h
+++ b/call/video_send_stream.h
@@ -82,7 +82,6 @@ class VideoSendStream {
uint64_t total_packet_send_delay_ms = 0;
StreamDataCounters rtp_stats;
RtcpPacketTypeCounter rtcp_packet_type_counts;
- RtcpStatistics rtcp_stats;
// A snapshot of the most recent Report Block with additional data of
// interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats.
absl::optional<ReportBlockData> report_block_data;
diff --git a/common_audio/signal_processing/division_operations.c b/common_audio/signal_processing/division_operations.c
index c6195e7999..4764ddfccd 100644
--- a/common_audio/signal_processing/division_operations.c
+++ b/common_audio/signal_processing/division_operations.c
@@ -98,8 +98,7 @@ int32_t WebRtcSpl_DivResultInQ31(int32_t num, int32_t den)
return div;
}
-int32_t RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486
-WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low)
+int32_t WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low)
{
int16_t approx, tmp_hi, tmp_low, num_hi, num_low;
int32_t tmpW32;
@@ -111,8 +110,8 @@ WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low)
tmpW32 = (den_hi * approx << 1) + ((den_low * approx >> 15) << 1);
// tmpW32 = den * approx
- tmpW32 = (int32_t)0x7fffffffL - tmpW32; // result in Q30 (tmpW32 = 2.0-(den*approx))
- // UBSan: 2147483647 - -2 cannot be represented in type 'int'
+ // result in Q30 (tmpW32 = 2.0-(den*approx))
+ tmpW32 = (int32_t)((int64_t)0x7fffffffL - tmpW32);
// Store tmpW32 in hi and low format
tmp_hi = (int16_t)(tmpW32 >> 16);
diff --git a/common_video/h264/h264_bitstream_parser.cc b/common_video/h264/h264_bitstream_parser.cc
index b0ada92d74..3b41599fa0 100644
--- a/common_video/h264/h264_bitstream_parser.cc
+++ b/common_video/h264/h264_bitstream_parser.cc
@@ -28,11 +28,13 @@ const int kMaxQpValue = 51;
namespace webrtc {
-#define RETURN_ON_FAIL(x, res) \
- if (!(x)) { \
- RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \
- return res; \
- }
+#define RETURN_ON_FAIL(x, res) \
+ do { \
+ if (!(x)) { \
+ RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \
+ return res; \
+ } \
+ } while (0)
#define RETURN_INV_ON_FAIL(x) RETURN_ON_FAIL(x, kInvalidStream)
@@ -62,64 +64,63 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
uint32_t bits_tmp;
// first_mb_in_slice: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
// slice_type: ue(v)
uint32_t slice_type;
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&slice_type));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(slice_type));
// slice_type's 5..9 range is used to indicate that all slices of a picture
// have the same value of slice_type % 5, we don't care about that, so we map
// to the corresponding 0..4 range.
slice_type %= 5;
// pic_parameter_set_id: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
if (sps_->separate_colour_plane_flag == 1) {
// colour_plane_id
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(2, bits_tmp));
}
// frame_num: u(v)
// Represented by log2_max_frame_num bits.
- RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&bits_tmp, sps_->log2_max_frame_num));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(sps_->log2_max_frame_num, bits_tmp));
uint32_t field_pic_flag = 0;
if (sps_->frame_mbs_only_flag == 0) {
// field_pic_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&field_pic_flag, 1));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, field_pic_flag));
if (field_pic_flag != 0) {
// bottom_field_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp));
}
}
if (is_idr) {
// idr_pic_id: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
// pic_order_cnt_lsb: u(v)
// Represented by sps_.log2_max_pic_order_cnt_lsb bits.
if (sps_->pic_order_cnt_type == 0) {
RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&bits_tmp, sps_->log2_max_pic_order_cnt_lsb));
+ slice_reader.ReadBits(sps_->log2_max_pic_order_cnt_lsb, bits_tmp));
if (pps_->bottom_field_pic_order_in_frame_present_flag &&
field_pic_flag == 0) {
// delta_pic_order_cnt_bottom: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
}
if (sps_->pic_order_cnt_type == 1 &&
!sps_->delta_pic_order_always_zero_flag) {
// delta_pic_order_cnt[0]: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) {
// delta_pic_order_cnt[1]: se(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
}
if (pps_->redundant_pic_cnt_present_flag) {
// redundant_pic_cnt: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
if (slice_type == H264::SliceType::kB) {
// direct_spatial_mv_pred_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp));
}
switch (slice_type) {
case H264::SliceType::kP:
@@ -128,13 +129,13 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
uint32_t num_ref_idx_active_override_flag;
// num_ref_idx_active_override_flag: u(1)
RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&num_ref_idx_active_override_flag, 1));
+ slice_reader.ReadBits(1, num_ref_idx_active_override_flag));
if (num_ref_idx_active_override_flag != 0) {
// num_ref_idx_l0_active_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
if (slice_type == H264::SliceType::kB) {
// num_ref_idx_l1_active_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
}
break;
@@ -158,20 +159,20 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
// ref_pic_list_modification_flag_l0: u(1)
uint32_t ref_pic_list_modification_flag_l0;
RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&ref_pic_list_modification_flag_l0, 1));
+ slice_reader.ReadBits(1, ref_pic_list_modification_flag_l0));
if (ref_pic_list_modification_flag_l0) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &modification_of_pic_nums_idc));
+ RETURN_INV_ON_FAIL(
+ slice_reader.ReadExponentialGolomb(modification_of_pic_nums_idc));
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
} while (modification_of_pic_nums_idc != 3);
}
@@ -180,20 +181,20 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
// ref_pic_list_modification_flag_l1: u(1)
uint32_t ref_pic_list_modification_flag_l1;
RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&ref_pic_list_modification_flag_l1, 1));
+ slice_reader.ReadBits(1, ref_pic_list_modification_flag_l1));
if (ref_pic_list_modification_flag_l1) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &modification_of_pic_nums_idc));
+ RETURN_INV_ON_FAIL(
+ slice_reader.ReadExponentialGolomb(modification_of_pic_nums_idc));
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
} while (modification_of_pic_nums_idc != 3);
}
@@ -215,35 +216,35 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
if (is_idr) {
// no_output_of_prior_pics_flag: u(1)
// long_term_reference_flag: u(1)
- RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2));
+ RETURN_INV_ON_FAIL(slice_reader.ReadBits(2, bits_tmp));
} else {
// adaptive_ref_pic_marking_mode_flag: u(1)
uint32_t adaptive_ref_pic_marking_mode_flag;
RETURN_INV_ON_FAIL(
- slice_reader.ReadBits(&adaptive_ref_pic_marking_mode_flag, 1));
+ slice_reader.ReadBits(1, adaptive_ref_pic_marking_mode_flag));
if (adaptive_ref_pic_marking_mode_flag) {
uint32_t memory_management_control_operation;
do {
// memory_management_control_operation: ue(v)
RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(
- &memory_management_control_operation));
+ memory_management_control_operation));
if (memory_management_control_operation == 1 ||
memory_management_control_operation == 3) {
// difference_of_pic_nums_minus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
if (memory_management_control_operation == 2) {
// long_term_pic_num: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
if (memory_management_control_operation == 3 ||
memory_management_control_operation == 6) {
// long_term_frame_idx: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
if (memory_management_control_operation == 4) {
// max_long_term_frame_idx_plus1: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
} while (memory_management_control_operation != 0);
}
@@ -252,12 +253,12 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
if (pps_->entropy_coding_mode_flag && slice_type != H264::SliceType::kI &&
slice_type != H264::SliceType::kSi) {
// cabac_init_idc: ue(v)
- RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp));
+ RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp));
}
int32_t last_slice_qp_delta;
RETURN_INV_ON_FAIL(
- slice_reader.ReadSignedExponentialGolomb(&last_slice_qp_delta));
+ slice_reader.ReadSignedExponentialGolomb(last_slice_qp_delta));
if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) {
// Something has gone wrong, and the parsed value is invalid.
RTC_LOG(LS_WARNING) << "Parsed QP value out of range.";
@@ -275,14 +276,14 @@ void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) {
sps_ = SpsParser::ParseSps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!sps_)
- RTC_LOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream.";
+ RTC_DLOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream.";
break;
}
case H264::NaluType::kPps: {
pps_ = PpsParser::ParsePps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!pps_)
- RTC_LOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream.";
+ RTC_DLOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream.";
break;
}
case H264::NaluType::kAud:
@@ -291,7 +292,7 @@ void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) {
default:
Result res = ParseNonParameterSetNalu(slice, length, nalu_type);
if (res != kOk)
- RTC_LOG(LS_INFO) << "Failed to parse bitstream. Error: " << res;
+ RTC_DLOG(LS_INFO) << "Failed to parse bitstream. Error: " << res;
break;
}
}
diff --git a/common_video/h264/pps_parser.cc b/common_video/h264/pps_parser.cc
index ae01652189..3d3725f95a 100644
--- a/common_video/h264/pps_parser.cc
+++ b/common_video/h264/pps_parser.cc
@@ -18,9 +18,11 @@
#include "rtc_base/checks.h"
#define RETURN_EMPTY_ON_FAIL(x) \
- if (!(x)) { \
- return absl::nullopt; \
- }
+ do { \
+ if (!(x)) { \
+ return absl::nullopt; \
+ } \
+ } while (0)
namespace {
const int kMaxPicInitQpDeltaValue = 25;
@@ -64,14 +66,14 @@ absl::optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
uint32_t golomb_tmp;
// first_mb_in_slice: ue(v)
- if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
+ if (!slice_reader.ReadExponentialGolomb(golomb_tmp))
return absl::nullopt;
// slice_type: ue(v)
- if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
+ if (!slice_reader.ReadExponentialGolomb(golomb_tmp))
return absl::nullopt;
// pic_parameter_set_id: ue(v)
uint32_t slice_pps_id;
- if (!slice_reader.ReadExponentialGolomb(&slice_pps_id))
+ if (!slice_reader.ReadExponentialGolomb(slice_pps_id))
return absl::nullopt;
return slice_pps_id;
}
@@ -86,30 +88,29 @@ absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
uint32_t golomb_ignored;
// entropy_coding_mode_flag: u(1)
uint32_t entropy_coding_mode_flag;
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&entropy_coding_mode_flag, 1));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, entropy_coding_mode_flag));
pps.entropy_coding_mode_flag = entropy_coding_mode_flag != 0;
// bottom_field_pic_order_in_frame_present_flag: u(1)
uint32_t bottom_field_pic_order_in_frame_present_flag;
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&bottom_field_pic_order_in_frame_present_flag, 1));
+ bit_buffer->ReadBits(1, bottom_field_pic_order_in_frame_present_flag));
pps.bottom_field_pic_order_in_frame_present_flag =
bottom_field_pic_order_in_frame_present_flag != 0;
// num_slice_groups_minus1: ue(v)
uint32_t num_slice_groups_minus1;
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&num_slice_groups_minus1));
+ bit_buffer->ReadExponentialGolomb(num_slice_groups_minus1));
if (num_slice_groups_minus1 > 0) {
uint32_t slice_group_map_type;
// slice_group_map_type: ue(v)
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&slice_group_map_type));
+ bit_buffer->ReadExponentialGolomb(slice_group_map_type));
if (slice_group_map_type == 0) {
for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1;
++i_group) {
// run_length_minus1[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
}
} else if (slice_group_map_type == 1) {
// TODO(sprang): Implement support for dispersed slice group map type.
@@ -118,23 +119,21 @@ absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1;
++i_group) {
// top_left[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
// bottom_right[iGroup]: ue(v)
- RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
}
} else if (slice_group_map_type == 3 || slice_group_map_type == 4 ||
slice_group_map_type == 5) {
// slice_group_change_direction_flag: u(1)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp));
// slice_group_change_rate_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
} else if (slice_group_map_type == 6) {
// pic_size_in_map_units_minus1: ue(v)
uint32_t pic_size_in_map_units_minus1;
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadExponentialGolomb(&pic_size_in_map_units_minus1));
+ bit_buffer->ReadExponentialGolomb(pic_size_in_map_units_minus1));
uint32_t slice_group_id_bits = 0;
uint32_t num_slice_groups = num_slice_groups_minus1 + 1;
// If num_slice_groups is not a power of two an additional bit is required
@@ -149,39 +148,39 @@ absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
// slice_group_id[i]: u(v)
// Represented by ceil(log2(num_slice_groups_minus1 + 1)) bits.
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&bits_tmp, slice_group_id_bits));
+ bit_buffer->ReadBits(slice_group_id_bits, bits_tmp));
}
}
}
// num_ref_idx_l0_default_active_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
// num_ref_idx_l1_default_active_minus1: ue(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
// weighted_pred_flag: u(1)
uint32_t weighted_pred_flag;
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&weighted_pred_flag, 1));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, weighted_pred_flag));
pps.weighted_pred_flag = weighted_pred_flag != 0;
// weighted_bipred_idc: u(2)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_bipred_idc, 2));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(2, pps.weighted_bipred_idc));
// pic_init_qp_minus26: se(v)
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadSignedExponentialGolomb(&pps.pic_init_qp_minus26));
+ bit_buffer->ReadSignedExponentialGolomb(pps.pic_init_qp_minus26));
// Sanity-check parsed value
if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue ||
pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) {
RETURN_EMPTY_ON_FAIL(false);
}
// pic_init_qs_minus26: se(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
// chroma_qp_index_offset: se(v)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored));
// deblocking_filter_control_present_flag: u(1)
// constrained_intra_pred_flag: u(1)
- RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 2));
+ RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(2, bits_tmp));
// redundant_pic_cnt_present_flag: u(1)
RETURN_EMPTY_ON_FAIL(
- bit_buffer->ReadBits(&pps.redundant_pic_cnt_present_flag, 1));
+ bit_buffer->ReadBits(1, pps.redundant_pic_cnt_present_flag));
return pps;
}
@@ -189,11 +188,15 @@ absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
bool PpsParser::ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer,
uint32_t* pps_id,
uint32_t* sps_id) {
+ if (pps_id == nullptr)
+ return false;
// pic_parameter_set_id: ue(v)
- if (!bit_buffer->ReadExponentialGolomb(pps_id))
+ if (!bit_buffer->ReadExponentialGolomb(*pps_id))
+ return false;
+ if (sps_id == nullptr)
return false;
// seq_parameter_set_id: ue(v)
- if (!bit_buffer->ReadExponentialGolomb(sps_id))
+ if (!bit_buffer->ReadExponentialGolomb(*sps_id))
return false;
return true;
}
diff --git a/common_video/h264/pps_parser.h b/common_video/h264/pps_parser.h
index d6c31b0688..324a432ef3 100644
--- a/common_video/h264/pps_parser.h
+++ b/common_video/h264/pps_parser.h
@@ -11,6 +11,7 @@
#ifndef COMMON_VIDEO_H264_PPS_PARSER_H_
#define COMMON_VIDEO_H264_PPS_PARSER_H_
+#include <cstdint>
#include "absl/types/optional.h"
namespace rtc {
diff --git a/common_video/h264/sps_parser.cc b/common_video/h264/sps_parser.cc
index 3d78184e7a..f505928f29 100644
--- a/common_video/h264/sps_parser.cc
+++ b/common_video/h264/sps_parser.cc
@@ -71,14 +71,14 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
// profile_idc: u(8). We need it to determine if we need to read/skip chroma
// formats.
uint8_t profile_idc;
- RETURN_EMPTY_ON_FAIL(buffer->ReadUInt8(&profile_idc));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadUInt8(profile_idc));
// constraint_set0_flag through constraint_set5_flag + reserved_zero_2bits
// 1 bit each for the flags + 2 bits = 8 bits = 1 byte.
RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1));
// level_idc: u(8)
RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1));
// seq_parameter_set_id: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.id));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.id));
sps.separate_colour_plane_flag = 0;
// See if profile_idc has chroma format information.
if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 ||
@@ -86,21 +86,20 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
profile_idc == 86 || profile_idc == 118 || profile_idc == 128 ||
profile_idc == 138 || profile_idc == 139 || profile_idc == 134) {
// chroma_format_idc: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&chroma_format_idc));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(chroma_format_idc));
if (chroma_format_idc == 3) {
// separate_colour_plane_flag: u(1)
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&sps.separate_colour_plane_flag, 1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.separate_colour_plane_flag));
}
// bit_depth_luma_minus8: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored));
// bit_depth_chroma_minus8: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored));
// qpprime_y_zero_transform_bypass_flag: u(1)
RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
// seq_scaling_matrix_present_flag: u(1)
uint32_t seq_scaling_matrix_present_flag;
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&seq_scaling_matrix_present_flag, 1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, seq_scaling_matrix_present_flag));
if (seq_scaling_matrix_present_flag) {
// Process the scaling lists just enough to be able to properly
// skip over them, so we can still read the resolution on streams
@@ -110,7 +109,7 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
// seq_scaling_list_present_flag[i] : u(1)
uint32_t seq_scaling_list_present_flags;
RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&seq_scaling_list_present_flags, 1));
+ buffer->ReadBits(1, seq_scaling_list_present_flags));
if (seq_scaling_list_present_flags != 0) {
int last_scale = 8;
int next_scale = 8;
@@ -120,7 +119,7 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
int32_t delta_scale;
// delta_scale: se(v)
RETURN_EMPTY_ON_FAIL(
- buffer->ReadSignedExponentialGolomb(&delta_scale));
+ buffer->ReadSignedExponentialGolomb(delta_scale));
RETURN_EMPTY_ON_FAIL(delta_scale >= kScalingDeltaMin &&
delta_scale <= kScaldingDeltaMax);
next_scale = (last_scale + delta_scale + 256) % 256;
@@ -140,18 +139,18 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
// log2_max_frame_num_minus4: ue(v)
uint32_t log2_max_frame_num_minus4;
- if (!buffer->ReadExponentialGolomb(&log2_max_frame_num_minus4) ||
+ if (!buffer->ReadExponentialGolomb(log2_max_frame_num_minus4) ||
log2_max_frame_num_minus4 > kMaxLog2Minus4) {
return OptionalSps();
}
sps.log2_max_frame_num = log2_max_frame_num_minus4 + 4;
// pic_order_cnt_type: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.pic_order_cnt_type));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.pic_order_cnt_type));
if (sps.pic_order_cnt_type == 0) {
// log2_max_pic_order_cnt_lsb_minus4: ue(v)
uint32_t log2_max_pic_order_cnt_lsb_minus4;
- if (!buffer->ReadExponentialGolomb(&log2_max_pic_order_cnt_lsb_minus4) ||
+ if (!buffer->ReadExponentialGolomb(log2_max_pic_order_cnt_lsb_minus4) ||
log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) {
return OptionalSps();
}
@@ -159,22 +158,22 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
} else if (sps.pic_order_cnt_type == 1) {
// delta_pic_order_always_zero_flag: u(1)
RETURN_EMPTY_ON_FAIL(
- buffer->ReadBits(&sps.delta_pic_order_always_zero_flag, 1));
+ buffer->ReadBits(1, sps.delta_pic_order_always_zero_flag));
// offset_for_non_ref_pic: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored));
// offset_for_top_to_bottom_field: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored));
// num_ref_frames_in_pic_order_cnt_cycle: ue(v)
uint32_t num_ref_frames_in_pic_order_cnt_cycle;
RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&num_ref_frames_in_pic_order_cnt_cycle));
+ buffer->ReadExponentialGolomb(num_ref_frames_in_pic_order_cnt_cycle));
for (size_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
// offset_for_ref_frame[i]: se(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored));
}
}
// max_num_ref_frames: ue(v)
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.max_num_ref_frames));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.max_num_ref_frames));
// gaps_in_frame_num_value_allowed_flag: u(1)
RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
//
@@ -185,13 +184,13 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
//
// pic_width_in_mbs_minus1: ue(v)
uint32_t pic_width_in_mbs_minus1;
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&pic_width_in_mbs_minus1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(pic_width_in_mbs_minus1));
// pic_height_in_map_units_minus1: ue(v)
uint32_t pic_height_in_map_units_minus1;
RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&pic_height_in_map_units_minus1));
+ buffer->ReadExponentialGolomb(pic_height_in_map_units_minus1));
// frame_mbs_only_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.frame_mbs_only_flag, 1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.frame_mbs_only_flag));
if (!sps.frame_mbs_only_flag) {
// mb_adaptive_frame_field_flag: u(1)
RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1));
@@ -207,19 +206,18 @@ absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
uint32_t frame_crop_right_offset = 0;
uint32_t frame_crop_top_offset = 0;
uint32_t frame_crop_bottom_offset = 0;
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&frame_cropping_flag, 1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, frame_cropping_flag));
if (frame_cropping_flag) {
// frame_crop_{left, right, top, bottom}_offset: ue(v)
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(frame_crop_left_offset));
RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_left_offset));
+ buffer->ReadExponentialGolomb(frame_crop_right_offset));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(frame_crop_top_offset));
RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_right_offset));
- RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&frame_crop_top_offset));
- RETURN_EMPTY_ON_FAIL(
- buffer->ReadExponentialGolomb(&frame_crop_bottom_offset));
+ buffer->ReadExponentialGolomb(frame_crop_bottom_offset));
}
// vui_parameters_present_flag: u(1)
- RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.vui_params_present, 1));
+ RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.vui_params_present));
// Far enough! We don't use the rest of the SPS.
diff --git a/common_video/h264/sps_parser.h b/common_video/h264/sps_parser.h
index 76e627d27a..35bd6471a6 100644
--- a/common_video/h264/sps_parser.h
+++ b/common_video/h264/sps_parser.h
@@ -11,6 +11,7 @@
#ifndef COMMON_VIDEO_H264_SPS_PARSER_H_
#define COMMON_VIDEO_H264_SPS_PARSER_H_
+#include <cstdint>
#include "absl/types/optional.h"
namespace rtc {
diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc
index 0d16be8254..856b012b32 100644
--- a/common_video/h264/sps_vui_rewriter.cc
+++ b/common_video/h264/sps_vui_rewriter.cc
@@ -45,29 +45,31 @@ enum SpsValidEvent {
kSpsRewrittenMax = 8
};
-#define RETURN_FALSE_ON_FAIL(x) \
- if (!(x)) { \
- RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \
- return false; \
- }
+#define RETURN_FALSE_ON_FAIL(x) \
+ do { \
+ if (!(x)) { \
+ RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \
+ return false; \
+ } \
+ } while (0)
#define COPY_UINT8(src, dest, tmp) \
do { \
- RETURN_FALSE_ON_FAIL((src)->ReadUInt8(&tmp)); \
+ RETURN_FALSE_ON_FAIL((src)->ReadUInt8(tmp)); \
if (dest) \
RETURN_FALSE_ON_FAIL((dest)->WriteUInt8(tmp)); \
} while (0)
#define COPY_EXP_GOLOMB(src, dest, tmp) \
do { \
- RETURN_FALSE_ON_FAIL((src)->ReadExponentialGolomb(&tmp)); \
+ RETURN_FALSE_ON_FAIL((src)->ReadExponentialGolomb(tmp)); \
if (dest) \
RETURN_FALSE_ON_FAIL((dest)->WriteExponentialGolomb(tmp)); \
} while (0)
#define COPY_BITS(src, dest, tmp, bits) \
do { \
- RETURN_FALSE_ON_FAIL((src)->ReadBits(&tmp, bits)); \
+ RETURN_FALSE_ON_FAIL((src)->ReadBits(bits, tmp)); \
if (dest) \
RETURN_FALSE_ON_FAIL((dest)->WriteBits(tmp, bits)); \
} while (0)
@@ -369,7 +371,7 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
// bitstream_restriction_flag: u(1)
uint32_t bitstream_restriction_flag;
- RETURN_FALSE_ON_FAIL(source->ReadBits(&bitstream_restriction_flag, 1));
+ RETURN_FALSE_ON_FAIL(source->ReadBits(1, bitstream_restriction_flag));
RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
if (bitstream_restriction_flag == 0) {
// We're adding one from scratch.
@@ -396,9 +398,9 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
// want, then we don't need to be rewriting.
uint32_t max_num_reorder_frames, max_dec_frame_buffering;
RETURN_FALSE_ON_FAIL(
- source->ReadExponentialGolomb(&max_num_reorder_frames));
+ source->ReadExponentialGolomb(max_num_reorder_frames));
RETURN_FALSE_ON_FAIL(
- source->ReadExponentialGolomb(&max_dec_frame_buffering));
+ source->ReadExponentialGolomb(max_dec_frame_buffering));
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(0));
RETURN_FALSE_ON_FAIL(
destination->WriteExponentialGolomb(sps.max_num_ref_frames));
@@ -511,15 +513,15 @@ bool CopyOrRewriteVideoSignalTypeInfo(
uint8_t colour_primaries = 3; // H264 default: unspecified
uint8_t transfer_characteristics = 3; // H264 default: unspecified
uint8_t matrix_coefficients = 3; // H264 default: unspecified
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_signal_type_present_flag, 1));
+ RETURN_FALSE_ON_FAIL(source->ReadBits(1, video_signal_type_present_flag));
if (video_signal_type_present_flag) {
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_format, 3));
- RETURN_FALSE_ON_FAIL(source->ReadBits(&video_full_range_flag, 1));
- RETURN_FALSE_ON_FAIL(source->ReadBits(&colour_description_present_flag, 1));
+ RETURN_FALSE_ON_FAIL(source->ReadBits(3, video_format));
+ RETURN_FALSE_ON_FAIL(source->ReadBits(1, video_full_range_flag));
+ RETURN_FALSE_ON_FAIL(source->ReadBits(1, colour_description_present_flag));
if (colour_description_present_flag) {
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&colour_primaries));
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&transfer_characteristics));
- RETURN_FALSE_ON_FAIL(source->ReadUInt8(&matrix_coefficients));
+ RETURN_FALSE_ON_FAIL(source->ReadUInt8(colour_primaries));
+ RETURN_FALSE_ON_FAIL(source->ReadUInt8(transfer_characteristics));
+ RETURN_FALSE_ON_FAIL(source->ReadUInt8(matrix_coefficients));
}
}
diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc
index e2b77f6f77..703dec775b 100644
--- a/common_video/video_frame_buffer.cc
+++ b/common_video/video_frame_buffer.cc
@@ -212,7 +212,7 @@ rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
int v_stride,
std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I420BufferInterface>(
- new rtc::RefCountedObject<WrappedYuvBuffer<I420BufferInterface>>(
+ rtc::make_ref_counted<WrappedYuvBuffer<I420BufferInterface>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
@@ -230,7 +230,7 @@ rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int a_stride,
std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I420ABufferInterface>(
- new rtc::RefCountedObject<WrappedYuvaBuffer<I420ABufferInterface>>(
+ rtc::make_ref_counted<WrappedYuvaBuffer<I420ABufferInterface>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, a_plane, a_stride, no_longer_used));
}
@@ -246,7 +246,7 @@ rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int v_stride,
std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I444BufferInterface>(
- new rtc::RefCountedObject<WrappedYuvBuffer<I444BufferBase>>(
+ rtc::make_ref_counted<WrappedYuvBuffer<I444BufferBase>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
@@ -285,7 +285,7 @@ rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
int v_stride,
std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I010BufferInterface>(
- new rtc::RefCountedObject<WrappedYuv16BBuffer<I010BufferBase>>(
+ rtc::make_ref_counted<WrappedYuv16BBuffer<I010BufferBase>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc
index 6df240d9fe..d225370a4d 100644
--- a/common_video/video_frame_buffer_pool.cc
+++ b/common_video/video_frame_buffer_pool.cc
@@ -107,7 +107,7 @@ rtc::scoped_refptr<I420Buffer> VideoFrameBufferPool::CreateI420Buffer(
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<I420Buffer> buffer =
- new rtc::RefCountedObject<I420Buffer>(width, height);
+ rtc::make_ref_counted<I420Buffer>(width, height);
if (zero_initialize_)
buffer->InitializeData();
@@ -138,7 +138,7 @@ rtc::scoped_refptr<NV12Buffer> VideoFrameBufferPool::CreateNV12Buffer(
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<NV12Buffer> buffer =
- new rtc::RefCountedObject<NV12Buffer>(width, height);
+ rtc::make_ref_counted<NV12Buffer>(width, height);
if (zero_initialize_)
buffer->InitializeData();
diff --git a/darwin_aarch64.cmake b/darwin_aarch64.cmake
index 4b629998a4..e88776ced0 100644
--- a/darwin_aarch64.cmake
+++ b/darwin_aarch64.cmake
@@ -1,4 +1,4 @@
-# Generated on 04/21/21 for target: Darwin-aarch64
+# Generated on 06/23/21 for target: Darwin-aarch64
# This is an autogenerated file by calling:
# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_common_video_common_video_unittests --target webrtc_media_rtc_media_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --target webrtc_pc_peerconnection_unittests --target webrtc_pc_rtc_pc_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Darwin-aarch64 BUILD .
@@ -6,11 +6,11 @@
# Re-running this script will require you to merge in the latest upstream-master for webrtc
# Expecting jsoncpp at 9059f5cad030ba11d37818847443a53918c327b1
-# Expecting libaom at 6c93db7ff63430d2e5dcdfc42e84e3a0514da608
-# Expecting libsrtp2 at 7990ca64c616b150a9cb4714601c4a3b0c84fe91
+# Expecting libaom at 12287adee94fc3b1f5349d3f4bd85cea4e57f62b
+# Expecting libsrtp2 at 5b7c744eb8310250ccc534f3f86a2015b3887a0a
# Expecting libvpx at 61edec1efbea1c02d71857e2aff9426d9cd2df4e
-# Expecting libyuv at 64994843e652443df2d5201c6ae3fb725097360f
-# Expecting usrsctp at 70d42ae95a1de83bd317c8cc9503f894671d1392
+# Expecting libyuv at 49ebc996aa8c4bdf89c1b5ea461eb677234c61cc
+# Expecting usrsctp at 22ba62ffe79c3881581ab430368bf3764d9533eb
@@ -294,12 +294,12 @@ target_include_directories(webrtc_api_call_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_
# api:callfactory_api
add_library(webrtc_api_callfactory_api INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_call_rtp_interfaces webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_callfactory_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:callfactory_api.headers
add_library(webrtc_api_callfactory_api.headers INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_call_rtp_interfaces.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_callfactory_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:create_frame_generator
@@ -328,7 +328,7 @@ add_library(webrtc_api_create_time_controller ${WEBRTC_ROOT}/api/test/create_tim
target_include_directories(webrtc_api_create_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_create_time_controller PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_api_create_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_test_time_controller_time_controller )
# api/crypto:frame_decryptor_interface
add_library(webrtc_api_crypto_frame_decryptor_interface INTERFACE)
@@ -429,15 +429,15 @@ add_library(webrtc_api_libjingle_logging_api.headers INTERFACE)
target_include_directories(webrtc_api_libjingle_logging_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:libjingle_peerconnection_api
-add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/proxy.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
+add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
target_include_directories(webrtc_api_libjingle_peerconnection_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_libjingle_peerconnection_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_api_libjingle_peerconnection_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_call_rtp_interfaces webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# api:libjingle_peerconnection_api.headers
add_library(webrtc_api_libjingle_peerconnection_api.headers INTERFACE)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_call_rtp_interfaces.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_libjingle_peerconnection_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:media_stream_interface
@@ -573,12 +573,12 @@ target_include_directories(webrtc_api_priority.headers INTERFACE ${WEBRTC_ROOT}
# api:refcountedbase
add_library(webrtc_api_refcountedbase INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_macromagic webrtc_rtc_base_refcount )
target_include_directories(webrtc_api_refcountedbase INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:refcountedbase.headers
add_library(webrtc_api_refcountedbase.headers INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_macromagic.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_refcountedbase.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_error
@@ -621,12 +621,12 @@ target_link_libraries(webrtc_api_rtc_event_log_rtc_event_log_factory PUBLIC webr
# api:rtc_stats_api
add_library(webrtc_api_rtc_stats_api INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_rtc_stats_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_stats_api.headers
add_library(webrtc_api_rtc_stats_api.headers INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtc_stats_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_headers
@@ -646,11 +646,11 @@ add_library(webrtc_api_rtp_packet_info ${WEBRTC_ROOT}/api/rtp_packet_info.cc)
target_include_directories(webrtc_api_rtp_packet_info PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_rtp_packet_info PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_api_rtp_packet_info PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api:rtp_packet_info.headers
add_library(webrtc_api_rtp_packet_info.headers INTERFACE)
-target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtp_packet_info.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_parameters
@@ -767,6 +767,11 @@ add_library(webrtc_api_transport_datagram_transport_interface INTERFACE)
target_link_libraries(webrtc_api_transport_datagram_transport_interface INTERFACE absl::optional webrtc_api_array_view webrtc_api_rtc_error webrtc_rtc_base_rtc_base_approved )
target_include_directories(webrtc_api_transport_datagram_transport_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# api/transport:datagram_transport_interface.headers
+add_library(webrtc_api_transport_datagram_transport_interface.headers INTERFACE)
+target_link_libraries(webrtc_api_transport_datagram_transport_interface.headers INTERFACE webrtc_api_array_view.headers webrtc_api_rtc_error.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_api_transport_datagram_transport_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# api/transport:enums
add_library(webrtc_api_transport_enums INTERFACE)
target_include_directories(webrtc_api_transport_enums INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -957,15 +962,15 @@ target_compile_options(webrtc_api_video_codecs_rtc_software_fallback_wrappers PR
target_link_libraries(webrtc_api_video_codecs_rtc_software_fallback_wrappers PUBLIC absl::core_headers absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# api/video_codecs:video_codecs_api
-add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder_factory.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
+add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
target_include_directories(webrtc_api_video_codecs_video_codecs_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_video_codecs_video_codecs_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_api_video_codecs_video_codecs_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api/video_codecs:video_codecs_api.headers
add_library(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api/video_codecs:vp8_temporal_layers_factory
@@ -1144,7 +1149,7 @@ add_library(webrtc_audio_audio ${WEBRTC_ROOT}/audio/audio_level.cc ${WEBRTC_ROOT
target_include_directories(webrtc_audio_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_audio_audio PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_audio_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# audio/utility:audio_frame_operations
add_library(webrtc_audio_utility_audio_frame_operations ${WEBRTC_ROOT}/audio/utility/audio_frame_operations.cc ${WEBRTC_ROOT}/audio/utility/channel_mixer.cc ${WEBRTC_ROOT}/audio/utility/channel_mixing_matrix.cc)
@@ -1201,18 +1206,18 @@ add_library(webrtc_call_call ${WEBRTC_ROOT}/call/call.cc ${WEBRTC_ROOT}/call/cal
target_include_directories(webrtc_call_call PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_call_call PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call PUBLIC absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
+target_link_libraries(webrtc_call_call PUBLIC absl::bind_front absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
# call:call_interfaces
add_library(webrtc_call_call_interfaces ${WEBRTC_ROOT}/call/audio_receive_stream.cc ${WEBRTC_ROOT}/call/audio_send_stream.cc ${WEBRTC_ROOT}/call/audio_state.cc ${WEBRTC_ROOT}/call/call_config.cc ${WEBRTC_ROOT}/call/flexfec_receive_stream.cc ${WEBRTC_ROOT}/call/syncable.cc)
target_include_directories(webrtc_call_call_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call_interfaces PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_call_call_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_call_audio_sender_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
+target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::bind_front absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_call_audio_sender_interface webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
# call:call_interfaces.headers
add_library(webrtc_call_call_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_call_audio_sender_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
+target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_call_audio_sender_interface.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
target_include_directories(webrtc_call_call_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:fake_network
@@ -1227,16 +1232,26 @@ add_library(webrtc_call_mock_rtp_interfaces INTERFACE)
target_link_libraries(webrtc_call_mock_rtp_interfaces INTERFACE webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_bitrate_settings webrtc_call_rtp_interfaces webrtc_modules_pacing_pacing webrtc_rtc_base_rtc_base webrtc_rtc_base_rate_limiter webrtc_rtc_base_network_sent_packet webrtc_test_test_support )
target_include_directories(webrtc_call_mock_rtp_interfaces INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# call:receive_stream_interface
+add_library(webrtc_call_receive_stream_interface INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface INTERFACE webrtc_api_frame_transformer_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source )
+target_include_directories(webrtc_call_receive_stream_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# call:receive_stream_interface.headers
+add_library(webrtc_call_receive_stream_interface.headers INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_transport_rtp_rtp_source.headers )
+target_include_directories(webrtc_call_receive_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# call:rtp_interfaces
add_library(webrtc_call_rtp_interfaces ${WEBRTC_ROOT}/call/rtp_config.cc)
target_include_directories(webrtc_call_rtp_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_rtp_interfaces PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_call_rtp_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue )
# call:rtp_interfaces.headers
add_library(webrtc_call_rtp_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers )
target_include_directories(webrtc_call_rtp_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_receiver
@@ -1276,11 +1291,11 @@ add_library(webrtc_call_video_stream_api ${WEBRTC_ROOT}/call/video_receive_strea
target_include_directories(webrtc_call_video_stream_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_video_stream_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_call_video_stream_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
# call:video_stream_api.headers
add_library(webrtc_call_video_stream_api.headers INTERFACE)
-target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_call_video_stream_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# common_audio
@@ -1300,7 +1315,7 @@ add_library(webrtc_common_audio_common_audio_c ${WEBRTC_ROOT}/common_audio/ring_
target_include_directories(webrtc_common_audio_common_audio_c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_common_audio_common_audio_c PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_common_audio_common_audio_c PRIVATE -Wstrict-prototypes)
-target_link_libraries(webrtc_common_audio_common_audio_c PUBLIC webrtc_fft_size_256 webrtc_spl_sqrt_floor webrtc_common_audio_common_audio_c_arm_asm webrtc_common_audio_common_audio_cc webrtc_rtc_base_checks webrtc_rtc_base_compile_assert_c webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_sanitizer webrtc_rtc_base_system_arch webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_common_audio_common_audio_c PUBLIC webrtc_fft_size_256 webrtc_spl_sqrt_floor webrtc_common_audio_common_audio_c_arm_asm webrtc_common_audio_common_audio_cc webrtc_rtc_base_checks webrtc_rtc_base_compile_assert_c webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_sanitizer webrtc_rtc_base_system_arch webrtc_system_wrappers_system_wrappers webrtc_common_audio_common_audio_neon )
# common_audio:common_audio_c.headers
add_library(webrtc_common_audio_common_audio_c.headers INTERFACE)
@@ -1439,11 +1454,11 @@ add_library(webrtc_logging_ice_log ${WEBRTC_ROOT}/logging/rtc_event_log/events/r
target_include_directories(webrtc_logging_ice_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_ice_log PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_ice_log PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved )
# logging:ice_log.headers
add_library(webrtc_logging_ice_log.headers INTERFACE)
-target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_logging_ice_log.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# logging:rtc_event_audio
@@ -1451,41 +1466,41 @@ add_library(webrtc_logging_rtc_event_audio ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_audio PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
# logging:rtc_event_bwe
add_library(webrtc_logging_rtc_event_bwe ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_success.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_route_change.cc)
target_include_directories(webrtc_logging_rtc_event_bwe PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_bwe PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_bwe PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate )
+target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate webrtc_api_units_timestamp )
# logging:rtc_event_frame_events
add_library(webrtc_logging_rtc_event_frame_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_frame_decoded.cc)
target_include_directories(webrtc_logging_rtc_event_frame_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_frame_events PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_frame_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
# logging:rtc_event_generic_packet_events
add_library(webrtc_logging_rtc_event_generic_packet_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc)
target_include_directories(webrtc_logging_rtc_event_generic_packet_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_generic_packet_events PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_generic_packet_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_timeutils )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log2_proto
-add_library(webrtc_logging_rtc_event_log2_proto_bridge)
+add_library(webrtc_logging_rtc_event_log2_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log2_proto_bridge
+ TARGET webrtc_logging_rtc_event_log2_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log2.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log2_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto)
+target_link_libraries(webrtc_logging_rtc_event_log2_proto PUBLIC libprotobuf)
# logging:rtc_event_log_api
add_library(webrtc_logging_rtc_event_log_api INTERFACE)
@@ -1504,27 +1519,27 @@ add_library(webrtc_logging_rtc_event_log_impl_encoder ${WEBRTC_ROOT}/logging/rtc
target_include_directories(webrtc_logging_rtc_event_log_impl_encoder PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_log_impl_encoder PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_log_impl_encoder PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto_bridge webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto_bridge webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log_proto
-add_library(webrtc_logging_rtc_event_log_proto_bridge)
+add_library(webrtc_logging_rtc_event_log_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log_proto_bridge
+ TARGET webrtc_logging_rtc_event_log_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto)
+target_link_libraries(webrtc_logging_rtc_event_log_proto PUBLIC libprotobuf)
# logging:rtc_event_pacing
add_library(webrtc_logging_rtc_event_pacing ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_alr_state.cc)
target_include_directories(webrtc_logging_rtc_event_pacing PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_pacing PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_pacing PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log )
+target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp )
# logging:rtc_event_rtp_rtcp
add_library(webrtc_logging_rtc_event_rtp_rtcp ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc)
@@ -1538,7 +1553,7 @@ add_library(webrtc_logging_rtc_event_video ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_video PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_logging_rtc_event_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
# logging:rtc_stream_config
add_library(webrtc_logging_rtc_stream_config ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_stream_config.cc)
@@ -1552,18 +1567,25 @@ add_library(webrtc_media_rtc_audio_video ${WEBRTC_ROOT}/media/engine/adm_helpers
target_include_directories(webrtc_media_rtc_audio_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_audio_video PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_media_rtc_audio_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+
+# media:rtc_data_dcsctp_transport
+add_library(webrtc_media_rtc_data_dcsctp_transport ${WEBRTC_ROOT}/media/sctp/dcsctp_transport.cc)
+target_include_directories(webrtc_media_rtc_data_dcsctp_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_media_rtc_data_dcsctp_transport PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_media_rtc_data_dcsctp_transport PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_media_rtc_data_dcsctp_transport PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_net_dcsctp_public_factory webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_task_queue_timeout webrtc_p2p_rtc_p2p webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers )
# media:rtc_data_sctp_transport_factory
add_library(webrtc_media_rtc_data_sctp_transport_factory ${WEBRTC_ROOT}/media/sctp/sctp_transport_factory.cc)
target_include_directories(webrtc_media_rtc_data_sctp_transport_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_DCSCTP WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_media_rtc_data_sctp_transport_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_system_unused )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_dcsctp_transport webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_system_unused webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# media:rtc_data_sctp_transport_internal
add_library(webrtc_media_rtc_data_sctp_transport_internal INTERFACE)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_api_transport_datagram_transport_interface webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
target_include_directories(webrtc_media_rtc_data_sctp_transport_internal INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_data_usrsctp_transport
@@ -1604,11 +1626,11 @@ add_library(webrtc_media_rtc_media_base ${WEBRTC_ROOT}/media/base/adapted_video_
target_include_directories(webrtc_media_rtc_media_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_media_rtc_media_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_datagram_transport_interface webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# media:rtc_media_base.headers
add_library(webrtc_media_rtc_media_base.headers INTERFACE)
-target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_datagram_transport_interface.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_media_rtc_media_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_media_config
@@ -1638,7 +1660,7 @@ target_link_libraries(webrtc_media_rtc_media_tests_utils PUBLIC gmock gtest absl
target_include_directories(webrtc_media_rtc_media_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_media_rtc_media_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
# media:rtc_sdp_video_format_utils
add_library(webrtc_media_rtc_sdp_video_format_utils ${WEBRTC_ROOT}/media/base/sdp_video_format_utils.cc)
@@ -1667,31 +1689,31 @@ target_link_libraries(webrtc_modules_async_audio_processing_async_audio_processi
target_include_directories(webrtc_modules_async_audio_processing_async_audio_processing.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_config_proto
-add_library(webrtc_modules_audio_coding_ana_config_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_config_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_config_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_config_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/config.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_coding_ana_config_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_config_proto PUBLIC libprotobuf)
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_debug_dump_proto
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_debug_dump_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_debug_dump_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/debug_dump.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC libprotobuf)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
+target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC libprotobuf)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
# modules/audio_coding
add_library(webrtc_modules_audio_coding_audio_coding ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_receiver.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_remixing.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_resampler.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/audio_coding_module.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/call_statistics.cc)
@@ -1730,7 +1752,7 @@ add_library(webrtc_modules_audio_coding_audio_network_adaptor ${WEBRTC_ROOT}/mod
target_include_directories(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional libprotobuf webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_coding_ana_debug_dump_proto_bridge webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_coding_ana_debug_dump_proto webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# modules/audio_coding:audio_network_adaptor_config
add_library(webrtc_modules_audio_coding_audio_network_adaptor_config ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc)
@@ -1888,7 +1910,7 @@ add_library(webrtc_modules_audio_coding_neteq_tools_minimal ${WEBRTC_ROOT}/modul
target_include_directories(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_ARCH_ARM64 WEBRTC_CODEC_ISAC WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_array_view webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/audio_coding:pcm16b
add_library(webrtc_modules_audio_coding_pcm16b ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc)
@@ -1908,7 +1930,7 @@ add_library(webrtc_modules_audio_coding_red ${WEBRTC_ROOT}/modules/audio_coding/
target_include_directories(webrtc_modules_audio_coding_red PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_red PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_coding_red PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_cng
add_library(webrtc_modules_audio_coding_webrtc_cng ${WEBRTC_ROOT}/modules/audio_coding/codecs/cng/webrtc_cng.cc)
@@ -1929,7 +1951,7 @@ add_library(webrtc_modules_audio_coding_webrtc_opus ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_coding_webrtc_opus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_webrtc_opus PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_ARCH_ARM64 WEBRTC_CODEC_ISAC WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_coding_webrtc_opus PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional libprotobuf webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_opus_wrapper
add_library(webrtc_modules_audio_coding_webrtc_opus_wrapper ${WEBRTC_ROOT}/modules/audio_coding/codecs/opus/opus_interface.cc)
@@ -2031,7 +2053,7 @@ add_library(webrtc_modules_audio_mixer_audio_mixer_impl ${WEBRTC_ROOT}/modules/a
target_include_directories(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_rtp_packet_info webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/audio_processing/aec3:adaptive_fir_filter
add_library(webrtc_modules_audio_processing_aec3_adaptive_fir_filter INTERFACE)
@@ -2138,11 +2160,11 @@ add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl ${WEBRTC_ROOT
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC libprotobuf webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto_bridge webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
# modules/audio_processing/aec_dump:aec_dump_impl.headers
add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_protobuf_utils.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/audio_processing:aec_dump_interface
@@ -2388,13 +2410,49 @@ add_library(webrtc_modules_audio_processing_agc_agc ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_processing_agc_agc PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_agc_agc PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_audio_processing_agc_agc PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor webrtc_modules_audio_processing_agc_clipping_predictor_evaluator webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/audio_processing/agc:agc.headers
add_library(webrtc_modules_audio_processing_agc_agc.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor.headers webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_modules_audio_processing_agc_agc.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# modules/audio_processing/agc:clipping_predictor
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer webrtc_modules_audio_processing_agc_gain_map webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_safe_minmax )
+
+# modules/audio_processing/agc:clipping_predictor.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_safe_minmax.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_evaluator
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_evaluator.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging )
+
+# modules/audio_processing/agc:clipping_predictor_evaluator.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_level_buffer.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved )
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# modules/audio_processing/agc:gain_control_interface
add_library(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE)
target_include_directories(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2518,17 +2576,17 @@ target_link_libraries(webrtc_modules_audio_processing_audio_processing_statistic
target_include_directories(webrtc_modules_audio_processing_audio_processing_statistics.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_processing:audioproc_debug_proto
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_processing_audioproc_debug_proto_bridge
+ TARGET webrtc_modules_audio_processing_audioproc_debug_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_processing/debug.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_processing
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto)
+target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC libprotobuf)
# modules/audio_processing:audioproc_test_utils
add_library(webrtc_modules_audio_processing_audioproc_test_utils ${WEBRTC_ROOT}/modules/audio_processing/test/audio_buffer_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/audio_processing_builder_for_testing.cc ${WEBRTC_ROOT}/modules/audio_processing/test/bitexactness_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/performance_timer.cc ${WEBRTC_ROOT}/modules/audio_processing/test/simulator_buffers.cc ${WEBRTC_ROOT}/modules/audio_processing/test/test_utils.cc)
@@ -2695,11 +2753,11 @@ target_link_libraries(webrtc_modules_audio_processing_voice_detection.headers IN
target_include_directories(webrtc_modules_audio_processing_voice_detection.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/congestion_controller
-add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc)
+add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc ${WEBRTC_ROOT}/modules/congestion_controller/remb_throttler.cc)
target_include_directories(webrtc_modules_congestion_controller_congestion_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_congestion_controller_congestion_controller PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_congestion_controller_congestion_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
# modules/congestion_controller/goog_cc:alr_detector
add_library(webrtc_modules_congestion_controller_goog_cc_alr_detector ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/alr_detector.cc)
@@ -2818,11 +2876,11 @@ target_compile_options(webrtc_modules_pacing_pacing PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_pacing_pacing PUBLIC absl::memory absl::strings absl::optional webrtc_api_function_view webrtc_api_sequence_checker webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_pacing webrtc_modules_module_api webrtc_modules_pacing_interval_budget webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/remote_bitrate_estimator
-add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
+add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/packet_arrival_map.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
target_include_directories(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/rtp_rtcp:mock_rtp_rtcp
add_library(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE)
@@ -2830,7 +2888,7 @@ target_link_libraries(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE absl::opti
target_include_directories(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
+add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/capture_clock_offset_updater.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE -fno-exceptions)
@@ -2841,11 +2899,11 @@ add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format ${WEBRTC_ROOT}/modules/rtp_r
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/rtp_rtcp:rtp_rtcp_format.headers
add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp:rtp_video_header
@@ -3076,7 +3134,7 @@ add_library(webrtc_modules_video_coding_webrtc_vp9 ${WEBRTC_ROOT}/modules/video_
target_include_directories(webrtc_modules_video_coding_webrtc_vp9 PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_video_coding_webrtc_vp9 PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_modules_video_coding_webrtc_vp9 PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
# modules/video_coding:webrtc_vp9_helpers
add_library(webrtc_modules_video_coding_webrtc_vp9_helpers ${WEBRTC_ROOT}/modules/video_coding/codecs/vp9/svc_config.cc)
@@ -3103,6 +3161,215 @@ target_compile_definitions(webrtc_modules_video_processing_video_processing_neon
target_compile_options(webrtc_modules_video_processing_video_processing_neon PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_video_processing_video_processing_neon PUBLIC webrtc_modules_video_processing_denoiser_filter )
+# net/dcsctp/common:internal_types
+add_library(webrtc_net_dcsctp_common_internal_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_internal_types INTERFACE webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_common_internal_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:math
+add_library(webrtc_net_dcsctp_common_math INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_math INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:pair_hash
+add_library(webrtc_net_dcsctp_common_pair_hash INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_pair_hash INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:sequence_numbers
+add_library(webrtc_net_dcsctp_common_sequence_numbers INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_sequence_numbers INTERFACE webrtc_net_dcsctp_common_internal_types )
+target_include_directories(webrtc_net_dcsctp_common_sequence_numbers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:str_join
+add_library(webrtc_net_dcsctp_common_str_join INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_str_join INTERFACE absl::strings webrtc_rtc_base_stringutils )
+target_include_directories(webrtc_net_dcsctp_common_str_join INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:bounded_io
+add_library(webrtc_net_dcsctp_packet_bounded_io INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_bounded_io INTERFACE webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_bounded_io INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:chunk
+add_library(webrtc_net_dcsctp_packet_chunk ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/abort_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_echo_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/data_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/error_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/forward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/idata_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/reconfig_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/sack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_chunk PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:chunk_validators
+add_library(webrtc_net_dcsctp_packet_chunk_validators ${WEBRTC_ROOT}/net/dcsctp/packet/chunk_validators.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk_validators PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk_validators PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_chunk_validators PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk_validators PUBLIC webrtc_net_dcsctp_packet_chunk webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:crc32c
+add_library(webrtc_net_dcsctp_packet_crc32c ${WEBRTC_ROOT}/net/dcsctp/packet/crc32c.cc)
+target_include_directories(webrtc_net_dcsctp_packet_crc32c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_crc32c PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_crc32c PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_crc32c PUBLIC crc32c webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:data
+add_library(webrtc_net_dcsctp_packet_data INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_data INTERFACE webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_data INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:error_cause
+add_library(webrtc_net_dcsctp_packet_error_cause ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/no_user_data_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/protocol_violation_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc)
+target_include_directories(webrtc_net_dcsctp_packet_error_cause PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_error_cause PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_error_cause PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_error_cause PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:parameter
+add_library(webrtc_net_dcsctp_packet_parameter ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/state_cookie_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/supported_extensions_parameter.cc)
+target_include_directories(webrtc_net_dcsctp_packet_parameter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_parameter PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_parameter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_parameter PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:sctp_packet
+add_library(webrtc_net_dcsctp_packet_sctp_packet ${WEBRTC_ROOT}/net/dcsctp/packet/sctp_packet.cc)
+target_include_directories(webrtc_net_dcsctp_packet_sctp_packet PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_sctp_packet PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_sctp_packet PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_sctp_packet PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_crc32c webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:tlv_trait
+add_library(webrtc_net_dcsctp_packet_tlv_trait ${WEBRTC_ROOT}/net/dcsctp/packet/tlv_trait.cc)
+target_include_directories(webrtc_net_dcsctp_packet_tlv_trait PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_tlv_trait PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_packet_tlv_trait PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_tlv_trait PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/public:factory
+add_library(webrtc_net_dcsctp_public_factory ${WEBRTC_ROOT}/net/dcsctp/public/dcsctp_socket_factory.cc)
+target_include_directories(webrtc_net_dcsctp_public_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_public_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_public_factory PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_public_factory PUBLIC absl::strings webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_dcsctp_socket )
+
+# net/dcsctp/public:socket
+add_library(webrtc_net_dcsctp_public_socket INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_socket INTERFACE absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_public_socket INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:strong_alias
+add_library(webrtc_net_dcsctp_public_strong_alias INTERFACE)
+target_include_directories(webrtc_net_dcsctp_public_strong_alias INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:types
+add_library(webrtc_net_dcsctp_public_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_types INTERFACE webrtc_api_array_view webrtc_net_dcsctp_public_strong_alias )
+target_include_directories(webrtc_net_dcsctp_public_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:data_tracker
+add_library(webrtc_net_dcsctp_rx_data_tracker ${WEBRTC_ROOT}/net/dcsctp/rx/data_tracker.cc)
+target_include_directories(webrtc_net_dcsctp_rx_data_tracker PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_data_tracker PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_rx_data_tracker PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_data_tracker PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_queue
+add_library(webrtc_net_dcsctp_rx_reassembly_queue ${WEBRTC_ROOT}/net/dcsctp/rx/reassembly_queue.cc)
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_queue PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_net_dcsctp_rx_traditional_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_streams
+add_library(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE absl::strings webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:traditional_reassembly_streams
+add_library(webrtc_net_dcsctp_rx_traditional_reassembly_streams ${WEBRTC_ROOT}/net/dcsctp/rx/traditional_reassembly_streams.cc)
+target_include_directories(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_traditional_reassembly_streams PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:context
+add_library(webrtc_net_dcsctp_socket_context INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_socket_context INTERFACE absl::strings webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_socket_context INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/socket:dcsctp_socket
+add_library(webrtc_net_dcsctp_socket_dcsctp_socket ${WEBRTC_ROOT}/net/dcsctp/socket/dcsctp_socket.cc ${WEBRTC_ROOT}/net/dcsctp/socket/state_cookie.cc)
+target_include_directories(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_dcsctp_socket PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_chunk_validators webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_socket_transmission_control_block webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_rr_send_queue webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:heartbeat_handler
+add_library(webrtc_net_dcsctp_socket_heartbeat_handler ${WEBRTC_ROOT}/net/dcsctp/socket/heartbeat_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_heartbeat_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:stream_reset_handler
+add_library(webrtc_net_dcsctp_socket_stream_reset_handler ${WEBRTC_ROOT}/net/dcsctp/socket/stream_reset_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_stream_reset_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:transmission_control_block
+add_library(webrtc_net_dcsctp_socket_transmission_control_block ${WEBRTC_ROOT}/net/dcsctp/socket/transmission_control_block.cc)
+target_include_directories(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_transmission_control_block PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/timer:task_queue_timeout
+add_library(webrtc_net_dcsctp_timer_task_queue_timeout ${WEBRTC_ROOT}/net/dcsctp/timer/task_queue_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_task_queue_timeout PUBLIC webrtc_api_array_view webrtc_api_task_queue_task_queue webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
+
+# net/dcsctp/timer
+add_library(webrtc_net_dcsctp_timer_timer ${WEBRTC_ROOT}/net/dcsctp/timer/timer.cc)
+target_include_directories(webrtc_net_dcsctp_timer_timer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_timer PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_timer_timer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_timer PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_error_counter
+add_library(webrtc_net_dcsctp_tx_retransmission_error_counter ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_error_counter.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_error_counter PUBLIC absl::strings webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_queue
+add_library(webrtc_net_dcsctp_tx_retransmission_queue ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_timeout
+add_library(webrtc_net_dcsctp_tx_retransmission_timeout ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_timeout PUBLIC webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:rr_send_queue
+add_library(webrtc_net_dcsctp_tx_rr_send_queue ${WEBRTC_ROOT}/net/dcsctp/tx/rr_send_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_rr_send_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:send_queue
+add_library(webrtc_net_dcsctp_tx_send_queue INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_tx_send_queue INTERFACE absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_tx_send_queue INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# p2p:fake_ice_transport
add_library(webrtc_p2p_fake_ice_transport INTERFACE)
target_link_libraries(webrtc_p2p_fake_ice_transport INTERFACE absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
@@ -3144,7 +3411,7 @@ add_library(webrtc_pc_audio_rtp_receiver ${WEBRTC_ROOT}/pc/audio_rtp_receiver.cc
target_include_directories(webrtc_pc_audio_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_audio_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_audio_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
# pc:audio_track
add_library(webrtc_pc_audio_track ${WEBRTC_ROOT}/pc/audio_track.cc)
@@ -3158,38 +3425,28 @@ add_library(webrtc_pc_connection_context ${WEBRTC_ROOT}/pc/connection_context.cc
target_include_directories(webrtc_pc_connection_context PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_connection_context PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_connection_context PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
+target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
# pc:dtmf_sender
add_library(webrtc_pc_dtmf_sender ${WEBRTC_ROOT}/pc/dtmf_sender.cc)
target_include_directories(webrtc_pc_dtmf_sender PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_dtmf_sender PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_dtmf_sender PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_pc_proxy webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:integration_test_helpers
add_library(webrtc_pc_integration_test_helpers ${WEBRTC_ROOT}/pc/test/integration_test_helpers.cc)
target_include_directories(webrtc_pc_integration_test_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_integration_test_helpers PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_integration_test_helpers PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
# pc:jitter_buffer_delay
add_library(webrtc_pc_jitter_buffer_delay ${WEBRTC_ROOT}/pc/jitter_buffer_delay.cc)
target_include_directories(webrtc_pc_jitter_buffer_delay PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_jitter_buffer_delay PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_jitter_buffer_delay PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading )
-
-# pc:jitter_buffer_delay_interface
-add_library(webrtc_pc_jitter_buffer_delay_interface INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_interface INTERFACE absl::algorithm_container absl::strings absl::optional webrtc_media_rtc_media_base webrtc_rtc_base_refcount )
-target_include_directories(webrtc_pc_jitter_buffer_delay_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:jitter_buffer_delay_proxy
-add_library(webrtc_pc_jitter_buffer_delay_proxy INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_proxy INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface )
-target_include_directories(webrtc_pc_jitter_buffer_delay_proxy INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::optional webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_safe_conversions webrtc_rtc_base_safe_minmax webrtc_rtc_base_system_no_unique_address )
# pc:libjingle_peerconnection
add_library(webrtc_pc_libjingle_peerconnection INTERFACE)
@@ -3214,7 +3471,7 @@ add_library(webrtc_pc_pc_test_utils ${WEBRTC_ROOT}/pc/test/fake_audio_capture_mo
target_include_directories(webrtc_pc_pc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_pc_test_utils PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_pc_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
# pc:peer_connection_message_handler
add_library(webrtc_pc_peer_connection_message_handler ${WEBRTC_ROOT}/pc/peer_connection_message_handler.cc)
@@ -3228,14 +3485,21 @@ add_library(webrtc_pc_peerconnection ${WEBRTC_ROOT}/pc/data_channel_controller.c
target_include_directories(webrtc_pc_peerconnection PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_peerconnection PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_sender webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:peerconnection_unittests
android_add_executable(TARGET webrtc_pc_peerconnection_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/data_channel_integrationtest.cc ${WEBRTC_ROOT}/pc/data_channel_unittest.cc ${WEBRTC_ROOT}/pc/dtmf_sender_unittest.cc ${WEBRTC_ROOT}/pc/ice_server_parsing_unittest.cc ${WEBRTC_ROOT}/pc/jitter_buffer_delay_unittest.cc ${WEBRTC_ROOT}/pc/jsep_session_description_unittest.cc ${WEBRTC_ROOT}/pc/local_audio_source_unittest.cc ${WEBRTC_ROOT}/pc/media_stream_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_adaptation_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_bundle_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_crypto_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_data_channel_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_end_to_end_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_factory_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_header_extension_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_histogram_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_ice_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_interface_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_jsep_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_media_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_rtp_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_signaling_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_simulcast_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.h ${WEBRTC_ROOT}/pc/proxy_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_integrationtest.cc ${WEBRTC_ROOT}/pc/rtc_stats_traversal_unittest.cc ${WEBRTC_ROOT}/pc/rtp_media_utils_unittest.cc ${WEBRTC_ROOT}/pc/rtp_parameters_conversion_unittest.cc ${WEBRTC_ROOT}/pc/rtp_sender_receiver_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transceiver_unittest.cc ${WEBRTC_ROOT}/pc/sctp_utils_unittest.cc ${WEBRTC_ROOT}/pc/sdp_serializer_unittest.cc ${WEBRTC_ROOT}/pc/stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module_unittest.cc ${WEBRTC_ROOT}/pc/test/test_sdp_strings.h ${WEBRTC_ROOT}/pc/track_media_info_map_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_track_source_unittest.cc ${WEBRTC_ROOT}/pc/video_track_unittest.cc ${WEBRTC_ROOT}/pc/webrtc_sdp_unittest.cc)
target_include_directories(webrtc_pc_peerconnection_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_peerconnection_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+
+# pc:proxy
+add_library(webrtc_pc_proxy ${WEBRTC_ROOT}/pc/proxy.cc)
+target_include_directories(webrtc_pc_proxy PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_proxy PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_pc_proxy PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_proxy PUBLIC webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# pc:remote_audio_source
add_library(webrtc_pc_remote_audio_source ${WEBRTC_ROOT}/pc/remote_audio_source.cc)
@@ -3250,32 +3514,32 @@ target_link_libraries(webrtc_pc_rtc_pc INTERFACE libsrtp webrtc_media_rtc_audio_
target_include_directories(webrtc_pc_rtc_pc INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# pc:rtc_pc_base
-add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/session_description.cc ${WEBRTC_ROOT}/pc/simulcast_description.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc)
+add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_collection.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc ${WEBRTC_ROOT}/pc/video_track_source_proxy.cc)
target_include_directories(webrtc_pc_rtc_pc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_rtc_pc_base PRIVATE -fno-exceptions -Ithird_party/libsrtp/include)
-target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_proxy webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:rtc_pc_unittests
android_add_executable(TARGET webrtc_pc_rtc_pc_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/channel_manager_unittest.cc ${WEBRTC_ROOT}/pc/channel_unittest.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/dtls_transport_unittest.cc ${WEBRTC_ROOT}/pc/ice_transport_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_unittest.cc ${WEBRTC_ROOT}/pc/media_session_unittest.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/sctp_transport_unittest.cc ${WEBRTC_ROOT}/pc/session_description_unittest.cc ${WEBRTC_ROOT}/pc/srtp_filter_unittest.cc ${WEBRTC_ROOT}/pc/srtp_session_unittest.cc ${WEBRTC_ROOT}/pc/srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/test/rtp_transport_test_util.h ${WEBRTC_ROOT}/pc/test/srtp_test_util.h ${WEBRTC_ROOT}/pc/used_ids_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_receiver_unittest.cc)
target_include_directories(webrtc_pc_rtc_pc_unittests PRIVATE ${WEBRTC_ROOT}/pc/../third_party/libsrtp/srtp ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_rtc_pc_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
+target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
# pc:rtp_parameters_conversion
add_library(webrtc_pc_rtp_parameters_conversion ${WEBRTC_ROOT}/pc/rtp_parameters_conversion.cc)
target_include_directories(webrtc_pc_rtp_parameters_conversion PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_parameters_conversion PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_rtp_parameters_conversion PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
+target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
# pc:rtp_receiver
add_library(webrtc_pc_rtp_receiver ${WEBRTC_ROOT}/pc/rtp_receiver.cc)
target_include_directories(webrtc_pc_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
# pc:rtp_sender
add_library(webrtc_pc_rtp_sender ${WEBRTC_ROOT}/pc/rtp_sender.cc)
@@ -3289,7 +3553,7 @@ add_library(webrtc_pc_rtp_transceiver ${WEBRTC_ROOT}/pc/rtp_transceiver.cc)
target_include_directories(webrtc_pc_rtp_transceiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_transceiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_rtp_transceiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_proxy webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_session_description webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:rtp_transmission_manager
add_library(webrtc_pc_rtp_transmission_manager ${WEBRTC_ROOT}/pc/rtp_transmission_manager.cc)
@@ -3310,6 +3574,20 @@ add_library(webrtc_pc_sdp_state_provider INTERFACE)
target_link_libraries(webrtc_pc_sdp_state_provider INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_rtc_pc_base )
target_include_directories(webrtc_pc_sdp_state_provider INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# pc:session_description
+add_library(webrtc_pc_session_description ${WEBRTC_ROOT}/pc/session_description.cc)
+target_include_directories(webrtc_pc_session_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_session_description PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_pc_session_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_session_description PUBLIC absl::algorithm_container absl::memory webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_simulcast_description webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
+# pc:simulcast_description
+add_library(webrtc_pc_simulcast_description ${WEBRTC_ROOT}/pc/simulcast_description.cc)
+target_include_directories(webrtc_pc_simulcast_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_simulcast_description PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
+target_compile_options(webrtc_pc_simulcast_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_simulcast_description PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
# pc:stats_collector_interface
add_library(webrtc_pc_stats_collector_interface INTERFACE)
target_link_libraries(webrtc_pc_stats_collector_interface INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface )
@@ -3334,7 +3612,7 @@ add_library(webrtc_pc_video_rtp_receiver ${WEBRTC_ROOT}/pc/video_rtp_receiver.cc
target_include_directories(webrtc_pc_video_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_video_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_pc_video_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address )
# pc:video_rtp_track_source
add_library(webrtc_pc_video_rtp_track_source ${WEBRTC_ROOT}/pc/video_rtp_track_source.cc)
@@ -3702,7 +3980,7 @@ add_library(webrtc_rtc_base_platform_thread ${WEBRTC_ROOT}/rtc_base/platform_thr
target_include_directories(webrtc_rtc_base_platform_thread PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_platform_thread PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_rtc_base_platform_thread PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::strings webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::memory absl::strings absl::optional webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
# rtc_base:platform_thread.headers
add_library(webrtc_rtc_base_platform_thread.headers INTERFACE)
@@ -3721,6 +3999,15 @@ add_library(webrtc_rtc_base_platform_thread_types.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_platform_thread_types.headers INTERFACE webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_platform_thread_types.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# rtc_base:protobuf_utils
+add_library(webrtc_rtc_base_protobuf_utils INTERFACE)
+target_link_libraries(webrtc_rtc_base_protobuf_utils INTERFACE libprotobuf )
+target_include_directories(webrtc_rtc_base_protobuf_utils INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# rtc_base:protobuf_utils.headers
+add_library(webrtc_rtc_base_protobuf_utils.headers INTERFACE)
+target_include_directories(webrtc_rtc_base_protobuf_utils.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# rtc_base:rate_limiter
add_library(webrtc_rtc_base_rate_limiter ${WEBRTC_ROOT}/rtc_base/rate_limiter.cc)
target_include_directories(webrtc_rtc_base_rate_limiter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3730,12 +4017,12 @@ target_link_libraries(webrtc_rtc_base_rate_limiter PUBLIC absl::optional webrtc_
# rtc_base:refcount
add_library(webrtc_rtc_base_refcount INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_rtc_base_macromagic )
+target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_macromagic )
target_include_directories(webrtc_rtc_base_refcount INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:refcount.headers
add_library(webrtc_rtc_base_refcount.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_rtc_base_macromagic.headers )
+target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_refcount.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base
@@ -3743,11 +4030,11 @@ add_library(webrtc_rtc_base_rtc_base ${WEBRTC_ROOT}/rtc_base/mac_ifaddrs_convert
target_include_directories(webrtc_rtc_base_rtc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_rtc_base_rtc_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# rtc_base:rtc_base.headers
add_library(webrtc_rtc_base_rtc_base.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_rtc_base_rtc_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:rtc_base_approved
@@ -4088,11 +4375,11 @@ add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag ${WEBRTC_ROOT}/r
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_system_no_unique_address )
# rtc_base/task_utils:pending_task_safety_flag.headers
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_no_unique_address.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_system_no_unique_address.headers )
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:repeating_task
@@ -4100,11 +4387,11 @@ add_library(webrtc_rtc_base_task_utils_repeating_task ${WEBRTC_ROOT}/rtc_base/ta
target_include_directories(webrtc_rtc_base_task_utils_repeating_task PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_repeating_task PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_rtc_base_task_utils_repeating_task PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
# rtc_base/task_utils:repeating_task.headers
add_library(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:to_queued_task
@@ -4144,11 +4431,11 @@ add_library(webrtc_rtc_base_threading ${WEBRTC_ROOT}/rtc_base/async_resolver.cc
target_include_directories(webrtc_rtc_base_threading PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_threading PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_rtc_base_threading PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_system_cocoa_threading absl::algorithm_container webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_system_cocoa_threading absl::algorithm_container webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# rtc_base:threading.headers
add_library(webrtc_rtc_base_threading.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_system_cocoa_threading.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
+target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_system_cocoa_threading.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
target_include_directories(webrtc_rtc_base_threading.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/time:timestamp_extrapolator
@@ -4332,7 +4619,7 @@ add_library(webrtc_sdk_videosource_objc ${WEBRTC_ROOT}/sdk/objc/api/peerconnecti
target_include_directories(webrtc_sdk_videosource_objc PRIVATE ${WEBRTC_ROOT}/sdk/objc ${WEBRTC_ROOT}/sdk/objc/base ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_sdk_videosource_objc PRIVATE WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_MAC WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_POSIX RTC_SUPPORTS_METAL)
target_compile_options(webrtc_sdk_videosource_objc PRIVATE -Wimplicit-retain-self -Wmissing-field-initializers -Wstrict-overflow -fno-exceptions -fobjc-weak)
-target_link_libraries(webrtc_sdk_videosource_objc PUBLIC webrtc-yuv webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_sdk_base_objc webrtc_sdk_mediasource_objc webrtc_sdk_native_video webrtc_sdk_videoframebuffer_objc )
+target_link_libraries(webrtc_sdk_videosource_objc PUBLIC webrtc-yuv webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_sdk_base_objc webrtc_sdk_mediasource_objc webrtc_sdk_native_video webrtc_sdk_videoframebuffer_objc )
target_link_libraries(webrtc_sdk_videosource_objc PRIVATE "-framework Foundation")
# sdk:videotoolbox_objc
@@ -4494,7 +4781,7 @@ add_library(webrtc_test_network_emulated_network ${WEBRTC_ROOT}/test/network/cro
target_include_directories(webrtc_test_network_emulated_network PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_network_emulated_network PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_test_network_emulated_network PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
# test/pc/sctp:fake_sctp_transport
add_library(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE)
@@ -4506,7 +4793,7 @@ add_library(webrtc_test_peer_scenario_peer_scenario ${WEBRTC_ROOT}/test/peer_sce
target_include_directories(webrtc_test_peer_scenario_peer_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_peer_scenario_peer_scenario PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_test_peer_scenario_peer_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
# test:perf_test
add_library(webrtc_test_perf_test ${WEBRTC_ROOT}/test/testsupport/perf_result_reporter.cc ${WEBRTC_ROOT}/test/testsupport/perf_test.cc ${WEBRTC_ROOT}/test/testsupport/perf_test_histogram_writer.cc)
@@ -4532,7 +4819,7 @@ add_library(webrtc_test_rtp_test_utils ${WEBRTC_ROOT}/test/rtcp_packet_parser.cc
target_include_directories(webrtc_test_rtp_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_rtp_test_utils PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_test_rtp_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_rtp_test_utils PUBLIC webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
+target_link_libraries(webrtc_test_rtp_test_utils PUBLIC absl::optional webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
# test/scenario:column_printer
add_library(webrtc_test_scenario_column_printer ${WEBRTC_ROOT}/test/scenario/column_printer.cc)
@@ -4546,7 +4833,7 @@ add_library(webrtc_test_scenario_scenario ${WEBRTC_ROOT}/test/scenario/audio_str
target_include_directories(webrtc_test_scenario_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_scenario_scenario PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_test_scenario_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_scenario_scenario PUBLIC webrtc_modules_video_coding_objc_codec_factory_helper absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
+target_link_libraries(webrtc_test_scenario_scenario PUBLIC webrtc_modules_video_coding_objc_codec_factory_helper absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
# test:test_common
add_library(webrtc_test_test_common ${WEBRTC_ROOT}/test/call_test.cc ${WEBRTC_ROOT}/test/drifting_clock.cc ${WEBRTC_ROOT}/test/layer_filtering_transport.cc ${WEBRTC_ROOT}/test/run_loop.cc)
@@ -4607,7 +4894,7 @@ add_library(webrtc_video_video ${WEBRTC_ROOT}/video/buffered_frame_decryptor.cc
target_include_directories(webrtc_video_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_video_video PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON)
target_compile_options(webrtc_video_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
+target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
# video:video_stream_encoder_impl
add_library(webrtc_video_video_stream_encoder_impl ${WEBRTC_ROOT}/video/alignment_adjuster.cc ${WEBRTC_ROOT}/video/encoder_bitrate_adjuster.cc ${WEBRTC_ROOT}/video/encoder_overshoot_detector.cc ${WEBRTC_ROOT}/video/frame_encode_metadata_writer.cc ${WEBRTC_ROOT}/video/video_source_sink_controller.cc ${WEBRTC_ROOT}/video/video_stream_encoder.cc)
diff --git a/darwin_x86_64.cmake b/darwin_x86_64.cmake
index 4670cd58be..2777aa47ff 100644
--- a/darwin_x86_64.cmake
+++ b/darwin_x86_64.cmake
@@ -1,4 +1,4 @@
-# Generated on 04/21/21 for target: Darwin
+# Generated on 06/23/21 for target: Darwin
# This is an autogenerated file by calling:
# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_common_video_common_video_unittests --target webrtc_media_rtc_media_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --target webrtc_pc_peerconnection_unittests --target webrtc_pc_rtc_pc_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Darwin BUILD .
@@ -6,11 +6,11 @@
# Re-running this script will require you to merge in the latest upstream-master for webrtc
# Expecting jsoncpp at 9059f5cad030ba11d37818847443a53918c327b1
-# Expecting libaom at 6c93db7ff63430d2e5dcdfc42e84e3a0514da608
-# Expecting libsrtp2 at 7990ca64c616b150a9cb4714601c4a3b0c84fe91
+# Expecting libaom at 12287adee94fc3b1f5349d3f4bd85cea4e57f62b
+# Expecting libsrtp2 at 5b7c744eb8310250ccc534f3f86a2015b3887a0a
# Expecting libvpx at 61edec1efbea1c02d71857e2aff9426d9cd2df4e
-# Expecting libyuv at 64994843e652443df2d5201c6ae3fb725097360f
-# Expecting usrsctp at 70d42ae95a1de83bd317c8cc9503f894671d1392
+# Expecting libyuv at 49ebc996aa8c4bdf89c1b5ea461eb677234c61cc
+# Expecting usrsctp at 22ba62ffe79c3881581ab430368bf3764d9533eb
@@ -294,12 +294,12 @@ target_include_directories(webrtc_api_call_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_
# api:callfactory_api
add_library(webrtc_api_callfactory_api INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_call_rtp_interfaces webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_callfactory_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:callfactory_api.headers
add_library(webrtc_api_callfactory_api.headers INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_call_rtp_interfaces.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_callfactory_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:create_frame_generator
@@ -328,7 +328,7 @@ add_library(webrtc_api_create_time_controller ${WEBRTC_ROOT}/api/test/create_tim
target_include_directories(webrtc_api_create_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_create_time_controller PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_create_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_test_time_controller_time_controller )
# api/crypto:frame_decryptor_interface
add_library(webrtc_api_crypto_frame_decryptor_interface INTERFACE)
@@ -429,15 +429,15 @@ add_library(webrtc_api_libjingle_logging_api.headers INTERFACE)
target_include_directories(webrtc_api_libjingle_logging_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:libjingle_peerconnection_api
-add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/proxy.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
+add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
target_include_directories(webrtc_api_libjingle_peerconnection_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_libjingle_peerconnection_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_libjingle_peerconnection_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_call_rtp_interfaces webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# api:libjingle_peerconnection_api.headers
add_library(webrtc_api_libjingle_peerconnection_api.headers INTERFACE)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_call_rtp_interfaces.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_libjingle_peerconnection_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:media_stream_interface
@@ -573,12 +573,12 @@ target_include_directories(webrtc_api_priority.headers INTERFACE ${WEBRTC_ROOT}
# api:refcountedbase
add_library(webrtc_api_refcountedbase INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_macromagic webrtc_rtc_base_refcount )
target_include_directories(webrtc_api_refcountedbase INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:refcountedbase.headers
add_library(webrtc_api_refcountedbase.headers INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_macromagic.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_refcountedbase.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_error
@@ -621,12 +621,12 @@ target_link_libraries(webrtc_api_rtc_event_log_rtc_event_log_factory PUBLIC webr
# api:rtc_stats_api
add_library(webrtc_api_rtc_stats_api INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_rtc_stats_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_stats_api.headers
add_library(webrtc_api_rtc_stats_api.headers INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtc_stats_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_headers
@@ -646,11 +646,11 @@ add_library(webrtc_api_rtp_packet_info ${WEBRTC_ROOT}/api/rtp_packet_info.cc)
target_include_directories(webrtc_api_rtp_packet_info PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_rtp_packet_info PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_rtp_packet_info PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api:rtp_packet_info.headers
add_library(webrtc_api_rtp_packet_info.headers INTERFACE)
-target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtp_packet_info.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_parameters
@@ -767,6 +767,11 @@ add_library(webrtc_api_transport_datagram_transport_interface INTERFACE)
target_link_libraries(webrtc_api_transport_datagram_transport_interface INTERFACE absl::optional webrtc_api_array_view webrtc_api_rtc_error webrtc_rtc_base_rtc_base_approved )
target_include_directories(webrtc_api_transport_datagram_transport_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# api/transport:datagram_transport_interface.headers
+add_library(webrtc_api_transport_datagram_transport_interface.headers INTERFACE)
+target_link_libraries(webrtc_api_transport_datagram_transport_interface.headers INTERFACE webrtc_api_array_view.headers webrtc_api_rtc_error.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_api_transport_datagram_transport_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# api/transport:enums
add_library(webrtc_api_transport_enums INTERFACE)
target_include_directories(webrtc_api_transport_enums INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -957,15 +962,15 @@ target_compile_options(webrtc_api_video_codecs_rtc_software_fallback_wrappers PR
target_link_libraries(webrtc_api_video_codecs_rtc_software_fallback_wrappers PUBLIC absl::core_headers absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# api/video_codecs:video_codecs_api
-add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder_factory.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
+add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
target_include_directories(webrtc_api_video_codecs_video_codecs_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_video_codecs_video_codecs_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_video_codecs_video_codecs_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api/video_codecs:video_codecs_api.headers
add_library(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api/video_codecs:vp8_temporal_layers_factory
@@ -1144,7 +1149,7 @@ add_library(webrtc_audio_audio ${WEBRTC_ROOT}/audio/audio_level.cc ${WEBRTC_ROOT
target_include_directories(webrtc_audio_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_audio_audio PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_audio_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# audio/utility:audio_frame_operations
add_library(webrtc_audio_utility_audio_frame_operations ${WEBRTC_ROOT}/audio/utility/audio_frame_operations.cc ${WEBRTC_ROOT}/audio/utility/channel_mixer.cc ${WEBRTC_ROOT}/audio/utility/channel_mixing_matrix.cc)
@@ -1201,18 +1206,18 @@ add_library(webrtc_call_call ${WEBRTC_ROOT}/call/call.cc ${WEBRTC_ROOT}/call/cal
target_include_directories(webrtc_call_call PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call PUBLIC absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
+target_link_libraries(webrtc_call_call PUBLIC absl::bind_front absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
# call:call_interfaces
add_library(webrtc_call_call_interfaces ${WEBRTC_ROOT}/call/audio_receive_stream.cc ${WEBRTC_ROOT}/call/audio_send_stream.cc ${WEBRTC_ROOT}/call/audio_state.cc ${WEBRTC_ROOT}/call/call_config.cc ${WEBRTC_ROOT}/call/flexfec_receive_stream.cc ${WEBRTC_ROOT}/call/syncable.cc)
target_include_directories(webrtc_call_call_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call_interfaces PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_call_audio_sender_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
+target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::bind_front absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_call_audio_sender_interface webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
# call:call_interfaces.headers
add_library(webrtc_call_call_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_call_audio_sender_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
+target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_call_audio_sender_interface.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
target_include_directories(webrtc_call_call_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:fake_network
@@ -1227,16 +1232,26 @@ add_library(webrtc_call_mock_rtp_interfaces INTERFACE)
target_link_libraries(webrtc_call_mock_rtp_interfaces INTERFACE webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_bitrate_settings webrtc_call_rtp_interfaces webrtc_modules_pacing_pacing webrtc_rtc_base_rtc_base webrtc_rtc_base_rate_limiter webrtc_rtc_base_network_sent_packet webrtc_test_test_support )
target_include_directories(webrtc_call_mock_rtp_interfaces INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# call:receive_stream_interface
+add_library(webrtc_call_receive_stream_interface INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface INTERFACE webrtc_api_frame_transformer_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source )
+target_include_directories(webrtc_call_receive_stream_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# call:receive_stream_interface.headers
+add_library(webrtc_call_receive_stream_interface.headers INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_transport_rtp_rtp_source.headers )
+target_include_directories(webrtc_call_receive_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# call:rtp_interfaces
add_library(webrtc_call_rtp_interfaces ${WEBRTC_ROOT}/call/rtp_config.cc)
target_include_directories(webrtc_call_rtp_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_rtp_interfaces PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_rtp_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue )
# call:rtp_interfaces.headers
add_library(webrtc_call_rtp_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers )
target_include_directories(webrtc_call_rtp_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_receiver
@@ -1276,11 +1291,11 @@ add_library(webrtc_call_video_stream_api ${WEBRTC_ROOT}/call/video_receive_strea
target_include_directories(webrtc_call_video_stream_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_video_stream_api PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_video_stream_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
# call:video_stream_api.headers
add_library(webrtc_call_video_stream_api.headers INTERFACE)
-target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_call_video_stream_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# common_audio
@@ -1439,11 +1454,11 @@ add_library(webrtc_logging_ice_log ${WEBRTC_ROOT}/logging/rtc_event_log/events/r
target_include_directories(webrtc_logging_ice_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_ice_log PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_ice_log PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved )
# logging:ice_log.headers
add_library(webrtc_logging_ice_log.headers INTERFACE)
-target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_logging_ice_log.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# logging:rtc_event_audio
@@ -1451,41 +1466,41 @@ add_library(webrtc_logging_rtc_event_audio ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_audio PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
# logging:rtc_event_bwe
add_library(webrtc_logging_rtc_event_bwe ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_success.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_route_change.cc)
target_include_directories(webrtc_logging_rtc_event_bwe PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_bwe PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_bwe PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate )
+target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate webrtc_api_units_timestamp )
# logging:rtc_event_frame_events
add_library(webrtc_logging_rtc_event_frame_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_frame_decoded.cc)
target_include_directories(webrtc_logging_rtc_event_frame_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_frame_events PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_frame_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
# logging:rtc_event_generic_packet_events
add_library(webrtc_logging_rtc_event_generic_packet_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc)
target_include_directories(webrtc_logging_rtc_event_generic_packet_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_generic_packet_events PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_generic_packet_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_timeutils )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log2_proto
-add_library(webrtc_logging_rtc_event_log2_proto_bridge)
+add_library(webrtc_logging_rtc_event_log2_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log2_proto_bridge
+ TARGET webrtc_logging_rtc_event_log2_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log2.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log2_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto)
+target_link_libraries(webrtc_logging_rtc_event_log2_proto PUBLIC libprotobuf)
# logging:rtc_event_log_api
add_library(webrtc_logging_rtc_event_log_api INTERFACE)
@@ -1504,27 +1519,27 @@ add_library(webrtc_logging_rtc_event_log_impl_encoder ${WEBRTC_ROOT}/logging/rtc
target_include_directories(webrtc_logging_rtc_event_log_impl_encoder PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_log_impl_encoder PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_log_impl_encoder PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto_bridge webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto_bridge webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log_proto
-add_library(webrtc_logging_rtc_event_log_proto_bridge)
+add_library(webrtc_logging_rtc_event_log_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log_proto_bridge
+ TARGET webrtc_logging_rtc_event_log_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto)
+target_link_libraries(webrtc_logging_rtc_event_log_proto PUBLIC libprotobuf)
# logging:rtc_event_pacing
add_library(webrtc_logging_rtc_event_pacing ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_alr_state.cc)
target_include_directories(webrtc_logging_rtc_event_pacing PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_pacing PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_pacing PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log )
+target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp )
# logging:rtc_event_rtp_rtcp
add_library(webrtc_logging_rtc_event_rtp_rtcp ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc)
@@ -1538,7 +1553,7 @@ add_library(webrtc_logging_rtc_event_video ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_video PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
# logging:rtc_stream_config
add_library(webrtc_logging_rtc_stream_config ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_stream_config.cc)
@@ -1552,18 +1567,25 @@ add_library(webrtc_media_rtc_audio_video ${WEBRTC_ROOT}/media/engine/adm_helpers
target_include_directories(webrtc_media_rtc_audio_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_audio_video PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_audio_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+
+# media:rtc_data_dcsctp_transport
+add_library(webrtc_media_rtc_data_dcsctp_transport ${WEBRTC_ROOT}/media/sctp/dcsctp_transport.cc)
+target_include_directories(webrtc_media_rtc_data_dcsctp_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_media_rtc_data_dcsctp_transport PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_media_rtc_data_dcsctp_transport PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_media_rtc_data_dcsctp_transport PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_net_dcsctp_public_factory webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_task_queue_timeout webrtc_p2p_rtc_p2p webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers )
# media:rtc_data_sctp_transport_factory
add_library(webrtc_media_rtc_data_sctp_transport_factory ${WEBRTC_ROOT}/media/sctp/sctp_transport_factory.cc)
target_include_directories(webrtc_media_rtc_data_sctp_transport_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_DCSCTP WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_data_sctp_transport_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_system_unused )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_dcsctp_transport webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_system_unused webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# media:rtc_data_sctp_transport_internal
add_library(webrtc_media_rtc_data_sctp_transport_internal INTERFACE)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_api_transport_datagram_transport_interface webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
target_include_directories(webrtc_media_rtc_data_sctp_transport_internal INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_data_usrsctp_transport
@@ -1604,11 +1626,11 @@ add_library(webrtc_media_rtc_media_base ${WEBRTC_ROOT}/media/base/adapted_video_
target_include_directories(webrtc_media_rtc_media_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_media_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_datagram_transport_interface webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# media:rtc_media_base.headers
add_library(webrtc_media_rtc_media_base.headers INTERFACE)
-target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_datagram_transport_interface.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_media_rtc_media_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_media_config
@@ -1638,7 +1660,7 @@ target_link_libraries(webrtc_media_rtc_media_tests_utils PUBLIC gmock gtest absl
target_include_directories(webrtc_media_rtc_media_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_media_rtc_media_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
# media:rtc_sdp_video_format_utils
add_library(webrtc_media_rtc_sdp_video_format_utils ${WEBRTC_ROOT}/media/base/sdp_video_format_utils.cc)
@@ -1667,31 +1689,31 @@ target_link_libraries(webrtc_modules_async_audio_processing_async_audio_processi
target_include_directories(webrtc_modules_async_audio_processing_async_audio_processing.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_config_proto
-add_library(webrtc_modules_audio_coding_ana_config_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_config_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_config_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_config_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/config.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_coding_ana_config_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_config_proto PUBLIC libprotobuf)
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_debug_dump_proto
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_debug_dump_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_debug_dump_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/debug_dump.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC libprotobuf)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
+target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC libprotobuf)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
# modules/audio_coding
add_library(webrtc_modules_audio_coding_audio_coding ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_receiver.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_remixing.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_resampler.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/audio_coding_module.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/call_statistics.cc)
@@ -1730,7 +1752,7 @@ add_library(webrtc_modules_audio_coding_audio_network_adaptor ${WEBRTC_ROOT}/mod
target_include_directories(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional libprotobuf webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_coding_ana_debug_dump_proto_bridge webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_coding_ana_debug_dump_proto webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# modules/audio_coding:audio_network_adaptor_config
add_library(webrtc_modules_audio_coding_audio_network_adaptor_config ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc)
@@ -1881,7 +1903,7 @@ add_library(webrtc_modules_audio_coding_neteq_tools_minimal ${WEBRTC_ROOT}/modul
target_include_directories(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_CODEC_ISAC)
target_compile_options(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_array_view webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/audio_coding:pcm16b
add_library(webrtc_modules_audio_coding_pcm16b ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc)
@@ -1901,7 +1923,7 @@ add_library(webrtc_modules_audio_coding_red ${WEBRTC_ROOT}/modules/audio_coding/
target_include_directories(webrtc_modules_audio_coding_red PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_red PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_red PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_cng
add_library(webrtc_modules_audio_coding_webrtc_cng ${WEBRTC_ROOT}/modules/audio_coding/codecs/cng/webrtc_cng.cc)
@@ -1922,7 +1944,7 @@ add_library(webrtc_modules_audio_coding_webrtc_opus ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_coding_webrtc_opus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_webrtc_opus PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_CODEC_ISAC)
target_compile_options(webrtc_modules_audio_coding_webrtc_opus PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional libprotobuf webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_opus_wrapper
add_library(webrtc_modules_audio_coding_webrtc_opus_wrapper ${WEBRTC_ROOT}/modules/audio_coding/codecs/opus/opus_interface.cc)
@@ -2024,7 +2046,7 @@ add_library(webrtc_modules_audio_mixer_audio_mixer_impl ${WEBRTC_ROOT}/modules/a
target_include_directories(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_rtp_packet_info webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/audio_processing/aec3:adaptive_fir_filter
add_library(webrtc_modules_audio_processing_aec3_adaptive_fir_filter INTERFACE)
@@ -2143,11 +2165,11 @@ add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl ${WEBRTC_ROOT
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE WEBRTC_MAC WEBRTC_POSIX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC libprotobuf webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto_bridge webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
# modules/audio_processing/aec_dump:aec_dump_impl.headers
add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_protobuf_utils.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/audio_processing:aec_dump_interface
@@ -2405,13 +2427,49 @@ add_library(webrtc_modules_audio_processing_agc_agc ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_processing_agc_agc PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_agc_agc PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_agc_agc PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor webrtc_modules_audio_processing_agc_clipping_predictor_evaluator webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/audio_processing/agc:agc.headers
add_library(webrtc_modules_audio_processing_agc_agc.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor.headers webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_modules_audio_processing_agc_agc.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# modules/audio_processing/agc:clipping_predictor
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer webrtc_modules_audio_processing_agc_gain_map webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_safe_minmax )
+
+# modules/audio_processing/agc:clipping_predictor.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_safe_minmax.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_evaluator
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_evaluator.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging )
+
+# modules/audio_processing/agc:clipping_predictor_evaluator.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_level_buffer.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved )
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# modules/audio_processing/agc:gain_control_interface
add_library(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE)
target_include_directories(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2535,17 +2593,17 @@ target_link_libraries(webrtc_modules_audio_processing_audio_processing_statistic
target_include_directories(webrtc_modules_audio_processing_audio_processing_statistics.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_processing:audioproc_debug_proto
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_processing_audioproc_debug_proto_bridge
+ TARGET webrtc_modules_audio_processing_audioproc_debug_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_processing/debug.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_processing
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto)
+target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC libprotobuf)
# modules/audio_processing:audioproc_test_utils
add_library(webrtc_modules_audio_processing_audioproc_test_utils ${WEBRTC_ROOT}/modules/audio_processing/test/audio_buffer_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/audio_processing_builder_for_testing.cc ${WEBRTC_ROOT}/modules/audio_processing/test/bitexactness_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/performance_timer.cc ${WEBRTC_ROOT}/modules/audio_processing/test/simulator_buffers.cc ${WEBRTC_ROOT}/modules/audio_processing/test/test_utils.cc)
@@ -2712,11 +2770,11 @@ target_link_libraries(webrtc_modules_audio_processing_voice_detection.headers IN
target_include_directories(webrtc_modules_audio_processing_voice_detection.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/congestion_controller
-add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc)
+add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc ${WEBRTC_ROOT}/modules/congestion_controller/remb_throttler.cc)
target_include_directories(webrtc_modules_congestion_controller_congestion_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_congestion_controller_congestion_controller PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_congestion_controller_congestion_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
# modules/congestion_controller/goog_cc:alr_detector
add_library(webrtc_modules_congestion_controller_goog_cc_alr_detector ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/alr_detector.cc)
@@ -2835,11 +2893,11 @@ target_compile_options(webrtc_modules_pacing_pacing PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_pacing_pacing PUBLIC absl::memory absl::strings absl::optional webrtc_api_function_view webrtc_api_sequence_checker webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_pacing webrtc_modules_module_api webrtc_modules_pacing_interval_budget webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/remote_bitrate_estimator
-add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
+add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/packet_arrival_map.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
target_include_directories(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/rtp_rtcp:mock_rtp_rtcp
add_library(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE)
@@ -2847,7 +2905,7 @@ target_link_libraries(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE absl::opti
target_include_directories(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
+add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/capture_clock_offset_updater.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE WEBRTC_MAC WEBRTC_POSIX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE -fno-exceptions)
@@ -2858,11 +2916,11 @@ add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format ${WEBRTC_ROOT}/modules/rtp_r
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/rtp_rtcp:rtp_rtcp_format.headers
add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp:rtp_video_header
@@ -3093,7 +3151,7 @@ add_library(webrtc_modules_video_coding_webrtc_vp9 ${WEBRTC_ROOT}/modules/video_
target_include_directories(webrtc_modules_video_coding_webrtc_vp9 PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_video_coding_webrtc_vp9 PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_video_coding_webrtc_vp9 PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
# modules/video_coding:webrtc_vp9_helpers
add_library(webrtc_modules_video_coding_webrtc_vp9_helpers ${WEBRTC_ROOT}/modules/video_coding/codecs/vp9/svc_config.cc)
@@ -3120,6 +3178,215 @@ target_compile_definitions(webrtc_modules_video_processing_video_processing_sse2
target_compile_options(webrtc_modules_video_processing_video_processing_sse2 PRIVATE -fno-exceptions -msse2)
target_link_libraries(webrtc_modules_video_processing_video_processing_sse2 PUBLIC webrtc_modules_video_processing_denoiser_filter webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+# net/dcsctp/common:internal_types
+add_library(webrtc_net_dcsctp_common_internal_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_internal_types INTERFACE webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_common_internal_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:math
+add_library(webrtc_net_dcsctp_common_math INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_math INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:pair_hash
+add_library(webrtc_net_dcsctp_common_pair_hash INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_pair_hash INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:sequence_numbers
+add_library(webrtc_net_dcsctp_common_sequence_numbers INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_sequence_numbers INTERFACE webrtc_net_dcsctp_common_internal_types )
+target_include_directories(webrtc_net_dcsctp_common_sequence_numbers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:str_join
+add_library(webrtc_net_dcsctp_common_str_join INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_str_join INTERFACE absl::strings webrtc_rtc_base_stringutils )
+target_include_directories(webrtc_net_dcsctp_common_str_join INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:bounded_io
+add_library(webrtc_net_dcsctp_packet_bounded_io INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_bounded_io INTERFACE webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_bounded_io INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:chunk
+add_library(webrtc_net_dcsctp_packet_chunk ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/abort_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_echo_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/data_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/error_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/forward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/idata_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/reconfig_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/sack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:chunk_validators
+add_library(webrtc_net_dcsctp_packet_chunk_validators ${WEBRTC_ROOT}/net/dcsctp/packet/chunk_validators.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk_validators PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk_validators PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk_validators PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk_validators PUBLIC webrtc_net_dcsctp_packet_chunk webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:crc32c
+add_library(webrtc_net_dcsctp_packet_crc32c ${WEBRTC_ROOT}/net/dcsctp/packet/crc32c.cc)
+target_include_directories(webrtc_net_dcsctp_packet_crc32c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_crc32c PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_crc32c PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_crc32c PUBLIC crc32c webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:data
+add_library(webrtc_net_dcsctp_packet_data INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_data INTERFACE webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_data INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:error_cause
+add_library(webrtc_net_dcsctp_packet_error_cause ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/no_user_data_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/protocol_violation_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc)
+target_include_directories(webrtc_net_dcsctp_packet_error_cause PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_error_cause PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_error_cause PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_error_cause PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:parameter
+add_library(webrtc_net_dcsctp_packet_parameter ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/state_cookie_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/supported_extensions_parameter.cc)
+target_include_directories(webrtc_net_dcsctp_packet_parameter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_parameter PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_parameter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_parameter PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:sctp_packet
+add_library(webrtc_net_dcsctp_packet_sctp_packet ${WEBRTC_ROOT}/net/dcsctp/packet/sctp_packet.cc)
+target_include_directories(webrtc_net_dcsctp_packet_sctp_packet PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_sctp_packet PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_sctp_packet PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_sctp_packet PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_crc32c webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:tlv_trait
+add_library(webrtc_net_dcsctp_packet_tlv_trait ${WEBRTC_ROOT}/net/dcsctp/packet/tlv_trait.cc)
+target_include_directories(webrtc_net_dcsctp_packet_tlv_trait PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_tlv_trait PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_tlv_trait PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_tlv_trait PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/public:factory
+add_library(webrtc_net_dcsctp_public_factory ${WEBRTC_ROOT}/net/dcsctp/public/dcsctp_socket_factory.cc)
+target_include_directories(webrtc_net_dcsctp_public_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_public_factory PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_public_factory PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_public_factory PUBLIC absl::strings webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_dcsctp_socket )
+
+# net/dcsctp/public:socket
+add_library(webrtc_net_dcsctp_public_socket INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_socket INTERFACE absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_public_socket INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:strong_alias
+add_library(webrtc_net_dcsctp_public_strong_alias INTERFACE)
+target_include_directories(webrtc_net_dcsctp_public_strong_alias INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:types
+add_library(webrtc_net_dcsctp_public_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_types INTERFACE webrtc_api_array_view webrtc_net_dcsctp_public_strong_alias )
+target_include_directories(webrtc_net_dcsctp_public_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:data_tracker
+add_library(webrtc_net_dcsctp_rx_data_tracker ${WEBRTC_ROOT}/net/dcsctp/rx/data_tracker.cc)
+target_include_directories(webrtc_net_dcsctp_rx_data_tracker PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_data_tracker PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_data_tracker PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_data_tracker PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_queue
+add_library(webrtc_net_dcsctp_rx_reassembly_queue ${WEBRTC_ROOT}/net/dcsctp/rx/reassembly_queue.cc)
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_queue PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_net_dcsctp_rx_traditional_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_streams
+add_library(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE absl::strings webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:traditional_reassembly_streams
+add_library(webrtc_net_dcsctp_rx_traditional_reassembly_streams ${WEBRTC_ROOT}/net/dcsctp/rx/traditional_reassembly_streams.cc)
+target_include_directories(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_traditional_reassembly_streams PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:context
+add_library(webrtc_net_dcsctp_socket_context INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_socket_context INTERFACE absl::strings webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_socket_context INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/socket:dcsctp_socket
+add_library(webrtc_net_dcsctp_socket_dcsctp_socket ${WEBRTC_ROOT}/net/dcsctp/socket/dcsctp_socket.cc ${WEBRTC_ROOT}/net/dcsctp/socket/state_cookie.cc)
+target_include_directories(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_dcsctp_socket PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_chunk_validators webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_socket_transmission_control_block webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_rr_send_queue webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:heartbeat_handler
+add_library(webrtc_net_dcsctp_socket_heartbeat_handler ${WEBRTC_ROOT}/net/dcsctp/socket/heartbeat_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_heartbeat_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:stream_reset_handler
+add_library(webrtc_net_dcsctp_socket_stream_reset_handler ${WEBRTC_ROOT}/net/dcsctp/socket/stream_reset_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_stream_reset_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:transmission_control_block
+add_library(webrtc_net_dcsctp_socket_transmission_control_block ${WEBRTC_ROOT}/net/dcsctp/socket/transmission_control_block.cc)
+target_include_directories(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_transmission_control_block PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/timer:task_queue_timeout
+add_library(webrtc_net_dcsctp_timer_task_queue_timeout ${WEBRTC_ROOT}/net/dcsctp/timer/task_queue_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_task_queue_timeout PUBLIC webrtc_api_array_view webrtc_api_task_queue_task_queue webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
+
+# net/dcsctp/timer
+add_library(webrtc_net_dcsctp_timer_timer ${WEBRTC_ROOT}/net/dcsctp/timer/timer.cc)
+target_include_directories(webrtc_net_dcsctp_timer_timer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_timer PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_timer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_timer PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_error_counter
+add_library(webrtc_net_dcsctp_tx_retransmission_error_counter ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_error_counter.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_error_counter PUBLIC absl::strings webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_queue
+add_library(webrtc_net_dcsctp_tx_retransmission_queue ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_timeout
+add_library(webrtc_net_dcsctp_tx_retransmission_timeout ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_timeout PUBLIC webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:rr_send_queue
+add_library(webrtc_net_dcsctp_tx_rr_send_queue ${WEBRTC_ROOT}/net/dcsctp/tx/rr_send_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE WEBRTC_MAC WEBRTC_POSIX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_rr_send_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:send_queue
+add_library(webrtc_net_dcsctp_tx_send_queue INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_tx_send_queue INTERFACE absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_tx_send_queue INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# p2p:fake_ice_transport
add_library(webrtc_p2p_fake_ice_transport INTERFACE)
target_link_libraries(webrtc_p2p_fake_ice_transport INTERFACE absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
@@ -3161,7 +3428,7 @@ add_library(webrtc_pc_audio_rtp_receiver ${WEBRTC_ROOT}/pc/audio_rtp_receiver.cc
target_include_directories(webrtc_pc_audio_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_audio_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_audio_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
# pc:audio_track
add_library(webrtc_pc_audio_track ${WEBRTC_ROOT}/pc/audio_track.cc)
@@ -3175,38 +3442,28 @@ add_library(webrtc_pc_connection_context ${WEBRTC_ROOT}/pc/connection_context.cc
target_include_directories(webrtc_pc_connection_context PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_connection_context PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_connection_context PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
+target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
# pc:dtmf_sender
add_library(webrtc_pc_dtmf_sender ${WEBRTC_ROOT}/pc/dtmf_sender.cc)
target_include_directories(webrtc_pc_dtmf_sender PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_dtmf_sender PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_dtmf_sender PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_pc_proxy webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:integration_test_helpers
add_library(webrtc_pc_integration_test_helpers ${WEBRTC_ROOT}/pc/test/integration_test_helpers.cc)
target_include_directories(webrtc_pc_integration_test_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_integration_test_helpers PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_integration_test_helpers PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
# pc:jitter_buffer_delay
add_library(webrtc_pc_jitter_buffer_delay ${WEBRTC_ROOT}/pc/jitter_buffer_delay.cc)
target_include_directories(webrtc_pc_jitter_buffer_delay PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_jitter_buffer_delay PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_jitter_buffer_delay PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading )
-
-# pc:jitter_buffer_delay_interface
-add_library(webrtc_pc_jitter_buffer_delay_interface INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_interface INTERFACE absl::algorithm_container absl::strings absl::optional webrtc_media_rtc_media_base webrtc_rtc_base_refcount )
-target_include_directories(webrtc_pc_jitter_buffer_delay_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:jitter_buffer_delay_proxy
-add_library(webrtc_pc_jitter_buffer_delay_proxy INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_proxy INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface )
-target_include_directories(webrtc_pc_jitter_buffer_delay_proxy INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::optional webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_safe_conversions webrtc_rtc_base_safe_minmax webrtc_rtc_base_system_no_unique_address )
# pc:libjingle_peerconnection
add_library(webrtc_pc_libjingle_peerconnection INTERFACE)
@@ -3231,7 +3488,7 @@ add_library(webrtc_pc_pc_test_utils ${WEBRTC_ROOT}/pc/test/fake_audio_capture_mo
target_include_directories(webrtc_pc_pc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_pc_test_utils PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_pc_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
# pc:peer_connection_message_handler
add_library(webrtc_pc_peer_connection_message_handler ${WEBRTC_ROOT}/pc/peer_connection_message_handler.cc)
@@ -3245,14 +3502,21 @@ add_library(webrtc_pc_peerconnection ${WEBRTC_ROOT}/pc/data_channel_controller.c
target_include_directories(webrtc_pc_peerconnection PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_peerconnection PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_sender webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:peerconnection_unittests
android_add_executable(TARGET webrtc_pc_peerconnection_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/data_channel_integrationtest.cc ${WEBRTC_ROOT}/pc/data_channel_unittest.cc ${WEBRTC_ROOT}/pc/dtmf_sender_unittest.cc ${WEBRTC_ROOT}/pc/ice_server_parsing_unittest.cc ${WEBRTC_ROOT}/pc/jitter_buffer_delay_unittest.cc ${WEBRTC_ROOT}/pc/jsep_session_description_unittest.cc ${WEBRTC_ROOT}/pc/local_audio_source_unittest.cc ${WEBRTC_ROOT}/pc/media_stream_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_adaptation_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_bundle_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_crypto_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_data_channel_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_end_to_end_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_factory_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_header_extension_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_histogram_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_ice_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_interface_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_jsep_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_media_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_rtp_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_signaling_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_simulcast_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.h ${WEBRTC_ROOT}/pc/proxy_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_integrationtest.cc ${WEBRTC_ROOT}/pc/rtc_stats_traversal_unittest.cc ${WEBRTC_ROOT}/pc/rtp_media_utils_unittest.cc ${WEBRTC_ROOT}/pc/rtp_parameters_conversion_unittest.cc ${WEBRTC_ROOT}/pc/rtp_sender_receiver_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transceiver_unittest.cc ${WEBRTC_ROOT}/pc/sctp_utils_unittest.cc ${WEBRTC_ROOT}/pc/sdp_serializer_unittest.cc ${WEBRTC_ROOT}/pc/stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module_unittest.cc ${WEBRTC_ROOT}/pc/test/test_sdp_strings.h ${WEBRTC_ROOT}/pc/track_media_info_map_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_track_source_unittest.cc ${WEBRTC_ROOT}/pc/video_track_unittest.cc ${WEBRTC_ROOT}/pc/webrtc_sdp_unittest.cc)
target_include_directories(webrtc_pc_peerconnection_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_peerconnection_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+
+# pc:proxy
+add_library(webrtc_pc_proxy ${WEBRTC_ROOT}/pc/proxy.cc)
+target_include_directories(webrtc_pc_proxy PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_proxy PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_proxy PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_proxy PUBLIC webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# pc:remote_audio_source
add_library(webrtc_pc_remote_audio_source ${WEBRTC_ROOT}/pc/remote_audio_source.cc)
@@ -3267,32 +3531,32 @@ target_link_libraries(webrtc_pc_rtc_pc INTERFACE libsrtp webrtc_media_rtc_audio_
target_include_directories(webrtc_pc_rtc_pc INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# pc:rtc_pc_base
-add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/session_description.cc ${WEBRTC_ROOT}/pc/simulcast_description.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc)
+add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_collection.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc ${WEBRTC_ROOT}/pc/video_track_source_proxy.cc)
target_include_directories(webrtc_pc_rtc_pc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtc_pc_base PRIVATE -fno-exceptions -Ithird_party/libsrtp/include)
-target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_proxy webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:rtc_pc_unittests
android_add_executable(TARGET webrtc_pc_rtc_pc_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/channel_manager_unittest.cc ${WEBRTC_ROOT}/pc/channel_unittest.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/dtls_transport_unittest.cc ${WEBRTC_ROOT}/pc/ice_transport_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_unittest.cc ${WEBRTC_ROOT}/pc/media_session_unittest.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/sctp_transport_unittest.cc ${WEBRTC_ROOT}/pc/session_description_unittest.cc ${WEBRTC_ROOT}/pc/srtp_filter_unittest.cc ${WEBRTC_ROOT}/pc/srtp_session_unittest.cc ${WEBRTC_ROOT}/pc/srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/test/rtp_transport_test_util.h ${WEBRTC_ROOT}/pc/test/srtp_test_util.h ${WEBRTC_ROOT}/pc/used_ids_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_receiver_unittest.cc)
target_include_directories(webrtc_pc_rtc_pc_unittests PRIVATE ${WEBRTC_ROOT}/pc/../third_party/libsrtp/srtp ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_unittests PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_rtc_pc_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
+target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
# pc:rtp_parameters_conversion
add_library(webrtc_pc_rtp_parameters_conversion ${WEBRTC_ROOT}/pc/rtp_parameters_conversion.cc)
target_include_directories(webrtc_pc_rtp_parameters_conversion PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_parameters_conversion PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_parameters_conversion PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
+target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
# pc:rtp_receiver
add_library(webrtc_pc_rtp_receiver ${WEBRTC_ROOT}/pc/rtp_receiver.cc)
target_include_directories(webrtc_pc_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
# pc:rtp_sender
add_library(webrtc_pc_rtp_sender ${WEBRTC_ROOT}/pc/rtp_sender.cc)
@@ -3306,7 +3570,7 @@ add_library(webrtc_pc_rtp_transceiver ${WEBRTC_ROOT}/pc/rtp_transceiver.cc)
target_include_directories(webrtc_pc_rtp_transceiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_transceiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_transceiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_proxy webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_session_description webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:rtp_transmission_manager
add_library(webrtc_pc_rtp_transmission_manager ${WEBRTC_ROOT}/pc/rtp_transmission_manager.cc)
@@ -3327,6 +3591,20 @@ add_library(webrtc_pc_sdp_state_provider INTERFACE)
target_link_libraries(webrtc_pc_sdp_state_provider INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_rtc_pc_base )
target_include_directories(webrtc_pc_sdp_state_provider INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# pc:session_description
+add_library(webrtc_pc_session_description ${WEBRTC_ROOT}/pc/session_description.cc)
+target_include_directories(webrtc_pc_session_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_session_description PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_session_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_session_description PUBLIC absl::algorithm_container absl::memory webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_simulcast_description webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
+# pc:simulcast_description
+add_library(webrtc_pc_simulcast_description ${WEBRTC_ROOT}/pc/simulcast_description.cc)
+target_include_directories(webrtc_pc_simulcast_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_simulcast_description PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_simulcast_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_simulcast_description PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
# pc:stats_collector_interface
add_library(webrtc_pc_stats_collector_interface INTERFACE)
target_link_libraries(webrtc_pc_stats_collector_interface INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface )
@@ -3351,7 +3629,7 @@ add_library(webrtc_pc_video_rtp_receiver ${WEBRTC_ROOT}/pc/video_rtp_receiver.cc
target_include_directories(webrtc_pc_video_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_video_rtp_receiver PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_video_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address )
# pc:video_rtp_track_source
add_library(webrtc_pc_video_rtp_track_source ${WEBRTC_ROOT}/pc/video_rtp_track_source.cc)
@@ -3719,7 +3997,7 @@ add_library(webrtc_rtc_base_platform_thread ${WEBRTC_ROOT}/rtc_base/platform_thr
target_include_directories(webrtc_rtc_base_platform_thread PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_platform_thread PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_platform_thread PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::strings webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::memory absl::strings absl::optional webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
# rtc_base:platform_thread.headers
add_library(webrtc_rtc_base_platform_thread.headers INTERFACE)
@@ -3738,6 +4016,15 @@ add_library(webrtc_rtc_base_platform_thread_types.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_platform_thread_types.headers INTERFACE webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_platform_thread_types.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# rtc_base:protobuf_utils
+add_library(webrtc_rtc_base_protobuf_utils INTERFACE)
+target_link_libraries(webrtc_rtc_base_protobuf_utils INTERFACE libprotobuf )
+target_include_directories(webrtc_rtc_base_protobuf_utils INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# rtc_base:protobuf_utils.headers
+add_library(webrtc_rtc_base_protobuf_utils.headers INTERFACE)
+target_include_directories(webrtc_rtc_base_protobuf_utils.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# rtc_base:rate_limiter
add_library(webrtc_rtc_base_rate_limiter ${WEBRTC_ROOT}/rtc_base/rate_limiter.cc)
target_include_directories(webrtc_rtc_base_rate_limiter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3747,12 +4034,12 @@ target_link_libraries(webrtc_rtc_base_rate_limiter PUBLIC absl::optional webrtc_
# rtc_base:refcount
add_library(webrtc_rtc_base_refcount INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_rtc_base_macromagic )
+target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_macromagic )
target_include_directories(webrtc_rtc_base_refcount INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:refcount.headers
add_library(webrtc_rtc_base_refcount.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_rtc_base_macromagic.headers )
+target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_refcount.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base
@@ -3760,11 +4047,11 @@ add_library(webrtc_rtc_base_rtc_base ${WEBRTC_ROOT}/rtc_base/mac_ifaddrs_convert
target_include_directories(webrtc_rtc_base_rtc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_base PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_rtc_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# rtc_base:rtc_base.headers
add_library(webrtc_rtc_base_rtc_base.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_rtc_base_rtc_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:rtc_base_approved
@@ -4105,11 +4392,11 @@ add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag ${WEBRTC_ROOT}/r
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_system_no_unique_address )
# rtc_base/task_utils:pending_task_safety_flag.headers
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_no_unique_address.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_system_no_unique_address.headers )
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:repeating_task
@@ -4117,11 +4404,11 @@ add_library(webrtc_rtc_base_task_utils_repeating_task ${WEBRTC_ROOT}/rtc_base/ta
target_include_directories(webrtc_rtc_base_task_utils_repeating_task PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_repeating_task PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_repeating_task PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
# rtc_base/task_utils:repeating_task.headers
add_library(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:to_queued_task
@@ -4161,11 +4448,11 @@ add_library(webrtc_rtc_base_threading ${WEBRTC_ROOT}/rtc_base/async_resolver.cc
target_include_directories(webrtc_rtc_base_threading PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_threading PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_threading PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_system_cocoa_threading absl::algorithm_container webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_system_cocoa_threading absl::algorithm_container webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# rtc_base:threading.headers
add_library(webrtc_rtc_base_threading.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_system_cocoa_threading.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
+target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_system_cocoa_threading.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
target_include_directories(webrtc_rtc_base_threading.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/time:timestamp_extrapolator
@@ -4349,7 +4636,7 @@ add_library(webrtc_sdk_videosource_objc ${WEBRTC_ROOT}/sdk/objc/api/peerconnecti
target_include_directories(webrtc_sdk_videosource_objc PRIVATE ${WEBRTC_ROOT}/sdk/objc ${WEBRTC_ROOT}/sdk/objc/base ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_sdk_videosource_objc PRIVATE WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_MAC WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_POSIX RTC_SUPPORTS_METAL)
target_compile_options(webrtc_sdk_videosource_objc PRIVATE -Wimplicit-retain-self -Wmissing-field-initializers -Wstrict-overflow -fno-exceptions -fobjc-weak)
-target_link_libraries(webrtc_sdk_videosource_objc PUBLIC webrtc-yuv webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_sdk_base_objc webrtc_sdk_mediasource_objc webrtc_sdk_native_video webrtc_sdk_videoframebuffer_objc )
+target_link_libraries(webrtc_sdk_videosource_objc PUBLIC webrtc-yuv webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_sdk_base_objc webrtc_sdk_mediasource_objc webrtc_sdk_native_video webrtc_sdk_videoframebuffer_objc )
target_link_libraries(webrtc_sdk_videosource_objc PRIVATE "-framework Foundation")
# sdk:videotoolbox_objc
@@ -4511,7 +4798,7 @@ add_library(webrtc_test_network_emulated_network ${WEBRTC_ROOT}/test/network/cro
target_include_directories(webrtc_test_network_emulated_network PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_network_emulated_network PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_network_emulated_network PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
# test/pc/sctp:fake_sctp_transport
add_library(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE)
@@ -4523,7 +4810,7 @@ add_library(webrtc_test_peer_scenario_peer_scenario ${WEBRTC_ROOT}/test/peer_sce
target_include_directories(webrtc_test_peer_scenario_peer_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_peer_scenario_peer_scenario PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_peer_scenario_peer_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
# test:perf_test
add_library(webrtc_test_perf_test ${WEBRTC_ROOT}/test/testsupport/perf_result_reporter.cc ${WEBRTC_ROOT}/test/testsupport/perf_test.cc ${WEBRTC_ROOT}/test/testsupport/perf_test_histogram_writer.cc)
@@ -4549,7 +4836,7 @@ add_library(webrtc_test_rtp_test_utils ${WEBRTC_ROOT}/test/rtcp_packet_parser.cc
target_include_directories(webrtc_test_rtp_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_rtp_test_utils PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_rtp_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_rtp_test_utils PUBLIC webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
+target_link_libraries(webrtc_test_rtp_test_utils PUBLIC absl::optional webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
# test/scenario:column_printer
add_library(webrtc_test_scenario_column_printer ${WEBRTC_ROOT}/test/scenario/column_printer.cc)
@@ -4563,7 +4850,7 @@ add_library(webrtc_test_scenario_scenario ${WEBRTC_ROOT}/test/scenario/audio_str
target_include_directories(webrtc_test_scenario_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_scenario_scenario PRIVATE WEBRTC_MAC WEBRTC_POSIX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_scenario_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_scenario_scenario PUBLIC webrtc_modules_video_coding_objc_codec_factory_helper absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
+target_link_libraries(webrtc_test_scenario_scenario PUBLIC webrtc_modules_video_coding_objc_codec_factory_helper absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
# test:test_common
add_library(webrtc_test_test_common ${WEBRTC_ROOT}/test/call_test.cc ${WEBRTC_ROOT}/test/drifting_clock.cc ${WEBRTC_ROOT}/test/layer_filtering_transport.cc ${WEBRTC_ROOT}/test/run_loop.cc)
@@ -4624,7 +4911,7 @@ add_library(webrtc_video_video ${WEBRTC_ROOT}/video/buffered_frame_decryptor.cc
target_include_directories(webrtc_video_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_video_video PRIVATE WEBRTC_MAC WEBRTC_POSIX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_video_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
+target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
# video:video_stream_encoder_impl
add_library(webrtc_video_video_stream_encoder_impl ${WEBRTC_ROOT}/video/alignment_adjuster.cc ${WEBRTC_ROOT}/video/encoder_bitrate_adjuster.cc ${WEBRTC_ROOT}/video/encoder_overshoot_detector.cc ${WEBRTC_ROOT}/video/frame_encode_metadata_writer.cc ${WEBRTC_ROOT}/video/video_source_sink_controller.cc ${WEBRTC_ROOT}/video/video_stream_encoder.cc)
diff --git a/examples/BUILD.gn b/examples/BUILD.gn
index 704afc5467..2f1277c744 100644
--- a/examples/BUILD.gn
+++ b/examples/BUILD.gn
@@ -253,8 +253,6 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"objc/AppRTCMobile/ARDAppClient.m",
"objc/AppRTCMobile/ARDAppEngineClient.h",
"objc/AppRTCMobile/ARDAppEngineClient.m",
- "objc/AppRTCMobile/ARDBitrateTracker.h",
- "objc/AppRTCMobile/ARDBitrateTracker.m",
"objc/AppRTCMobile/ARDCaptureController.h",
"objc/AppRTCMobile/ARDCaptureController.m",
"objc/AppRTCMobile/ARDExternalSampleCapturer.h",
diff --git a/examples/androidapp/res/values/arrays.xml b/examples/androidapp/res/values/arrays.xml
index e0e6ccbdc2..4a2948c875 100644
--- a/examples/androidapp/res/values/arrays.xml
+++ b/examples/androidapp/res/values/arrays.xml
@@ -34,6 +34,7 @@
<item>VP9</item>
<item>H264 Baseline</item>
<item>H264 High</item>
+ <item>AV1</item>
</string-array>
<string-array name="audioCodecs">
diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 0776ccba60..b3282a6955 100644
--- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -95,6 +95,8 @@ public class PeerConnectionClient {
private static final String VIDEO_CODEC_H264 = "H264";
private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
+ private static final String VIDEO_CODEC_AV1 = "AV1";
+ private static final String VIDEO_CODEC_AV1_SDP_CODEC_NAME = "AV1X";
private static final String AUDIO_CODEC_OPUS = "opus";
private static final String AUDIO_CODEC_ISAC = "ISAC";
private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
@@ -986,6 +988,8 @@ public class PeerConnectionClient {
return VIDEO_CODEC_VP8;
case VIDEO_CODEC_VP9:
return VIDEO_CODEC_VP9;
+ case VIDEO_CODEC_AV1:
+ return VIDEO_CODEC_AV1_SDP_CODEC_NAME;
case VIDEO_CODEC_H264_HIGH:
case VIDEO_CODEC_H264_BASELINE:
return VIDEO_CODEC_H264;
@@ -1312,6 +1316,9 @@ public class PeerConnectionClient {
@Override
public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {}
+
+ @Override
+ public void onRemoveTrack(final RtpReceiver receiver) {}
}
// Implementation detail: handle offer creation/signaling and answer setting,
diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc
index f0b060632d..f38de24a3f 100644
--- a/examples/androidnativeapi/jni/android_call_client.cc
+++ b/examples/androidnativeapi/jni/android_call_client.cc
@@ -179,9 +179,9 @@ void AndroidCallClient::CreatePeerConnection() {
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
// DTLS SRTP has to be disabled for loopback to work.
config.enable_dtls_srtp = false;
- pc_ = pcf_->CreatePeerConnection(config, nullptr /* port_allocator */,
- nullptr /* cert_generator */,
- pc_observer_.get());
+ webrtc::PeerConnectionDependencies deps(pc_observer_.get());
+ pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(deps)).MoveValue();
+
RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
diff --git a/examples/objc/AppRTCMobile/ARDAppClient.h b/examples/objc/AppRTCMobile/ARDAppClient.h
index 1fed247060..8e124ed925 100644
--- a/examples/objc/AppRTCMobile/ARDAppClient.h
+++ b/examples/objc/AppRTCMobile/ARDAppClient.h
@@ -48,7 +48,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
-- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats;
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
@optional
- (void)appClient:(ARDAppClient *)client
diff --git a/examples/objc/AppRTCMobile/ARDAppClient.m b/examples/objc/AppRTCMobile/ARDAppClient.m
index ccd5bb0662..fa6a960a54 100644
--- a/examples/objc/AppRTCMobile/ARDAppClient.m
+++ b/examples/objc/AppRTCMobile/ARDAppClient.m
@@ -191,9 +191,8 @@ static int const kKbpsMultiplier = 1000;
repeats:YES
timerHandler:^{
ARDAppClient *strongSelf = weakSelf;
- [strongSelf.peerConnection statsForTrack:nil
- statsOutputLevel:RTCStatsOutputLevelDebug
- completionHandler:^(NSArray *stats) {
+ [strongSelf.peerConnection statisticsWithCompletionHandler:^(
+ RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
dispatch_async(dispatch_get_main_queue(), ^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.delegate appClient:strongSelf didGetStats:stats];
diff --git a/examples/objc/AppRTCMobile/ARDBitrateTracker.h b/examples/objc/AppRTCMobile/ARDBitrateTracker.h
deleted file mode 100644
index 81ac4b4bd5..0000000000
--- a/examples/objc/AppRTCMobile/ARDBitrateTracker.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Foundation/Foundation.h>
-
-/** Class used to estimate bitrate based on byte count. It is expected that
- * byte count is monotonocially increasing. This class tracks the times that
- * byte count is updated, and measures the bitrate based on the byte difference
- * over the interval between updates.
- */
-@interface ARDBitrateTracker : NSObject
-
-/** The bitrate in bits per second. */
-@property(nonatomic, readonly) double bitrate;
-/** The bitrate as a formatted string in bps, Kbps or Mbps. */
-@property(nonatomic, readonly) NSString *bitrateString;
-
-/** Converts the bitrate to a readable format in bps, Kbps or Mbps. */
-+ (NSString *)bitrateStringForBitrate:(double)bitrate;
-/** Updates the tracked bitrate with the new byte count. */
-- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount;
-
-@end
diff --git a/examples/objc/AppRTCMobile/ARDBitrateTracker.m b/examples/objc/AppRTCMobile/ARDBitrateTracker.m
deleted file mode 100644
index 8158229187..0000000000
--- a/examples/objc/AppRTCMobile/ARDBitrateTracker.m
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "ARDBitrateTracker.h"
-
-#import <QuartzCore/QuartzCore.h>
-
-@implementation ARDBitrateTracker {
- CFTimeInterval _prevTime;
- NSInteger _prevByteCount;
-}
-
-@synthesize bitrate = _bitrate;
-
-+ (NSString *)bitrateStringForBitrate:(double)bitrate {
- if (bitrate > 1e6) {
- return [NSString stringWithFormat:@"%.2fMbps", bitrate * 1e-6];
- } else if (bitrate > 1e3) {
- return [NSString stringWithFormat:@"%.0fKbps", bitrate * 1e-3];
- } else {
- return [NSString stringWithFormat:@"%.0fbps", bitrate];
- }
-}
-
-- (NSString *)bitrateString {
- return [[self class] bitrateStringForBitrate:_bitrate];
-}
-
-- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount {
- CFTimeInterval currentTime = CACurrentMediaTime();
- if (_prevTime && (byteCount > _prevByteCount)) {
- _bitrate = (byteCount - _prevByteCount) * 8 / (currentTime - _prevTime);
- }
- _prevByteCount = byteCount;
- _prevTime = currentTime;
-}
-
-@end
diff --git a/examples/objc/AppRTCMobile/ARDStatsBuilder.h b/examples/objc/AppRTCMobile/ARDStatsBuilder.h
index e8224dd707..eaffa67049 100644
--- a/examples/objc/AppRTCMobile/ARDStatsBuilder.h
+++ b/examples/objc/AppRTCMobile/ARDStatsBuilder.h
@@ -10,10 +10,9 @@
#import <Foundation/Foundation.h>
+#import "sdk/objc/api/peerconnection/RTCStatisticsReport.h"
#import "sdk/objc/base/RTCMacros.h"
-@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
-
/** Class used to accumulate stats information into a single displayable string.
*/
@interface ARDStatsBuilder : NSObject
@@ -22,10 +21,6 @@
* class.
*/
@property(nonatomic, readonly) NSString *statsString;
-
-/** Parses the information in the stats report into an appropriate internal
- * format used to generate the stats string.
- */
-- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport;
+@property(nonatomic) RTC_OBJC_TYPE(RTCStatisticsReport) * stats;
@end
diff --git a/examples/objc/AppRTCMobile/ARDStatsBuilder.m b/examples/objc/AppRTCMobile/ARDStatsBuilder.m
index a74e351d51..7ebf9fb1c7 100644
--- a/examples/objc/AppRTCMobile/ARDStatsBuilder.m
+++ b/examples/objc/AppRTCMobile/ARDStatsBuilder.m
@@ -13,333 +13,23 @@
#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
#import "sdk/objc/base/RTCMacros.h"
-#import "ARDBitrateTracker.h"
#import "ARDUtilities.h"
-@implementation ARDStatsBuilder {
- // Connection stats.
- NSString *_connRecvBitrate;
- NSString *_connRtt;
- NSString *_connSendBitrate;
- NSString *_localCandType;
- NSString *_remoteCandType;
- NSString *_transportType;
+@implementation ARDStatsBuilder
- // BWE stats.
- NSString *_actualEncBitrate;
- NSString *_availableRecvBw;
- NSString *_availableSendBw;
- NSString *_targetEncBitrate;
-
- // Video send stats.
- NSString *_videoEncodeMs;
- NSString *_videoInputFps;
- NSString *_videoInputHeight;
- NSString *_videoInputWidth;
- NSString *_videoSendCodec;
- NSString *_videoSendBitrate;
- NSString *_videoSendFps;
- NSString *_videoSendHeight;
- NSString *_videoSendWidth;
-
- // QP stats.
- int _videoQPSum;
- int _framesEncoded;
- int _oldVideoQPSum;
- int _oldFramesEncoded;
-
- // Video receive stats.
- NSString *_videoDecodeMs;
- NSString *_videoDecodedFps;
- NSString *_videoOutputFps;
- NSString *_videoRecvBitrate;
- NSString *_videoRecvFps;
- NSString *_videoRecvHeight;
- NSString *_videoRecvWidth;
-
- // Audio send stats.
- NSString *_audioSendBitrate;
- NSString *_audioSendCodec;
-
- // Audio receive stats.
- NSString *_audioCurrentDelay;
- NSString *_audioExpandRate;
- NSString *_audioRecvBitrate;
- NSString *_audioRecvCodec;
-
- // Bitrate trackers.
- ARDBitrateTracker *_audioRecvBitrateTracker;
- ARDBitrateTracker *_audioSendBitrateTracker;
- ARDBitrateTracker *_connRecvBitrateTracker;
- ARDBitrateTracker *_connSendBitrateTracker;
- ARDBitrateTracker *_videoRecvBitrateTracker;
- ARDBitrateTracker *_videoSendBitrateTracker;
-}
-
-- (instancetype)init {
- if (self = [super init]) {
- _audioSendBitrateTracker = [[ARDBitrateTracker alloc] init];
- _audioRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
- _connSendBitrateTracker = [[ARDBitrateTracker alloc] init];
- _connRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
- _videoSendBitrateTracker = [[ARDBitrateTracker alloc] init];
- _videoRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
- _videoQPSum = 0;
- _framesEncoded = 0;
- }
- return self;
-}
+@synthesize stats = _stats;
- (NSString *)statsString {
NSMutableString *result = [NSMutableString string];
- NSString *systemStatsFormat = @"(cpu)%ld%%\n";
- [result appendString:[NSString stringWithFormat:systemStatsFormat,
- (long)ARDGetCpuUsagePercentage()]];
-
- // Connection stats.
- NSString *connStatsFormat = @"CN %@ms | %@->%@/%@ | (s)%@ | (r)%@\n";
- [result appendString:[NSString stringWithFormat:connStatsFormat,
- _connRtt,
- _localCandType, _remoteCandType, _transportType,
- _connSendBitrate, _connRecvBitrate]];
-
- // Video send stats.
- NSString *videoSendFormat = @"VS (input) %@x%@@%@fps | (sent) %@x%@@%@fps\n"
- "VS (enc) %@/%@ | (sent) %@/%@ | %@ms | %@\n"
- "AvgQP (past %d encoded frames) = %d\n ";
- int avgqp = [self calculateAvgQP];
-
- [result appendString:[NSString stringWithFormat:videoSendFormat,
- _videoInputWidth, _videoInputHeight, _videoInputFps,
- _videoSendWidth, _videoSendHeight, _videoSendFps,
- _actualEncBitrate, _targetEncBitrate,
- _videoSendBitrate, _availableSendBw,
- _videoEncodeMs,
- _videoSendCodec,
- _framesEncoded - _oldFramesEncoded, avgqp]];
-
- // Video receive stats.
- NSString *videoReceiveFormat =
- @"VR (recv) %@x%@@%@fps | (decoded)%@ | (output)%@fps | %@/%@ | %@ms\n";
- [result appendString:[NSString stringWithFormat:videoReceiveFormat,
- _videoRecvWidth, _videoRecvHeight, _videoRecvFps,
- _videoDecodedFps,
- _videoOutputFps,
- _videoRecvBitrate, _availableRecvBw,
- _videoDecodeMs]];
-
- // Audio send stats.
- NSString *audioSendFormat = @"AS %@ | %@\n";
- [result appendString:[NSString stringWithFormat:audioSendFormat,
- _audioSendBitrate, _audioSendCodec]];
- // Audio receive stats.
- NSString *audioReceiveFormat = @"AR %@ | %@ | %@ms | (expandrate)%@";
- [result appendString:[NSString stringWithFormat:audioReceiveFormat,
- _audioRecvBitrate, _audioRecvCodec, _audioCurrentDelay,
- _audioExpandRate]];
+ [result appendFormat:@"(cpu)%ld%%\n", (long)ARDGetCpuUsagePercentage()];
- return result;
-}
-
-- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- NSString *reportType = statsReport.type;
- if ([reportType isEqualToString:@"ssrc"] &&
- [statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
- if ([statsReport.reportId rangeOfString:@"send"].location != NSNotFound) {
- [self parseSendSsrcStatsReport:statsReport];
- }
- if ([statsReport.reportId rangeOfString:@"recv"].location != NSNotFound) {
- [self parseRecvSsrcStatsReport:statsReport];
- }
- } else if ([reportType isEqualToString:@"VideoBwe"]) {
- [self parseBweStatsReport:statsReport];
- } else if ([reportType isEqualToString:@"googCandidatePair"]) {
- [self parseConnectionStatsReport:statsReport];
+ for (NSString *key in _stats.statistics) {
+ RTC_OBJC_TYPE(RTCStatistics) *stat = _stats.statistics[key];
+ [result appendFormat:@"%@\n", stat.description];
}
-}
-
-#pragma mark - Private
-
-- (int)calculateAvgQP {
- int deltaFramesEncoded = _framesEncoded - _oldFramesEncoded;
- int deltaQPSum = _videoQPSum - _oldVideoQPSum;
-
- return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
-}
-- (void)updateBweStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
- _availableSendBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
- } else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
- _availableRecvBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
- } else if ([key isEqualToString:@"googActualEncBitrate"]) {
- _actualEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
- } else if ([key isEqualToString:@"googTargetEncBitrate"]) {
- _targetEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
- }
-}
-
-- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateBweStatOfKey:key value:value];
- }];
-}
-
-- (void)updateConnectionStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googRtt"]) {
- _connRtt = value;
- } else if ([key isEqualToString:@"googLocalCandidateType"]) {
- _localCandType = value;
- } else if ([key isEqualToString:@"googRemoteCandidateType"]) {
- _remoteCandType = value;
- } else if ([key isEqualToString:@"googTransportType"]) {
- _transportType = value;
- } else if ([key isEqualToString:@"bytesReceived"]) {
- NSInteger byteCount = value.integerValue;
- [_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _connRecvBitrate = _connRecvBitrateTracker.bitrateString;
- } else if ([key isEqualToString:@"bytesSent"]) {
- NSInteger byteCount = value.integerValue;
- [_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _connSendBitrate = _connSendBitrateTracker.bitrateString;
- }
-}
-
-- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- NSString *activeConnection = statsReport.values[@"googActiveConnection"];
- if (![activeConnection isEqualToString:@"true"]) {
- return;
- }
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateConnectionStatOfKey:key value:value];
- }];
-}
-
-- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- NSDictionary *values = statsReport.values;
- if ([values objectForKey:@"googFrameRateSent"]) {
- // Video track.
- [self parseVideoSendStatsReport:statsReport];
- } else if ([values objectForKey:@"audioInputLevel"]) {
- // Audio track.
- [self parseAudioSendStatsReport:statsReport];
- }
-}
-
-- (void)updateAudioSendStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googCodecName"]) {
- _audioSendCodec = value;
- } else if ([key isEqualToString:@"bytesSent"]) {
- NSInteger byteCount = value.integerValue;
- [_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _audioSendBitrate = _audioSendBitrateTracker.bitrateString;
- }
-}
-
-- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateAudioSendStatOfKey:key value:value];
- }];
-}
-
-- (void)updateVideoSendStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googCodecName"]) {
- _videoSendCodec = value;
- } else if ([key isEqualToString:@"googFrameHeightInput"]) {
- _videoInputHeight = value;
- } else if ([key isEqualToString:@"googFrameWidthInput"]) {
- _videoInputWidth = value;
- } else if ([key isEqualToString:@"googFrameRateInput"]) {
- _videoInputFps = value;
- } else if ([key isEqualToString:@"googFrameHeightSent"]) {
- _videoSendHeight = value;
- } else if ([key isEqualToString:@"googFrameWidthSent"]) {
- _videoSendWidth = value;
- } else if ([key isEqualToString:@"googFrameRateSent"]) {
- _videoSendFps = value;
- } else if ([key isEqualToString:@"googAvgEncodeMs"]) {
- _videoEncodeMs = value;
- } else if ([key isEqualToString:@"bytesSent"]) {
- NSInteger byteCount = value.integerValue;
- [_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _videoSendBitrate = _videoSendBitrateTracker.bitrateString;
- } else if ([key isEqualToString:@"qpSum"]) {
- _oldVideoQPSum = _videoQPSum;
- _videoQPSum = value.integerValue;
- } else if ([key isEqualToString:@"framesEncoded"]) {
- _oldFramesEncoded = _framesEncoded;
- _framesEncoded = value.integerValue;
- }
-}
-
-- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateVideoSendStatOfKey:key value:value];
- }];
-}
-
-- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- NSDictionary *values = statsReport.values;
- if ([values objectForKey:@"googFrameWidthReceived"]) {
- // Video track.
- [self parseVideoRecvStatsReport:statsReport];
- } else if ([values objectForKey:@"audioOutputLevel"]) {
- // Audio track.
- [self parseAudioRecvStatsReport:statsReport];
- }
-}
-
-- (void)updateAudioRecvStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googCodecName"]) {
- _audioRecvCodec = value;
- } else if ([key isEqualToString:@"bytesReceived"]) {
- NSInteger byteCount = value.integerValue;
- [_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
- } else if ([key isEqualToString:@"googSpeechExpandRate"]) {
- _audioExpandRate = value;
- } else if ([key isEqualToString:@"googCurrentDelayMs"]) {
- _audioCurrentDelay = value;
- }
-}
-
-- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateAudioRecvStatOfKey:key value:value];
- }];
-}
-
-- (void)updateVideoRecvStatOfKey:(NSString *)key value:(NSString *)value {
- if ([key isEqualToString:@"googFrameHeightReceived"]) {
- _videoRecvHeight = value;
- } else if ([key isEqualToString:@"googFrameWidthReceived"]) {
- _videoRecvWidth = value;
- } else if ([key isEqualToString:@"googFrameRateReceived"]) {
- _videoRecvFps = value;
- } else if ([key isEqualToString:@"googFrameRateDecoded"]) {
- _videoDecodedFps = value;
- } else if ([key isEqualToString:@"googFrameRateOutput"]) {
- _videoOutputFps = value;
- } else if ([key isEqualToString:@"googDecodeMs"]) {
- _videoDecodeMs = value;
- } else if ([key isEqualToString:@"bytesReceived"]) {
- NSInteger byteCount = value.integerValue;
- [_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
- _videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
- }
-}
-
-- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
- [statsReport.values
- enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
- [self updateVideoRecvStatOfKey:key value:value];
- }];
+ return result;
}
@end
diff --git a/examples/objc/AppRTCMobile/ios/ARDStatsView.h b/examples/objc/AppRTCMobile/ios/ARDStatsView.h
index 9c8636476c..72207de64e 100644
--- a/examples/objc/AppRTCMobile/ios/ARDStatsView.h
+++ b/examples/objc/AppRTCMobile/ios/ARDStatsView.h
@@ -10,8 +10,12 @@
#import <UIKit/UIKit.h>
+#import "sdk/objc/base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCStatisticsReport);
+
@interface ARDStatsView : UIView
-- (void)setStats:(NSArray *)stats;
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
@end
diff --git a/examples/objc/AppRTCMobile/ios/ARDStatsView.m b/examples/objc/AppRTCMobile/ios/ARDStatsView.m
index bd97d30fbe..867ba5b09e 100644
--- a/examples/objc/AppRTCMobile/ios/ARDStatsView.m
+++ b/examples/objc/AppRTCMobile/ios/ARDStatsView.m
@@ -34,10 +34,8 @@
return self;
}
-- (void)setStats:(NSArray *)stats {
- for (RTC_OBJC_TYPE(RTCLegacyStatsReport) * report in stats) {
- [_statsBuilder parseStatsReport:report];
- }
+- (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
+ _statsBuilder.stats = stats;
_statsLabel.text = _statsBuilder.statsString;
}
diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
index cd26829713..a82d90b290 100644
--- a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
+++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m
@@ -132,8 +132,7 @@
});
}
-- (void)appClient:(ARDAppClient *)client
- didGetStats:(NSArray *)stats {
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
_videoCallView.statsView.stats = stats;
[_videoCallView setNeedsLayout];
}
diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
index d9c816d573..1c276d965f 100644
--- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
+++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m
@@ -120,7 +120,7 @@
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
}
-- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats {
+- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
}
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
diff --git a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
index 60f670544a..ad7b99a4b2 100644
--- a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
+++ b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m
@@ -1487,7 +1487,7 @@ static const size_t SRFrameHeaderOverhead = 32;
if (bytes_read > 0) {
[self->_readBuffer appendBytes:buffer length:bytes_read];
} else if (bytes_read < 0) {
- [self _failWithError:_inputStream.streamError];
+ [self _failWithError:self->_inputStream.streamError];
}
if (bytes_read != bufferSize) {
diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm
index 5ce7eb7804..419203eb62 100644
--- a/examples/objcnativeapi/objc/objc_call_client.mm
+++ b/examples/objcnativeapi/objc/objc_call_client.mm
@@ -144,7 +144,7 @@ void ObjCCallClient::CreatePeerConnection() {
// DTLS SRTP has to be disabled for loopback to work.
config.enable_dtls_srtp = false;
webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get());
- pc_ = pcf_->CreatePeerConnection(config, std::move(pc_dependencies));
+ pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)).MoveValue();
RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
diff --git a/examples/unityplugin/simple_peer_connection.cc b/examples/unityplugin/simple_peer_connection.cc
index 23e4d7b869..c7e5185bdc 100644
--- a/examples/unityplugin/simple_peer_connection.cc
+++ b/examples/unityplugin/simple_peer_connection.cc
@@ -192,10 +192,14 @@ bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls,
config_.servers.push_back(stun_server);
config_.enable_dtls_srtp = false;
- peer_connection_ = g_peer_connection_factory->CreatePeerConnection(
- config_, nullptr, nullptr, this);
-
- return peer_connection_.get() != nullptr;
+ auto result = g_peer_connection_factory->CreatePeerConnectionOrError(
+ config_, webrtc::PeerConnectionDependencies(this));
+ if (!result.ok()) {
+ peer_connection_ = nullptr;
+ return false;
+ }
+ peer_connection_ = result.MoveValue();
+ return true;
}
void SimplePeerConnection::DeletePeerConnection() {
@@ -493,8 +497,9 @@ bool SimplePeerConnection::CreateDataChannel() {
struct webrtc::DataChannelInit init;
init.ordered = true;
init.reliable = true;
- data_channel_ = peer_connection_->CreateDataChannel("Hello", &init);
- if (data_channel_.get()) {
+ auto result = peer_connection_->CreateDataChannelOrError("Hello", &init);
+ if (result.ok()) {
+ data_channel_ = result.MoveValue();
data_channel_->RegisterObserver(this);
RTC_LOG(LS_INFO) << "Succeeds to create data channel";
return true;
diff --git a/abseil-in-webrtc.md b/g3doc/abseil-in-webrtc.md
index 79b1031ffd..692ebe2b0b 100644
--- a/abseil-in-webrtc.md
+++ b/g3doc/abseil-in-webrtc.md
@@ -1,5 +1,8 @@
# Using Abseil in WebRTC
+<?% config.freshness.owner = 'danilchap' %?>
+<?% config.freshness.reviewed = '2021-05-12' %?>
+
You may use a subset of the utilities provided by the [Abseil][abseil]
library when writing WebRTC C++ code. Below, we list the explicitly
*allowed* and the explicitly *disallowed* subsets of Abseil; if you
@@ -22,6 +25,7 @@ will generate a shared library.
## **Allowed**
+* `absl::bind_front`
* `absl::InlinedVector`
* `absl::WrapUnique`
* `absl::optional` and related stuff from `absl/types/optional.h`.
diff --git a/g3doc/implementation_basics.md b/g3doc/implementation_basics.md
new file mode 100644
index 0000000000..933941a0d1
--- /dev/null
+++ b/g3doc/implementation_basics.md
@@ -0,0 +1,92 @@
+<?% config.freshness.owner = 'hta' %?>
+<?% config.freshness.reviewed = '2021-05-31' %?>
+
+# Basic concepts and primitives
+
+## Time
+
+Internally, time is represent using the [webrtc::Timestamp][1] class. This
+represents
+time with a resolution of one microsecond, using a 64-bit integer, and provides
+converters to milliseconds or seconds as needed.
+
+All timestamps need to be measured from the system monotonic time.
+
+The epoch is not specified (because we can't always know if the system clock is
+correct), but whenever an absolute epoch is needed, the Unix time
+epoch (Jan 1, 1970 at 0:00 GMT) is used.
+
+Conversion from/to other formats (for example milliseconds, NTP times,
+timestamp strings) should happen as close to the interface requiring that
+format as possible.
+
+NOTE: There are parts of the codebase that don't use Timestamp, parts of the
+codebase that use the NTP epoch, and parts of the codebase that don't use the
+monotonic clock. They need to
+be updated.
+
+## Threads
+
+All execution happens on a TaskQueue instance. How a TaskQueue is implemented
+varies by platform, but they all have the [webrtc::TaskQueueBase][3] API.
+
+This API offers primitives for posting tasks, with or without delay.
+
+Some core parts use the [rtc::Thread][2], which is a subclass of TaskQueueBase.
+This may contain a SocketServer for processing I/O, and is used for policing
+certain calling pattern between a few core threads (the NetworkThread cannot
+do Invoke on the Worker thread, for instance).
+
+## Synchronization primitives
+
+### PostTask and thread-guarded variables
+
+The preferred method for synchronization is to post tasks between threads,
+and to let each thread take care of its own variables (lock-free programming).
+All variables in
+classes intended to be used with multiple threads should therefore be
+annotated with RTC_GUARDED_BY(thread).
+
+For classes used with only one thread, the recommended pattern is to let
+them own a webrtc::SequenceChecker (conventionally named sequence_checker_)
+and let all variables be RTC_GUARDED_BY(sequence_checker_).
+
+Member variables marked const do not need to be guarded, since they never
+change. (But note that they may point to objects that can change!)
+
+When posting tasks with callbacks, it is the duty of the caller to check
+that the object one is calling back into still exists when the callback
+is made. A helper for this task is the [webrtc::ScopedTaskSafety][5]
+flag, which can automatically drop callbacks in this situation, and
+associated classes.
+
+### Synchronization primitives to be used when needed
+
+When it is absolutely necessary to let one thread wait for another thread
+to do something, Thread::Invoke can be used. This function is DISCOURAGED,
+since it leads to performance issues, but is currently still widespread.
+
+When it is absolutely necessary to access one variable from multiple threads,
+the webrtc::Mutex can be used. Such variables MUST be marked up with
+RTC_GUARDED_BY(mutex), to allow static analysis that lessens the chance of
+deadlocks or unintended consequences.
+
+### Synchronization primitives that are being removed
+The following non-exhaustive list of synchronization primitives are
+in the (slow) process of being removed from the codebase.
+
+* sigslot. Use [webrtc::CallbackList][4] instead, or, when there's only one
+ signal consumer, a single std::function.
+
+* AsyncInvoker.
+
+* RecursiveCriticalSection. Try to use [webrtc::Mutex][6] instead, and don't recurse.
+
+
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/units/timestamp.h;drc=b95d90b78a3491ef8e8aa0640dd521515ec881ca;l=29
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/rtc_base/thread.h;drc=1107751b6f11c35259a1c5c8a0f716e227b7e3b4;l=194
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/task_queue/task_queue_base.h;drc=1107751b6f11c35259a1c5c8a0f716e227b7e3b4;l=25
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/rtc_base/callback_list.h;drc=54b91412de3f579a2d5ccdead6e04cc2cc5ca3a1;l=162
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/rtc_base/task_utils/pending_task_safety_flag.h;drc=86ee89f73e4f4799b3ebcc0b5c65837c9601fe6d;l=117
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/rtc_base/synchronization/mutex.h;drc=0d3c09a8fe5f12dfbc9f1bcd5790fda8830624ec;l=40
diff --git a/g3doc/sitemap.md b/g3doc/sitemap.md
index f3f9cd7547..f54bf4d21a 100644
--- a/g3doc/sitemap.md
+++ b/g3doc/sitemap.md
@@ -1,15 +1,19 @@
* [Home](/g3doc/index.md)
* How to contribute
* Code
+ * [Style guide](/g3doc/style-guide.md)
* [Documentation](/g3doc/how_to_write_documentation.md)
* [Public C++ API](/api/g3doc/index.md)
* [Threading](/api/g3doc/threading_design.md)
* Implementation
+ * [Basic concepts](/g3doc/implementation_basics.md)
* Network
* [ICE](/p2p/g3doc/ice.md)
* STUN
* TURN
- * DTLS
+ * [DTLS](/pc/g3doc/dtls_transport.md)
+ * [RTP](/pc/g3doc/rtp.md)
+ * [SRTP](/pc/g3doc/srtp.md)
* [SCTP](/pc/g3doc/sctp_transport.md)
* [Pacing buffer](/modules/pacing/g3doc/index.md)
* Congestion control and bandwidth estimation
@@ -18,6 +22,7 @@
* AudioEngine
* [ADM](/modules/audio_device/g3doc/audio_device_module.md)
* [Audio Coding](/modules/audio_coding/g3doc/index.md)
+ * [Audio Mixer](/modules/audio_mixer/g3doc/index.md)
* AudioProcessingModule
* [APM](/modules/audio_processing/g3doc/audio_processing_module.md)
* Video
@@ -25,9 +30,10 @@
* [Video coding](/modules/video_coding/g3doc/index.md)
* [Stats](/video/g3doc/stats.md)
* DataChannel
- * PeerConnection
+ * [PeerConnection](/pc/g3doc/peer_connection.md)
* Desktop capture
* Stats
+ * [Logging](/logging/g3doc/rtc_event_log.md)
* Testing
* Media Quality and performance
* [PeerConnection Framework](/test/pc/e2e/g3doc/index.md)
diff --git a/g3doc/style-guide.md b/g3doc/style-guide.md
new file mode 100644
index 0000000000..f3b0e8869d
--- /dev/null
+++ b/g3doc/style-guide.md
@@ -0,0 +1,279 @@
+# WebRTC coding style guide
+
+<?% config.freshness.owner = 'danilchap' %?>
+<?% config.freshness.reviewed = '2021-05-12' %?>
+
+## General advice
+
+Some older parts of the code violate the style guide in various ways.
+
+* If making small changes to such code, follow the style guide when it's
+ reasonable to do so, but in matters of formatting etc., it is often better to
+ be consistent with the surrounding code.
+* If making large changes to such code, consider first cleaning it up in a
+ separate CL.
+
+## C++
+
+WebRTC follows the [Chromium C++ style guide][chr-style] and the
+[Google C++ style guide][goog-style]. In cases where they conflict, the Chromium
+style guide trumps the Google style guide, and the rules in this file trump them
+both.
+
+[chr-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/c++/c++.md
+[goog-style]: https://google.github.io/styleguide/cppguide.html
+
+### C++ version
+
+WebRTC is written in C++14, but with some restrictions:
+
+* We only allow the subset of C++14 (language and library) that is not banned by
+ Chromium; see the [list of banned C++ features in Chromium][chromium-cpp].
+* We only allow the subset of C++14 that is also valid C++17; otherwise, users
+ would not be able to compile WebRTC in C++17 mode.
+
+[chromium-cpp]: https://chromium-cpp.appspot.com/
+
+Unlike the Chromium and Google C++ style guides, we do not allow C++20-style
+designated initializers, because we want to stay compatible with compilers that
+do not yet support them.
+
+### Abseil
+
+You may use a subset of the utilities provided by the [Abseil][abseil] library
+when writing WebRTC C++ code; see the
+[instructions on how to use Abseil in WebRTC](abseil-in-webrtc.md).
+
+[abseil]: https://abseil.io/about/
+
+### <a name="h-cc-pairs"></a>`.h` and `.cc` files come in pairs
+
+`.h` and `.cc` files should come in pairs, with the same name (except for the
+file type suffix), in the same directory, in the same build target.
+
+* If a declaration in `path/to/foo.h` has a definition in some `.cc` file, it
+ should be in `path/to/foo.cc`.
+* If a definition in `path/to/foo.cc` file has a declaration in some `.h` file,
+ it should be in `path/to/foo.h`.
+* Omit the `.cc` file if it would have been empty, but still list the `.h` file
+ in a build target.
+* Omit the `.h` file if it would have been empty. (This can happen with unit
+ test `.cc` files, and with `.cc` files that define `main`.)
+
+See also the
+[examples and exceptions on how to treat `.h` and `.cpp` files](style-guide/h-cc-pairs.md).
+
+This makes the source code easier to navigate and organize, and precludes some
+questionable build system practices such as having build targets that don't pull
+in definitions for everything they declare.
+
+### `TODO` comments
+
+Follow the [Google styleguide for `TODO` comments][goog-style-todo]. When
+referencing a WebRTC bug, prefer the url form, e.g.
+
+```cpp
+// TODO(bugs.webrtc.org/12345): Delete the hack when blocking bugs are resolved.
+```
+
+[goog-style-todo]: https://google.github.io/styleguide/cppguide.html#TODO_Comments
+
+### Deprecation
+
+Annotate the declarations of deprecated functions and classes with the
+[`ABSL_DEPRECATED` macro][ABSL_DEPRECATED] to cause an error when they're used
+inside WebRTC and a compiler warning when they're used by dependant projects.
+Like so:
+
+```cpp
+ABSL_DEPRECATED("bugs.webrtc.org/12345")
+std::pony PonyPlz(const std::pony_spec& ps);
+```
+
+NOTE 1: The annotation goes on the declaration in the `.h` file, not the
+definition in the `.cc` file!
+
+NOTE 2: In order to have unit tests that use the deprecated function without
+getting errors, do something like this:
+
+```cpp
+std::pony DEPRECATED_PonyPlz(const std::pony_spec& ps);
+ABSL_DEPRECATED("bugs.webrtc.org/12345")
+inline std::pony PonyPlz(const std::pony_spec& ps) {
+ return DEPRECATED_PonyPlz(ps);
+}
+```
+
+In other words, rename the existing function, and provide an inline wrapper
+using the original name that calls it. That way, callers who are willing to
+call it using the `DEPRECATED_`-prefixed name don't get the warning.
+
+[ABSL_DEPRECATED]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/abseil-cpp/absl/base/attributes.h?q=ABSL_DEPRECATED
+
+### ArrayView
+
+When passing an array of values to a function, use `rtc::ArrayView`
+whenever possible—that is, whenever you're not passing ownership of
+the array, and don't allow the callee to change the array size.
+
+For example,
+
+| instead of | use |
+|-------------------------------------|----------------------|
+| `const std::vector<T>&` | `ArrayView<const T>` |
+| `const T* ptr, size_t num_elements` | `ArrayView<const T>` |
+| `T* ptr, size_t num_elements` | `ArrayView<T>` |
+
+See the [source code for `rtc::ArrayView`](api/array_view.h) for more detailed
+docs.
+
+### sigslot
+
+SIGSLOT IS DEPRECATED.
+
+Prefer `webrtc::CallbackList`, and manage thread safety yourself.
+
+### Smart pointers
+
+The following smart pointer types are recommended:
+
+ * `std::unique_ptr` for all singly-owned objects
+ * `rtc::scoped_refptr` for all objects with shared ownership
+
+Use of `std::shared_ptr` is *not permitted*. It is banned in the Chromium style
+guide (overriding the Google style guide), and offers no compelling advantage
+over `rtc::scoped_refptr` (which is cloned from the corresponding Chromium
+type). See the
+[list of banned C++ library features in Chromium][chr-std-shared-ptr] for more
+information.
+
+In most cases, one will want to explicitly control lifetimes, and therefore use
+`std::unique_ptr`, but in some cases, for instance where references have to
+exist both from the API users and internally, with no way to invalidate pointers
+held by the API user, `rtc::scoped_refptr` can be appropriate.
+
+[chr-std-shared-ptr]: https://chromium-cpp.appspot.com/#library-blocklist
+
+### `std::bind`
+
+Don't use `std::bind`—there are pitfalls, and lambdas are almost as succinct and
+already familiar to modern C++ programmers.
+
+### `std::function`
+
+`std::function` is allowed, but remember that it's not the right tool for every
+occasion. Prefer to use interfaces when that makes sense, and consider
+`rtc::FunctionView` for cases where the callee will not save the function
+object.
+
+### Forward declarations
+
+WebRTC follows the
+[Google C++ style guide on forward declarations][goog-forward-declarations].
+In summary: avoid using forward declarations where possible; just `#include` the
+headers you need.
+
+[goog-forward-declarations]: https://google.github.io/styleguide/cppguide.html#Forward_Declarations
+
+## C
+
+There's a substantial chunk of legacy C code in WebRTC, and a lot of it is old
+enough that it violates the parts of the C++ style guide that also applies to C
+(naming etc.) for the simple reason that it pre-dates the use of the current C++
+style guide for this code base.
+
+* If making small changes to C code, mimic the style of the surrounding code.
+* If making large changes to C code, consider converting the whole thing to C++
+ first.
+
+## Java
+
+WebRTC follows the [Google Java style guide][goog-java-style].
+
+[goog-java-style]: https://google.github.io/styleguide/javaguide.html
+
+## Objective-C and Objective-C++
+
+WebRTC follows the
+[Chromium Objective-C and Objective-C++ style guide][chr-objc-style].
+
+[chr-objc-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/objective-c/objective-c.md
+
+## Python
+
+WebRTC follows [Chromium's Python style][chr-py-style].
+
+[chr-py-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/python/python.md
+
+## Build files
+
+The WebRTC build files are written in [GN][gn], and we follow the
+[GN style guide][gn-style]. Additionally, there are some
+WebRTC-specific rules below; in case of conflict, they trump the Chromium style
+guide.
+
+[gn]: https://gn.googlesource.com/gn/
+[gn-style]: https://gn.googlesource.com/gn/+/HEAD/docs/style_guide.md
+
+### <a name="webrtc-gn-templates"></a>WebRTC-specific GN templates
+
+Use the following [GN templates][gn-templ] to ensure that all our
+[GN targets][gn-target] are built with the same configuration:
+
+| instead of | use |
+|------------------|----------------------|
+| `executable` | `rtc_executable` |
+| `shared_library` | `rtc_shared_library` |
+| `source_set` | `rtc_source_set` |
+| `static_library` | `rtc_static_library` |
+| `test` | `rtc_test` |
+
+
+[gn-templ]: https://gn.googlesource.com/gn/+/HEAD/docs/language.md#Templates
+[gn-target]: https://gn.googlesource.com/gn/+/HEAD/docs/language.md#Targets
+
+### Target visibility and the native API
+
+The [WebRTC-specific GN templates](#webrtc-gn-templates) declare build targets
+whose default `visibility` allows all other targets in the WebRTC tree (and no
+targets outside the tree) to depend on them.
+
+Prefer to restrict the `visibility` if possible:
+
+* If a target is used by only one or a tiny number of other targets, prefer to
+ list them explicitly: `visibility = [ ":foo", ":bar" ]`
+* If a target is used only by targets in the same `BUILD.gn` file:
+ `visibility = [ ":*" ]`.
+
+Setting `visibility = [ "*" ]` means that targets outside the WebRTC tree can
+depend on this target; use this only for build targets whose headers are part of
+the [native WebRTC API](native-api.md).
+
+### Conditional compilation with the C preprocessor
+
+Avoid using the C preprocessor to conditionally enable or disable pieces of
+code. But if you can't avoid it, introduce a GN variable, and then set a
+preprocessor constant to either 0 or 1 in the build targets that need it:
+
+```gn
+if (apm_debug_dump) {
+ defines = [ "WEBRTC_APM_DEBUG_DUMP=1" ]
+} else {
+ defines = [ "WEBRTC_APM_DEBUG_DUMP=0" ]
+}
+```
+
+In the C, C++, or Objective-C files, use `#if` when testing the flag,
+not `#ifdef` or `#if defined()`:
+
+```c
+#if WEBRTC_APM_DEBUG_DUMP
+// One way.
+#else
+// Or another.
+#endif
+```
+
+When combined with the `-Wundef` compiler option, this produces compile time
+warnings if preprocessor symbols are misspelled, or used without corresponding
+build rules to set them.
diff --git a/style-guide/h-cc-pairs.md b/g3doc/style-guide/h-cc-pairs.md
index 1a24e49d09..bb85871260 100644
--- a/style-guide/h-cc-pairs.md
+++ b/g3doc/style-guide/h-cc-pairs.md
@@ -1,5 +1,8 @@
# `.h` and `.cc` files come in pairs
+<?% config.freshness.owner = 'danilchap' %?>
+<?% config.freshness.reviewed = '2021-05-12' %?>
+
This is an overflow page for [this](../style-guide.md#h-cc-pairs)
style rule.
diff --git a/import-webrtc.py b/import-webrtc.py
index 9c7dde732b..e3516af3e2 100644
--- a/import-webrtc.py
+++ b/import-webrtc.py
@@ -46,7 +46,15 @@ PLATFORMS = {
"Darwin": ["apple", "posix", "mac", "_all", "intel_cpu", ":cpu_x86_64"],
"Darwin-aarch64": ["apple", "posix", "mac", "_all", "arm64", ":cpu_arm64"],
"Linux-aarch64": ["linux", "posix", "linux_kernel", "arm64", "_all", ":cpu_arm64"],
- "Linux": ["linux", "posix", "linux_kernel", "_all", "intel_cpu", ":cpu_x86_64"],
+ "Linux": [
+ "linux_x64",
+ "linux",
+ "posix",
+ "linux_kernel",
+ "_all",
+ "intel_cpu",
+ ":cpu_x86_64",
+ ],
"Windows": ["windows", "windows_x64", "intel_cpu", "_all", ":cpu_x86_64"],
}
@@ -67,8 +75,8 @@ PLATFORM_NAME = {
DEP_MAP = {
":AppRTCMobile_lib": "unknown",
"//testing/base/public:gunit_for_library_testonly": "gmock gtest",
- "//third_party/absl/algorithm:container": "absl::algorithm_container",
"//third_party/absl/algorithm": "absl::algorithm",
+ "//third_party/absl/algorithm:container": "absl::algorithm_container",
"//third_party/absl/base:config": "absl::config",
"//third_party/absl/base:core_headers": "absl::core_headers",
"//third_party/absl/container:flat_hash_map": "absl::flat_hash_map",
@@ -79,18 +87,22 @@ DEP_MAP = {
"//third_party/absl/flags:flag": "absl::flags",
"//third_party/absl/flags:parse": "absl::flags_parse",
"//third_party/absl/flags:usage": "absl::flags_usage",
+ "//third_party/absl/functional:bind_front": "absl::bind_front",
"//third_party/absl/memory": "absl::memory",
"//third_party/absl/meta:type_traits": "absl::type_traits",
"//third_party/absl/strings": "absl::strings",
"//third_party/absl/synchronization": "absl::synchronization",
"//third_party/absl/types:optional": "absl::optional",
"//third_party/absl/types:variant": "absl::variant",
+ "//third_party/catapult/tracing/tracing/proto:histogram_proto_bridge": "webrtc_histogram_proto_bridge",
"//third_party/catapult/tracing:histogram": "webrtc_catapult_histogram",
"//third_party/catapult/tracing:reserved_infos": "webrtc_catapult_reserved_infos",
- "//third_party/catapult/tracing/tracing/proto:histogram_proto_bridge": "webrtc_histogram_proto_bridge",
+ "//third_party/crc32c": "crc32c",
"//third_party/isac_fft:fft": "webrtc_fft", # "unknown_fft",
"//third_party/jsoncpp:json": "jsoncpp",
+ "//third_party/libaom:aom_highbd": "webrtc_libaom", #
"//third_party/libevent": "libevent",
+ "//third_party/libjpeg_turbo/src:jpeg": "emulator-libjpeg",
"//third_party/libopus": "opus",
"//third_party/libsrtp:srtp": "libsrtp",
"//third_party/libyuv": "webrtc-yuv",
@@ -100,15 +112,14 @@ DEP_MAP = {
"//third_party/openssl:ssl": "ssl",
"//third_party/pffft": "webrtc_pffft",
"//third_party/protobuf:protobuf-lite_legacy": "libprotobuf",
+ "//net/proto2/public:proto2_lite": "libprotobuf",
"//third_party/protobuf:py_proto_runtime": "unknown_py_proto_runtime",
"//third_party/rnnoise:rnn_vad": "webrtc_rnnoise",
"//third_party/spl_sqrt_floor": "webrtc_spl_sqrt_floor",
"//third_party/usrsctp": "usrsctp",
- "//third_party/webrtc:webrtc_libvpx": "libvpx",
"//third_party/webrtc/files/override/webrtc/rtc_base:protobuf_utils": "libprotobuf",
"//third_party/webrtc/files/testing:fileutils_override_google3": "emulator_test_overrides",
- "//third_party/libjpeg_turbo/src:jpeg": "emulator-libjpeg",
- "//third_party/libaom:aom_highbd": "webrtc_libaom", #
+ "//third_party/webrtc:webrtc_libvpx": "libvpx",
# We rely on Qt5 to provide all these..
"//third_party/GL:GLX_headers": "Qt5::Core",
"//third_party/Xorg:Xorg_static": "Qt5::Core",
@@ -226,6 +237,7 @@ class Target(object):
def _add_deps(self, name, deps, sort="PUBLIC"):
common = [bazel_target_to_cmake_target(x) for x in deps]
+
return "target_link_libraries({} {} {} {})\n".format(
name, sort, " ".join(common), " ".join(self.lopts)
)
@@ -310,8 +322,10 @@ class ProtobufTarget(object):
def __init__(self, path, name, srcs, deps):
self.path = path
self.label = name
- self.internal_name = "{}{}_{}".format(WEBRTC_PREFIX, path, name).replace("/", "_")
- self.name = self.internal_name + "_bridge"
+ self.internal_name = "{}{}_{}".format(WEBRTC_PREFIX, path, name).replace(
+ "/", "_"
+ )
+ self.name = self.internal_name
self.srcs = srcs
self.deps = deps
@@ -341,7 +355,9 @@ class ProtobufTarget(object):
snippet += "target_include_directories({} PUBLIC ${{CMAKE_CURRENT_BINARY_DIR}}/{})\n".format(
self.name, dest_path
)
- snippet += "add_library({}_lib ALIAS {})\n".format(self.internal_name, self.name)
+ snippet += "add_library({}_lib ALIAS {})\n".format(
+ self.internal_name, self.name
+ )
snippet += "target_link_libraries({} PUBLIC libprotobuf)\n".format(self.name)
if self.deps:
common = [bazel_target_to_cmake_target(x) for x in self.deps]
@@ -417,6 +433,21 @@ class BuildFileFunctions(object):
self.platforms = platforms
def _add_target(self, typ, name, srcs, hdrs, defs, deps, copts, lopts, includes):
+
+ # HACK ATTACK b/191745658, the neon build has a dependency at the wrong
+ # level, so we correct it here, this should be removed once the fix is
+ # in.
+ if name == "common_audio_c" and "arm64" in self.platforms:
+ logging.error("Fixing up common_audio_c depedency b/191745658!")
+ if (
+ "//third_party/webrtc/files/stable/webrtc/common_audio:common_audio_neon"
+ in deps
+ ):
+ logging.fatal("The workaround is no longer needed! Please remove it!")
+ deps.append(
+ "//third_party/webrtc/files/stable/webrtc/common_audio:common_audio_neon"
+ )
+
self.targets.add(
Target(self.rel, name, typ, srcs, hdrs, defs, deps, copts, includes, lopts)
)
diff --git a/import_all.sh b/import_all.sh
index 0c06bdc83f..1074d9f603 100755
--- a/import_all.sh
+++ b/import_all.sh
@@ -7,7 +7,7 @@
# Next we rsync the dependencies. We expect you to have a matching gclient
function generate_from_build {
- for platform in Darwin Darwin-aarch64 Linux-aarch64 Linux Windows; do
+ for platform in Darwin Darwin-aarch64 Linux Windows; do
echo "Generating cmake for ${platform}"
python ./import-webrtc.py \
--target webrtc_api_video_codecs_builtin_video_decoder_factory \
@@ -32,11 +32,28 @@ function generate_from_build {
# --target webrtc__webrtc_perf_tests \
# --target webrtc__webrtc_nonparallel_tests \
# --target webrtc__voip_unittests \
- done
- }
+ done
+
+ # ARM64 is using gcc.. not all the tests compile
+ for platform in Linux-aarch64; do
+ echo "Generating cmake for ${platform}"
+ python ./import-webrtc.py \
+ --target webrtc_api_video_codecs_builtin_video_decoder_factory \
+ --target webrtc_api_video_codecs_builtin_video_encoder_factory \
+ --target webrtc_api_libjingle_peerconnection_api \
+ --target webrtc_pc_peerconnection \
+ --target webrtc_api_create_peerconnection_factory \
+ --target webrtc_api_audio_codecs_builtin_audio_decoder_factory \
+ --target webrtc_api_audio_codecs_builtin_audio_encoder_factory \
+ --target webrtc_common_audio_common_audio_unittests \
+ --target webrtc_modules_audio_coding_audio_decoder_unittests \
+ --root $1 \
+ --platform $platform BUILD .
+ done
+}
function sync_deps {
- for dep in libaom libvpx jsoncpp pffft opus rnnoise usrsctp libsrtp libyuv
+ for dep in libaom libvpx jsoncpp pffft opus rnnoise usrsctp libsrtp libyuv crc32c
do
rsync -rav \
--exclude 'third_party' \
diff --git a/linux_aarch64.cmake b/linux_aarch64.cmake
index 29e0621ae2..f563ccfd89 100644
--- a/linux_aarch64.cmake
+++ b/linux_aarch64.cmake
@@ -1,16 +1,16 @@
-# Generated on 04/21/21 for target: Linux-aarch64
+# Generated on 06/23/21 for target: Linux-aarch64
# This is an autogenerated file by calling:
-# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_common_video_common_video_unittests --target webrtc_media_rtc_media_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --target webrtc_pc_peerconnection_unittests --target webrtc_pc_rtc_pc_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Linux-aarch64 BUILD .
+# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Linux-aarch64 BUILD .
# Re-running this script will require you to merge in the latest upstream-master for webrtc
# Expecting jsoncpp at 9059f5cad030ba11d37818847443a53918c327b1
-# Expecting libaom at 6c93db7ff63430d2e5dcdfc42e84e3a0514da608
-# Expecting libsrtp2 at 7990ca64c616b150a9cb4714601c4a3b0c84fe91
+# Expecting libaom at 12287adee94fc3b1f5349d3f4bd85cea4e57f62b
+# Expecting libsrtp2 at 5b7c744eb8310250ccc534f3f86a2015b3887a0a
# Expecting libvpx at 61edec1efbea1c02d71857e2aff9426d9cd2df4e
-# Expecting libyuv at 64994843e652443df2d5201c6ae3fb725097360f
-# Expecting usrsctp at 70d42ae95a1de83bd317c8cc9503f894671d1392
+# Expecting libyuv at 49ebc996aa8c4bdf89c1b5ea461eb677234c61cc
+# Expecting usrsctp at 22ba62ffe79c3881581ab430368bf3764d9533eb
@@ -203,13 +203,6 @@ target_compile_definitions(webrtc_api_audio_codecs_isac_audio_encoder_isac_float
target_compile_options(webrtc_api_audio_codecs_isac_audio_encoder_isac_float PRIVATE -fno-exceptions)
target_link_libraries(webrtc_api_audio_codecs_isac_audio_encoder_isac_float PUBLIC absl::strings absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_modules_audio_coding_isac webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
-# api/audio_codecs:opus_audio_decoder_factory
-add_library(webrtc_api_audio_codecs_opus_audio_decoder_factory ${WEBRTC_ROOT}/api/audio_codecs/opus_audio_decoder_factory.cc)
-target_include_directories(webrtc_api_audio_codecs_opus_audio_decoder_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_audio_codecs_opus_audio_decoder_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_audio_codecs_opus_audio_decoder_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_audio_codecs_opus_audio_decoder_factory PUBLIC webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_decoder_multiopus webrtc_api_audio_codecs_opus_audio_decoder_opus webrtc_rtc_base_rtc_base_approved )
-
# api/audio_codecs/opus:audio_decoder_multiopus
add_library(webrtc_api_audio_codecs_opus_audio_decoder_multiopus ${WEBRTC_ROOT}/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc)
target_include_directories(webrtc_api_audio_codecs_opus_audio_decoder_multiopus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -228,13 +221,6 @@ target_link_libraries(webrtc_api_audio_codecs_opus_audio_decoder_opus PUBLIC abs
add_library(webrtc_api_audio_codecs_opus_audio_decoder_opus_config INTERFACE)
target_include_directories(webrtc_api_audio_codecs_opus_audio_decoder_opus_config INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api/audio_codecs:opus_audio_encoder_factory
-add_library(webrtc_api_audio_codecs_opus_audio_encoder_factory ${WEBRTC_ROOT}/api/audio_codecs/opus_audio_encoder_factory.cc)
-target_include_directories(webrtc_api_audio_codecs_opus_audio_encoder_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_audio_codecs_opus_audio_encoder_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_audio_codecs_opus_audio_encoder_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_audio_codecs_opus_audio_encoder_factory PUBLIC webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_multiopus webrtc_api_audio_codecs_opus_audio_encoder_opus webrtc_rtc_base_rtc_base_approved )
-
# api/audio_codecs/opus:audio_encoder_multiopus
add_library(webrtc_api_audio_codecs_opus_audio_encoder_multiopus ${WEBRTC_ROOT}/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc)
target_include_directories(webrtc_api_audio_codecs_opus_audio_encoder_multiopus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -294,21 +280,14 @@ target_include_directories(webrtc_api_call_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_
# api:callfactory_api
add_library(webrtc_api_callfactory_api INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_call_rtp_interfaces webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_callfactory_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:callfactory_api.headers
add_library(webrtc_api_callfactory_api.headers INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_call_rtp_interfaces.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_callfactory_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api:create_frame_generator
-add_library(webrtc_api_create_frame_generator ${WEBRTC_ROOT}/api/test/create_frame_generator.cc)
-target_include_directories(webrtc_api_create_frame_generator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_create_frame_generator PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_create_frame_generator PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_frame_generator PUBLIC absl::optional webrtc_api_frame_generator_api webrtc_rtc_base_checks webrtc_system_wrappers_system_wrappers webrtc_test_frame_generator_impl )
-
# api:create_peerconnection_factory
add_library(webrtc_api_create_peerconnection_factory ${WEBRTC_ROOT}/api/create_peerconnection_factory.cc)
target_include_directories(webrtc_api_create_peerconnection_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -316,20 +295,6 @@ target_compile_definitions(webrtc_api_create_peerconnection_factory PRIVATE RTC_
target_compile_options(webrtc_api_create_peerconnection_factory PRIVATE -fno-exceptions)
target_link_libraries(webrtc_api_create_peerconnection_factory PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_pc_peerconnection webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
-# api:create_simulcast_test_fixture_api
-add_library(webrtc_api_create_simulcast_test_fixture_api ${WEBRTC_ROOT}/api/test/create_simulcast_test_fixture.cc)
-target_include_directories(webrtc_api_create_simulcast_test_fixture_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_create_simulcast_test_fixture_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_create_simulcast_test_fixture_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_simulcast_test_fixture_api PUBLIC webrtc_api_simulcast_test_fixture_api webrtc_api_video_codecs_video_codecs_api webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_rtc_base_rtc_base_approved )
-
-# api:create_time_controller
-add_library(webrtc_api_create_time_controller ${WEBRTC_ROOT}/api/test/create_time_controller.cc)
-target_include_directories(webrtc_api_create_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_create_time_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_create_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_test_time_controller_time_controller )
-
# api/crypto:frame_decryptor_interface
add_library(webrtc_api_crypto_frame_decryptor_interface INTERFACE)
target_link_libraries(webrtc_api_crypto_frame_decryptor_interface INTERFACE webrtc_api_array_view webrtc_api_rtp_parameters webrtc_rtc_base_refcount )
@@ -362,20 +327,6 @@ add_library(webrtc_api_crypto_options.headers INTERFACE)
target_link_libraries(webrtc_api_crypto_options.headers INTERFACE webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_crypto_options.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api:fake_frame_decryptor
-add_library(webrtc_api_fake_frame_decryptor ${WEBRTC_ROOT}/api/test/fake_frame_decryptor.cc)
-target_include_directories(webrtc_api_fake_frame_decryptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_fake_frame_decryptor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_fake_frame_decryptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_fake_frame_decryptor PUBLIC webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_crypto_frame_decryptor_interface webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
-
-# api:fake_frame_encryptor
-add_library(webrtc_api_fake_frame_encryptor ${WEBRTC_ROOT}/api/test/fake_frame_encryptor.cc)
-target_include_directories(webrtc_api_fake_frame_encryptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_fake_frame_encryptor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_fake_frame_encryptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_fake_frame_encryptor PUBLIC webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_crypto_frame_encryptor_interface webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
-
# api:fec_controller_api
add_library(webrtc_api_fec_controller_api INTERFACE)
target_link_libraries(webrtc_api_fec_controller_api INTERFACE webrtc_api_video_video_frame_type webrtc_modules_module_fec_api )
@@ -386,13 +337,6 @@ add_library(webrtc_api_fec_controller_api.headers INTERFACE)
target_link_libraries(webrtc_api_fec_controller_api.headers INTERFACE webrtc_api_video_video_frame_type.headers webrtc_modules_module_fec_api.headers )
target_include_directories(webrtc_api_fec_controller_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api:frame_generator_api
-add_library(webrtc_api_frame_generator_api ${WEBRTC_ROOT}/api/test/frame_generator_interface.cc)
-target_include_directories(webrtc_api_frame_generator_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_frame_generator_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_frame_generator_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_frame_generator_api PUBLIC absl::optional webrtc_api_scoped_refptr webrtc_api_video_video_frame )
-
# api:frame_transformer_interface
add_library(webrtc_api_frame_transformer_interface INTERFACE)
target_link_libraries(webrtc_api_frame_transformer_interface INTERFACE webrtc_api_scoped_refptr webrtc_api_video_encoded_frame webrtc_api_video_video_frame_metadata webrtc_rtc_base_refcount )
@@ -429,15 +373,15 @@ add_library(webrtc_api_libjingle_logging_api.headers INTERFACE)
target_include_directories(webrtc_api_libjingle_logging_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:libjingle_peerconnection_api
-add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/proxy.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
+add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
target_include_directories(webrtc_api_libjingle_peerconnection_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_libjingle_peerconnection_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_libjingle_peerconnection_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_call_rtp_interfaces webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# api:libjingle_peerconnection_api.headers
add_library(webrtc_api_libjingle_peerconnection_api.headers INTERFACE)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_call_rtp_interfaces.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_libjingle_peerconnection_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:media_stream_interface
@@ -452,36 +396,6 @@ add_library(webrtc_api_media_stream_interface.headers INTERFACE)
target_link_libraries(webrtc_api_media_stream_interface.headers INTERFACE webrtc_api_audio_options_api.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_media_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api:mock_rtp
-add_library(webrtc_api_mock_rtp INTERFACE)
-target_link_libraries(webrtc_api_mock_rtp INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_rtp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:mock_video_bitrate_allocator
-add_library(webrtc_api_mock_video_bitrate_allocator INTERFACE)
-target_link_libraries(webrtc_api_mock_video_bitrate_allocator INTERFACE webrtc_api_video_video_bitrate_allocator webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_video_bitrate_allocator INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:mock_video_bitrate_allocator_factory
-add_library(webrtc_api_mock_video_bitrate_allocator_factory INTERFACE)
-target_link_libraries(webrtc_api_mock_video_bitrate_allocator_factory INTERFACE webrtc_api_video_video_bitrate_allocator_factory webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_video_bitrate_allocator_factory INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:mock_video_codec_factory
-add_library(webrtc_api_mock_video_codec_factory INTERFACE)
-target_link_libraries(webrtc_api_mock_video_codec_factory INTERFACE webrtc_api_video_codecs_video_codecs_api webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_video_codec_factory INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:mock_video_decoder
-add_library(webrtc_api_mock_video_decoder INTERFACE)
-target_link_libraries(webrtc_api_mock_video_decoder INTERFACE webrtc_api_video_codecs_video_codecs_api webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_video_decoder INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:mock_video_encoder
-add_library(webrtc_api_mock_video_encoder INTERFACE)
-target_link_libraries(webrtc_api_mock_video_encoder INTERFACE webrtc_api_video_codecs_video_codecs_api webrtc_test_test_support )
-target_include_directories(webrtc_api_mock_video_encoder INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# api/neteq:custom_neteq_factory
add_library(webrtc_api_neteq_custom_neteq_factory ${WEBRTC_ROOT}/api/neteq/custom_neteq_factory.cc)
target_include_directories(webrtc_api_neteq_custom_neteq_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -526,13 +440,6 @@ target_compile_definitions(webrtc_api_neteq_tick_timer PRIVATE RTC_ENABLE_VP9 WE
target_compile_options(webrtc_api_neteq_tick_timer PRIVATE -fno-exceptions)
target_link_libraries(webrtc_api_neteq_tick_timer PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
-# api:network_emulation_manager_api
-add_library(webrtc_api_network_emulation_manager_api ${WEBRTC_ROOT}/api/test/network_emulation_manager.cc)
-target_include_directories(webrtc_api_network_emulation_manager_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_network_emulation_manager_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_network_emulation_manager_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_network_emulation_manager_api PUBLIC webrtc_api_array_view webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_test_network_emulation_network_emulation webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_rtc_base_rtc_base webrtc_rtc_base_network_constants webrtc_rtc_base_threading )
-
# api:network_state_predictor_api
add_library(webrtc_api_network_state_predictor_api INTERFACE)
target_include_directories(webrtc_api_network_state_predictor_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -573,12 +480,12 @@ target_include_directories(webrtc_api_priority.headers INTERFACE ${WEBRTC_ROOT}
# api:refcountedbase
add_library(webrtc_api_refcountedbase INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_macromagic webrtc_rtc_base_refcount )
target_include_directories(webrtc_api_refcountedbase INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:refcountedbase.headers
add_library(webrtc_api_refcountedbase.headers INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_macromagic.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_refcountedbase.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_error
@@ -621,12 +528,12 @@ target_link_libraries(webrtc_api_rtc_event_log_rtc_event_log_factory PUBLIC webr
# api:rtc_stats_api
add_library(webrtc_api_rtc_stats_api INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_rtc_stats_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_stats_api.headers
add_library(webrtc_api_rtc_stats_api.headers INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtc_stats_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_headers
@@ -646,11 +553,11 @@ add_library(webrtc_api_rtp_packet_info ${WEBRTC_ROOT}/api/rtp_packet_info.cc)
target_include_directories(webrtc_api_rtp_packet_info PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_rtp_packet_info PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_rtp_packet_info PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api:rtp_packet_info.headers
add_library(webrtc_api_rtp_packet_info.headers INTERFACE)
-target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtp_packet_info.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_parameters
@@ -696,10 +603,6 @@ add_library(webrtc_api_simulated_network_api INTERFACE)
target_link_libraries(webrtc_api_simulated_network_api INTERFACE absl::optional webrtc_rtc_base_rtc_base )
target_include_directories(webrtc_api_simulated_network_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api:simulcast_test_fixture_api
-add_library(webrtc_api_simulcast_test_fixture_api INTERFACE)
-target_include_directories(webrtc_api_simulcast_test_fixture_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# api/task_queue:default_task_queue_factory
add_library(webrtc_api_task_queue_default_task_queue_factory ${WEBRTC_ROOT}/api/task_queue/default_task_queue_factory_libevent.cc)
target_include_directories(webrtc_api_task_queue_default_task_queue_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -719,25 +622,6 @@ add_library(webrtc_api_task_queue_task_queue.headers INTERFACE)
target_link_libraries(webrtc_api_task_queue_task_queue.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_task_queue_task_queue.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api/test/network_emulation
-add_library(webrtc_api_test_network_emulation_network_emulation ${WEBRTC_ROOT}/api/test/network_emulation/network_emulation_interfaces.cc)
-target_include_directories(webrtc_api_test_network_emulation_network_emulation PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_test_network_emulation_network_emulation PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_test_network_emulation_network_emulation PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_test_network_emulation_network_emulation PUBLIC absl::optional webrtc_api_array_view webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address )
-
-# api/test/video:function_video_factory
-add_library(webrtc_api_test_video_function_video_factory INTERFACE)
-target_link_libraries(webrtc_api_test_video_function_video_factory INTERFACE webrtc_api_video_codecs_video_codecs_api webrtc_rtc_base_checks )
-target_include_directories(webrtc_api_test_video_function_video_factory INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# api:time_controller
-add_library(webrtc_api_time_controller ${WEBRTC_ROOT}/api/test/time_controller.cc)
-target_include_directories(webrtc_api_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_api_time_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_api_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_time_controller PUBLIC webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_threading webrtc_rtc_base_synchronization_yield_policy webrtc_system_wrappers_system_wrappers )
-
# api:transport_api
add_library(webrtc_api_transport_api ${WEBRTC_ROOT}/api/call/transport.cc)
target_include_directories(webrtc_api_transport_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -767,6 +651,11 @@ add_library(webrtc_api_transport_datagram_transport_interface INTERFACE)
target_link_libraries(webrtc_api_transport_datagram_transport_interface INTERFACE absl::optional webrtc_api_array_view webrtc_api_rtc_error webrtc_rtc_base_rtc_base_approved )
target_include_directories(webrtc_api_transport_datagram_transport_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# api/transport:datagram_transport_interface.headers
+add_library(webrtc_api_transport_datagram_transport_interface.headers INTERFACE)
+target_link_libraries(webrtc_api_transport_datagram_transport_interface.headers INTERFACE webrtc_api_array_view.headers webrtc_api_rtc_error.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_api_transport_datagram_transport_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# api/transport:enums
add_library(webrtc_api_transport_enums INTERFACE)
target_include_directories(webrtc_api_transport_enums INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -957,15 +846,15 @@ target_compile_options(webrtc_api_video_codecs_rtc_software_fallback_wrappers PR
target_link_libraries(webrtc_api_video_codecs_rtc_software_fallback_wrappers PUBLIC absl::core_headers absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# api/video_codecs:video_codecs_api
-add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder_factory.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
+add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
target_include_directories(webrtc_api_video_codecs_video_codecs_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_video_codecs_video_codecs_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_video_codecs_video_codecs_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api/video_codecs:video_codecs_api.headers
add_library(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api/video_codecs:vp8_temporal_layers_factory
@@ -1009,11 +898,6 @@ add_library(webrtc_api_video_recordable_encoded_frame.headers INTERFACE)
target_link_libraries(webrtc_api_video_recordable_encoded_frame.headers INTERFACE webrtc_api_array_view.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_video_recordable_encoded_frame.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# api/video/test:mock_recordable_encoded_frame
-add_library(webrtc_api_video_test_mock_recordable_encoded_frame INTERFACE)
-target_link_libraries(webrtc_api_video_test_mock_recordable_encoded_frame INTERFACE webrtc_api_video_recordable_encoded_frame webrtc_test_test_support )
-target_include_directories(webrtc_api_video_test_mock_recordable_encoded_frame INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# api/video:video_adaptation
add_library(webrtc_api_video_video_adaptation ${WEBRTC_ROOT}/api/video/video_adaptation_counters.cc)
target_include_directories(webrtc_api_video_video_adaptation PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -1144,7 +1028,7 @@ add_library(webrtc_audio_audio ${WEBRTC_ROOT}/audio/audio_level.cc ${WEBRTC_ROOT
target_include_directories(webrtc_audio_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_audio_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_audio_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# audio/utility:audio_frame_operations
add_library(webrtc_audio_utility_audio_frame_operations ${WEBRTC_ROOT}/audio/utility/audio_frame_operations.cc ${WEBRTC_ROOT}/audio/utility/channel_mixer.cc ${WEBRTC_ROOT}/audio/utility/channel_mixing_matrix.cc)
@@ -1165,13 +1049,6 @@ target_compile_definitions(webrtc_call_adaptation_resource_adaptation PRIVATE RT
target_compile_options(webrtc_call_adaptation_resource_adaptation PRIVATE -fno-exceptions)
target_link_libraries(webrtc_call_adaptation_resource_adaptation PUBLIC absl::algorithm_container absl::optional absl::variant webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_task_queue_task_queue webrtc_api_video_video_adaptation webrtc_api_video_video_frame webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_balanced_degradation_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task )
-# call/adaptation:resource_adaptation_test_utilities
-add_library(webrtc_call_adaptation_resource_adaptation_test_utilities ${WEBRTC_ROOT}/call/adaptation/test/fake_adaptation_constraint.cc ${WEBRTC_ROOT}/call/adaptation/test/fake_frame_rate_provider.cc ${WEBRTC_ROOT}/call/adaptation/test/fake_resource.cc ${WEBRTC_ROOT}/call/adaptation/test/fake_video_stream_input_state_provider.cc)
-target_include_directories(webrtc_call_adaptation_resource_adaptation_test_utilities PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_call_adaptation_resource_adaptation_test_utilities PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_call_adaptation_resource_adaptation_test_utilities PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_adaptation_resource_adaptation_test_utilities PUBLIC absl::optional webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_task_queue_task_queue webrtc_api_video_video_stream_encoder webrtc_call_adaptation_resource_adaptation webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_to_queued_task webrtc_test_test_support )
-
# call:audio_sender_interface
add_library(webrtc_call_audio_sender_interface INTERFACE)
target_link_libraries(webrtc_call_audio_sender_interface INTERFACE webrtc_api_audio_audio_frame_api )
@@ -1201,18 +1078,18 @@ add_library(webrtc_call_call ${WEBRTC_ROOT}/call/call.cc ${WEBRTC_ROOT}/call/cal
target_include_directories(webrtc_call_call PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call PUBLIC absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
+target_link_libraries(webrtc_call_call PUBLIC absl::bind_front absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
# call:call_interfaces
add_library(webrtc_call_call_interfaces ${WEBRTC_ROOT}/call/audio_receive_stream.cc ${WEBRTC_ROOT}/call/audio_send_stream.cc ${WEBRTC_ROOT}/call/audio_state.cc ${WEBRTC_ROOT}/call/call_config.cc ${WEBRTC_ROOT}/call/flexfec_receive_stream.cc ${WEBRTC_ROOT}/call/syncable.cc)
target_include_directories(webrtc_call_call_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_call_audio_sender_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
+target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::bind_front absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_call_audio_sender_interface webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
# call:call_interfaces.headers
add_library(webrtc_call_call_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_call_audio_sender_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
+target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_call_audio_sender_interface.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
target_include_directories(webrtc_call_call_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:fake_network
@@ -1222,21 +1099,26 @@ target_compile_definitions(webrtc_call_fake_network PRIVATE RTC_ENABLE_VP9 WEBRT
target_compile_options(webrtc_call_fake_network PRIVATE -fno-exceptions)
target_link_libraries(webrtc_call_fake_network PUBLIC webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_call_call_interfaces webrtc_call_simulated_network webrtc_call_simulated_packet_receiver webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers )
-# call:mock_rtp_interfaces
-add_library(webrtc_call_mock_rtp_interfaces INTERFACE)
-target_link_libraries(webrtc_call_mock_rtp_interfaces INTERFACE webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_bitrate_settings webrtc_call_rtp_interfaces webrtc_modules_pacing_pacing webrtc_rtc_base_rtc_base webrtc_rtc_base_rate_limiter webrtc_rtc_base_network_sent_packet webrtc_test_test_support )
-target_include_directories(webrtc_call_mock_rtp_interfaces INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# call:receive_stream_interface
+add_library(webrtc_call_receive_stream_interface INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface INTERFACE webrtc_api_frame_transformer_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source )
+target_include_directories(webrtc_call_receive_stream_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# call:receive_stream_interface.headers
+add_library(webrtc_call_receive_stream_interface.headers INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_transport_rtp_rtp_source.headers )
+target_include_directories(webrtc_call_receive_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_interfaces
add_library(webrtc_call_rtp_interfaces ${WEBRTC_ROOT}/call/rtp_config.cc)
target_include_directories(webrtc_call_rtp_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_rtp_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_rtp_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue )
# call:rtp_interfaces.headers
add_library(webrtc_call_rtp_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers )
target_include_directories(webrtc_call_rtp_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_receiver
@@ -1276,11 +1158,11 @@ add_library(webrtc_call_video_stream_api ${WEBRTC_ROOT}/call/video_receive_strea
target_include_directories(webrtc_call_video_stream_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_video_stream_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_video_stream_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
# call:video_stream_api.headers
add_library(webrtc_call_video_stream_api.headers INTERFACE)
-target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_call_video_stream_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# common_audio
@@ -1300,7 +1182,7 @@ add_library(webrtc_common_audio_common_audio_c ${WEBRTC_ROOT}/common_audio/signa
target_include_directories(webrtc_common_audio_common_audio_c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_common_audio_common_audio_c PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_common_audio_common_audio_c PRIVATE -Wstrict-prototypes)
-target_link_libraries(webrtc_common_audio_common_audio_c PUBLIC webrtc_fft_size_256 webrtc_spl_sqrt_floor webrtc_common_audio_common_audio_c_arm_asm webrtc_common_audio_common_audio_cc webrtc_rtc_base_checks webrtc_rtc_base_compile_assert_c webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_sanitizer webrtc_rtc_base_system_arch webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_common_audio_common_audio_c PUBLIC webrtc_fft_size_256 webrtc_spl_sqrt_floor webrtc_common_audio_common_audio_c_arm_asm webrtc_common_audio_common_audio_cc webrtc_rtc_base_checks webrtc_rtc_base_compile_assert_c webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_sanitizer webrtc_rtc_base_system_arch webrtc_system_wrappers_system_wrappers webrtc_common_audio_common_audio_neon )
# common_audio:common_audio_c.headers
add_library(webrtc_common_audio_common_audio_c.headers INTERFACE)
@@ -1400,13 +1282,6 @@ add_library(webrtc_common_video_common_video.headers INTERFACE)
target_link_libraries(webrtc_common_video_common_video.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_bitstream_parser_api.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_safe_minmax.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_rtc_export.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_common_video_common_video.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# common_video:common_video_unittests
- android_add_executable(TARGET webrtc_common_video_common_video_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/common_video/bitrate_adjuster_unittest.cc ${WEBRTC_ROOT}/common_video/frame_rate_estimator_unittest.cc ${WEBRTC_ROOT}/common_video/h264/h264_bitstream_parser_unittest.cc ${WEBRTC_ROOT}/common_video/h264/pps_parser_unittest.cc ${WEBRTC_ROOT}/common_video/h264/sps_parser_unittest.cc ${WEBRTC_ROOT}/common_video/h264/sps_vui_rewriter_unittest.cc ${WEBRTC_ROOT}/common_video/libyuv/libyuv_unittest.cc ${WEBRTC_ROOT}/common_video/video_frame_buffer_pool_unittest.cc ${WEBRTC_ROOT}/common_video/video_frame_unittest.cc)
-target_include_directories(webrtc_common_video_common_video_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_common_video_common_video_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_common_video_common_video_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_common_video_common_video_unittests PUBLIC gmock gtest absl::optional webrtc-yuv webrtc_api_scoped_refptr webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_system_wrappers_system_wrappers webrtc_test_fileutils webrtc_test_frame_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
-
# common_video:frame_counts
add_library(webrtc_common_video_frame_counts INTERFACE)
target_include_directories(webrtc_common_video_frame_counts INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -1427,23 +1302,16 @@ add_library(webrtc_common_video_generic_frame_descriptor_generic_frame_descripto
target_link_libraries(webrtc_common_video_generic_frame_descriptor_generic_frame_descriptor.headers INTERFACE webrtc_api_array_view.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_video_video_codec_constants.headers webrtc_rtc_base_checks.headers )
target_include_directories(webrtc_common_video_generic_frame_descriptor_generic_frame_descriptor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# logging:fake_rtc_event_log
-add_library(webrtc_logging_fake_rtc_event_log ${WEBRTC_ROOT}/logging/rtc_event_log/fake_rtc_event_log.cc ${WEBRTC_ROOT}/logging/rtc_event_log/fake_rtc_event_log_factory.cc)
-target_include_directories(webrtc_logging_fake_rtc_event_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_logging_fake_rtc_event_log PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_logging_fake_rtc_event_log PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_fake_rtc_event_log PUBLIC webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
-
# logging:ice_log
add_library(webrtc_logging_ice_log ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc ${WEBRTC_ROOT}/logging/rtc_event_log/ice_logger.cc)
target_include_directories(webrtc_logging_ice_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_ice_log PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_ice_log PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved )
# logging:ice_log.headers
add_library(webrtc_logging_ice_log.headers INTERFACE)
-target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_logging_ice_log.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# logging:rtc_event_audio
@@ -1451,41 +1319,41 @@ add_library(webrtc_logging_rtc_event_audio ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
# logging:rtc_event_bwe
add_library(webrtc_logging_rtc_event_bwe ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_success.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_route_change.cc)
target_include_directories(webrtc_logging_rtc_event_bwe PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_bwe PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_bwe PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate )
+target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate webrtc_api_units_timestamp )
# logging:rtc_event_frame_events
add_library(webrtc_logging_rtc_event_frame_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_frame_decoded.cc)
target_include_directories(webrtc_logging_rtc_event_frame_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_frame_events PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_frame_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
# logging:rtc_event_generic_packet_events
add_library(webrtc_logging_rtc_event_generic_packet_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc)
target_include_directories(webrtc_logging_rtc_event_generic_packet_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_generic_packet_events PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_generic_packet_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_timeutils )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log2_proto
-add_library(webrtc_logging_rtc_event_log2_proto_bridge)
+add_library(webrtc_logging_rtc_event_log2_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log2_proto_bridge
+ TARGET webrtc_logging_rtc_event_log2_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log2.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log2_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto)
+target_link_libraries(webrtc_logging_rtc_event_log2_proto PUBLIC libprotobuf)
# logging:rtc_event_log_api
add_library(webrtc_logging_rtc_event_log_api INTERFACE)
@@ -1504,27 +1372,27 @@ add_library(webrtc_logging_rtc_event_log_impl_encoder ${WEBRTC_ROOT}/logging/rtc
target_include_directories(webrtc_logging_rtc_event_log_impl_encoder PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_log_impl_encoder PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_log_impl_encoder PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto_bridge webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto_bridge webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log_proto
-add_library(webrtc_logging_rtc_event_log_proto_bridge)
+add_library(webrtc_logging_rtc_event_log_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log_proto_bridge
+ TARGET webrtc_logging_rtc_event_log_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto)
+target_link_libraries(webrtc_logging_rtc_event_log_proto PUBLIC libprotobuf)
# logging:rtc_event_pacing
add_library(webrtc_logging_rtc_event_pacing ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_alr_state.cc)
target_include_directories(webrtc_logging_rtc_event_pacing PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_pacing PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_pacing PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log )
+target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp )
# logging:rtc_event_rtp_rtcp
add_library(webrtc_logging_rtc_event_rtp_rtcp ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc)
@@ -1538,7 +1406,7 @@ add_library(webrtc_logging_rtc_event_video ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
# logging:rtc_stream_config
add_library(webrtc_logging_rtc_stream_config ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_stream_config.cc)
@@ -1552,18 +1420,25 @@ add_library(webrtc_media_rtc_audio_video ${WEBRTC_ROOT}/media/engine/adm_helpers
target_include_directories(webrtc_media_rtc_audio_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_audio_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_audio_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+
+# media:rtc_data_dcsctp_transport
+add_library(webrtc_media_rtc_data_dcsctp_transport ${WEBRTC_ROOT}/media/sctp/dcsctp_transport.cc)
+target_include_directories(webrtc_media_rtc_data_dcsctp_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_media_rtc_data_dcsctp_transport PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_media_rtc_data_dcsctp_transport PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_media_rtc_data_dcsctp_transport PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_net_dcsctp_public_factory webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_task_queue_timeout webrtc_p2p_rtc_p2p webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers )
# media:rtc_data_sctp_transport_factory
add_library(webrtc_media_rtc_data_sctp_transport_factory ${WEBRTC_ROOT}/media/sctp/sctp_transport_factory.cc)
target_include_directories(webrtc_media_rtc_data_sctp_transport_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_DCSCTP WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_data_sctp_transport_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_system_unused )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_dcsctp_transport webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_system_unused webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# media:rtc_data_sctp_transport_internal
add_library(webrtc_media_rtc_data_sctp_transport_internal INTERFACE)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_api_transport_datagram_transport_interface webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
target_include_directories(webrtc_media_rtc_data_sctp_transport_internal INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_data_usrsctp_transport
@@ -1594,21 +1469,16 @@ target_compile_definitions(webrtc_media_rtc_internal_video_codecs PRIVATE RTC_EN
target_compile_options(webrtc_media_rtc_internal_video_codecs PRIVATE -fno-exceptions)
target_link_libraries(webrtc_media_rtc_internal_video_codecs PUBLIC absl::strings webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_media_base webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_modules_video_coding_codecs_av1_libaom_av1_encoder webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_test_fake_video_codecs )
-# media:rtc_media
-add_library(webrtc_media_rtc_media INTERFACE)
-target_link_libraries(webrtc_media_rtc_media INTERFACE webrtc_media_rtc_audio_video )
-target_include_directories(webrtc_media_rtc_media INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# media:rtc_media_base
add_library(webrtc_media_rtc_media_base ${WEBRTC_ROOT}/media/base/adapted_video_track_source.cc ${WEBRTC_ROOT}/media/base/codec.cc ${WEBRTC_ROOT}/media/base/media_channel.cc ${WEBRTC_ROOT}/media/base/media_constants.cc ${WEBRTC_ROOT}/media/base/media_engine.cc ${WEBRTC_ROOT}/media/base/rid_description.cc ${WEBRTC_ROOT}/media/base/rtp_utils.cc ${WEBRTC_ROOT}/media/base/stream_params.cc ${WEBRTC_ROOT}/media/base/turn_utils.cc ${WEBRTC_ROOT}/media/base/video_adapter.cc ${WEBRTC_ROOT}/media/base/video_broadcaster.cc ${WEBRTC_ROOT}/media/base/video_common.cc ${WEBRTC_ROOT}/media/base/video_source_base.cc)
target_include_directories(webrtc_media_rtc_media_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_media_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_datagram_transport_interface webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# media:rtc_media_base.headers
add_library(webrtc_media_rtc_media_base.headers INTERFACE)
-target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_datagram_transport_interface.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_media_rtc_media_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_media_config
@@ -1619,27 +1489,6 @@ target_include_directories(webrtc_media_rtc_media_config INTERFACE ${WEBRTC_ROOT
add_library(webrtc_media_rtc_media_config.headers INTERFACE)
target_include_directories(webrtc_media_rtc_media_config.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# media:rtc_media_engine_defaults
-add_library(webrtc_media_rtc_media_engine_defaults ${WEBRTC_ROOT}/media/engine/webrtc_media_engine_defaults.cc)
-target_include_directories(webrtc_media_rtc_media_engine_defaults PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_media_engine_defaults PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_media_rtc_media_engine_defaults PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_engine_defaults PUBLIC webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_modules_audio_processing_api webrtc_rtc_base_checks webrtc_rtc_base_system_rtc_export )
-
-# media:rtc_media_tests_utils
-add_library(webrtc_media_rtc_media_tests_utils ${WEBRTC_ROOT}/media/base/fake_frame_source.cc ${WEBRTC_ROOT}/media/base/fake_media_engine.cc ${WEBRTC_ROOT}/media/base/fake_rtp.cc ${WEBRTC_ROOT}/media/base/fake_video_renderer.cc ${WEBRTC_ROOT}/media/base/test_utils.cc ${WEBRTC_ROOT}/media/engine/fake_webrtc_call.cc ${WEBRTC_ROOT}/media/engine/fake_webrtc_video_engine.cc)
-target_include_directories(webrtc_media_rtc_media_tests_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_media_tests_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_media_rtc_media_tests_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_tests_utils PUBLIC gmock gtest absl::algorithm_container absl::strings webrtc_api_call_api webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_mock_rtp_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support )
-
-# media:rtc_media_unittests
- android_add_executable(TARGET webrtc_media_rtc_media_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/media/engine/webrtc_voice_engine_unittest.cc ${WEBRTC_ROOT}/media/base/codec_unittest.cc ${WEBRTC_ROOT}/media/base/media_engine_unittest.cc ${WEBRTC_ROOT}/media/base/rtp_utils_unittest.cc ${WEBRTC_ROOT}/media/base/sdp_video_format_utils_unittest.cc ${WEBRTC_ROOT}/media/base/stream_params_unittest.cc ${WEBRTC_ROOT}/media/base/turn_utils_unittest.cc ${WEBRTC_ROOT}/media/base/video_adapter_unittest.cc ${WEBRTC_ROOT}/media/base/video_broadcaster_unittest.cc ${WEBRTC_ROOT}/media/base/video_common_unittest.cc ${WEBRTC_ROOT}/media/engine/encoder_simulcast_proxy_unittest.cc ${WEBRTC_ROOT}/media/engine/internal_decoder_factory_unittest.cc ${WEBRTC_ROOT}/media/engine/multiplex_codec_factory_unittest.cc ${WEBRTC_ROOT}/media/engine/null_webrtc_video_engine_unittest.cc ${WEBRTC_ROOT}/media/engine/payload_type_mapper_unittest.cc ${WEBRTC_ROOT}/media/engine/simulcast_encoder_adapter_unittest.cc ${WEBRTC_ROOT}/media/engine/simulcast_unittest.cc ${WEBRTC_ROOT}/media/engine/unhandled_packets_buffer_unittest.cc ${WEBRTC_ROOT}/media/engine/webrtc_media_engine_unittest.cc ${WEBRTC_ROOT}/media/engine/webrtc_video_engine_unittest.cc ${WEBRTC_ROOT}/media/sctp/usrsctp_transport_reliability_unittest.cc ${WEBRTC_ROOT}/media/sctp/usrsctp_transport_unittest.cc)
-target_include_directories(webrtc_media_rtc_media_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_media_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_media_rtc_media_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
-
# media:rtc_sdp_video_format_utils
add_library(webrtc_media_rtc_sdp_video_format_utils ${WEBRTC_ROOT}/media/base/sdp_video_format_utils.cc)
target_include_directories(webrtc_media_rtc_sdp_video_format_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -1667,31 +1516,31 @@ target_link_libraries(webrtc_modules_async_audio_processing_async_audio_processi
target_include_directories(webrtc_modules_async_audio_processing_async_audio_processing.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_config_proto
-add_library(webrtc_modules_audio_coding_ana_config_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_config_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_config_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_config_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/config.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_coding_ana_config_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_config_proto PUBLIC libprotobuf)
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_debug_dump_proto
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_debug_dump_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_debug_dump_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/debug_dump.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC libprotobuf)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
+target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC libprotobuf)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
# modules/audio_coding
add_library(webrtc_modules_audio_coding_audio_coding ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_receiver.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_remixing.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_resampler.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/audio_coding_module.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/call_statistics.cc)
@@ -1730,7 +1579,7 @@ add_library(webrtc_modules_audio_coding_audio_network_adaptor ${WEBRTC_ROOT}/mod
target_include_directories(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional libprotobuf webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_coding_ana_debug_dump_proto_bridge webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_coding_ana_debug_dump_proto webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# modules/audio_coding:audio_network_adaptor_config
add_library(webrtc_modules_audio_coding_audio_network_adaptor_config ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc)
@@ -1888,7 +1737,7 @@ add_library(webrtc_modules_audio_coding_neteq_tools_minimal ${WEBRTC_ROOT}/modul
target_include_directories(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_CODEC_ISAC WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1)
target_compile_options(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_array_view webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/audio_coding:pcm16b
add_library(webrtc_modules_audio_coding_pcm16b ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc)
@@ -1908,7 +1757,7 @@ add_library(webrtc_modules_audio_coding_red ${WEBRTC_ROOT}/modules/audio_coding/
target_include_directories(webrtc_modules_audio_coding_red PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_red PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_red PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_cng
add_library(webrtc_modules_audio_coding_webrtc_cng ${WEBRTC_ROOT}/modules/audio_coding/codecs/cng/webrtc_cng.cc)
@@ -1929,7 +1778,7 @@ add_library(webrtc_modules_audio_coding_webrtc_opus ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_coding_webrtc_opus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_webrtc_opus PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_CODEC_ISAC WEBRTC_HAS_NEON GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1)
target_compile_options(webrtc_modules_audio_coding_webrtc_opus PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional libprotobuf webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_opus_wrapper
add_library(webrtc_modules_audio_coding_webrtc_opus_wrapper ${WEBRTC_ROOT}/modules/audio_coding/codecs/opus/opus_interface.cc)
@@ -2004,11 +1853,6 @@ add_library(webrtc_modules_audio_device_audio_device_impl.headers INTERFACE)
target_link_libraries(webrtc_modules_audio_device_audio_device_impl.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_device_audio_device_api.headers webrtc_modules_audio_device_audio_device_buffer.headers webrtc_modules_audio_device_audio_device_default.headers webrtc_modules_audio_device_audio_device_generic.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_arch.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_device_audio_device_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# modules/audio_device:mock_audio_device
-add_library(webrtc_modules_audio_device_mock_audio_device INTERFACE)
-target_link_libraries(webrtc_modules_audio_device_mock_audio_device INTERFACE webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_buffer webrtc_modules_audio_device_audio_device_impl webrtc_rtc_base_refcount webrtc_test_test_support )
-target_include_directories(webrtc_modules_audio_device_mock_audio_device INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# modules/audio_mixer:audio_frame_manipulator
add_library(webrtc_modules_audio_mixer_audio_frame_manipulator ${WEBRTC_ROOT}/modules/audio_mixer/audio_frame_manipulator.cc)
target_include_directories(webrtc_modules_audio_mixer_audio_frame_manipulator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2021,7 +1865,7 @@ add_library(webrtc_modules_audio_mixer_audio_mixer_impl ${WEBRTC_ROOT}/modules/a
target_include_directories(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_rtp_packet_info webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/audio_processing/aec3:adaptive_fir_filter
add_library(webrtc_modules_audio_processing_aec3_adaptive_fir_filter INTERFACE)
@@ -2128,11 +1972,11 @@ add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl ${WEBRTC_ROOT
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC libprotobuf webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto_bridge webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
# modules/audio_processing/aec_dump:aec_dump_impl.headers
add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_protobuf_utils.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/audio_processing:aec_dump_interface
@@ -2378,13 +2222,49 @@ add_library(webrtc_modules_audio_processing_agc_agc ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_processing_agc_agc PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_agc_agc PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_agc_agc PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor webrtc_modules_audio_processing_agc_clipping_predictor_evaluator webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/audio_processing/agc:agc.headers
add_library(webrtc_modules_audio_processing_agc_agc.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor.headers webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_modules_audio_processing_agc_agc.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# modules/audio_processing/agc:clipping_predictor
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer webrtc_modules_audio_processing_agc_gain_map webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_safe_minmax )
+
+# modules/audio_processing/agc:clipping_predictor.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_safe_minmax.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_evaluator
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_evaluator.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging )
+
+# modules/audio_processing/agc:clipping_predictor_evaluator.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_level_buffer.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved )
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# modules/audio_processing/agc:gain_control_interface
add_library(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE)
target_include_directories(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2508,24 +2388,17 @@ target_link_libraries(webrtc_modules_audio_processing_audio_processing_statistic
target_include_directories(webrtc_modules_audio_processing_audio_processing_statistics.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_processing:audioproc_debug_proto
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_processing_audioproc_debug_proto_bridge
+ TARGET webrtc_modules_audio_processing_audioproc_debug_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_processing/debug.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_processing
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC libprotobuf)
-
-# modules/audio_processing:audioproc_test_utils
-add_library(webrtc_modules_audio_processing_audioproc_test_utils ${WEBRTC_ROOT}/modules/audio_processing/test/audio_buffer_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/audio_processing_builder_for_testing.cc ${WEBRTC_ROOT}/modules/audio_processing/test/bitexactness_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/performance_timer.cc ${WEBRTC_ROOT}/modules/audio_processing/test/simulator_buffers.cc ${WEBRTC_ROOT}/modules/audio_processing/test/test_utils.cc)
-target_include_directories(webrtc_modules_audio_processing_audioproc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_modules_audio_processing_audioproc_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_modules_audio_processing_audioproc_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_test_utils PUBLIC gmock gtest absl::optional webrtc_api_array_view webrtc_api_audio_audio_frame_api webrtc_common_audio_common_audio webrtc_modules_audio_coding_neteq_input_audio_tools webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_buffer webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_arch webrtc_system_wrappers_system_wrappers webrtc_test_fileutils webrtc_test_test_support )
+target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto)
+target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC libprotobuf)
# modules/audio_processing/capture_levels_adjuster
add_library(webrtc_modules_audio_processing_capture_levels_adjuster_capture_levels_adjuster ${WEBRTC_ROOT}/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc ${WEBRTC_ROOT}/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc)
@@ -2563,11 +2436,6 @@ add_library(webrtc_modules_audio_processing_high_pass_filter.headers INTERFACE)
target_link_libraries(webrtc_modules_audio_processing_high_pass_filter.headers INTERFACE webrtc_api_array_view.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_utility_cascaded_biquad_filter.headers webrtc_rtc_base_checks.headers )
target_include_directories(webrtc_modules_audio_processing_high_pass_filter.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# modules/audio_processing:mocks
-add_library(webrtc_modules_audio_processing_mocks INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_mocks INTERFACE webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_processing_statistics webrtc_test_test_support )
-target_include_directories(webrtc_modules_audio_processing_mocks INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# modules/audio_processing/ns
add_library(webrtc_modules_audio_processing_ns_ns ${WEBRTC_ROOT}/modules/audio_processing/ns/fast_math.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/histograms.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/noise_estimator.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/noise_suppressor.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/ns_fft.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/prior_signal_model.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/prior_signal_model_estimator.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/quantile_noise_estimator.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/signal_model.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/signal_model_estimator.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/speech_probability_estimator.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/suppression_params.cc ${WEBRTC_ROOT}/modules/audio_processing/ns/wiener_filter.cc)
target_include_directories(webrtc_modules_audio_processing_ns_ns PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2685,11 +2553,11 @@ target_link_libraries(webrtc_modules_audio_processing_voice_detection.headers IN
target_include_directories(webrtc_modules_audio_processing_voice_detection.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/congestion_controller
-add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc)
+add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc ${WEBRTC_ROOT}/modules/congestion_controller/remb_throttler.cc)
target_include_directories(webrtc_modules_congestion_controller_congestion_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_congestion_controller_congestion_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_congestion_controller_congestion_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
# modules/congestion_controller/goog_cc:alr_detector
add_library(webrtc_modules_congestion_controller_goog_cc_alr_detector ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/alr_detector.cc)
@@ -2747,13 +2615,6 @@ target_compile_definitions(webrtc_modules_congestion_controller_goog_cc_pushback
target_compile_options(webrtc_modules_congestion_controller_goog_cc_pushback_controller PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_congestion_controller_goog_cc_pushback_controller PUBLIC absl::strings absl::optional webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_size webrtc_rtc_base_checks webrtc_rtc_base_experiments_rate_control_settings )
-# modules/congestion_controller/goog_cc:test_goog_cc_printer
-add_library(webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc)
-target_include_directories(webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer PUBLIC absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_goog_cc webrtc_api_transport_network_control webrtc_api_units_timestamp webrtc_modules_congestion_controller_goog_cc_goog_cc webrtc_modules_congestion_controller_goog_cc_alr_detector webrtc_modules_congestion_controller_goog_cc_delay_based_bwe webrtc_modules_congestion_controller_goog_cc_estimators webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_rtc_base_checks webrtc_test_logging_log_writer )
-
# modules/congestion_controller/rtp:control_handler
add_library(webrtc_modules_congestion_controller_rtp_control_handler ${WEBRTC_ROOT}/modules/congestion_controller/rtp/control_handler.cc)
target_include_directories(webrtc_modules_congestion_controller_rtp_control_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2808,19 +2669,14 @@ target_compile_options(webrtc_modules_pacing_pacing PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_pacing_pacing PUBLIC absl::memory absl::strings absl::optional webrtc_api_function_view webrtc_api_sequence_checker webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_pacing webrtc_modules_module_api webrtc_modules_pacing_interval_budget webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/remote_bitrate_estimator
-add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
+add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/packet_arrival_map.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
target_include_directories(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
-
-# modules/rtp_rtcp:mock_rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE absl::optional webrtc_api_video_video_bitrate_allocation webrtc_modules_module_api webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_test_test_support )
-target_include_directories(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
+add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/capture_clock_offset_updater.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE -fno-exceptions)
@@ -2831,11 +2687,11 @@ add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format ${WEBRTC_ROOT}/modules/rtp_r
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/rtp_rtcp:rtp_rtcp_format.headers
add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp:rtp_video_header
@@ -2956,13 +2812,6 @@ target_compile_definitions(webrtc_modules_video_coding_nack_module PRIVATE RTC_E
target_compile_options(webrtc_modules_video_coding_nack_module PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_video_coding_nack_module PUBLIC webrtc_api_sequence_checker webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
-# modules/video_coding:simulcast_test_fixture_impl
-add_library(webrtc_modules_video_coding_simulcast_test_fixture_impl ${WEBRTC_ROOT}/modules/video_coding/utility/simulcast_test_fixture_impl.cc)
-target_include_directories(webrtc_modules_video_coding_simulcast_test_fixture_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_modules_video_coding_simulcast_test_fixture_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_modules_video_coding_simulcast_test_fixture_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_video_coding_simulcast_test_fixture_impl PUBLIC webrtc_api_mock_video_decoder webrtc_api_mock_video_encoder webrtc_api_simulcast_test_fixture_api webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_test_test_support )
-
# modules/video_coding/svc:scalability_structures
add_library(webrtc_modules_video_coding_svc_scalability_structures ${WEBRTC_ROOT}/modules/video_coding/svc/create_scalability_structure.cc ${WEBRTC_ROOT}/modules/video_coding/svc/scalability_structure_full_svc.cc ${WEBRTC_ROOT}/modules/video_coding/svc/scalability_structure_key_svc.cc ${WEBRTC_ROOT}/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc ${WEBRTC_ROOT}/modules/video_coding/svc/scalability_structure_simulcast.cc)
target_include_directories(webrtc_modules_video_coding_svc_scalability_structures PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3050,7 +2899,7 @@ add_library(webrtc_modules_video_coding_webrtc_vp9 ${WEBRTC_ROOT}/modules/video_
target_include_directories(webrtc_modules_video_coding_webrtc_vp9 PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_video_coding_webrtc_vp9 PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_video_coding_webrtc_vp9 PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
# modules/video_coding:webrtc_vp9_helpers
add_library(webrtc_modules_video_coding_webrtc_vp9_helpers ${WEBRTC_ROOT}/modules/video_coding/codecs/vp9/svc_config.cc)
@@ -3077,29 +2926,214 @@ target_compile_definitions(webrtc_modules_video_processing_video_processing_neon
target_compile_options(webrtc_modules_video_processing_video_processing_neon PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_video_processing_video_processing_neon PUBLIC webrtc_modules_video_processing_denoiser_filter )
-# p2p:fake_ice_transport
-add_library(webrtc_p2p_fake_ice_transport INTERFACE)
-target_link_libraries(webrtc_p2p_fake_ice_transport INTERFACE absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
-target_include_directories(webrtc_p2p_fake_ice_transport INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# p2p:fake_port_allocator
-add_library(webrtc_p2p_fake_port_allocator INTERFACE)
-target_link_libraries(webrtc_p2p_fake_port_allocator INTERFACE webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_net_helpers webrtc_rtc_base_threading )
-target_include_directories(webrtc_p2p_fake_port_allocator INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# p2p:p2p_server_utils
-add_library(webrtc_p2p_p2p_server_utils ${WEBRTC_ROOT}/p2p/base/stun_server.cc ${WEBRTC_ROOT}/p2p/base/turn_server.cc)
-target_include_directories(webrtc_p2p_p2p_server_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_p2p_p2p_server_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_p2p_p2p_server_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_p2p_p2p_server_utils PUBLIC absl::algorithm_container absl::memory webrtc_api_packet_socket_factory webrtc_api_sequence_checker webrtc_api_transport_stun_types webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
-
-# p2p:p2p_test_utils
-add_library(webrtc_p2p_p2p_test_utils ${WEBRTC_ROOT}/p2p/base/test_stun_server.cc)
-target_include_directories(webrtc_p2p_p2p_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_p2p_p2p_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_p2p_p2p_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_p2p_p2p_test_utils PUBLIC absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support )
+# net/dcsctp/common:internal_types
+add_library(webrtc_net_dcsctp_common_internal_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_internal_types INTERFACE webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_common_internal_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:math
+add_library(webrtc_net_dcsctp_common_math INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_math INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:pair_hash
+add_library(webrtc_net_dcsctp_common_pair_hash INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_pair_hash INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:sequence_numbers
+add_library(webrtc_net_dcsctp_common_sequence_numbers INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_sequence_numbers INTERFACE webrtc_net_dcsctp_common_internal_types )
+target_include_directories(webrtc_net_dcsctp_common_sequence_numbers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:str_join
+add_library(webrtc_net_dcsctp_common_str_join INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_str_join INTERFACE absl::strings webrtc_rtc_base_stringutils )
+target_include_directories(webrtc_net_dcsctp_common_str_join INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:bounded_io
+add_library(webrtc_net_dcsctp_packet_bounded_io INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_bounded_io INTERFACE webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_bounded_io INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:chunk
+add_library(webrtc_net_dcsctp_packet_chunk ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/abort_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_echo_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/data_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/error_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/forward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/idata_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/reconfig_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/sack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:chunk_validators
+add_library(webrtc_net_dcsctp_packet_chunk_validators ${WEBRTC_ROOT}/net/dcsctp/packet/chunk_validators.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk_validators PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk_validators PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk_validators PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk_validators PUBLIC webrtc_net_dcsctp_packet_chunk webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:crc32c
+add_library(webrtc_net_dcsctp_packet_crc32c ${WEBRTC_ROOT}/net/dcsctp/packet/crc32c.cc)
+target_include_directories(webrtc_net_dcsctp_packet_crc32c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_crc32c PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_crc32c PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_crc32c PUBLIC crc32c webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:data
+add_library(webrtc_net_dcsctp_packet_data INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_data INTERFACE webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_data INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:error_cause
+add_library(webrtc_net_dcsctp_packet_error_cause ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/no_user_data_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/protocol_violation_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc)
+target_include_directories(webrtc_net_dcsctp_packet_error_cause PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_error_cause PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_error_cause PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_error_cause PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:parameter
+add_library(webrtc_net_dcsctp_packet_parameter ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/state_cookie_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/supported_extensions_parameter.cc)
+target_include_directories(webrtc_net_dcsctp_packet_parameter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_parameter PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_parameter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_parameter PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:sctp_packet
+add_library(webrtc_net_dcsctp_packet_sctp_packet ${WEBRTC_ROOT}/net/dcsctp/packet/sctp_packet.cc)
+target_include_directories(webrtc_net_dcsctp_packet_sctp_packet PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_sctp_packet PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_sctp_packet PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_sctp_packet PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_crc32c webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:tlv_trait
+add_library(webrtc_net_dcsctp_packet_tlv_trait ${WEBRTC_ROOT}/net/dcsctp/packet/tlv_trait.cc)
+target_include_directories(webrtc_net_dcsctp_packet_tlv_trait PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_tlv_trait PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_tlv_trait PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_tlv_trait PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/public:factory
+add_library(webrtc_net_dcsctp_public_factory ${WEBRTC_ROOT}/net/dcsctp/public/dcsctp_socket_factory.cc)
+target_include_directories(webrtc_net_dcsctp_public_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_public_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_public_factory PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_public_factory PUBLIC absl::strings webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_dcsctp_socket )
+
+# net/dcsctp/public:socket
+add_library(webrtc_net_dcsctp_public_socket INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_socket INTERFACE absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_public_socket INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:strong_alias
+add_library(webrtc_net_dcsctp_public_strong_alias INTERFACE)
+target_include_directories(webrtc_net_dcsctp_public_strong_alias INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:types
+add_library(webrtc_net_dcsctp_public_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_types INTERFACE webrtc_api_array_view webrtc_net_dcsctp_public_strong_alias )
+target_include_directories(webrtc_net_dcsctp_public_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:data_tracker
+add_library(webrtc_net_dcsctp_rx_data_tracker ${WEBRTC_ROOT}/net/dcsctp/rx/data_tracker.cc)
+target_include_directories(webrtc_net_dcsctp_rx_data_tracker PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_data_tracker PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_data_tracker PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_data_tracker PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_queue
+add_library(webrtc_net_dcsctp_rx_reassembly_queue ${WEBRTC_ROOT}/net/dcsctp/rx/reassembly_queue.cc)
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_queue PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_net_dcsctp_rx_traditional_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_streams
+add_library(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE absl::strings webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:traditional_reassembly_streams
+add_library(webrtc_net_dcsctp_rx_traditional_reassembly_streams ${WEBRTC_ROOT}/net/dcsctp/rx/traditional_reassembly_streams.cc)
+target_include_directories(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_traditional_reassembly_streams PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:context
+add_library(webrtc_net_dcsctp_socket_context INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_socket_context INTERFACE absl::strings webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_socket_context INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/socket:dcsctp_socket
+add_library(webrtc_net_dcsctp_socket_dcsctp_socket ${WEBRTC_ROOT}/net/dcsctp/socket/dcsctp_socket.cc ${WEBRTC_ROOT}/net/dcsctp/socket/state_cookie.cc)
+target_include_directories(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_dcsctp_socket PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_chunk_validators webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_socket_transmission_control_block webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_rr_send_queue webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:heartbeat_handler
+add_library(webrtc_net_dcsctp_socket_heartbeat_handler ${WEBRTC_ROOT}/net/dcsctp/socket/heartbeat_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_heartbeat_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:stream_reset_handler
+add_library(webrtc_net_dcsctp_socket_stream_reset_handler ${WEBRTC_ROOT}/net/dcsctp/socket/stream_reset_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_stream_reset_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:transmission_control_block
+add_library(webrtc_net_dcsctp_socket_transmission_control_block ${WEBRTC_ROOT}/net/dcsctp/socket/transmission_control_block.cc)
+target_include_directories(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_transmission_control_block PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/timer:task_queue_timeout
+add_library(webrtc_net_dcsctp_timer_task_queue_timeout ${WEBRTC_ROOT}/net/dcsctp/timer/task_queue_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_task_queue_timeout PUBLIC webrtc_api_array_view webrtc_api_task_queue_task_queue webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
+
+# net/dcsctp/timer
+add_library(webrtc_net_dcsctp_timer_timer ${WEBRTC_ROOT}/net/dcsctp/timer/timer.cc)
+target_include_directories(webrtc_net_dcsctp_timer_timer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_timer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_timer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_timer PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_error_counter
+add_library(webrtc_net_dcsctp_tx_retransmission_error_counter ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_error_counter.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_error_counter PUBLIC absl::strings webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_queue
+add_library(webrtc_net_dcsctp_tx_retransmission_queue ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_timeout
+add_library(webrtc_net_dcsctp_tx_retransmission_timeout ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_timeout PUBLIC webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:rr_send_queue
+add_library(webrtc_net_dcsctp_tx_rr_send_queue ${WEBRTC_ROOT}/net/dcsctp/tx/rr_send_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_rr_send_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:send_queue
+add_library(webrtc_net_dcsctp_tx_send_queue INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_tx_send_queue INTERFACE absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_tx_send_queue INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# p2p:rtc_p2p
add_library(webrtc_p2p_rtc_p2p ${WEBRTC_ROOT}/p2p/base/async_stun_tcp_socket.cc ${WEBRTC_ROOT}/p2p/base/basic_async_resolver_factory.cc ${WEBRTC_ROOT}/p2p/base/basic_ice_controller.cc ${WEBRTC_ROOT}/p2p/base/basic_packet_socket_factory.cc ${WEBRTC_ROOT}/p2p/base/connection.cc ${WEBRTC_ROOT}/p2p/base/connection_info.cc ${WEBRTC_ROOT}/p2p/base/default_ice_transport_factory.cc ${WEBRTC_ROOT}/p2p/base/dtls_transport.cc ${WEBRTC_ROOT}/p2p/base/dtls_transport_internal.cc ${WEBRTC_ROOT}/p2p/base/ice_controller_interface.cc ${WEBRTC_ROOT}/p2p/base/ice_credentials_iterator.cc ${WEBRTC_ROOT}/p2p/base/ice_transport_internal.cc ${WEBRTC_ROOT}/p2p/base/p2p_constants.cc ${WEBRTC_ROOT}/p2p/base/p2p_transport_channel.cc ${WEBRTC_ROOT}/p2p/base/packet_transport_internal.cc ${WEBRTC_ROOT}/p2p/base/port.cc ${WEBRTC_ROOT}/p2p/base/port_allocator.cc ${WEBRTC_ROOT}/p2p/base/port_interface.cc ${WEBRTC_ROOT}/p2p/base/pseudo_tcp.cc ${WEBRTC_ROOT}/p2p/base/regathering_controller.cc ${WEBRTC_ROOT}/p2p/base/stun_port.cc ${WEBRTC_ROOT}/p2p/base/stun_request.cc ${WEBRTC_ROOT}/p2p/base/tcp_port.cc ${WEBRTC_ROOT}/p2p/base/transport_description.cc ${WEBRTC_ROOT}/p2p/base/transport_description_factory.cc ${WEBRTC_ROOT}/p2p/base/turn_port.cc ${WEBRTC_ROOT}/p2p/client/basic_port_allocator.cc ${WEBRTC_ROOT}/p2p/client/turn_port_factory.cc)
@@ -3118,7 +3152,7 @@ add_library(webrtc_pc_audio_rtp_receiver ${WEBRTC_ROOT}/pc/audio_rtp_receiver.cc
target_include_directories(webrtc_pc_audio_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_audio_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_audio_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
# pc:audio_track
add_library(webrtc_pc_audio_track ${WEBRTC_ROOT}/pc/audio_track.cc)
@@ -3132,43 +3166,21 @@ add_library(webrtc_pc_connection_context ${WEBRTC_ROOT}/pc/connection_context.cc
target_include_directories(webrtc_pc_connection_context PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_connection_context PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_connection_context PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
+target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
# pc:dtmf_sender
add_library(webrtc_pc_dtmf_sender ${WEBRTC_ROOT}/pc/dtmf_sender.cc)
target_include_directories(webrtc_pc_dtmf_sender PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_dtmf_sender PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_dtmf_sender PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
-
-# pc:integration_test_helpers
-add_library(webrtc_pc_integration_test_helpers ${WEBRTC_ROOT}/pc/test/integration_test_helpers.cc)
-target_include_directories(webrtc_pc_integration_test_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_pc_integration_test_helpers PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_pc_integration_test_helpers PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_pc_proxy webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:jitter_buffer_delay
add_library(webrtc_pc_jitter_buffer_delay ${WEBRTC_ROOT}/pc/jitter_buffer_delay.cc)
target_include_directories(webrtc_pc_jitter_buffer_delay PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_jitter_buffer_delay PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_jitter_buffer_delay PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading )
-
-# pc:jitter_buffer_delay_interface
-add_library(webrtc_pc_jitter_buffer_delay_interface INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_interface INTERFACE absl::algorithm_container absl::strings absl::optional webrtc_media_rtc_media_base webrtc_rtc_base_refcount )
-target_include_directories(webrtc_pc_jitter_buffer_delay_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:jitter_buffer_delay_proxy
-add_library(webrtc_pc_jitter_buffer_delay_proxy INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_proxy INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface )
-target_include_directories(webrtc_pc_jitter_buffer_delay_proxy INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:libjingle_peerconnection
-add_library(webrtc_pc_libjingle_peerconnection INTERFACE)
-target_link_libraries(webrtc_pc_libjingle_peerconnection INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_peerconnection )
-target_include_directories(webrtc_pc_libjingle_peerconnection INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::optional webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_safe_conversions webrtc_rtc_base_safe_minmax webrtc_rtc_base_system_no_unique_address )
# pc:media_protocol_names
add_library(webrtc_pc_media_protocol_names ${WEBRTC_ROOT}/pc/media_protocol_names.cc)
@@ -3183,13 +3195,6 @@ target_compile_definitions(webrtc_pc_media_stream PRIVATE RTC_ENABLE_VP9 WEBRTC_
target_compile_options(webrtc_pc_media_stream PRIVATE -fno-exceptions)
target_link_libraries(webrtc_pc_media_stream PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount )
-# pc:pc_test_utils
-add_library(webrtc_pc_pc_test_utils ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module.cc ${WEBRTC_ROOT}/pc/test/peer_connection_test_wrapper.cc)
-target_include_directories(webrtc_pc_pc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_pc_pc_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_pc_pc_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
-
# pc:peer_connection_message_handler
add_library(webrtc_pc_peer_connection_message_handler ${WEBRTC_ROOT}/pc/peer_connection_message_handler.cc)
target_include_directories(webrtc_pc_peer_connection_message_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3202,14 +3207,14 @@ add_library(webrtc_pc_peerconnection ${WEBRTC_ROOT}/pc/data_channel_controller.c
target_include_directories(webrtc_pc_peerconnection PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_peerconnection PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_sender webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
-# pc:peerconnection_unittests
- android_add_executable(TARGET webrtc_pc_peerconnection_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/data_channel_integrationtest.cc ${WEBRTC_ROOT}/pc/data_channel_unittest.cc ${WEBRTC_ROOT}/pc/dtmf_sender_unittest.cc ${WEBRTC_ROOT}/pc/ice_server_parsing_unittest.cc ${WEBRTC_ROOT}/pc/jitter_buffer_delay_unittest.cc ${WEBRTC_ROOT}/pc/jsep_session_description_unittest.cc ${WEBRTC_ROOT}/pc/local_audio_source_unittest.cc ${WEBRTC_ROOT}/pc/media_stream_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_adaptation_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_bundle_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_crypto_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_data_channel_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_end_to_end_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_factory_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_header_extension_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_histogram_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_ice_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_interface_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_jsep_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_media_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_rtp_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_signaling_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_simulcast_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.h ${WEBRTC_ROOT}/pc/proxy_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_integrationtest.cc ${WEBRTC_ROOT}/pc/rtc_stats_traversal_unittest.cc ${WEBRTC_ROOT}/pc/rtp_media_utils_unittest.cc ${WEBRTC_ROOT}/pc/rtp_parameters_conversion_unittest.cc ${WEBRTC_ROOT}/pc/rtp_sender_receiver_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transceiver_unittest.cc ${WEBRTC_ROOT}/pc/sctp_utils_unittest.cc ${WEBRTC_ROOT}/pc/sdp_serializer_unittest.cc ${WEBRTC_ROOT}/pc/stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module_unittest.cc ${WEBRTC_ROOT}/pc/test/test_sdp_strings.h ${WEBRTC_ROOT}/pc/track_media_info_map_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_track_source_unittest.cc ${WEBRTC_ROOT}/pc/video_track_unittest.cc ${WEBRTC_ROOT}/pc/webrtc_sdp_unittest.cc)
-target_include_directories(webrtc_pc_peerconnection_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_pc_peerconnection_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_pc_peerconnection_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+# pc:proxy
+add_library(webrtc_pc_proxy ${WEBRTC_ROOT}/pc/proxy.cc)
+target_include_directories(webrtc_pc_proxy PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_proxy PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_proxy PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_proxy PUBLIC webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# pc:remote_audio_source
add_library(webrtc_pc_remote_audio_source ${WEBRTC_ROOT}/pc/remote_audio_source.cc)
@@ -3218,38 +3223,26 @@ target_compile_definitions(webrtc_pc_remote_audio_source PRIVATE RTC_ENABLE_VP9
target_compile_options(webrtc_pc_remote_audio_source PRIVATE -fno-exceptions)
target_link_libraries(webrtc_pc_remote_audio_source PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex )
-# pc:rtc_pc
-add_library(webrtc_pc_rtc_pc INTERFACE)
-target_link_libraries(webrtc_pc_rtc_pc INTERFACE libsrtp webrtc_media_rtc_audio_video webrtc_pc_rtc_pc_base )
-target_include_directories(webrtc_pc_rtc_pc INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
# pc:rtc_pc_base
-add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/session_description.cc ${WEBRTC_ROOT}/pc/simulcast_description.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc)
+add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_collection.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc ${WEBRTC_ROOT}/pc/video_track_source_proxy.cc)
target_include_directories(webrtc_pc_rtc_pc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtc_pc_base PRIVATE -fno-exceptions -Ithird_party/libsrtp/include)
-target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
-
-# pc:rtc_pc_unittests
- android_add_executable(TARGET webrtc_pc_rtc_pc_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/channel_manager_unittest.cc ${WEBRTC_ROOT}/pc/channel_unittest.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/dtls_transport_unittest.cc ${WEBRTC_ROOT}/pc/ice_transport_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_unittest.cc ${WEBRTC_ROOT}/pc/media_session_unittest.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/sctp_transport_unittest.cc ${WEBRTC_ROOT}/pc/session_description_unittest.cc ${WEBRTC_ROOT}/pc/srtp_filter_unittest.cc ${WEBRTC_ROOT}/pc/srtp_session_unittest.cc ${WEBRTC_ROOT}/pc/srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/test/rtp_transport_test_util.h ${WEBRTC_ROOT}/pc/test/srtp_test_util.h ${WEBRTC_ROOT}/pc/used_ids_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_receiver_unittest.cc)
-target_include_directories(webrtc_pc_rtc_pc_unittests PRIVATE ${WEBRTC_ROOT}/pc/../third_party/libsrtp/srtp ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_pc_rtc_pc_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_pc_rtc_pc_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
+target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_proxy webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:rtp_parameters_conversion
add_library(webrtc_pc_rtp_parameters_conversion ${WEBRTC_ROOT}/pc/rtp_parameters_conversion.cc)
target_include_directories(webrtc_pc_rtp_parameters_conversion PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_parameters_conversion PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_parameters_conversion PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
+target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
# pc:rtp_receiver
add_library(webrtc_pc_rtp_receiver ${WEBRTC_ROOT}/pc/rtp_receiver.cc)
target_include_directories(webrtc_pc_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
# pc:rtp_sender
add_library(webrtc_pc_rtp_sender ${WEBRTC_ROOT}/pc/rtp_sender.cc)
@@ -3263,7 +3256,7 @@ add_library(webrtc_pc_rtp_transceiver ${WEBRTC_ROOT}/pc/rtp_transceiver.cc)
target_include_directories(webrtc_pc_rtp_transceiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_transceiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_transceiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_proxy webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_session_description webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:rtp_transmission_manager
add_library(webrtc_pc_rtp_transmission_manager ${WEBRTC_ROOT}/pc/rtp_transmission_manager.cc)
@@ -3272,18 +3265,25 @@ target_compile_definitions(webrtc_pc_rtp_transmission_manager PRIVATE RTC_ENABLE
target_compile_options(webrtc_pc_rtp_transmission_manager PRIVATE -fno-exceptions)
target_link_libraries(webrtc_pc_rtp_transmission_manager PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_audio_rtp_receiver webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_third_party_sigslot_sigslot )
-# pc/scenario_tests:pc_scenario_tests
-add_library(webrtc_pc_scenario_tests_pc_scenario_tests ${WEBRTC_ROOT}/pc/scenario_tests/goog_cc_test.cc)
-target_include_directories(webrtc_pc_scenario_tests_pc_scenario_tests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_pc_scenario_tests_pc_scenario_tests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_pc_scenario_tests_pc_scenario_tests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_scenario_tests_pc_scenario_tests PUBLIC webrtc_api_rtc_stats_api webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_test_field_trial webrtc_test_test_support webrtc_test_peer_scenario_peer_scenario )
-
# pc:sdp_state_provider
add_library(webrtc_pc_sdp_state_provider INTERFACE)
target_link_libraries(webrtc_pc_sdp_state_provider INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_rtc_pc_base )
target_include_directories(webrtc_pc_sdp_state_provider INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# pc:session_description
+add_library(webrtc_pc_session_description ${WEBRTC_ROOT}/pc/session_description.cc)
+target_include_directories(webrtc_pc_session_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_session_description PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_session_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_session_description PUBLIC absl::algorithm_container absl::memory webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_simulcast_description webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
+# pc:simulcast_description
+add_library(webrtc_pc_simulcast_description ${WEBRTC_ROOT}/pc/simulcast_description.cc)
+target_include_directories(webrtc_pc_simulcast_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_simulcast_description PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_simulcast_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_simulcast_description PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
# pc:stats_collector_interface
add_library(webrtc_pc_stats_collector_interface INTERFACE)
target_link_libraries(webrtc_pc_stats_collector_interface INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface )
@@ -3308,7 +3308,7 @@ add_library(webrtc_pc_video_rtp_receiver ${WEBRTC_ROOT}/pc/video_rtp_receiver.cc
target_include_directories(webrtc_pc_video_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_video_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_video_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address )
# pc:video_rtp_track_source
add_library(webrtc_pc_video_rtp_track_source ${WEBRTC_ROOT}/pc/video_rtp_track_source.cc)
@@ -3557,13 +3557,6 @@ target_include_directories(webrtc_rtc_base_gtest_prod INTERFACE ${WEBRTC_ROOT} $
add_library(webrtc_rtc_base_gtest_prod.headers INTERFACE)
target_include_directories(webrtc_rtc_base_gtest_prod.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# rtc_base:gunit_helpers
-add_library(webrtc_rtc_base_gunit_helpers ${WEBRTC_ROOT}/rtc_base/gunit.cc)
-target_include_directories(webrtc_rtc_base_gunit_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_rtc_base_gunit_helpers PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_rtc_base_gunit_helpers PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_gunit_helpers PUBLIC absl::strings webrtc_rtc_base_rtc_base webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_test_test_support )
-
# rtc_base:ignore_wundef
add_library(webrtc_rtc_base_ignore_wundef INTERFACE)
target_include_directories(webrtc_rtc_base_ignore_wundef INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3676,7 +3669,7 @@ add_library(webrtc_rtc_base_platform_thread ${WEBRTC_ROOT}/rtc_base/platform_thr
target_include_directories(webrtc_rtc_base_platform_thread PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_platform_thread PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_platform_thread PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::strings webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::memory absl::strings absl::optional webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
# rtc_base:platform_thread.headers
add_library(webrtc_rtc_base_platform_thread.headers INTERFACE)
@@ -3695,6 +3688,15 @@ add_library(webrtc_rtc_base_platform_thread_types.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_platform_thread_types.headers INTERFACE webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_platform_thread_types.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# rtc_base:protobuf_utils
+add_library(webrtc_rtc_base_protobuf_utils INTERFACE)
+target_link_libraries(webrtc_rtc_base_protobuf_utils INTERFACE libprotobuf )
+target_include_directories(webrtc_rtc_base_protobuf_utils INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# rtc_base:protobuf_utils.headers
+add_library(webrtc_rtc_base_protobuf_utils.headers INTERFACE)
+target_include_directories(webrtc_rtc_base_protobuf_utils.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# rtc_base:rate_limiter
add_library(webrtc_rtc_base_rate_limiter ${WEBRTC_ROOT}/rtc_base/rate_limiter.cc)
target_include_directories(webrtc_rtc_base_rate_limiter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3704,12 +3706,12 @@ target_link_libraries(webrtc_rtc_base_rate_limiter PUBLIC absl::optional webrtc_
# rtc_base:refcount
add_library(webrtc_rtc_base_refcount INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_rtc_base_macromagic )
+target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_macromagic )
target_include_directories(webrtc_rtc_base_refcount INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:refcount.headers
add_library(webrtc_rtc_base_refcount.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_rtc_base_macromagic.headers )
+target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_refcount.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base
@@ -3717,11 +3719,11 @@ add_library(webrtc_rtc_base_rtc_base ${WEBRTC_ROOT}/rtc_base/ifaddrs_converter.c
target_include_directories(webrtc_rtc_base_rtc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_rtc_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# rtc_base:rtc_base.headers
add_library(webrtc_rtc_base_rtc_base.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_rtc_base_rtc_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:rtc_base_approved
@@ -3755,13 +3757,6 @@ add_library(webrtc_rtc_base_rtc_event.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_rtc_event.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_synchronization_yield_policy.headers webrtc_rtc_base_system_warn_current_thread_is_deadlocked.headers )
target_include_directories(webrtc_rtc_base_rtc_event.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# rtc_base:rtc_json
-add_library(webrtc_rtc_base_rtc_json ${WEBRTC_ROOT}/rtc_base/strings/json.cc)
-target_include_directories(webrtc_rtc_base_rtc_json PRIVATE ${WEBRTC_ROOT}/rtc_base/../third_party/jsoncpp/source/include ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_rtc_base_rtc_json PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_EXTERNAL_JSON WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_rtc_base_rtc_json PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_json PUBLIC jsoncpp webrtc_rtc_base_stringutils )
-
# rtc_base:rtc_numerics
add_library(webrtc_rtc_base_rtc_numerics ${WEBRTC_ROOT}/rtc_base/numerics/event_based_exponential_moving_average.cc ${WEBRTC_ROOT}/rtc_base/numerics/exp_filter.cc ${WEBRTC_ROOT}/rtc_base/numerics/moving_average.cc)
target_include_directories(webrtc_rtc_base_rtc_numerics PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3781,13 +3776,6 @@ target_compile_definitions(webrtc_rtc_base_rtc_operations_chain PRIVATE RTC_ENAB
target_compile_options(webrtc_rtc_base_rtc_operations_chain PRIVATE -fno-exceptions)
target_link_libraries(webrtc_rtc_base_rtc_operations_chain PUBLIC absl::optional webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
-# rtc_base:rtc_stats_counters
-add_library(webrtc_rtc_base_rtc_stats_counters ${WEBRTC_ROOT}/rtc_base/numerics/event_rate_counter.cc ${WEBRTC_ROOT}/rtc_base/numerics/sample_stats.cc)
-target_include_directories(webrtc_rtc_base_rtc_stats_counters PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_rtc_base_rtc_stats_counters PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_rtc_base_rtc_stats_counters PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_stats_counters PUBLIC webrtc_api_numerics_numerics webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp )
-
# rtc_base:rtc_task_queue
add_library(webrtc_rtc_base_rtc_task_queue ${WEBRTC_ROOT}/rtc_base/task_queue.cc)
target_include_directories(webrtc_rtc_base_rtc_task_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -4029,23 +4017,16 @@ target_include_directories(webrtc_rtc_base_system_warn_current_thread_is_deadloc
add_library(webrtc_rtc_base_system_warn_current_thread_is_deadlocked.headers INTERFACE)
target_include_directories(webrtc_rtc_base_system_warn_current_thread_is_deadlocked.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# rtc_base:task_queue_for_test
-add_library(webrtc_rtc_base_task_queue_for_test ${WEBRTC_ROOT}/rtc_base/task_queue_for_test.cc)
-target_include_directories(webrtc_rtc_base_task_queue_for_test PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_rtc_base_task_queue_for_test PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_rtc_base_task_queue_for_test PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_queue_for_test PUBLIC absl::strings webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_utils_to_queued_task )
-
# rtc_base/task_utils:pending_task_safety_flag
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag ${WEBRTC_ROOT}/rtc_base/task_utils/pending_task_safety_flag.cc)
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_system_no_unique_address )
# rtc_base/task_utils:pending_task_safety_flag.headers
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_no_unique_address.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_system_no_unique_address.headers )
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:repeating_task
@@ -4053,11 +4034,11 @@ add_library(webrtc_rtc_base_task_utils_repeating_task ${WEBRTC_ROOT}/rtc_base/ta
target_include_directories(webrtc_rtc_base_task_utils_repeating_task PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_repeating_task PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_repeating_task PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
# rtc_base/task_utils:repeating_task.headers
add_library(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:to_queued_task
@@ -4097,11 +4078,11 @@ add_library(webrtc_rtc_base_threading ${WEBRTC_ROOT}/rtc_base/async_resolver.cc
target_include_directories(webrtc_rtc_base_threading PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_threading PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_threading PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_threading PUBLIC absl::algorithm_container webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_rtc_base_threading PUBLIC absl::algorithm_container webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# rtc_base:threading.headers
add_library(webrtc_rtc_base_threading.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
+target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
target_include_directories(webrtc_rtc_base_threading.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/time:timestamp_extrapolator
@@ -4210,27 +4191,6 @@ add_library(webrtc_system_wrappers_system_wrappers.headers INTERFACE)
target_link_libraries(webrtc_system_wrappers_system_wrappers.headers INTERFACE webrtc_api_array_view.headers webrtc_api_units_timestamp.headers webrtc_modules_module_api_public.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_arch.headers webrtc_rtc_base_system_rtc_export.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_system_wrappers_system_wrappers.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# test:audio_codec_mocks
-add_library(webrtc_test_audio_codec_mocks ${WEBRTC_ROOT}/test/mock_audio_decoder.cc ${WEBRTC_ROOT}/test/mock_audio_encoder.cc)
-target_include_directories(webrtc_test_audio_codec_mocks PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_audio_codec_mocks PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_test_audio_codec_mocks PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_audio_codec_mocks PUBLIC absl::memory webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_test_test_support )
-
-# test:direct_transport
-add_library(webrtc_test_direct_transport ${WEBRTC_ROOT}/test/direct_transport.cc)
-target_include_directories(webrtc_test_direct_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_direct_transport PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_direct_transport PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_direct_transport PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_simulated_packet_receiver webrtc_rtc_base_macromagic webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_test_rtp_test_utils )
-
-# test:encoder_settings
-add_library(webrtc_test_encoder_settings ${WEBRTC_ROOT}/test/encoder_settings.cc)
-target_include_directories(webrtc_test_encoder_settings PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_encoder_settings PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_encoder_settings PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_encoder_settings PUBLIC webrtc_api_scoped_refptr webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_rtc_base_checks webrtc_rtc_base_refcount )
-
# test:fake_video_codecs
add_library(webrtc_test_fake_video_codecs ${WEBRTC_ROOT}/test/configurable_frame_size_encoder.cc ${WEBRTC_ROOT}/test/fake_decoder.cc ${WEBRTC_ROOT}/test/fake_encoder.cc ${WEBRTC_ROOT}/test/fake_vp8_decoder.cc ${WEBRTC_ROOT}/test/fake_vp8_encoder.cc)
target_include_directories(webrtc_test_fake_video_codecs PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -4256,53 +4216,6 @@ target_link_libraries(webrtc_test_fileutils PUBLIC absl::optional webrtc_rtc_bas
add_library(webrtc_test_fileutils_override_api INTERFACE)
target_include_directories(webrtc_test_fileutils_override_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# test:frame_generator_impl
-add_library(webrtc_test_frame_generator_impl ${WEBRTC_ROOT}/test/frame_generator.cc ${WEBRTC_ROOT}/test/testsupport/ivf_video_frame_generator.cc)
-target_include_directories(webrtc_test_frame_generator_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_frame_generator_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_frame_generator_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_frame_generator_impl PUBLIC absl::optional webrtc_api_frame_generator_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_test_frame_utils )
-
-# test:frame_utils
-add_library(webrtc_test_frame_utils ${WEBRTC_ROOT}/test/frame_utils.cc)
-target_include_directories(webrtc_test_frame_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_frame_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_frame_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_frame_utils PUBLIC webrtc_api_scoped_refptr webrtc_api_video_video_frame )
-
-# test/logging:log_writer
-add_library(webrtc_test_logging_log_writer ${WEBRTC_ROOT}/test/logging/file_log_writer.cc ${WEBRTC_ROOT}/test/logging/log_writer.cc ${WEBRTC_ROOT}/test/logging/memory_log_writer.cc)
-target_include_directories(webrtc_test_logging_log_writer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_logging_log_writer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_logging_log_writer PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_logging_log_writer PUBLIC absl::optional webrtc_api_libjingle_logging_api webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_stringutils webrtc_test_fileutils )
-
-# test:mock_transport
-add_library(webrtc_test_mock_transport ${WEBRTC_ROOT}/test/mock_transport.cc)
-target_include_directories(webrtc_test_mock_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_mock_transport PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_test_mock_transport PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_mock_transport PUBLIC webrtc_api_transport_api webrtc_test_test_support )
-
-# test/network:emulated_network
-add_library(webrtc_test_network_emulated_network ${WEBRTC_ROOT}/test/network/cross_traffic.cc ${WEBRTC_ROOT}/test/network/emulated_network_manager.cc ${WEBRTC_ROOT}/test/network/emulated_turn_server.cc ${WEBRTC_ROOT}/test/network/fake_network_socket_server.cc ${WEBRTC_ROOT}/test/network/network_emulation.cc ${WEBRTC_ROOT}/test/network/network_emulation_manager.cc ${WEBRTC_ROOT}/test/network/traffic_route.cc)
-target_include_directories(webrtc_test_network_emulated_network PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_network_emulated_network PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_network_emulated_network PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
-
-# test/pc/sctp:fake_sctp_transport
-add_library(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE)
-target_link_libraries(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE webrtc_media_rtc_data_sctp_transport_internal )
-target_include_directories(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# test/peer_scenario
-add_library(webrtc_test_peer_scenario_peer_scenario ${WEBRTC_ROOT}/test/peer_scenario/peer_scenario.cc ${WEBRTC_ROOT}/test/peer_scenario/peer_scenario_client.cc ${WEBRTC_ROOT}/test/peer_scenario/scenario_connection.cc ${WEBRTC_ROOT}/test/peer_scenario/sdp_callbacks.cc ${WEBRTC_ROOT}/test/peer_scenario/signaling_route.cc)
-target_include_directories(webrtc_test_peer_scenario_peer_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_peer_scenario_peer_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_test_peer_scenario_peer_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
-
# test:perf_test
add_library(webrtc_test_perf_test ${WEBRTC_ROOT}/test/testsupport/perf_result_reporter.cc ${WEBRTC_ROOT}/test/testsupport/perf_test.cc ${WEBRTC_ROOT}/test/testsupport/perf_test_histogram_writer.cc)
target_include_directories(webrtc_test_perf_test PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -4322,34 +4235,6 @@ add_library(webrtc_test_rtc_expect_death INTERFACE)
target_link_libraries(webrtc_test_rtc_expect_death INTERFACE webrtc_test_test_support )
target_include_directories(webrtc_test_rtc_expect_death INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# test:rtp_test_utils
-add_library(webrtc_test_rtp_test_utils ${WEBRTC_ROOT}/test/rtcp_packet_parser.cc ${WEBRTC_ROOT}/test/rtp_file_reader.cc ${WEBRTC_ROOT}/test/rtp_file_writer.cc ${WEBRTC_ROOT}/test/rtp_header_parser.cc)
-target_include_directories(webrtc_test_rtp_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_rtp_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_rtp_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_rtp_test_utils PUBLIC webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
-
-# test/scenario:column_printer
-add_library(webrtc_test_scenario_column_printer ${WEBRTC_ROOT}/test/scenario/column_printer.cc)
-target_include_directories(webrtc_test_scenario_column_printer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_scenario_column_printer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_scenario_column_printer PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_scenario_column_printer PUBLIC webrtc_rtc_base_macromagic webrtc_rtc_base_stringutils webrtc_test_logging_log_writer )
-
-# test/scenario
-add_library(webrtc_test_scenario_scenario ${WEBRTC_ROOT}/test/scenario/audio_stream.cc ${WEBRTC_ROOT}/test/scenario/call_client.cc ${WEBRTC_ROOT}/test/scenario/hardware_codecs.cc ${WEBRTC_ROOT}/test/scenario/network_node.cc ${WEBRTC_ROOT}/test/scenario/performance_stats.cc ${WEBRTC_ROOT}/test/scenario/scenario.cc ${WEBRTC_ROOT}/test/scenario/scenario_config.cc ${WEBRTC_ROOT}/test/scenario/stats_collection.cc ${WEBRTC_ROOT}/test/scenario/video_frame_matcher.cc ${WEBRTC_ROOT}/test/scenario/video_stream.cc)
-target_include_directories(webrtc_test_scenario_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_scenario_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_test_scenario_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_scenario_scenario PUBLIC absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
-
-# test:test_common
-add_library(webrtc_test_test_common ${WEBRTC_ROOT}/test/call_test.cc ${WEBRTC_ROOT}/test/drifting_clock.cc ${WEBRTC_ROOT}/test/layer_filtering_transport.cc ${WEBRTC_ROOT}/test/run_loop.cc)
-target_include_directories(webrtc_test_test_common PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_test_common PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
-target_compile_options(webrtc_test_test_common PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_test_common PUBLIC webrtc_modules_video_capture_video_capture_internal_impl absl::optional webrtc_api_create_frame_generator webrtc_api_frame_generator_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_simulated_network webrtc_call_simulated_packet_receiver webrtc_call_video_stream_api webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_rtc_event webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_direct_transport webrtc_test_encoder_settings webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_mock_transport webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_video_test_common )
-
# test:test_main
add_library(webrtc_test_test_main ${WEBRTC_ROOT}/test/test_main.cc)
target_include_directories(webrtc_test_test_main PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -4369,20 +4254,6 @@ add_library(webrtc_test_test_support INTERFACE)
target_link_libraries(webrtc_test_test_support INTERFACE gmock gtest webrtc_rtc_base_ignore_wundef )
target_include_directories(webrtc_test_test_support INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-# test/time_controller
-add_library(webrtc_test_time_controller_time_controller ${WEBRTC_ROOT}/test/time_controller/external_time_controller.cc ${WEBRTC_ROOT}/test/time_controller/real_time_controller.cc ${WEBRTC_ROOT}/test/time_controller/simulated_process_thread.cc ${WEBRTC_ROOT}/test/time_controller/simulated_task_queue.cc ${WEBRTC_ROOT}/test/time_controller/simulated_thread.cc ${WEBRTC_ROOT}/test/time_controller/simulated_time_controller.cc)
-target_include_directories(webrtc_test_time_controller_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_time_controller_time_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_time_controller_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_time_controller_time_controller PUBLIC absl::strings webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_synchronization_yield_policy webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
-
-# test:video_test_common
-add_library(webrtc_test_video_test_common ${WEBRTC_ROOT}/test/fake_texture_frame.cc ${WEBRTC_ROOT}/test/frame_forwarder.cc ${WEBRTC_ROOT}/test/frame_generator_capturer.cc ${WEBRTC_ROOT}/test/mappable_native_buffer.cc ${WEBRTC_ROOT}/test/test_video_capturer.cc)
-target_include_directories(webrtc_test_video_test_common PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_test_video_test_common PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
-target_compile_options(webrtc_test_video_test_common PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_video_test_common PUBLIC absl::algorithm_container absl::strings webrtc_api_array_view webrtc_api_create_frame_generator webrtc_api_frame_generator_api webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_refcount webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_test_fileutils webrtc_test_frame_utils )
-
# video/adaptation:video_adaptation
add_library(webrtc_video_adaptation_video_adaptation ${WEBRTC_ROOT}/video/adaptation/balanced_constraint.cc ${WEBRTC_ROOT}/video/adaptation/bitrate_constraint.cc ${WEBRTC_ROOT}/video/adaptation/encode_usage_resource.cc ${WEBRTC_ROOT}/video/adaptation/overuse_frame_detector.cc ${WEBRTC_ROOT}/video/adaptation/pixel_limit_resource.cc ${WEBRTC_ROOT}/video/adaptation/quality_rampup_experiment_helper.cc ${WEBRTC_ROOT}/video/adaptation/quality_scaler_resource.cc ${WEBRTC_ROOT}/video/adaptation/video_stream_encoder_resource.cc ${WEBRTC_ROOT}/video/adaptation/video_stream_encoder_resource_manager.cc)
target_include_directories(webrtc_video_adaptation_video_adaptation PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -4402,7 +4273,7 @@ add_library(webrtc_video_video ${WEBRTC_ROOT}/video/buffered_frame_decryptor.cc
target_include_directories(webrtc_video_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_video_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ARCH_ARM64 WEBRTC_HAS_NEON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_video_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
+target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
# video:video_stream_encoder_impl
add_library(webrtc_video_video_stream_encoder_impl ${WEBRTC_ROOT}/video/alignment_adjuster.cc ${WEBRTC_ROOT}/video/encoder_bitrate_adjuster.cc ${WEBRTC_ROOT}/video/encoder_overshoot_detector.cc ${WEBRTC_ROOT}/video/frame_encode_metadata_writer.cc ${WEBRTC_ROOT}/video/video_source_sink_controller.cc ${WEBRTC_ROOT}/video/video_stream_encoder.cc)
diff --git a/linux_x86_64.cmake b/linux_x86_64.cmake
index 5ab263a2a7..cc8a417331 100644
--- a/linux_x86_64.cmake
+++ b/linux_x86_64.cmake
@@ -1,4 +1,4 @@
-# Generated on 04/21/21 for target: Linux
+# Generated on 06/23/21 for target: Linux
# This is an autogenerated file by calling:
# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_common_video_common_video_unittests --target webrtc_media_rtc_media_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --target webrtc_pc_peerconnection_unittests --target webrtc_pc_rtc_pc_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Linux BUILD .
@@ -6,11 +6,11 @@
# Re-running this script will require you to merge in the latest upstream-master for webrtc
# Expecting jsoncpp at 9059f5cad030ba11d37818847443a53918c327b1
-# Expecting libaom at 6c93db7ff63430d2e5dcdfc42e84e3a0514da608
-# Expecting libsrtp2 at 7990ca64c616b150a9cb4714601c4a3b0c84fe91
+# Expecting libaom at 12287adee94fc3b1f5349d3f4bd85cea4e57f62b
+# Expecting libsrtp2 at 5b7c744eb8310250ccc534f3f86a2015b3887a0a
# Expecting libvpx at 61edec1efbea1c02d71857e2aff9426d9cd2df4e
-# Expecting libyuv at 64994843e652443df2d5201c6ae3fb725097360f
-# Expecting usrsctp at 70d42ae95a1de83bd317c8cc9503f894671d1392
+# Expecting libyuv at 49ebc996aa8c4bdf89c1b5ea461eb677234c61cc
+# Expecting usrsctp at 22ba62ffe79c3881581ab430368bf3764d9533eb
@@ -294,12 +294,12 @@ target_include_directories(webrtc_api_call_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_
# api:callfactory_api
add_library(webrtc_api_callfactory_api INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_call_rtp_interfaces webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_callfactory_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:callfactory_api.headers
add_library(webrtc_api_callfactory_api.headers INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_call_rtp_interfaces.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_callfactory_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:create_frame_generator
@@ -328,7 +328,7 @@ add_library(webrtc_api_create_time_controller ${WEBRTC_ROOT}/api/test/create_tim
target_include_directories(webrtc_api_create_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_create_time_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_create_time_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_test_time_controller_time_controller )
# api/crypto:frame_decryptor_interface
add_library(webrtc_api_crypto_frame_decryptor_interface INTERFACE)
@@ -429,15 +429,15 @@ add_library(webrtc_api_libjingle_logging_api.headers INTERFACE)
target_include_directories(webrtc_api_libjingle_logging_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:libjingle_peerconnection_api
-add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/proxy.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
+add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
target_include_directories(webrtc_api_libjingle_peerconnection_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_libjingle_peerconnection_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_libjingle_peerconnection_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_call_rtp_interfaces webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# api:libjingle_peerconnection_api.headers
add_library(webrtc_api_libjingle_peerconnection_api.headers INTERFACE)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_call_rtp_interfaces.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_libjingle_peerconnection_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:media_stream_interface
@@ -573,12 +573,12 @@ target_include_directories(webrtc_api_priority.headers INTERFACE ${WEBRTC_ROOT}
# api:refcountedbase
add_library(webrtc_api_refcountedbase INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_macromagic webrtc_rtc_base_refcount )
target_include_directories(webrtc_api_refcountedbase INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:refcountedbase.headers
add_library(webrtc_api_refcountedbase.headers INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_macromagic.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_refcountedbase.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_error
@@ -621,12 +621,12 @@ target_link_libraries(webrtc_api_rtc_event_log_rtc_event_log_factory PUBLIC webr
# api:rtc_stats_api
add_library(webrtc_api_rtc_stats_api INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_rtc_stats_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_stats_api.headers
add_library(webrtc_api_rtc_stats_api.headers INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtc_stats_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_headers
@@ -646,11 +646,11 @@ add_library(webrtc_api_rtp_packet_info ${WEBRTC_ROOT}/api/rtp_packet_info.cc)
target_include_directories(webrtc_api_rtp_packet_info PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_rtp_packet_info PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_rtp_packet_info PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api:rtp_packet_info.headers
add_library(webrtc_api_rtp_packet_info.headers INTERFACE)
-target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtp_packet_info.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_parameters
@@ -767,6 +767,11 @@ add_library(webrtc_api_transport_datagram_transport_interface INTERFACE)
target_link_libraries(webrtc_api_transport_datagram_transport_interface INTERFACE absl::optional webrtc_api_array_view webrtc_api_rtc_error webrtc_rtc_base_rtc_base_approved )
target_include_directories(webrtc_api_transport_datagram_transport_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# api/transport:datagram_transport_interface.headers
+add_library(webrtc_api_transport_datagram_transport_interface.headers INTERFACE)
+target_link_libraries(webrtc_api_transport_datagram_transport_interface.headers INTERFACE webrtc_api_array_view.headers webrtc_api_rtc_error.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_api_transport_datagram_transport_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# api/transport:enums
add_library(webrtc_api_transport_enums INTERFACE)
target_include_directories(webrtc_api_transport_enums INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -957,15 +962,15 @@ target_compile_options(webrtc_api_video_codecs_rtc_software_fallback_wrappers PR
target_link_libraries(webrtc_api_video_codecs_rtc_software_fallback_wrappers PUBLIC absl::core_headers absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# api/video_codecs:video_codecs_api
-add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder_factory.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
+add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
target_include_directories(webrtc_api_video_codecs_video_codecs_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_video_codecs_video_codecs_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_video_codecs_video_codecs_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api/video_codecs:video_codecs_api.headers
add_library(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api/video_codecs:vp8_temporal_layers_factory
@@ -1144,7 +1149,7 @@ add_library(webrtc_audio_audio ${WEBRTC_ROOT}/audio/audio_level.cc ${WEBRTC_ROOT
target_include_directories(webrtc_audio_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_audio_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_audio_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# audio/utility:audio_frame_operations
add_library(webrtc_audio_utility_audio_frame_operations ${WEBRTC_ROOT}/audio/utility/audio_frame_operations.cc ${WEBRTC_ROOT}/audio/utility/channel_mixer.cc ${WEBRTC_ROOT}/audio/utility/channel_mixing_matrix.cc)
@@ -1201,18 +1206,18 @@ add_library(webrtc_call_call ${WEBRTC_ROOT}/call/call.cc ${WEBRTC_ROOT}/call/cal
target_include_directories(webrtc_call_call PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call PUBLIC absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
+target_link_libraries(webrtc_call_call PUBLIC absl::bind_front absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
# call:call_interfaces
add_library(webrtc_call_call_interfaces ${WEBRTC_ROOT}/call/audio_receive_stream.cc ${WEBRTC_ROOT}/call/audio_send_stream.cc ${WEBRTC_ROOT}/call/audio_state.cc ${WEBRTC_ROOT}/call/call_config.cc ${WEBRTC_ROOT}/call/flexfec_receive_stream.cc ${WEBRTC_ROOT}/call/syncable.cc)
target_include_directories(webrtc_call_call_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_call_audio_sender_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
+target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::bind_front absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_call_audio_sender_interface webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
# call:call_interfaces.headers
add_library(webrtc_call_call_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_call_audio_sender_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
+target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_call_audio_sender_interface.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
target_include_directories(webrtc_call_call_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:fake_network
@@ -1227,16 +1232,26 @@ add_library(webrtc_call_mock_rtp_interfaces INTERFACE)
target_link_libraries(webrtc_call_mock_rtp_interfaces INTERFACE webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_bitrate_settings webrtc_call_rtp_interfaces webrtc_modules_pacing_pacing webrtc_rtc_base_rtc_base webrtc_rtc_base_rate_limiter webrtc_rtc_base_network_sent_packet webrtc_test_test_support )
target_include_directories(webrtc_call_mock_rtp_interfaces INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# call:receive_stream_interface
+add_library(webrtc_call_receive_stream_interface INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface INTERFACE webrtc_api_frame_transformer_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source )
+target_include_directories(webrtc_call_receive_stream_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# call:receive_stream_interface.headers
+add_library(webrtc_call_receive_stream_interface.headers INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_transport_rtp_rtp_source.headers )
+target_include_directories(webrtc_call_receive_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# call:rtp_interfaces
add_library(webrtc_call_rtp_interfaces ${WEBRTC_ROOT}/call/rtp_config.cc)
target_include_directories(webrtc_call_rtp_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_rtp_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_rtp_interfaces PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue )
# call:rtp_interfaces.headers
add_library(webrtc_call_rtp_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers )
target_include_directories(webrtc_call_rtp_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_receiver
@@ -1276,11 +1291,11 @@ add_library(webrtc_call_video_stream_api ${WEBRTC_ROOT}/call/video_receive_strea
target_include_directories(webrtc_call_video_stream_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_video_stream_api PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_video_stream_api PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
# call:video_stream_api.headers
add_library(webrtc_call_video_stream_api.headers INTERFACE)
-target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_call_video_stream_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# common_audio
@@ -1439,11 +1454,11 @@ add_library(webrtc_logging_ice_log ${WEBRTC_ROOT}/logging/rtc_event_log/events/r
target_include_directories(webrtc_logging_ice_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_ice_log PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_ice_log PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved )
# logging:ice_log.headers
add_library(webrtc_logging_ice_log.headers INTERFACE)
-target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_logging_ice_log.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# logging:rtc_event_audio
@@ -1451,41 +1466,41 @@ add_library(webrtc_logging_rtc_event_audio ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_audio PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
# logging:rtc_event_bwe
add_library(webrtc_logging_rtc_event_bwe ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_success.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_route_change.cc)
target_include_directories(webrtc_logging_rtc_event_bwe PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_bwe PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_bwe PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate )
+target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate webrtc_api_units_timestamp )
# logging:rtc_event_frame_events
add_library(webrtc_logging_rtc_event_frame_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_frame_decoded.cc)
target_include_directories(webrtc_logging_rtc_event_frame_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_frame_events PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_frame_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
# logging:rtc_event_generic_packet_events
add_library(webrtc_logging_rtc_event_generic_packet_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc)
target_include_directories(webrtc_logging_rtc_event_generic_packet_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_generic_packet_events PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_generic_packet_events PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_timeutils )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log2_proto
-add_library(webrtc_logging_rtc_event_log2_proto_bridge)
+add_library(webrtc_logging_rtc_event_log2_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log2_proto_bridge
+ TARGET webrtc_logging_rtc_event_log2_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log2.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log2_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto)
+target_link_libraries(webrtc_logging_rtc_event_log2_proto PUBLIC libprotobuf)
# logging:rtc_event_log_api
add_library(webrtc_logging_rtc_event_log_api INTERFACE)
@@ -1504,27 +1519,27 @@ add_library(webrtc_logging_rtc_event_log_impl_encoder ${WEBRTC_ROOT}/logging/rtc
target_include_directories(webrtc_logging_rtc_event_log_impl_encoder PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_log_impl_encoder PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_log_impl_encoder PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto_bridge webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto_bridge webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log_proto
-add_library(webrtc_logging_rtc_event_log_proto_bridge)
+add_library(webrtc_logging_rtc_event_log_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log_proto_bridge
+ TARGET webrtc_logging_rtc_event_log_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto)
+target_link_libraries(webrtc_logging_rtc_event_log_proto PUBLIC libprotobuf)
# logging:rtc_event_pacing
add_library(webrtc_logging_rtc_event_pacing ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_alr_state.cc)
target_include_directories(webrtc_logging_rtc_event_pacing PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_pacing PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_pacing PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log )
+target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp )
# logging:rtc_event_rtp_rtcp
add_library(webrtc_logging_rtc_event_rtp_rtcp ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc)
@@ -1538,7 +1553,7 @@ add_library(webrtc_logging_rtc_event_video ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
# logging:rtc_stream_config
add_library(webrtc_logging_rtc_stream_config ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_stream_config.cc)
@@ -1552,18 +1567,25 @@ add_library(webrtc_media_rtc_audio_video ${WEBRTC_ROOT}/media/engine/adm_helpers
target_include_directories(webrtc_media_rtc_audio_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_audio_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_audio_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+
+# media:rtc_data_dcsctp_transport
+add_library(webrtc_media_rtc_data_dcsctp_transport ${WEBRTC_ROOT}/media/sctp/dcsctp_transport.cc)
+target_include_directories(webrtc_media_rtc_data_dcsctp_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_media_rtc_data_dcsctp_transport PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_media_rtc_data_dcsctp_transport PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_media_rtc_data_dcsctp_transport PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_net_dcsctp_public_factory webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_task_queue_timeout webrtc_p2p_rtc_p2p webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers )
# media:rtc_data_sctp_transport_factory
add_library(webrtc_media_rtc_data_sctp_transport_factory ${WEBRTC_ROOT}/media/sctp/sctp_transport_factory.cc)
target_include_directories(webrtc_media_rtc_data_sctp_transport_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_DCSCTP WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_data_sctp_transport_factory PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_system_unused )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_dcsctp_transport webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_system_unused webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# media:rtc_data_sctp_transport_internal
add_library(webrtc_media_rtc_data_sctp_transport_internal INTERFACE)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_api_transport_datagram_transport_interface webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
target_include_directories(webrtc_media_rtc_data_sctp_transport_internal INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_data_usrsctp_transport
@@ -1604,11 +1626,11 @@ add_library(webrtc_media_rtc_media_base ${WEBRTC_ROOT}/media/base/adapted_video_
target_include_directories(webrtc_media_rtc_media_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_media_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_datagram_transport_interface webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# media:rtc_media_base.headers
add_library(webrtc_media_rtc_media_base.headers INTERFACE)
-target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_datagram_transport_interface.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_media_rtc_media_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_media_config
@@ -1638,7 +1660,7 @@ target_link_libraries(webrtc_media_rtc_media_tests_utils PUBLIC gmock gtest absl
target_include_directories(webrtc_media_rtc_media_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_media_rtc_media_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
# media:rtc_sdp_video_format_utils
add_library(webrtc_media_rtc_sdp_video_format_utils ${WEBRTC_ROOT}/media/base/sdp_video_format_utils.cc)
@@ -1667,31 +1689,31 @@ target_link_libraries(webrtc_modules_async_audio_processing_async_audio_processi
target_include_directories(webrtc_modules_async_audio_processing_async_audio_processing.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_config_proto
-add_library(webrtc_modules_audio_coding_ana_config_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_config_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_config_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_config_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/config.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_coding_ana_config_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_config_proto PUBLIC libprotobuf)
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_debug_dump_proto
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_debug_dump_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_debug_dump_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/debug_dump.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC libprotobuf)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
+target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC libprotobuf)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
# modules/audio_coding
add_library(webrtc_modules_audio_coding_audio_coding ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_receiver.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_remixing.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_resampler.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/audio_coding_module.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/call_statistics.cc)
@@ -1730,7 +1752,7 @@ add_library(webrtc_modules_audio_coding_audio_network_adaptor ${WEBRTC_ROOT}/mod
target_include_directories(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional libprotobuf webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_coding_ana_debug_dump_proto_bridge webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_coding_ana_debug_dump_proto webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# modules/audio_coding:audio_network_adaptor_config
add_library(webrtc_modules_audio_coding_audio_network_adaptor_config ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc)
@@ -1881,7 +1903,7 @@ add_library(webrtc_modules_audio_coding_neteq_tools_minimal ${WEBRTC_ROOT}/modul
target_include_directories(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_CODEC_ISAC)
target_compile_options(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_array_view webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/audio_coding:pcm16b
add_library(webrtc_modules_audio_coding_pcm16b ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc)
@@ -1901,7 +1923,7 @@ add_library(webrtc_modules_audio_coding_red ${WEBRTC_ROOT}/modules/audio_coding/
target_include_directories(webrtc_modules_audio_coding_red PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_red PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_red PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_cng
add_library(webrtc_modules_audio_coding_webrtc_cng ${WEBRTC_ROOT}/modules/audio_coding/codecs/cng/webrtc_cng.cc)
@@ -1922,7 +1944,7 @@ add_library(webrtc_modules_audio_coding_webrtc_opus ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_coding_webrtc_opus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_webrtc_opus PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_CODEC_ISAC)
target_compile_options(webrtc_modules_audio_coding_webrtc_opus PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional libprotobuf webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_opus_wrapper
add_library(webrtc_modules_audio_coding_webrtc_opus_wrapper ${WEBRTC_ROOT}/modules/audio_coding/codecs/opus/opus_interface.cc)
@@ -2014,7 +2036,7 @@ add_library(webrtc_modules_audio_mixer_audio_mixer_impl ${WEBRTC_ROOT}/modules/a
target_include_directories(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_rtp_packet_info webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/audio_processing/aec3:adaptive_fir_filter
add_library(webrtc_modules_audio_processing_aec3_adaptive_fir_filter INTERFACE)
@@ -2133,11 +2155,11 @@ add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl ${WEBRTC_ROOT
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC libprotobuf webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto_bridge webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
# modules/audio_processing/aec_dump:aec_dump_impl.headers
add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_protobuf_utils.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/audio_processing:aec_dump_interface
@@ -2395,13 +2417,49 @@ add_library(webrtc_modules_audio_processing_agc_agc ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_processing_agc_agc PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_agc_agc PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_agc_agc PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor webrtc_modules_audio_processing_agc_clipping_predictor_evaluator webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/audio_processing/agc:agc.headers
add_library(webrtc_modules_audio_processing_agc_agc.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor.headers webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_modules_audio_processing_agc_agc.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# modules/audio_processing/agc:clipping_predictor
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer webrtc_modules_audio_processing_agc_gain_map webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_safe_minmax )
+
+# modules/audio_processing/agc:clipping_predictor.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_safe_minmax.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_evaluator
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_evaluator.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging )
+
+# modules/audio_processing/agc:clipping_predictor_evaluator.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_level_buffer.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved )
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# modules/audio_processing/agc:gain_control_interface
add_library(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE)
target_include_directories(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2525,17 +2583,17 @@ target_link_libraries(webrtc_modules_audio_processing_audio_processing_statistic
target_include_directories(webrtc_modules_audio_processing_audio_processing_statistics.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_processing:audioproc_debug_proto
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_processing_audioproc_debug_proto_bridge
+ TARGET webrtc_modules_audio_processing_audioproc_debug_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_processing/debug.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_processing
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto)
+target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC libprotobuf)
# modules/audio_processing:audioproc_test_utils
add_library(webrtc_modules_audio_processing_audioproc_test_utils ${WEBRTC_ROOT}/modules/audio_processing/test/audio_buffer_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/audio_processing_builder_for_testing.cc ${WEBRTC_ROOT}/modules/audio_processing/test/bitexactness_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/performance_timer.cc ${WEBRTC_ROOT}/modules/audio_processing/test/simulator_buffers.cc ${WEBRTC_ROOT}/modules/audio_processing/test/test_utils.cc)
@@ -2702,11 +2760,11 @@ target_link_libraries(webrtc_modules_audio_processing_voice_detection.headers IN
target_include_directories(webrtc_modules_audio_processing_voice_detection.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/congestion_controller
-add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc)
+add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc ${WEBRTC_ROOT}/modules/congestion_controller/remb_throttler.cc)
target_include_directories(webrtc_modules_congestion_controller_congestion_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_congestion_controller_congestion_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_congestion_controller_congestion_controller PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
# modules/congestion_controller/goog_cc:alr_detector
add_library(webrtc_modules_congestion_controller_goog_cc_alr_detector ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/alr_detector.cc)
@@ -2825,11 +2883,11 @@ target_compile_options(webrtc_modules_pacing_pacing PRIVATE -fno-exceptions)
target_link_libraries(webrtc_modules_pacing_pacing PUBLIC absl::memory absl::strings absl::optional webrtc_api_function_view webrtc_api_sequence_checker webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_pacing webrtc_modules_module_api webrtc_modules_pacing_interval_budget webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/remote_bitrate_estimator
-add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
+add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/packet_arrival_map.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
target_include_directories(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/rtp_rtcp:mock_rtp_rtcp
add_library(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE)
@@ -2837,7 +2895,7 @@ target_link_libraries(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE absl::opti
target_include_directories(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
+add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/capture_clock_offset_updater.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE -fno-exceptions)
@@ -2848,11 +2906,11 @@ add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format ${WEBRTC_ROOT}/modules/rtp_r
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/rtp_rtcp:rtp_rtcp_format.headers
add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp:rtp_video_header
@@ -3067,7 +3125,7 @@ add_library(webrtc_modules_video_coding_webrtc_vp9 ${WEBRTC_ROOT}/modules/video_
target_include_directories(webrtc_modules_video_coding_webrtc_vp9 PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_video_coding_webrtc_vp9 PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_video_coding_webrtc_vp9 PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
# modules/video_coding:webrtc_vp9_helpers
add_library(webrtc_modules_video_coding_webrtc_vp9_helpers ${WEBRTC_ROOT}/modules/video_coding/codecs/vp9/svc_config.cc)
@@ -3094,6 +3152,215 @@ target_compile_definitions(webrtc_modules_video_processing_video_processing_sse2
target_compile_options(webrtc_modules_video_processing_video_processing_sse2 PRIVATE -fno-exceptions -msse2)
target_link_libraries(webrtc_modules_video_processing_video_processing_sse2 PUBLIC webrtc_modules_video_processing_denoiser_filter webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+# net/dcsctp/common:internal_types
+add_library(webrtc_net_dcsctp_common_internal_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_internal_types INTERFACE webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_common_internal_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:math
+add_library(webrtc_net_dcsctp_common_math INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_math INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:pair_hash
+add_library(webrtc_net_dcsctp_common_pair_hash INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_pair_hash INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:sequence_numbers
+add_library(webrtc_net_dcsctp_common_sequence_numbers INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_sequence_numbers INTERFACE webrtc_net_dcsctp_common_internal_types )
+target_include_directories(webrtc_net_dcsctp_common_sequence_numbers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:str_join
+add_library(webrtc_net_dcsctp_common_str_join INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_str_join INTERFACE absl::strings webrtc_rtc_base_stringutils )
+target_include_directories(webrtc_net_dcsctp_common_str_join INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:bounded_io
+add_library(webrtc_net_dcsctp_packet_bounded_io INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_bounded_io INTERFACE webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_bounded_io INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:chunk
+add_library(webrtc_net_dcsctp_packet_chunk ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/abort_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_echo_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/data_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/error_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/forward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/idata_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/reconfig_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/sack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:chunk_validators
+add_library(webrtc_net_dcsctp_packet_chunk_validators ${WEBRTC_ROOT}/net/dcsctp/packet/chunk_validators.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk_validators PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk_validators PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk_validators PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk_validators PUBLIC webrtc_net_dcsctp_packet_chunk webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:crc32c
+add_library(webrtc_net_dcsctp_packet_crc32c ${WEBRTC_ROOT}/net/dcsctp/packet/crc32c.cc)
+target_include_directories(webrtc_net_dcsctp_packet_crc32c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_crc32c PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_crc32c PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_crc32c PUBLIC crc32c webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:data
+add_library(webrtc_net_dcsctp_packet_data INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_data INTERFACE webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_data INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:error_cause
+add_library(webrtc_net_dcsctp_packet_error_cause ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/no_user_data_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/protocol_violation_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc)
+target_include_directories(webrtc_net_dcsctp_packet_error_cause PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_error_cause PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_error_cause PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_error_cause PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:parameter
+add_library(webrtc_net_dcsctp_packet_parameter ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/state_cookie_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/supported_extensions_parameter.cc)
+target_include_directories(webrtc_net_dcsctp_packet_parameter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_parameter PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_parameter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_parameter PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:sctp_packet
+add_library(webrtc_net_dcsctp_packet_sctp_packet ${WEBRTC_ROOT}/net/dcsctp/packet/sctp_packet.cc)
+target_include_directories(webrtc_net_dcsctp_packet_sctp_packet PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_sctp_packet PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_sctp_packet PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_sctp_packet PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_crc32c webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:tlv_trait
+add_library(webrtc_net_dcsctp_packet_tlv_trait ${WEBRTC_ROOT}/net/dcsctp/packet/tlv_trait.cc)
+target_include_directories(webrtc_net_dcsctp_packet_tlv_trait PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_tlv_trait PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_tlv_trait PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_packet_tlv_trait PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/public:factory
+add_library(webrtc_net_dcsctp_public_factory ${WEBRTC_ROOT}/net/dcsctp/public/dcsctp_socket_factory.cc)
+target_include_directories(webrtc_net_dcsctp_public_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_public_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_public_factory PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_public_factory PUBLIC absl::strings webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_dcsctp_socket )
+
+# net/dcsctp/public:socket
+add_library(webrtc_net_dcsctp_public_socket INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_socket INTERFACE absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_public_socket INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:strong_alias
+add_library(webrtc_net_dcsctp_public_strong_alias INTERFACE)
+target_include_directories(webrtc_net_dcsctp_public_strong_alias INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:types
+add_library(webrtc_net_dcsctp_public_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_types INTERFACE webrtc_api_array_view webrtc_net_dcsctp_public_strong_alias )
+target_include_directories(webrtc_net_dcsctp_public_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:data_tracker
+add_library(webrtc_net_dcsctp_rx_data_tracker ${WEBRTC_ROOT}/net/dcsctp/rx/data_tracker.cc)
+target_include_directories(webrtc_net_dcsctp_rx_data_tracker PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_data_tracker PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_data_tracker PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_data_tracker PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_queue
+add_library(webrtc_net_dcsctp_rx_reassembly_queue ${WEBRTC_ROOT}/net/dcsctp/rx/reassembly_queue.cc)
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_queue PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_net_dcsctp_rx_traditional_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_streams
+add_library(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE absl::strings webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:traditional_reassembly_streams
+add_library(webrtc_net_dcsctp_rx_traditional_reassembly_streams ${WEBRTC_ROOT}/net/dcsctp/rx/traditional_reassembly_streams.cc)
+target_include_directories(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_rx_traditional_reassembly_streams PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:context
+add_library(webrtc_net_dcsctp_socket_context INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_socket_context INTERFACE absl::strings webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_socket_context INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/socket:dcsctp_socket
+add_library(webrtc_net_dcsctp_socket_dcsctp_socket ${WEBRTC_ROOT}/net/dcsctp/socket/dcsctp_socket.cc ${WEBRTC_ROOT}/net/dcsctp/socket/state_cookie.cc)
+target_include_directories(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_dcsctp_socket PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_chunk_validators webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_socket_transmission_control_block webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_rr_send_queue webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:heartbeat_handler
+add_library(webrtc_net_dcsctp_socket_heartbeat_handler ${WEBRTC_ROOT}/net/dcsctp/socket/heartbeat_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_heartbeat_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:stream_reset_handler
+add_library(webrtc_net_dcsctp_socket_stream_reset_handler ${WEBRTC_ROOT}/net/dcsctp/socket/stream_reset_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_stream_reset_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:transmission_control_block
+add_library(webrtc_net_dcsctp_socket_transmission_control_block ${WEBRTC_ROOT}/net/dcsctp/socket/transmission_control_block.cc)
+target_include_directories(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_socket_transmission_control_block PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/timer:task_queue_timeout
+add_library(webrtc_net_dcsctp_timer_task_queue_timeout ${WEBRTC_ROOT}/net/dcsctp/timer/task_queue_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_task_queue_timeout PUBLIC webrtc_api_array_view webrtc_api_task_queue_task_queue webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
+
+# net/dcsctp/timer
+add_library(webrtc_net_dcsctp_timer_timer ${WEBRTC_ROOT}/net/dcsctp/timer/timer.cc)
+target_include_directories(webrtc_net_dcsctp_timer_timer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_timer PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_timer PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_timer_timer PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_error_counter
+add_library(webrtc_net_dcsctp_tx_retransmission_error_counter ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_error_counter.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_error_counter PUBLIC absl::strings webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_queue
+add_library(webrtc_net_dcsctp_tx_retransmission_queue ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_timeout
+add_library(webrtc_net_dcsctp_tx_retransmission_timeout ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_timeout PUBLIC webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:rr_send_queue
+add_library(webrtc_net_dcsctp_tx_rr_send_queue ${WEBRTC_ROOT}/net/dcsctp/tx/rr_send_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_net_dcsctp_tx_rr_send_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:send_queue
+add_library(webrtc_net_dcsctp_tx_send_queue INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_tx_send_queue INTERFACE absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_tx_send_queue INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# p2p:fake_ice_transport
add_library(webrtc_p2p_fake_ice_transport INTERFACE)
target_link_libraries(webrtc_p2p_fake_ice_transport INTERFACE absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
@@ -3135,7 +3402,7 @@ add_library(webrtc_pc_audio_rtp_receiver ${WEBRTC_ROOT}/pc/audio_rtp_receiver.cc
target_include_directories(webrtc_pc_audio_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_audio_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_audio_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
# pc:audio_track
add_library(webrtc_pc_audio_track ${WEBRTC_ROOT}/pc/audio_track.cc)
@@ -3149,38 +3416,28 @@ add_library(webrtc_pc_connection_context ${WEBRTC_ROOT}/pc/connection_context.cc
target_include_directories(webrtc_pc_connection_context PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_connection_context PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_connection_context PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
+target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
# pc:dtmf_sender
add_library(webrtc_pc_dtmf_sender ${WEBRTC_ROOT}/pc/dtmf_sender.cc)
target_include_directories(webrtc_pc_dtmf_sender PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_dtmf_sender PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_dtmf_sender PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_pc_proxy webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:integration_test_helpers
add_library(webrtc_pc_integration_test_helpers ${WEBRTC_ROOT}/pc/test/integration_test_helpers.cc)
target_include_directories(webrtc_pc_integration_test_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_integration_test_helpers PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_integration_test_helpers PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
# pc:jitter_buffer_delay
add_library(webrtc_pc_jitter_buffer_delay ${WEBRTC_ROOT}/pc/jitter_buffer_delay.cc)
target_include_directories(webrtc_pc_jitter_buffer_delay PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_jitter_buffer_delay PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_jitter_buffer_delay PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading )
-
-# pc:jitter_buffer_delay_interface
-add_library(webrtc_pc_jitter_buffer_delay_interface INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_interface INTERFACE absl::algorithm_container absl::strings absl::optional webrtc_media_rtc_media_base webrtc_rtc_base_refcount )
-target_include_directories(webrtc_pc_jitter_buffer_delay_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:jitter_buffer_delay_proxy
-add_library(webrtc_pc_jitter_buffer_delay_proxy INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_proxy INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface )
-target_include_directories(webrtc_pc_jitter_buffer_delay_proxy INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::optional webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_safe_conversions webrtc_rtc_base_safe_minmax webrtc_rtc_base_system_no_unique_address )
# pc:libjingle_peerconnection
add_library(webrtc_pc_libjingle_peerconnection INTERFACE)
@@ -3205,7 +3462,7 @@ add_library(webrtc_pc_pc_test_utils ${WEBRTC_ROOT}/pc/test/fake_audio_capture_mo
target_include_directories(webrtc_pc_pc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_pc_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_pc_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
# pc:peer_connection_message_handler
add_library(webrtc_pc_peer_connection_message_handler ${WEBRTC_ROOT}/pc/peer_connection_message_handler.cc)
@@ -3219,14 +3476,21 @@ add_library(webrtc_pc_peerconnection ${WEBRTC_ROOT}/pc/data_channel_controller.c
target_include_directories(webrtc_pc_peerconnection PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_peerconnection PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_sender webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:peerconnection_unittests
android_add_executable(TARGET webrtc_pc_peerconnection_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/data_channel_integrationtest.cc ${WEBRTC_ROOT}/pc/data_channel_unittest.cc ${WEBRTC_ROOT}/pc/dtmf_sender_unittest.cc ${WEBRTC_ROOT}/pc/ice_server_parsing_unittest.cc ${WEBRTC_ROOT}/pc/jitter_buffer_delay_unittest.cc ${WEBRTC_ROOT}/pc/jsep_session_description_unittest.cc ${WEBRTC_ROOT}/pc/local_audio_source_unittest.cc ${WEBRTC_ROOT}/pc/media_stream_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_adaptation_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_bundle_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_crypto_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_data_channel_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_end_to_end_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_factory_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_header_extension_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_histogram_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_ice_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_interface_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_jsep_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_media_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_rtp_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_signaling_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_simulcast_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.h ${WEBRTC_ROOT}/pc/proxy_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_integrationtest.cc ${WEBRTC_ROOT}/pc/rtc_stats_traversal_unittest.cc ${WEBRTC_ROOT}/pc/rtp_media_utils_unittest.cc ${WEBRTC_ROOT}/pc/rtp_parameters_conversion_unittest.cc ${WEBRTC_ROOT}/pc/rtp_sender_receiver_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transceiver_unittest.cc ${WEBRTC_ROOT}/pc/sctp_utils_unittest.cc ${WEBRTC_ROOT}/pc/sdp_serializer_unittest.cc ${WEBRTC_ROOT}/pc/stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module_unittest.cc ${WEBRTC_ROOT}/pc/test/test_sdp_strings.h ${WEBRTC_ROOT}/pc/track_media_info_map_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_track_source_unittest.cc ${WEBRTC_ROOT}/pc/video_track_unittest.cc ${WEBRTC_ROOT}/pc/webrtc_sdp_unittest.cc)
target_include_directories(webrtc_pc_peerconnection_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_peerconnection_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+
+# pc:proxy
+add_library(webrtc_pc_proxy ${WEBRTC_ROOT}/pc/proxy.cc)
+target_include_directories(webrtc_pc_proxy PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_proxy PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_proxy PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_proxy PUBLIC webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# pc:remote_audio_source
add_library(webrtc_pc_remote_audio_source ${WEBRTC_ROOT}/pc/remote_audio_source.cc)
@@ -3241,32 +3505,32 @@ target_link_libraries(webrtc_pc_rtc_pc INTERFACE libsrtp webrtc_media_rtc_audio_
target_include_directories(webrtc_pc_rtc_pc INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# pc:rtc_pc_base
-add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/session_description.cc ${WEBRTC_ROOT}/pc/simulcast_description.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc)
+add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_collection.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc ${WEBRTC_ROOT}/pc/video_track_source_proxy.cc)
target_include_directories(webrtc_pc_rtc_pc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtc_pc_base PRIVATE -fno-exceptions -Ithird_party/libsrtp/include)
-target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_proxy webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:rtc_pc_unittests
android_add_executable(TARGET webrtc_pc_rtc_pc_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/channel_manager_unittest.cc ${WEBRTC_ROOT}/pc/channel_unittest.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/dtls_transport_unittest.cc ${WEBRTC_ROOT}/pc/ice_transport_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_unittest.cc ${WEBRTC_ROOT}/pc/media_session_unittest.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/sctp_transport_unittest.cc ${WEBRTC_ROOT}/pc/session_description_unittest.cc ${WEBRTC_ROOT}/pc/srtp_filter_unittest.cc ${WEBRTC_ROOT}/pc/srtp_session_unittest.cc ${WEBRTC_ROOT}/pc/srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/test/rtp_transport_test_util.h ${WEBRTC_ROOT}/pc/test/srtp_test_util.h ${WEBRTC_ROOT}/pc/used_ids_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_receiver_unittest.cc)
target_include_directories(webrtc_pc_rtc_pc_unittests PRIVATE ${WEBRTC_ROOT}/pc/../third_party/libsrtp/srtp ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_rtc_pc_unittests PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
+target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support )
# pc:rtp_parameters_conversion
add_library(webrtc_pc_rtp_parameters_conversion ${WEBRTC_ROOT}/pc/rtp_parameters_conversion.cc)
target_include_directories(webrtc_pc_rtp_parameters_conversion PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_parameters_conversion PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_parameters_conversion PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
+target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
# pc:rtp_receiver
add_library(webrtc_pc_rtp_receiver ${WEBRTC_ROOT}/pc/rtp_receiver.cc)
target_include_directories(webrtc_pc_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
# pc:rtp_sender
add_library(webrtc_pc_rtp_sender ${WEBRTC_ROOT}/pc/rtp_sender.cc)
@@ -3280,7 +3544,7 @@ add_library(webrtc_pc_rtp_transceiver ${WEBRTC_ROOT}/pc/rtp_transceiver.cc)
target_include_directories(webrtc_pc_rtp_transceiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_transceiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_transceiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_proxy webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_session_description webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:rtp_transmission_manager
add_library(webrtc_pc_rtp_transmission_manager ${WEBRTC_ROOT}/pc/rtp_transmission_manager.cc)
@@ -3301,6 +3565,20 @@ add_library(webrtc_pc_sdp_state_provider INTERFACE)
target_link_libraries(webrtc_pc_sdp_state_provider INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_rtc_pc_base )
target_include_directories(webrtc_pc_sdp_state_provider INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# pc:session_description
+add_library(webrtc_pc_session_description ${WEBRTC_ROOT}/pc/session_description.cc)
+target_include_directories(webrtc_pc_session_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_session_description PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_session_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_session_description PUBLIC absl::algorithm_container absl::memory webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_simulcast_description webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
+# pc:simulcast_description
+add_library(webrtc_pc_simulcast_description ${WEBRTC_ROOT}/pc/simulcast_description.cc)
+target_include_directories(webrtc_pc_simulcast_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_simulcast_description PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_simulcast_description PRIVATE -fno-exceptions)
+target_link_libraries(webrtc_pc_simulcast_description PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
# pc:stats_collector_interface
add_library(webrtc_pc_stats_collector_interface INTERFACE)
target_link_libraries(webrtc_pc_stats_collector_interface INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface )
@@ -3325,7 +3603,7 @@ add_library(webrtc_pc_video_rtp_receiver ${WEBRTC_ROOT}/pc/video_rtp_receiver.cc
target_include_directories(webrtc_pc_video_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_video_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_video_rtp_receiver PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address )
# pc:video_rtp_track_source
add_library(webrtc_pc_video_rtp_track_source ${WEBRTC_ROOT}/pc/video_rtp_track_source.cc)
@@ -3693,7 +3971,7 @@ add_library(webrtc_rtc_base_platform_thread ${WEBRTC_ROOT}/rtc_base/platform_thr
target_include_directories(webrtc_rtc_base_platform_thread PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_platform_thread PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_platform_thread PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::strings webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::memory absl::strings absl::optional webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
# rtc_base:platform_thread.headers
add_library(webrtc_rtc_base_platform_thread.headers INTERFACE)
@@ -3712,6 +3990,15 @@ add_library(webrtc_rtc_base_platform_thread_types.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_platform_thread_types.headers INTERFACE webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_platform_thread_types.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# rtc_base:protobuf_utils
+add_library(webrtc_rtc_base_protobuf_utils INTERFACE)
+target_link_libraries(webrtc_rtc_base_protobuf_utils INTERFACE libprotobuf )
+target_include_directories(webrtc_rtc_base_protobuf_utils INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# rtc_base:protobuf_utils.headers
+add_library(webrtc_rtc_base_protobuf_utils.headers INTERFACE)
+target_include_directories(webrtc_rtc_base_protobuf_utils.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# rtc_base:rate_limiter
add_library(webrtc_rtc_base_rate_limiter ${WEBRTC_ROOT}/rtc_base/rate_limiter.cc)
target_include_directories(webrtc_rtc_base_rate_limiter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3721,12 +4008,12 @@ target_link_libraries(webrtc_rtc_base_rate_limiter PUBLIC absl::optional webrtc_
# rtc_base:refcount
add_library(webrtc_rtc_base_refcount INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_rtc_base_macromagic )
+target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_macromagic )
target_include_directories(webrtc_rtc_base_refcount INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:refcount.headers
add_library(webrtc_rtc_base_refcount.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_rtc_base_macromagic.headers )
+target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_refcount.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base
@@ -3734,11 +4021,11 @@ add_library(webrtc_rtc_base_rtc_base ${WEBRTC_ROOT}/rtc_base/ifaddrs_converter.c
target_include_directories(webrtc_rtc_base_rtc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_rtc_base PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# rtc_base:rtc_base.headers
add_library(webrtc_rtc_base_rtc_base.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_rtc_base_rtc_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:rtc_base_approved
@@ -4058,11 +4345,11 @@ add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag ${WEBRTC_ROOT}/r
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_system_no_unique_address )
# rtc_base/task_utils:pending_task_safety_flag.headers
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_no_unique_address.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_system_no_unique_address.headers )
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:repeating_task
@@ -4070,11 +4357,11 @@ add_library(webrtc_rtc_base_task_utils_repeating_task ${WEBRTC_ROOT}/rtc_base/ta
target_include_directories(webrtc_rtc_base_task_utils_repeating_task PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_repeating_task PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_repeating_task PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
# rtc_base/task_utils:repeating_task.headers
add_library(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:to_queued_task
@@ -4114,11 +4401,11 @@ add_library(webrtc_rtc_base_threading ${WEBRTC_ROOT}/rtc_base/async_resolver.cc
target_include_directories(webrtc_rtc_base_threading PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_threading PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_threading PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_rtc_base_threading PUBLIC absl::algorithm_container webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_rtc_base_threading PUBLIC absl::algorithm_container webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# rtc_base:threading.headers
add_library(webrtc_rtc_base_threading.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
+target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
target_include_directories(webrtc_rtc_base_threading.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/time:timestamp_extrapolator
@@ -4306,7 +4593,7 @@ add_library(webrtc_test_network_emulated_network ${WEBRTC_ROOT}/test/network/cro
target_include_directories(webrtc_test_network_emulated_network PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_network_emulated_network PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_network_emulated_network PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
# test/pc/sctp:fake_sctp_transport
add_library(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE)
@@ -4318,7 +4605,7 @@ add_library(webrtc_test_peer_scenario_peer_scenario ${WEBRTC_ROOT}/test/peer_sce
target_include_directories(webrtc_test_peer_scenario_peer_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_peer_scenario_peer_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_peer_scenario_peer_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
# test:perf_test
add_library(webrtc_test_perf_test ${WEBRTC_ROOT}/test/testsupport/perf_result_reporter.cc ${WEBRTC_ROOT}/test/testsupport/perf_test.cc ${WEBRTC_ROOT}/test/testsupport/perf_test_histogram_writer.cc)
@@ -4344,7 +4631,7 @@ add_library(webrtc_test_rtp_test_utils ${WEBRTC_ROOT}/test/rtcp_packet_parser.cc
target_include_directories(webrtc_test_rtp_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_rtp_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_rtp_test_utils PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_rtp_test_utils PUBLIC webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
+target_link_libraries(webrtc_test_rtp_test_utils PUBLIC absl::optional webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
# test/scenario:column_printer
add_library(webrtc_test_scenario_column_printer ${WEBRTC_ROOT}/test/scenario/column_printer.cc)
@@ -4358,7 +4645,7 @@ add_library(webrtc_test_scenario_scenario ${WEBRTC_ROOT}/test/scenario/audio_str
target_include_directories(webrtc_test_scenario_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_scenario_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_scenario_scenario PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_test_scenario_scenario PUBLIC absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
+target_link_libraries(webrtc_test_scenario_scenario PUBLIC absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
# test:test_common
add_library(webrtc_test_test_common ${WEBRTC_ROOT}/test/call_test.cc ${WEBRTC_ROOT}/test/drifting_clock.cc ${WEBRTC_ROOT}/test/layer_filtering_transport.cc ${WEBRTC_ROOT}/test/run_loop.cc)
@@ -4419,7 +4706,7 @@ add_library(webrtc_video_video ${WEBRTC_ROOT}/video/buffered_frame_decryptor.cc
target_include_directories(webrtc_video_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_video_video PRIVATE RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_video_video PRIVATE -fno-exceptions)
-target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
+target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
# video:video_stream_encoder_impl
add_library(webrtc_video_video_stream_encoder_impl ${WEBRTC_ROOT}/video/alignment_adjuster.cc ${WEBRTC_ROOT}/video/encoder_bitrate_adjuster.cc ${WEBRTC_ROOT}/video/encoder_overshoot_detector.cc ${WEBRTC_ROOT}/video/frame_encode_metadata_writer.cc ${WEBRTC_ROOT}/video/video_source_sink_controller.cc ${WEBRTC_ROOT}/video/video_stream_encoder.cc)
diff --git a/logging/BUILD.gn b/logging/BUILD.gn
index 519f357345..90a05f7c49 100644
--- a/logging/BUILD.gn
+++ b/logging/BUILD.gn
@@ -53,6 +53,7 @@ rtc_library("rtc_event_pacing") {
deps = [
"../api:scoped_refptr",
"../api/rtc_event_log",
+ "../api/units:timestamp",
]
absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
@@ -73,6 +74,7 @@ rtc_library("rtc_event_audio") {
":rtc_stream_config",
"../api:scoped_refptr",
"../api/rtc_event_log",
+ "../api/units:timestamp",
"../modules/audio_coding:audio_network_adaptor_config",
"../rtc_base:checks",
]
@@ -101,6 +103,7 @@ rtc_library("rtc_event_bwe") {
"../api:scoped_refptr",
"../api/rtc_event_log",
"../api/units:data_rate",
+ "../api/units:timestamp",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
@@ -115,6 +118,7 @@ rtc_library("rtc_event_frame_events") {
]
deps = [
"../api/rtc_event_log",
+ "../api/units:timestamp",
"../api/video:video_frame",
"../rtc_base:timeutils",
]
@@ -136,6 +140,7 @@ rtc_library("rtc_event_generic_packet_events") {
]
deps = [
"../api/rtc_event_log",
+ "../api/units:timestamp",
"../rtc_base:timeutils",
]
absl_deps = [
@@ -179,6 +184,7 @@ rtc_library("rtc_event_video") {
":rtc_stream_config",
"../api:scoped_refptr",
"../api/rtc_event_log",
+ "../api/units:timestamp",
"../rtc_base:checks",
]
absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
@@ -452,6 +458,7 @@ rtc_library("ice_log") {
"../api:libjingle_logging_api",
"../api:libjingle_peerconnection_api", # For api/dtls_transport_interface.h
"../api/rtc_event_log",
+ "../api/units:timestamp",
"../rtc_base:rtc_base_approved",
]
absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
diff --git a/logging/g3doc/rtc_event_log.md b/logging/g3doc/rtc_event_log.md
new file mode 100644
index 0000000000..c7996e0b42
--- /dev/null
+++ b/logging/g3doc/rtc_event_log.md
@@ -0,0 +1,85 @@
+# RTC event log
+
+<?% config.freshness.owner = 'terelius' %?>
+<?% config.freshness.reviewed = '2021-06-02' %?>
+
+## Overview
+
+RTC event logs can be enabled to capture in-depth inpformation about sent and
+received packets and the internal state of some WebRTC components. The logs are
+useful to understand network behavior and to debug issues around connectivity,
+bandwidth estimation and audio jitter buffers.
+
+The contents include:
+
+* Sent and received RTP headers
+* Full RTCP feedback
+* ICE candidates, pings and responses
+* Bandwidth estimator events, including loss-based estimate, delay-based
+ estimate, probe results and ALR state
+* Audio network adaptation settings
+* Audio playout events
+
+## Binary wire format
+
+No guarantees are made on the wire format, and the format may change without
+prior notice. To maintain compatibility with past and future formats, analysis
+tools should be built on top of the provided
+[rtc_event_log_parser.h](https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h)
+
+In particular, an analysis tool should *not* read the log as a protobuf.
+
+## Visualization
+
+Since the logs contain a substantial amount of data, it is usually convenient to
+get an overview by visualizing them as a set of plots. Use the command:
+
+```
+out/Default/event_log_visualizer /path/to/log_file | python
+```
+
+This visualization requires matplotlib to be installed. The tool is capable of
+producing a substantial number of plots, of which only a handful are generated
+by default. You can select which plots are generated though the `--plot=`
+command line argument. For example, the command
+
+```
+out/Default/event_log_visualizer \
+ --plot=incoming_packet_sizes,incoming_stream_bitrate \
+ /path/to/log_file | python
+```
+
+plots the sizes of incoming packets and the bitrate per incoming stream.
+
+You can get a full list of options for the `--plot` argument through
+
+```
+out/Default/event_log_visualizer --list_plots /path/to/log_file
+```
+
+You can also synchronize the x-axis between all plots (so zooming or
+panning in one plot affects all of them), by adding the command line
+argument `--shared_xaxis`.
+
+
+## Viewing the raw log contents as text
+
+If you know which format version the log file uses, you can view the raw
+contents as text. For version 1, you can use the command
+
+```
+out/Default/protoc --decode webrtc.rtclog.EventStream \
+ ./logging/rtc_event_log/rtc_event_log.proto < /path/to/log_file
+```
+
+Similarly, you can use
+
+```
+out/Default/protoc --decode webrtc.rtclog2.EventStream \
+ ./logging/rtc_event_log/rtc_event_log2.proto < /path/to/log_file
+```
+
+for logs that use version 2. However, note that not all of the contents will be
+human readable. Some fields are based on the raw RTP format or may be encoded as
+deltas relative to previous fields. Such fields will be printed as a list of
+bytes.
diff --git a/logging/rtc_event_log/encoder/delta_encoding.cc b/logging/rtc_event_log/encoder/delta_encoding.cc
index 022fb9c163..7bccdabdc8 100644
--- a/logging/rtc_event_log/encoder/delta_encoding.cc
+++ b/logging/rtc_event_log/encoder/delta_encoding.cc
@@ -693,7 +693,7 @@ bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(const std::string& input) {
uint32_t encoding_type_bits;
const bool result =
- reader.ReadBits(&encoding_type_bits, kBitsInHeaderForEncodingType);
+ reader.ReadBits(kBitsInHeaderForEncodingType, encoding_type_bits);
RTC_DCHECK(result);
const auto encoding_type = static_cast<EncodingType>(encoding_type_bits);
@@ -729,7 +729,7 @@ std::unique_ptr<FixedLengthDeltaDecoder> FixedLengthDeltaDecoder::Create(
// Encoding type
uint32_t encoding_type_bits;
const bool result =
- reader->ReadBits(&encoding_type_bits, kBitsInHeaderForEncodingType);
+ reader->ReadBits(kBitsInHeaderForEncodingType, encoding_type_bits);
RTC_DCHECK(result);
const EncodingType encoding = static_cast<EncodingType>(encoding_type_bits);
if (encoding != EncodingType::kFixedSizeUnsignedDeltasNoEarlyWrapNoOpt &&
@@ -742,7 +742,7 @@ std::unique_ptr<FixedLengthDeltaDecoder> FixedLengthDeltaDecoder::Create(
uint32_t read_buffer;
// delta_width_bits
- if (!reader->ReadBits(&read_buffer, kBitsInHeaderForDeltaWidthBits)) {
+ if (!reader->ReadBits(kBitsInHeaderForDeltaWidthBits, read_buffer)) {
return nullptr;
}
RTC_DCHECK_LE(read_buffer, 64 - 1); // See encoding for -1's rationale.
@@ -759,20 +759,20 @@ std::unique_ptr<FixedLengthDeltaDecoder> FixedLengthDeltaDecoder::Create(
value_width_bits = kDefaultValueWidthBits;
} else {
// signed_deltas
- if (!reader->ReadBits(&read_buffer, kBitsInHeaderForSignedDeltas)) {
+ if (!reader->ReadBits(kBitsInHeaderForSignedDeltas, read_buffer)) {
return nullptr;
}
signed_deltas = rtc::dchecked_cast<bool>(read_buffer);
// values_optional
- if (!reader->ReadBits(&read_buffer, kBitsInHeaderForValuesOptional)) {
+ if (!reader->ReadBits(kBitsInHeaderForValuesOptional, read_buffer)) {
return nullptr;
}
RTC_DCHECK_LE(read_buffer, 1);
values_optional = rtc::dchecked_cast<bool>(read_buffer);
// value_width_bits
- if (!reader->ReadBits(&read_buffer, kBitsInHeaderForValueWidthBits)) {
+ if (!reader->ReadBits(kBitsInHeaderForValueWidthBits, read_buffer)) {
return nullptr;
}
RTC_DCHECK_LE(read_buffer, 64 - 1); // See encoding for -1's rationale.
@@ -813,7 +813,7 @@ std::vector<absl::optional<uint64_t>> FixedLengthDeltaDecoder::Decode() {
if (params_.values_optional()) {
for (size_t i = 0; i < num_of_deltas_; ++i) {
uint32_t exists;
- if (!reader_->ReadBits(&exists, 1u)) {
+ if (!reader_->ReadBits(1u, exists)) {
RTC_LOG(LS_WARNING) << "Failed to read existence-indicating bit.";
return std::vector<absl::optional<uint64_t>>();
}
@@ -877,7 +877,7 @@ bool FixedLengthDeltaDecoder::ParseDelta(uint64_t* delta) {
uint32_t higher_bits;
if (higher_bit_count > 0) {
- if (!reader_->ReadBits(&higher_bits, higher_bit_count)) {
+ if (!reader_->ReadBits(higher_bit_count, higher_bits)) {
RTC_LOG(LS_WARNING) << "Failed to read higher half of delta.";
return false;
}
@@ -885,7 +885,7 @@ bool FixedLengthDeltaDecoder::ParseDelta(uint64_t* delta) {
higher_bits = 0;
}
- if (!reader_->ReadBits(&lower_bits, lower_bit_count)) {
+ if (!reader_->ReadBits(lower_bit_count, lower_bits)) {
RTC_LOG(LS_WARNING) << "Failed to read lower half of delta.";
return false;
}
diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc
index dfbad7669a..2bd7507853 100644
--- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc
+++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc
@@ -15,6 +15,7 @@
#include <vector>
#include "absl/types/optional.h"
+#include "api/array_view.h"
#include "api/network_state_predictor.h"
#include "api/rtp_headers.h"
#include "api/rtp_parameters.h"
@@ -593,14 +594,14 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacketOutgoing(
std::string RtcEventLogEncoderLegacy::EncodeRtpPacketIncoming(
const RtcEventRtpPacketIncoming& event) {
- return EncodeRtpPacket(event.timestamp_us(), event.header(),
+ return EncodeRtpPacket(event.timestamp_us(), event.RawHeader(),
event.packet_length(), PacedPacketInfo::kNotAProbe,
true);
}
std::string RtcEventLogEncoderLegacy::EncodeRtpPacketOutgoing(
const RtcEventRtpPacketOutgoing& event) {
- return EncodeRtpPacket(event.timestamp_us(), event.header(),
+ return EncodeRtpPacket(event.timestamp_us(), event.RawHeader(),
event.packet_length(), event.probe_cluster_id(),
false);
}
@@ -736,7 +737,7 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket(
std::string RtcEventLogEncoderLegacy::EncodeRtpPacket(
int64_t timestamp_us,
- const webrtc::RtpPacket& header,
+ rtc::ArrayView<const uint8_t> header,
size_t packet_length,
int probe_cluster_id,
bool is_incoming) {
diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h
index 3105dc1e68..37296e797f 100644
--- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h
+++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h
@@ -15,6 +15,7 @@
#include <memory>
#include <string>
+#include "api/array_view.h"
#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h"
#include "rtc_base/buffer.h"
@@ -94,7 +95,7 @@ class RtcEventLogEncoderLegacy final : public RtcEventLogEncoder {
const rtc::Buffer& packet,
bool is_incoming);
std::string EncodeRtpPacket(int64_t timestamp_us,
- const RtpPacket& header,
+ rtc::ArrayView<const uint8_t> header,
size_t packet_length,
int probe_cluster_id,
bool is_incoming);
diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc
index f0a307973e..9400c864bf 100644
--- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc
+++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc
@@ -394,12 +394,12 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// Base event
const EventType* const base_event = batch[0];
proto_batch->set_timestamp_ms(base_event->timestamp_ms());
- proto_batch->set_marker(base_event->header().Marker());
+ proto_batch->set_marker(base_event->Marker());
// TODO(terelius): Is payload type needed?
- proto_batch->set_payload_type(base_event->header().PayloadType());
- proto_batch->set_sequence_number(base_event->header().SequenceNumber());
- proto_batch->set_rtp_timestamp(base_event->header().Timestamp());
- proto_batch->set_ssrc(base_event->header().Ssrc());
+ proto_batch->set_payload_type(base_event->PayloadType());
+ proto_batch->set_sequence_number(base_event->SequenceNumber());
+ proto_batch->set_rtp_timestamp(base_event->Timestamp());
+ proto_batch->set_ssrc(base_event->Ssrc());
proto_batch->set_payload_size(base_event->payload_length());
proto_batch->set_header_size(base_event->header_length());
proto_batch->set_padding_size(base_event->padding_length());
@@ -408,8 +408,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
absl::optional<uint64_t> base_transport_sequence_number;
{
uint16_t seqnum;
- if (base_event->header().template GetExtension<TransportSequenceNumber>(
- &seqnum)) {
+ if (base_event->template GetExtension<TransportSequenceNumber>(&seqnum)) {
proto_batch->set_transport_sequence_number(seqnum);
base_transport_sequence_number = seqnum;
}
@@ -418,8 +417,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
absl::optional<uint64_t> unsigned_base_transmission_time_offset;
{
int32_t offset;
- if (base_event->header().template GetExtension<TransmissionOffset>(
- &offset)) {
+ if (base_event->template GetExtension<TransmissionOffset>(&offset)) {
proto_batch->set_transmission_time_offset(offset);
unsigned_base_transmission_time_offset = ToUnsigned(offset);
}
@@ -428,8 +426,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
absl::optional<uint64_t> base_absolute_send_time;
{
uint32_t sendtime;
- if (base_event->header().template GetExtension<AbsoluteSendTime>(
- &sendtime)) {
+ if (base_event->template GetExtension<AbsoluteSendTime>(&sendtime)) {
proto_batch->set_absolute_send_time(sendtime);
base_absolute_send_time = sendtime;
}
@@ -438,8 +435,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
absl::optional<uint64_t> base_video_rotation;
{
VideoRotation video_rotation;
- if (base_event->header().template GetExtension<VideoOrientation>(
- &video_rotation)) {
+ if (base_event->template GetExtension<VideoOrientation>(&video_rotation)) {
proto_batch->set_video_rotation(
ConvertVideoRotationToCVOByte(video_rotation));
base_video_rotation = ConvertVideoRotationToCVOByte(video_rotation);
@@ -451,8 +447,8 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
{
bool voice_activity;
uint8_t audio_level;
- if (base_event->header().template GetExtension<AudioLevel>(&voice_activity,
- &audio_level)) {
+ if (base_event->template GetExtension<AudioLevel>(&voice_activity,
+ &audio_level)) {
RTC_DCHECK_LE(audio_level, 0x7Fu);
base_audio_level = audio_level;
proto_batch->set_audio_level(audio_level);
@@ -484,9 +480,9 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// marker (RTP base)
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
- values[i] = event->header().Marker();
+ values[i] = event->Marker();
}
- encoded_deltas = EncodeDeltas(base_event->header().Marker(), values);
+ encoded_deltas = EncodeDeltas(base_event->Marker(), values);
if (!encoded_deltas.empty()) {
proto_batch->set_marker_deltas(encoded_deltas);
}
@@ -494,9 +490,9 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// payload_type (RTP base)
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
- values[i] = event->header().PayloadType();
+ values[i] = event->PayloadType();
}
- encoded_deltas = EncodeDeltas(base_event->header().PayloadType(), values);
+ encoded_deltas = EncodeDeltas(base_event->PayloadType(), values);
if (!encoded_deltas.empty()) {
proto_batch->set_payload_type_deltas(encoded_deltas);
}
@@ -504,9 +500,9 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// sequence_number (RTP base)
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
- values[i] = event->header().SequenceNumber();
+ values[i] = event->SequenceNumber();
}
- encoded_deltas = EncodeDeltas(base_event->header().SequenceNumber(), values);
+ encoded_deltas = EncodeDeltas(base_event->SequenceNumber(), values);
if (!encoded_deltas.empty()) {
proto_batch->set_sequence_number_deltas(encoded_deltas);
}
@@ -514,9 +510,9 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// rtp_timestamp (RTP base)
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
- values[i] = event->header().Timestamp();
+ values[i] = event->Timestamp();
}
- encoded_deltas = EncodeDeltas(base_event->header().Timestamp(), values);
+ encoded_deltas = EncodeDeltas(base_event->Timestamp(), values);
if (!encoded_deltas.empty()) {
proto_batch->set_rtp_timestamp_deltas(encoded_deltas);
}
@@ -524,9 +520,9 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
// ssrc (RTP base)
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
- values[i] = event->header().Ssrc();
+ values[i] = event->Ssrc();
}
- encoded_deltas = EncodeDeltas(base_event->header().Ssrc(), values);
+ encoded_deltas = EncodeDeltas(base_event->Ssrc(), values);
if (!encoded_deltas.empty()) {
proto_batch->set_ssrc_deltas(encoded_deltas);
}
@@ -565,8 +561,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
uint16_t seqnum;
- if (event->header().template GetExtension<TransportSequenceNumber>(
- &seqnum)) {
+ if (event->template GetExtension<TransportSequenceNumber>(&seqnum)) {
values[i] = seqnum;
} else {
values[i].reset();
@@ -581,7 +576,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
int32_t offset;
- if (event->header().template GetExtension<TransmissionOffset>(&offset)) {
+ if (event->template GetExtension<TransmissionOffset>(&offset)) {
values[i] = ToUnsigned(offset);
} else {
values[i].reset();
@@ -596,7 +591,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
uint32_t sendtime;
- if (event->header().template GetExtension<AbsoluteSendTime>(&sendtime)) {
+ if (event->template GetExtension<AbsoluteSendTime>(&sendtime)) {
values[i] = sendtime;
} else {
values[i].reset();
@@ -611,8 +606,7 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
for (size_t i = 0; i < values.size(); ++i) {
const EventType* event = batch[i + 1];
VideoRotation video_rotation;
- if (event->header().template GetExtension<VideoOrientation>(
- &video_rotation)) {
+ if (event->template GetExtension<VideoOrientation>(&video_rotation)) {
values[i] = ConvertVideoRotationToCVOByte(video_rotation);
} else {
values[i].reset();
@@ -628,8 +622,8 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
const EventType* event = batch[i + 1];
bool voice_activity;
uint8_t audio_level;
- if (event->header().template GetExtension<AudioLevel>(&voice_activity,
- &audio_level)) {
+ if (event->template GetExtension<AudioLevel>(&voice_activity,
+ &audio_level)) {
RTC_DCHECK_LE(audio_level, 0x7Fu);
values[i] = audio_level;
} else {
@@ -646,8 +640,8 @@ void EncodeRtpPacket(const std::vector<const EventType*>& batch,
const EventType* event = batch[i + 1];
bool voice_activity;
uint8_t audio_level;
- if (event->header().template GetExtension<AudioLevel>(&voice_activity,
- &audio_level)) {
+ if (event->template GetExtension<AudioLevel>(&voice_activity,
+ &audio_level)) {
RTC_DCHECK_LE(audio_level, 0x7Fu);
values[i] = voice_activity;
} else {
@@ -823,14 +817,14 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch(
case RtcEvent::Type::RtpPacketIncoming: {
auto* rtc_event =
static_cast<const RtcEventRtpPacketIncoming* const>(it->get());
- auto& v = incoming_rtp_packets[rtc_event->header().Ssrc()];
+ auto& v = incoming_rtp_packets[rtc_event->Ssrc()];
v.emplace_back(rtc_event);
break;
}
case RtcEvent::Type::RtpPacketOutgoing: {
auto* rtc_event =
static_cast<const RtcEventRtpPacketOutgoing* const>(it->get());
- auto& v = outgoing_rtp_packets[rtc_event->header().Ssrc()];
+ auto& v = outgoing_rtp_packets[rtc_event->Ssrc()];
v.emplace_back(rtc_event);
break;
}
diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
index 458b5af894..063d425af5 100644
--- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
+++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
@@ -49,12 +49,12 @@ class RtcEventLogEncoderTest
RtcEventLogEncoderTest()
: seed_(std::get<0>(GetParam())),
prng_(seed_),
- encoding_(std::get<1>(GetParam())),
+ encoding_type_(std::get<1>(GetParam())),
event_count_(std::get<2>(GetParam())),
force_repeated_fields_(std::get<3>(GetParam())),
gen_(seed_ * 880001UL),
- verifier_(encoding_) {
- switch (encoding_) {
+ verifier_(encoding_type_) {
+ switch (encoding_type_) {
case RtcEventLog::EncodingType::Legacy:
encoder_ = std::make_unique<RtcEventLogEncoderLegacy>();
break;
@@ -62,6 +62,8 @@ class RtcEventLogEncoderTest
encoder_ = std::make_unique<RtcEventLogEncoderNewFormat>();
break;
}
+ encoded_ =
+ encoder_->EncodeLogStart(rtc::TimeMillis(), rtc::TimeUTCMillis());
}
~RtcEventLogEncoderTest() override = default;
@@ -89,11 +91,12 @@ class RtcEventLogEncoderTest
ParsedRtcEventLog parsed_log_;
const uint64_t seed_;
Random prng_;
- const RtcEventLog::EncodingType encoding_;
+ const RtcEventLog::EncodingType encoding_type_;
const size_t event_count_;
const bool force_repeated_fields_;
test::EventGenerator gen_;
test::EventVerifier verifier_;
+ std::string encoded_;
};
void RtcEventLogEncoderTest::TestRtcEventAudioNetworkAdaptation(
@@ -105,8 +108,8 @@ void RtcEventLogEncoderTest::TestRtcEventAudioNetworkAdaptation(
history_.push_back(event->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& ana_configs = parsed_log_.audio_network_adaptation_events();
ASSERT_EQ(ana_configs.size(), events.size());
@@ -167,7 +170,7 @@ void RtcEventLogEncoderTest::TestRtpPackets() {
// TODO(terelius): Test extensions for legacy encoding, too.
RtpHeaderExtensionMap extension_map;
- if (encoding_ != RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ != RtcEventLog::EncodingType::Legacy) {
extension_map = gen_.NewRtpHeaderExtensionMap(true);
}
@@ -185,8 +188,8 @@ void RtcEventLogEncoderTest::TestRtpPackets() {
}
// Encode and parse.
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
// For each SSRC, make sure the RTP packets associated with it to have been
// correctly encoded and parsed.
@@ -212,8 +215,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAlrState) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& alr_state_events = parsed_log_.alr_state_events();
ASSERT_EQ(alr_state_events.size(), event_count_);
@@ -223,7 +226,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAlrState) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) {
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
return;
}
std::vector<std::unique_ptr<RtcEventRouteChange>> events(event_count_);
@@ -233,8 +236,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& route_change_events = parsed_log_.route_change_events();
ASSERT_EQ(route_change_events.size(), event_count_);
@@ -244,7 +247,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventRemoteEstimate) {
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
return;
}
std::vector<std::unique_ptr<RtcEventRemoteEstimate>> events(event_count_);
@@ -255,8 +258,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRemoteEstimate) {
history_.push_back(std::make_unique<RtcEventRemoteEstimate>(*events[i]));
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& parsed_events = parsed_log_.remote_estimate_events();
ASSERT_EQ(parsed_events.size(), event_count_);
@@ -409,8 +412,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioPlayout) {
original_events_by_ssrc[ssrc].push_back(std::move(event));
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& parsed_playout_events_by_ssrc =
parsed_log_.audio_playout_events();
@@ -445,8 +448,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioReceiveStreamConfig) {
gen_.NewAudioReceiveStreamConfig(ssrc, extensions);
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& audio_recv_configs = parsed_log_.audio_recv_configs();
ASSERT_EQ(audio_recv_configs.size(), 1u);
@@ -461,8 +464,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioSendStreamConfig) {
gen_.NewAudioSendStreamConfig(ssrc, extensions);
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& audio_send_configs = parsed_log_.audio_send_configs();
ASSERT_EQ(audio_send_configs.size(), 1u);
@@ -479,8 +482,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateDelayBased) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& bwe_delay_updates = parsed_log_.bwe_delay_updates();
ASSERT_EQ(bwe_delay_updates.size(), event_count_);
@@ -499,8 +502,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateLossBased) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& bwe_loss_updates = parsed_log_.bwe_loss_updates();
ASSERT_EQ(bwe_loss_updates.size(), event_count_);
@@ -511,7 +514,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateLossBased) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketReceived) {
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
return;
}
std::vector<std::unique_ptr<RtcEventGenericPacketReceived>> events(
@@ -523,8 +526,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketReceived) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& packets_received = parsed_log_.generic_packets_received();
ASSERT_EQ(packets_received.size(), event_count_);
@@ -536,7 +539,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketReceived) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketSent) {
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
return;
}
std::vector<std::unique_ptr<RtcEventGenericPacketSent>> events(event_count_);
@@ -547,8 +550,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketSent) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& packets_sent = parsed_log_.generic_packets_sent();
ASSERT_EQ(packets_sent.size(), event_count_);
@@ -559,7 +562,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketSent) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventGenericAcksReceived) {
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
return;
}
std::vector<std::unique_ptr<RtcEventGenericAckReceived>> events(event_count_);
@@ -570,8 +573,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericAcksReceived) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& decoded_events = parsed_log_.generic_acks_received();
ASSERT_EQ(decoded_events.size(), event_count_);
@@ -590,12 +593,11 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsTransportState) {
history_.push_back(events[i]->Copy());
}
- const std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& dtls_transport_states = parsed_log_.dtls_transport_states();
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
ASSERT_EQ(dtls_transport_states.size(), 0u);
return;
}
@@ -616,12 +618,11 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsWritableState) {
history_.push_back(events[i]->Copy());
}
- const std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& dtls_writable_states = parsed_log_.dtls_writable_states();
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
ASSERT_EQ(dtls_writable_states.size(), 0u);
return;
}
@@ -654,15 +655,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventFrameDecoded) {
original_events_by_ssrc[ssrc].push_back(std::move(event));
}
- const std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- auto status = parsed_log_.ParseString(encoded);
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ auto status = parsed_log_.ParseString(encoded_);
if (!status.ok())
RTC_LOG(LS_ERROR) << status.message();
ASSERT_TRUE(status.ok());
const auto& decoded_frames_by_ssrc = parsed_log_.decoded_frames();
- if (encoding_ == RtcEventLog::EncodingType::Legacy) {
+ if (encoding_type_ == RtcEventLog::EncodingType::Legacy) {
ASSERT_EQ(decoded_frames_by_ssrc.size(), 0u);
return;
}
@@ -695,8 +695,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePairConfig) {
gen_.NewIceCandidatePairConfig();
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& ice_candidate_pair_configs =
parsed_log_.ice_candidate_pair_configs();
@@ -710,8 +710,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePair) {
std::unique_ptr<RtcEventIceCandidatePair> event = gen_.NewIceCandidatePair();
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& ice_candidate_pair_events =
parsed_log_.ice_candidate_pair_events();
@@ -721,31 +721,35 @@ TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePair) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStarted) {
- const int64_t timestamp_us = rtc::TimeMicros();
- const int64_t utc_time_us = rtc::TimeUTCMicros();
+ const int64_t timestamp_ms = prng_.Rand(1'000'000'000);
+ const int64_t utc_time_ms = prng_.Rand(1'000'000'000);
- std::string encoded = encoder_->EncodeLogStart(timestamp_us, utc_time_us);
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ // Overwrite the previously encoded LogStart event.
+ encoded_ = encoder_->EncodeLogStart(timestamp_ms * 1000, utc_time_ms * 1000);
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& start_log_events = parsed_log_.start_log_events();
ASSERT_EQ(start_log_events.size(), 1u);
- verifier_.VerifyLoggedStartEvent(timestamp_us, utc_time_us,
+ verifier_.VerifyLoggedStartEvent(timestamp_ms * 1000, utc_time_ms * 1000,
start_log_events[0]);
}
TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStopped) {
- const int64_t start_timestamp_us = rtc::TimeMicros();
- const int64_t start_utc_time_us = rtc::TimeUTCMicros();
- std::string encoded =
- encoder_->EncodeLogStart(start_timestamp_us, start_utc_time_us);
-
- const int64_t stop_timestamp_us = rtc::TimeMicros();
- encoded += encoder_->EncodeLogEnd(stop_timestamp_us);
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ const int64_t start_timestamp_ms = prng_.Rand(1'000'000'000);
+ const int64_t start_utc_time_ms = prng_.Rand(1'000'000'000);
+
+ // Overwrite the previously encoded LogStart event.
+ encoded_ = encoder_->EncodeLogStart(start_timestamp_ms * 1000,
+ start_utc_time_ms * 1000);
+
+ const int64_t stop_timestamp_ms =
+ prng_.Rand(start_timestamp_ms, 2'000'000'000);
+ encoded_ += encoder_->EncodeLogEnd(stop_timestamp_ms * 1000);
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& stop_log_events = parsed_log_.stop_log_events();
ASSERT_EQ(stop_log_events.size(), 1u);
- verifier_.VerifyLoggedStopEvent(stop_timestamp_us, stop_log_events[0]);
+ verifier_.VerifyLoggedStopEvent(stop_timestamp_ms * 1000, stop_log_events[0]);
}
// TODO(eladalon/terelius): Test with multiple events in the batch.
@@ -754,8 +758,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventProbeClusterCreated) {
gen_.NewProbeClusterCreated();
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& bwe_probe_cluster_created_events =
parsed_log_.bwe_probe_cluster_created_events();
@@ -770,8 +774,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventProbeResultFailure) {
gen_.NewProbeResultFailure();
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& bwe_probe_failure_events = parsed_log_.bwe_probe_failure_events();
ASSERT_EQ(bwe_probe_failure_events.size(), 1u);
@@ -785,8 +789,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventProbeResultSuccess) {
gen_.NewProbeResultSuccess();
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& bwe_probe_success_events = parsed_log_.bwe_probe_success_events();
ASSERT_EQ(bwe_probe_success_events.size(), 1u);
@@ -809,8 +813,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketIncoming) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& incoming_rtcp_packets = parsed_log_.incoming_rtcp_packets();
ASSERT_EQ(incoming_rtcp_packets.size(), event_count_);
@@ -830,8 +834,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketOutgoing) {
history_.push_back(events[i]->Copy());
}
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& outgoing_rtcp_packets = parsed_log_.outgoing_rtcp_packets();
ASSERT_EQ(outgoing_rtcp_packets.size(), event_count_);
@@ -852,9 +856,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpReceiverReport) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::ReceiverReport> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewReceiverReport();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -867,15 +871,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpReceiverReport) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& receiver_reports = parsed_log_.receiver_reports(direction);
ASSERT_EQ(receiver_reports.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedReceiverReport(timestamps_us[i], events[i],
+ verifier_.VerifyLoggedReceiverReport(timestamps_ms[i], events[i],
receiver_reports[i]);
}
}
@@ -891,9 +894,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpSenderReport) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::SenderReport> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewSenderReport();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -906,15 +909,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpSenderReport) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& sender_reports = parsed_log_.sender_reports(direction);
ASSERT_EQ(sender_reports.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedSenderReport(timestamps_us[i], events[i],
+ verifier_.VerifyLoggedSenderReport(timestamps_ms[i], events[i],
sender_reports[i]);
}
}
@@ -930,9 +932,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpExtendedReports) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::ExtendedReports> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewExtendedReports();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -945,15 +947,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpExtendedReports) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& extended_reports = parsed_log_.extended_reports(direction);
ASSERT_EQ(extended_reports.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedExtendedReports(timestamps_us[i], events[i],
+ verifier_.VerifyLoggedExtendedReports(timestamps_ms[i], events[i],
extended_reports[i]);
}
}
@@ -969,9 +970,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpFir) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::Fir> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewFir();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -984,15 +985,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpFir) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& firs = parsed_log_.firs(direction);
ASSERT_EQ(firs.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedFir(timestamps_us[i], events[i], firs[i]);
+ verifier_.VerifyLoggedFir(timestamps_ms[i], events[i], firs[i]);
}
}
}
@@ -1007,9 +1007,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPli) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::Pli> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewPli();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1022,15 +1022,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPli) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& plis = parsed_log_.plis(direction);
ASSERT_EQ(plis.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedPli(timestamps_us[i], events[i], plis[i]);
+ verifier_.VerifyLoggedPli(timestamps_ms[i], events[i], plis[i]);
}
}
}
@@ -1045,9 +1044,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpBye) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::Bye> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewBye();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1060,15 +1059,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpBye) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& byes = parsed_log_.byes(direction);
ASSERT_EQ(byes.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedBye(timestamps_us[i], events[i], byes[i]);
+ verifier_.VerifyLoggedBye(timestamps_ms[i], events[i], byes[i]);
}
}
}
@@ -1083,9 +1081,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpNack) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::Nack> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewNack();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1098,15 +1096,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpNack) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& nacks = parsed_log_.nacks(direction);
ASSERT_EQ(nacks.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedNack(timestamps_us[i], events[i], nacks[i]);
+ verifier_.VerifyLoggedNack(timestamps_ms[i], events[i], nacks[i]);
}
}
}
@@ -1121,9 +1118,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpRemb) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::Remb> events(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events[i] = gen_.NewRemb();
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1136,15 +1133,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpRemb) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& rembs = parsed_log_.rembs(direction);
ASSERT_EQ(rembs.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedRemb(timestamps_us[i], events[i], rembs[i]);
+ verifier_.VerifyLoggedRemb(timestamps_ms[i], events[i], rembs[i]);
}
}
}
@@ -1160,9 +1156,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpTransportFeedback) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::TransportFeedback> events;
events.reserve(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events.emplace_back(gen_.NewTransportFeedback());
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1175,16 +1171,15 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpTransportFeedback) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& transport_feedbacks =
parsed_log_.transport_feedbacks(direction);
ASSERT_EQ(transport_feedbacks.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedTransportFeedback(timestamps_us[i], events[i],
+ verifier_.VerifyLoggedTransportFeedback(timestamps_ms[i], events[i],
transport_feedbacks[i]);
}
}
@@ -1201,9 +1196,9 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpLossNotification) {
for (auto direction : {kIncomingPacket, kOutgoingPacket}) {
std::vector<rtcp::LossNotification> events;
events.reserve(event_count_);
- std::vector<int64_t> timestamps_us(event_count_);
+ std::vector<int64_t> timestamps_ms(event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- timestamps_us[i] = rtc::TimeMicros();
+ timestamps_ms[i] = rtc::TimeMillis();
events.emplace_back(gen_.NewLossNotification());
rtc::Buffer buffer = events[i].Build();
if (direction == kIncomingPacket) {
@@ -1216,15 +1211,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpLossNotification) {
fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000)));
}
- std::string encoded =
- encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& loss_notifications = parsed_log_.loss_notifications(direction);
ASSERT_EQ(loss_notifications.size(), event_count_);
for (size_t i = 0; i < event_count_; ++i) {
- verifier_.VerifyLoggedLossNotification(timestamps_us[i], events[i],
+ verifier_.VerifyLoggedLossNotification(timestamps_ms[i], events[i],
loss_notifications[i]);
}
}
@@ -1246,8 +1240,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventVideoReceiveStreamConfig) {
gen_.NewVideoReceiveStreamConfig(ssrc, extensions);
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& video_recv_configs = parsed_log_.video_recv_configs();
ASSERT_EQ(video_recv_configs.size(), 1u);
@@ -1262,8 +1256,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventVideoSendStreamConfig) {
gen_.NewVideoSendStreamConfig(ssrc, extensions);
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
- ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
+ ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok());
const auto& video_send_configs = parsed_log_.video_send_configs();
ASSERT_EQ(video_send_configs.size(), 1u);
@@ -1283,8 +1277,8 @@ INSTANTIATE_TEST_SUITE_P(
class RtcEventLogEncoderSimpleTest
: public ::testing::TestWithParam<RtcEventLog::EncodingType> {
protected:
- RtcEventLogEncoderSimpleTest() : encoding_(GetParam()) {
- switch (encoding_) {
+ RtcEventLogEncoderSimpleTest() : encoding_type_(GetParam()) {
+ switch (encoding_type_) {
case RtcEventLog::EncodingType::Legacy:
encoder_ = std::make_unique<RtcEventLogEncoderLegacy>();
break;
@@ -1292,13 +1286,16 @@ class RtcEventLogEncoderSimpleTest
encoder_ = std::make_unique<RtcEventLogEncoderNewFormat>();
break;
}
+ encoded_ =
+ encoder_->EncodeLogStart(rtc::TimeMillis(), rtc::TimeUTCMillis());
}
~RtcEventLogEncoderSimpleTest() override = default;
std::deque<std::unique_ptr<RtcEvent>> history_;
std::unique_ptr<RtcEventLogEncoder> encoder_;
ParsedRtcEventLog parsed_log_;
- const RtcEventLog::EncodingType encoding_;
+ const RtcEventLog::EncodingType encoding_type_;
+ std::string encoded_;
};
TEST_P(RtcEventLogEncoderSimpleTest, RtcEventLargeCompoundRtcpPacketIncoming) {
@@ -1320,9 +1317,9 @@ TEST_P(RtcEventLogEncoderSimpleTest, RtcEventLargeCompoundRtcpPacketIncoming) {
EXPECT_GT(packet.size(), static_cast<size_t>(IP_PACKET_SIZE));
auto event = std::make_unique<RtcEventRtcpPacketIncoming>(packet);
history_.push_back(event->Copy());
- std::string encoded = encoder_->EncodeBatch(history_.begin(), history_.end());
+ encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end());
- ParsedRtcEventLog::ParseStatus status = parsed_log_.ParseString(encoded);
+ ParsedRtcEventLog::ParseStatus status = parsed_log_.ParseString(encoded_);
ASSERT_TRUE(status.ok()) << status.message();
const auto& incoming_rtcp_packets = parsed_log_.incoming_rtcp_packets();
diff --git a/logging/rtc_event_log/encoder/var_int.cc b/logging/rtc_event_log/encoder/var_int.cc
index 59c971376e..f2819c0c73 100644
--- a/logging/rtc_event_log/encoder/var_int.cc
+++ b/logging/rtc_event_log/encoder/var_int.cc
@@ -64,7 +64,7 @@ size_t DecodeVarInt(rtc::BitBuffer* input, uint64_t* output) {
uint64_t decoded = 0;
for (size_t i = 0; i < kMaxVarIntLengthBytes; ++i) {
uint8_t byte;
- if (!input->ReadUInt8(&byte)) {
+ if (!input->ReadUInt8(byte)) {
return 0;
}
decoded +=
diff --git a/logging/rtc_event_log/events/rtc_event_alr_state.h b/logging/rtc_event_log/events/rtc_event_alr_state.h
index 3ad0f005fb..74d66015ef 100644
--- a/logging/rtc_event_log/events/rtc_event_alr_state.h
+++ b/logging/rtc_event_log/events/rtc_event_alr_state.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -39,13 +40,13 @@ class RtcEventAlrState final : public RtcEvent {
struct LoggedAlrStateEvent {
LoggedAlrStateEvent() = default;
- LoggedAlrStateEvent(int64_t timestamp_us, bool in_alr)
- : timestamp_us(timestamp_us), in_alr(in_alr) {}
+ LoggedAlrStateEvent(Timestamp timestamp, bool in_alr)
+ : timestamp(timestamp), in_alr(in_alr) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
bool in_alr;
};
diff --git a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h
index 2b183bb307..aeeb28e218 100644
--- a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h
+++ b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h"
namespace webrtc {
@@ -43,14 +44,14 @@ class RtcEventAudioNetworkAdaptation final : public RtcEvent {
struct LoggedAudioNetworkAdaptationEvent {
LoggedAudioNetworkAdaptationEvent() = default;
- LoggedAudioNetworkAdaptationEvent(int64_t timestamp_us,
+ LoggedAudioNetworkAdaptationEvent(Timestamp timestamp,
const AudioEncoderRuntimeConfig& config)
- : timestamp_us(timestamp_us), config(config) {}
+ : timestamp(timestamp), config(config) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
AudioEncoderRuntimeConfig config;
};
diff --git a/logging/rtc_event_log/events/rtc_event_audio_playout.h b/logging/rtc_event_log/events/rtc_event_audio_playout.h
index 83825217a1..00d07a65bf 100644
--- a/logging/rtc_event_log/events/rtc_event_audio_playout.h
+++ b/logging/rtc_event_log/events/rtc_event_audio_playout.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -41,13 +42,13 @@ class RtcEventAudioPlayout final : public RtcEvent {
struct LoggedAudioPlayoutEvent {
LoggedAudioPlayoutEvent() = default;
- LoggedAudioPlayoutEvent(int64_t timestamp_us, uint32_t ssrc)
- : timestamp_us(timestamp_us), ssrc(ssrc) {}
+ LoggedAudioPlayoutEvent(Timestamp timestamp, uint32_t ssrc)
+ : timestamp(timestamp), ssrc(ssrc) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
uint32_t ssrc;
};
diff --git a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h
index 1edd8e1e46..ccf76025e6 100644
--- a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h
+++ b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
namespace webrtc {
@@ -42,13 +43,13 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent {
struct LoggedAudioRecvConfig {
LoggedAudioRecvConfig() = default;
- LoggedAudioRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config)
- : timestamp_us(timestamp_us), config(config) {}
+ LoggedAudioRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config)
+ : timestamp(timestamp), config(config) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtclog::StreamConfig config;
};
diff --git a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h
index d3c60683b4..4e93871ae8 100644
--- a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h
+++ b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h
@@ -41,13 +41,13 @@ class RtcEventAudioSendStreamConfig final : public RtcEvent {
struct LoggedAudioSendConfig {
LoggedAudioSendConfig() = default;
- LoggedAudioSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config)
- : timestamp_us(timestamp_us), config(config) {}
+ LoggedAudioSendConfig(Timestamp timestamp, const rtclog::StreamConfig config)
+ : timestamp(timestamp), config(config) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtclog::StreamConfig config;
};
} // namespace webrtc
diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h
index a83ea8b693..522f98fd8d 100644
--- a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h
+++ b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h
@@ -17,6 +17,7 @@
#include "api/network_state_predictor.h"
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -45,17 +46,17 @@ class RtcEventBweUpdateDelayBased final : public RtcEvent {
struct LoggedBweDelayBasedUpdate {
LoggedBweDelayBasedUpdate() = default;
- LoggedBweDelayBasedUpdate(int64_t timestamp_us,
+ LoggedBweDelayBasedUpdate(Timestamp timestamp,
int32_t bitrate_bps,
BandwidthUsage detector_state)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
bitrate_bps(bitrate_bps),
detector_state(detector_state) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int32_t bitrate_bps;
BandwidthUsage detector_state;
};
diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h
index b638f1ac16..b031658ea2 100644
--- a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h
+++ b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -47,19 +48,19 @@ class RtcEventBweUpdateLossBased final : public RtcEvent {
struct LoggedBweLossBasedUpdate {
LoggedBweLossBasedUpdate() = default;
- LoggedBweLossBasedUpdate(int64_t timestamp_us,
+ LoggedBweLossBasedUpdate(Timestamp timestamp,
int32_t bitrate_bps,
uint8_t fraction_lost,
int32_t expected_packets)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
bitrate_bps(bitrate_bps),
fraction_lost(fraction_lost),
expected_packets(expected_packets) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int32_t bitrate_bps;
uint8_t fraction_lost;
int32_t expected_packets;
diff --git a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h
index af35a3f3bc..9a3eecb3d3 100644
--- a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h
+++ b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h
@@ -15,6 +15,7 @@
#include "api/dtls_transport_interface.h"
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -41,10 +42,10 @@ class RtcEventDtlsTransportState : public RtcEvent {
};
struct LoggedDtlsTransportState {
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
DtlsTransportState dtls_transport_state;
};
diff --git a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h
index c3ecce00ef..c0cc5b87ef 100644
--- a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h
+++ b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -41,10 +42,10 @@ struct LoggedDtlsWritableState {
LoggedDtlsWritableState() = default;
explicit LoggedDtlsWritableState(bool writable) : writable(writable) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
bool writable;
};
diff --git a/logging/rtc_event_log/events/rtc_event_frame_decoded.h b/logging/rtc_event_log/events/rtc_event_frame_decoded.h
index c549aa8831..4a6bb90d02 100644
--- a/logging/rtc_event_log/events/rtc_event_frame_decoded.h
+++ b/logging/rtc_event_log/events/rtc_event_frame_decoded.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
#include "api/video/video_codec_type.h"
namespace webrtc {
@@ -56,10 +57,10 @@ class RtcEventFrameDecoded final : public RtcEvent {
};
struct LoggedFrameDecoded {
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int64_t render_time_ms;
uint32_t ssrc;
int width;
diff --git a/logging/rtc_event_log/events/rtc_event_generic_ack_received.h b/logging/rtc_event_log/events/rtc_event_generic_ack_received.h
index 76e3cc24c4..75fc83c8b8 100644
--- a/logging/rtc_event_log/events/rtc_event_generic_ack_received.h
+++ b/logging/rtc_event_log/events/rtc_event_generic_ack_received.h
@@ -16,6 +16,7 @@
#include "absl/types/optional.h"
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -77,19 +78,19 @@ class RtcEventGenericAckReceived final : public RtcEvent {
struct LoggedGenericAckReceived {
LoggedGenericAckReceived() = default;
- LoggedGenericAckReceived(int64_t timestamp_us,
+ LoggedGenericAckReceived(Timestamp timestamp,
int64_t packet_number,
int64_t acked_packet_number,
absl::optional<int64_t> receive_acked_packet_time_ms)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
packet_number(packet_number),
acked_packet_number(acked_packet_number),
receive_acked_packet_time_ms(receive_acked_packet_time_ms) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int64_t packet_number;
int64_t acked_packet_number;
absl::optional<int64_t> receive_acked_packet_time_ms;
diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_received.h b/logging/rtc_event_log/events/rtc_event_generic_packet_received.h
index 45e5e4cc44..428e7b3806 100644
--- a/logging/rtc_event_log/events/rtc_event_generic_packet_received.h
+++ b/logging/rtc_event_log/events/rtc_event_generic_packet_received.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -45,17 +46,17 @@ class RtcEventGenericPacketReceived final : public RtcEvent {
struct LoggedGenericPacketReceived {
LoggedGenericPacketReceived() = default;
- LoggedGenericPacketReceived(int64_t timestamp_us,
+ LoggedGenericPacketReceived(Timestamp timestamp,
int64_t packet_number,
int packet_length)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
packet_number(packet_number),
packet_length(packet_length) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int64_t packet_number;
int packet_length;
};
diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h
index 9ebafbe2ec..6e626e63a1 100644
--- a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h
+++ b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -62,24 +63,24 @@ class RtcEventGenericPacketSent final : public RtcEvent {
struct LoggedGenericPacketSent {
LoggedGenericPacketSent() = default;
- LoggedGenericPacketSent(int64_t timestamp_us,
+ LoggedGenericPacketSent(Timestamp timestamp,
int64_t packet_number,
size_t overhead_length,
size_t payload_length,
size_t padding_length)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
packet_number(packet_number),
overhead_length(overhead_length),
payload_length(payload_length),
padding_length(padding_length) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
size_t packet_length() const {
return payload_length + padding_length + overhead_length;
}
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int64_t packet_number;
size_t overhead_length;
size_t payload_length;
diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h
index 717ddf360d..1f4d825a99 100644
--- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h
+++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -56,19 +57,19 @@ class RtcEventIceCandidatePair final : public RtcEvent {
struct LoggedIceCandidatePairEvent {
LoggedIceCandidatePairEvent() = default;
- LoggedIceCandidatePairEvent(int64_t timestamp_us,
+ LoggedIceCandidatePairEvent(Timestamp timestamp,
IceCandidatePairEventType type,
uint32_t candidate_pair_id,
uint32_t transaction_id)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
type(type),
candidate_pair_id(candidate_pair_id),
transaction_id(transaction_id) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
IceCandidatePairEventType type;
uint32_t candidate_pair_id;
uint32_t transaction_id;
diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h
index ab2eaf2422..465a799780 100644
--- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h
+++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -113,10 +114,10 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent {
};
struct LoggedIceCandidatePairConfig {
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
IceCandidatePairConfigType type;
uint32_t candidate_pair_id;
IceCandidateType local_candidate_type;
diff --git a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h
index f3221b91fd..974a0c9a5c 100644
--- a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h
+++ b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -50,21 +51,21 @@ class RtcEventProbeClusterCreated final : public RtcEvent {
struct LoggedBweProbeClusterCreatedEvent {
LoggedBweProbeClusterCreatedEvent() = default;
- LoggedBweProbeClusterCreatedEvent(int64_t timestamp_us,
+ LoggedBweProbeClusterCreatedEvent(Timestamp timestamp,
int32_t id,
int32_t bitrate_bps,
uint32_t min_packets,
uint32_t min_bytes)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
id(id),
bitrate_bps(bitrate_bps),
min_packets(min_packets),
min_bytes(min_bytes) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int32_t id;
int32_t bitrate_bps;
uint32_t min_packets;
diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_failure.h b/logging/rtc_event_log/events/rtc_event_probe_result_failure.h
index 868c30b61c..fa61b314b4 100644
--- a/logging/rtc_event_log/events/rtc_event_probe_result_failure.h
+++ b/logging/rtc_event_log/events/rtc_event_probe_result_failure.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -50,15 +51,15 @@ class RtcEventProbeResultFailure final : public RtcEvent {
struct LoggedBweProbeFailureEvent {
LoggedBweProbeFailureEvent() = default;
- LoggedBweProbeFailureEvent(int64_t timestamp_us,
+ LoggedBweProbeFailureEvent(Timestamp timestamp,
int32_t id,
ProbeFailureReason failure_reason)
- : timestamp_us(timestamp_us), id(id), failure_reason(failure_reason) {}
+ : timestamp(timestamp), id(id), failure_reason(failure_reason) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int32_t id;
ProbeFailureReason failure_reason;
};
diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_success.h b/logging/rtc_event_log/events/rtc_event_probe_result_success.h
index e3746681f6..d00cfa81d6 100644
--- a/logging/rtc_event_log/events/rtc_event_probe_result_success.h
+++ b/logging/rtc_event_log/events/rtc_event_probe_result_success.h
@@ -16,6 +16,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -43,15 +44,15 @@ class RtcEventProbeResultSuccess final : public RtcEvent {
struct LoggedBweProbeSuccessEvent {
LoggedBweProbeSuccessEvent() = default;
- LoggedBweProbeSuccessEvent(int64_t timestamp_us,
+ LoggedBweProbeSuccessEvent(Timestamp timestamp,
int32_t id,
int32_t bitrate_bps)
- : timestamp_us(timestamp_us), id(id), bitrate_bps(bitrate_bps) {}
+ : timestamp(timestamp), id(id), bitrate_bps(bitrate_bps) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
int32_t id;
int32_t bitrate_bps;
};
diff --git a/logging/rtc_event_log/events/rtc_event_remote_estimate.h b/logging/rtc_event_log/events/rtc_event_remote_estimate.h
index 29b0c47195..956e05f682 100644
--- a/logging/rtc_event_log/events/rtc_event_remote_estimate.h
+++ b/logging/rtc_event_log/events/rtc_event_remote_estimate.h
@@ -15,6 +15,7 @@
#include "absl/types/optional.h"
#include "api/rtc_event_log/rtc_event.h"
#include "api/units/data_rate.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -37,10 +38,10 @@ class RtcEventRemoteEstimate final : public RtcEvent {
struct LoggedRemoteEstimateEvent {
LoggedRemoteEstimateEvent() = default;
- int64_t log_time_us() const { return timestamp_ms * 1000; }
- int64_t log_time_ms() const { return timestamp_ms; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_ms;
+ Timestamp timestamp = Timestamp::MinusInfinity();
absl::optional<DataRate> link_capacity_lower;
absl::optional<DataRate> link_capacity_upper;
};
diff --git a/logging/rtc_event_log/events/rtc_event_route_change.h b/logging/rtc_event_log/events/rtc_event_route_change.h
index 455a832141..4a4e9aef80 100644
--- a/logging/rtc_event_log/events/rtc_event_route_change.h
+++ b/logging/rtc_event_log/events/rtc_event_route_change.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
namespace webrtc {
@@ -41,15 +42,13 @@ class RtcEventRouteChange final : public RtcEvent {
struct LoggedRouteChangeEvent {
LoggedRouteChangeEvent() = default;
- LoggedRouteChangeEvent(int64_t timestamp_ms,
- bool connected,
- uint32_t overhead)
- : timestamp_ms(timestamp_ms), connected(connected), overhead(overhead) {}
+ LoggedRouteChangeEvent(Timestamp timestamp, bool connected, uint32_t overhead)
+ : timestamp(timestamp), connected(connected), overhead(overhead) {}
- int64_t log_time_us() const { return timestamp_ms * 1000; }
- int64_t log_time_ms() const { return timestamp_ms; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_ms;
+ Timestamp timestamp = Timestamp::MinusInfinity();
bool connected;
uint32_t overhead;
};
diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc
index 4e505bdbf1..4cf33a238f 100644
--- a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc
+++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc
@@ -18,22 +18,11 @@ namespace webrtc {
RtcEventRtpPacketIncoming::RtcEventRtpPacketIncoming(
const RtpPacketReceived& packet)
- : payload_length_(packet.payload_size()),
- header_length_(packet.headers_size()),
- padding_length_(packet.padding_size()) {
- header_.CopyHeaderFrom(packet);
- RTC_DCHECK_EQ(packet.size(),
- payload_length_ + header_length_ + padding_length_);
-}
+ : packet_(packet) {}
RtcEventRtpPacketIncoming::RtcEventRtpPacketIncoming(
const RtcEventRtpPacketIncoming& other)
- : RtcEvent(other.timestamp_us_),
- payload_length_(other.payload_length_),
- header_length_(other.header_length_),
- padding_length_(other.padding_length_) {
- header_.CopyHeaderFrom(other.header_);
-}
+ : RtcEvent(other.timestamp_us_), packet_(other.packet_) {}
RtcEventRtpPacketIncoming::~RtcEventRtpPacketIncoming() = default;
diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h
index 8d13dc6e87..ee48fa360b 100644
--- a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h
+++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h
@@ -11,8 +11,12 @@
#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_INCOMING_H_
#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_INCOMING_H_
+#include <cstddef>
+#include <cstdint>
#include <memory>
+#include <utility>
+#include "api/array_view.h"
#include "api/rtc_event_log/rtc_event.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
@@ -32,22 +36,33 @@ class RtcEventRtpPacketIncoming final : public RtcEvent {
std::unique_ptr<RtcEventRtpPacketIncoming> Copy() const;
- size_t packet_length() const {
- return payload_length_ + header_length_ + padding_length_;
+ size_t packet_length() const { return packet_.size(); }
+
+ rtc::ArrayView<const uint8_t> RawHeader() const {
+ return rtc::MakeArrayView(packet_.data(), header_length());
+ }
+ uint32_t Ssrc() const { return packet_.Ssrc(); }
+ uint32_t Timestamp() const { return packet_.Timestamp(); }
+ uint16_t SequenceNumber() const { return packet_.SequenceNumber(); }
+ uint8_t PayloadType() const { return packet_.PayloadType(); }
+ bool Marker() const { return packet_.Marker(); }
+ template <typename ExtensionTrait, typename... Args>
+ bool GetExtension(Args&&... args) const {
+ return packet_.GetExtension<ExtensionTrait>(std::forward<Args>(args)...);
+ }
+ template <typename ExtensionTrait>
+ bool HasExtension() const {
+ return packet_.HasExtension<ExtensionTrait>();
}
- const RtpPacket& header() const { return header_; }
- size_t payload_length() const { return payload_length_; }
- size_t header_length() const { return header_length_; }
- size_t padding_length() const { return padding_length_; }
+ size_t payload_length() const { return packet_.payload_size(); }
+ size_t header_length() const { return packet_.headers_size(); }
+ size_t padding_length() const { return packet_.padding_size(); }
private:
RtcEventRtpPacketIncoming(const RtcEventRtpPacketIncoming& other);
- RtpPacket header_; // Only the packet's header will be stored here.
- const size_t payload_length_; // Media payload, excluding header and padding.
- const size_t header_length_; // RTP header.
- const size_t padding_length_; // RTP padding.
+ const RtpPacket packet_;
};
} // namespace webrtc
diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc
index e5324bf1a3..a6a4d99702 100644
--- a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc
+++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc
@@ -19,24 +19,13 @@ namespace webrtc {
RtcEventRtpPacketOutgoing::RtcEventRtpPacketOutgoing(
const RtpPacketToSend& packet,
int probe_cluster_id)
- : payload_length_(packet.payload_size()),
- header_length_(packet.headers_size()),
- padding_length_(packet.padding_size()),
- probe_cluster_id_(probe_cluster_id) {
- header_.CopyHeaderFrom(packet);
- RTC_DCHECK_EQ(packet.size(),
- payload_length_ + header_length_ + padding_length_);
-}
+ : packet_(packet), probe_cluster_id_(probe_cluster_id) {}
RtcEventRtpPacketOutgoing::RtcEventRtpPacketOutgoing(
const RtcEventRtpPacketOutgoing& other)
: RtcEvent(other.timestamp_us_),
- payload_length_(other.payload_length_),
- header_length_(other.header_length_),
- padding_length_(other.padding_length_),
- probe_cluster_id_(other.probe_cluster_id_) {
- header_.CopyHeaderFrom(other.header_);
-}
+ packet_(other.packet_),
+ probe_cluster_id_(other.probe_cluster_id_) {}
RtcEventRtpPacketOutgoing::~RtcEventRtpPacketOutgoing() = default;
diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h
index de4abcc904..9ef5b1afdd 100644
--- a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h
+++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h
@@ -11,8 +11,12 @@
#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_OUTGOING_H_
#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_OUTGOING_H_
+#include <cstddef>
+#include <cstdint>
#include <memory>
+#include <utility>
+#include "api/array_view.h"
#include "api/rtc_event_log/rtc_event.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
@@ -33,23 +37,34 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent {
std::unique_ptr<RtcEventRtpPacketOutgoing> Copy() const;
- size_t packet_length() const {
- return payload_length_ + header_length_ + padding_length_;
+ size_t packet_length() const { return packet_.size(); }
+
+ rtc::ArrayView<const uint8_t> RawHeader() const {
+ return rtc::MakeArrayView(packet_.data(), header_length());
+ }
+ uint32_t Ssrc() const { return packet_.Ssrc(); }
+ uint32_t Timestamp() const { return packet_.Timestamp(); }
+ uint16_t SequenceNumber() const { return packet_.SequenceNumber(); }
+ uint8_t PayloadType() const { return packet_.PayloadType(); }
+ bool Marker() const { return packet_.Marker(); }
+ template <typename ExtensionTrait, typename... Args>
+ bool GetExtension(Args&&... args) const {
+ return packet_.GetExtension<ExtensionTrait>(std::forward<Args>(args)...);
+ }
+ template <typename ExtensionTrait>
+ bool HasExtension() const {
+ return packet_.HasExtension<ExtensionTrait>();
}
- const RtpPacket& header() const { return header_; }
- size_t payload_length() const { return payload_length_; }
- size_t header_length() const { return header_length_; }
- size_t padding_length() const { return padding_length_; }
+ size_t payload_length() const { return packet_.payload_size(); }
+ size_t header_length() const { return packet_.headers_size(); }
+ size_t padding_length() const { return packet_.padding_size(); }
int probe_cluster_id() const { return probe_cluster_id_; }
private:
RtcEventRtpPacketOutgoing(const RtcEventRtpPacketOutgoing& other);
- RtpPacket header_; // Only the packet's header will be stored here.
- const size_t payload_length_; // Media payload, excluding header and padding.
- const size_t header_length_; // RTP header.
- const size_t padding_length_; // RTP padding.
+ const RtpPacket packet_;
// TODO(eladalon): Delete |probe_cluster_id_| along with legacy encoding.
const int probe_cluster_id_;
};
diff --git a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h
index 2bf52476a1..e7b9061872 100644
--- a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h
+++ b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
namespace webrtc {
@@ -42,13 +43,13 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent {
struct LoggedVideoRecvConfig {
LoggedVideoRecvConfig() = default;
- LoggedVideoRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config)
- : timestamp_us(timestamp_us), config(config) {}
+ LoggedVideoRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config)
+ : timestamp(timestamp), config(config) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtclog::StreamConfig config;
};
diff --git a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h
index cf95afc4d8..e72e75e49d 100644
--- a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h
+++ b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h
@@ -14,6 +14,7 @@
#include <memory>
#include "api/rtc_event_log/rtc_event.h"
+#include "api/units/timestamp.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
namespace webrtc {
@@ -41,13 +42,13 @@ class RtcEventVideoSendStreamConfig final : public RtcEvent {
struct LoggedVideoSendConfig {
LoggedVideoSendConfig() = default;
- LoggedVideoSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config)
- : timestamp_us(timestamp_us), config(config) {}
+ LoggedVideoSendConfig(Timestamp timestamp, const rtclog::StreamConfig config)
+ : timestamp(timestamp), config(config) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtclog::StreamConfig config;
};
} // namespace webrtc
diff --git a/logging/rtc_event_log/logged_events.cc b/logging/rtc_event_log/logged_events.cc
index dd0a8aae2a..5ef3de11c0 100644
--- a/logging/rtc_event_log/logged_events.cc
+++ b/logging/rtc_event_log/logged_events.cc
@@ -40,13 +40,13 @@ LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default;
LoggedPacketInfo::~LoggedPacketInfo() {}
-LoggedRtcpPacket::LoggedRtcpPacket(int64_t timestamp_us,
+LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp,
const std::vector<uint8_t>& packet)
- : timestamp_us(timestamp_us), raw_data(packet) {}
+ : timestamp(timestamp), raw_data(packet) {}
-LoggedRtcpPacket::LoggedRtcpPacket(int64_t timestamp_us,
+LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp,
const std::string& packet)
- : timestamp_us(timestamp_us), raw_data(packet.size()) {
+ : timestamp(timestamp), raw_data(packet.size()) {
memcpy(raw_data.data(), packet.data(), packet.size());
}
diff --git a/logging/rtc_event_log/logged_events.h b/logging/rtc_event_log/logged_events.h
index da7653d392..5bce658c30 100644
--- a/logging/rtc_event_log/logged_events.h
+++ b/logging/rtc_event_log/logged_events.h
@@ -37,19 +37,19 @@ namespace webrtc {
// adding a vptr.
struct LoggedRtpPacket {
- LoggedRtpPacket(int64_t timestamp_us,
+ LoggedRtpPacket(Timestamp timestamp,
RTPHeader header,
size_t header_length,
size_t total_length)
- : timestamp_us(timestamp_us),
+ : timestamp(timestamp),
header(header),
header_length(header_length),
total_length(total_length) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp;
// TODO(terelius): This allocates space for 15 CSRCs even if none are used.
RTPHeader header;
size_t header_length;
@@ -57,145 +57,145 @@ struct LoggedRtpPacket {
};
struct LoggedRtpPacketIncoming {
- LoggedRtpPacketIncoming(int64_t timestamp_us,
+ LoggedRtpPacketIncoming(Timestamp timestamp,
RTPHeader header,
size_t header_length,
size_t total_length)
- : rtp(timestamp_us, header, header_length, total_length) {}
- int64_t log_time_us() const { return rtp.timestamp_us; }
- int64_t log_time_ms() const { return rtp.timestamp_us / 1000; }
+ : rtp(timestamp, header, header_length, total_length) {}
+ int64_t log_time_us() const { return rtp.timestamp.us(); }
+ int64_t log_time_ms() const { return rtp.timestamp.ms(); }
LoggedRtpPacket rtp;
};
struct LoggedRtpPacketOutgoing {
- LoggedRtpPacketOutgoing(int64_t timestamp_us,
+ LoggedRtpPacketOutgoing(Timestamp timestamp,
RTPHeader header,
size_t header_length,
size_t total_length)
- : rtp(timestamp_us, header, header_length, total_length) {}
- int64_t log_time_us() const { return rtp.timestamp_us; }
- int64_t log_time_ms() const { return rtp.timestamp_us / 1000; }
+ : rtp(timestamp, header, header_length, total_length) {}
+ int64_t log_time_us() const { return rtp.timestamp.us(); }
+ int64_t log_time_ms() const { return rtp.timestamp.ms(); }
LoggedRtpPacket rtp;
};
struct LoggedRtcpPacket {
- LoggedRtcpPacket(int64_t timestamp_us, const std::vector<uint8_t>& packet);
- LoggedRtcpPacket(int64_t timestamp_us, const std::string& packet);
+ LoggedRtcpPacket(Timestamp timestamp, const std::vector<uint8_t>& packet);
+ LoggedRtcpPacket(Timestamp timestamp, const std::string& packet);
LoggedRtcpPacket(const LoggedRtcpPacket&);
~LoggedRtcpPacket();
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp;
std::vector<uint8_t> raw_data;
};
struct LoggedRtcpPacketIncoming {
- LoggedRtcpPacketIncoming(int64_t timestamp_us,
+ LoggedRtcpPacketIncoming(Timestamp timestamp,
const std::vector<uint8_t>& packet)
- : rtcp(timestamp_us, packet) {}
- LoggedRtcpPacketIncoming(uint64_t timestamp_us, const std::string& packet)
- : rtcp(timestamp_us, packet) {}
+ : rtcp(timestamp, packet) {}
+ LoggedRtcpPacketIncoming(Timestamp timestamp, const std::string& packet)
+ : rtcp(timestamp, packet) {}
- int64_t log_time_us() const { return rtcp.timestamp_us; }
- int64_t log_time_ms() const { return rtcp.timestamp_us / 1000; }
+ int64_t log_time_us() const { return rtcp.timestamp.us(); }
+ int64_t log_time_ms() const { return rtcp.timestamp.ms(); }
LoggedRtcpPacket rtcp;
};
struct LoggedRtcpPacketOutgoing {
- LoggedRtcpPacketOutgoing(int64_t timestamp_us,
+ LoggedRtcpPacketOutgoing(Timestamp timestamp,
const std::vector<uint8_t>& packet)
- : rtcp(timestamp_us, packet) {}
- LoggedRtcpPacketOutgoing(uint64_t timestamp_us, const std::string& packet)
- : rtcp(timestamp_us, packet) {}
+ : rtcp(timestamp, packet) {}
+ LoggedRtcpPacketOutgoing(Timestamp timestamp, const std::string& packet)
+ : rtcp(timestamp, packet) {}
- int64_t log_time_us() const { return rtcp.timestamp_us; }
- int64_t log_time_ms() const { return rtcp.timestamp_us / 1000; }
+ int64_t log_time_us() const { return rtcp.timestamp.us(); }
+ int64_t log_time_ms() const { return rtcp.timestamp.ms(); }
LoggedRtcpPacket rtcp;
};
struct LoggedRtcpPacketReceiverReport {
LoggedRtcpPacketReceiverReport() = default;
- LoggedRtcpPacketReceiverReport(int64_t timestamp_us,
+ LoggedRtcpPacketReceiverReport(Timestamp timestamp,
const rtcp::ReceiverReport& rr)
- : timestamp_us(timestamp_us), rr(rr) {}
+ : timestamp(timestamp), rr(rr) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::ReceiverReport rr;
};
struct LoggedRtcpPacketSenderReport {
LoggedRtcpPacketSenderReport() = default;
- LoggedRtcpPacketSenderReport(int64_t timestamp_us,
+ LoggedRtcpPacketSenderReport(Timestamp timestamp,
const rtcp::SenderReport& sr)
- : timestamp_us(timestamp_us), sr(sr) {}
+ : timestamp(timestamp), sr(sr) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::SenderReport sr;
};
struct LoggedRtcpPacketExtendedReports {
LoggedRtcpPacketExtendedReports() = default;
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::ExtendedReports xr;
};
struct LoggedRtcpPacketRemb {
LoggedRtcpPacketRemb() = default;
- LoggedRtcpPacketRemb(int64_t timestamp_us, const rtcp::Remb& remb)
- : timestamp_us(timestamp_us), remb(remb) {}
+ LoggedRtcpPacketRemb(Timestamp timestamp, const rtcp::Remb& remb)
+ : timestamp(timestamp), remb(remb) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::Remb remb;
};
struct LoggedRtcpPacketNack {
LoggedRtcpPacketNack() = default;
- LoggedRtcpPacketNack(int64_t timestamp_us, const rtcp::Nack& nack)
- : timestamp_us(timestamp_us), nack(nack) {}
+ LoggedRtcpPacketNack(Timestamp timestamp, const rtcp::Nack& nack)
+ : timestamp(timestamp), nack(nack) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::Nack nack;
};
struct LoggedRtcpPacketFir {
LoggedRtcpPacketFir() = default;
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::Fir fir;
};
struct LoggedRtcpPacketPli {
LoggedRtcpPacketPli() = default;
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::Pli pli;
};
@@ -204,62 +204,64 @@ struct LoggedRtcpPacketTransportFeedback {
: transport_feedback(/*include_timestamps=*/true, /*include_lost*/ true) {
}
LoggedRtcpPacketTransportFeedback(
- int64_t timestamp_us,
+ Timestamp timestamp,
const rtcp::TransportFeedback& transport_feedback)
- : timestamp_us(timestamp_us), transport_feedback(transport_feedback) {}
+ : timestamp(timestamp), transport_feedback(transport_feedback) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::TransportFeedback transport_feedback;
};
struct LoggedRtcpPacketLossNotification {
LoggedRtcpPacketLossNotification() = default;
LoggedRtcpPacketLossNotification(
- int64_t timestamp_us,
+ Timestamp timestamp,
const rtcp::LossNotification& loss_notification)
- : timestamp_us(timestamp_us), loss_notification(loss_notification) {}
+ : timestamp(timestamp), loss_notification(loss_notification) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::LossNotification loss_notification;
};
struct LoggedRtcpPacketBye {
LoggedRtcpPacketBye() = default;
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp = Timestamp::MinusInfinity();
rtcp::Bye bye;
};
struct LoggedStartEvent {
- explicit LoggedStartEvent(int64_t timestamp_us)
- : LoggedStartEvent(timestamp_us, timestamp_us / 1000) {}
+ explicit LoggedStartEvent(Timestamp timestamp)
+ : LoggedStartEvent(timestamp, timestamp) {}
- LoggedStartEvent(int64_t timestamp_us, int64_t utc_start_time_ms)
- : timestamp_us(timestamp_us), utc_start_time_ms(utc_start_time_ms) {}
+ LoggedStartEvent(Timestamp timestamp, Timestamp utc_start_time)
+ : timestamp(timestamp), utc_start_time(utc_start_time) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
- int64_t utc_start_time_ms;
+ Timestamp utc_time() const { return utc_start_time; }
+
+ Timestamp timestamp;
+ Timestamp utc_start_time;
};
struct LoggedStopEvent {
- explicit LoggedStopEvent(int64_t timestamp_us) : timestamp_us(timestamp_us) {}
+ explicit LoggedStopEvent(Timestamp timestamp) : timestamp(timestamp) {}
- int64_t log_time_us() const { return timestamp_us; }
- int64_t log_time_ms() const { return timestamp_us / 1000; }
+ int64_t log_time_us() const { return timestamp.us(); }
+ int64_t log_time_ms() const { return timestamp.ms(); }
- int64_t timestamp_us;
+ Timestamp timestamp;
};
struct InferredRouteChangeEvent {
@@ -337,8 +339,5 @@ struct LoggedIceEvent {
};
-
-
-
} // namespace webrtc
#endif // LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_
diff --git a/logging/rtc_event_log/rtc_event_log_impl.cc b/logging/rtc_event_log/rtc_event_log_impl.cc
index 4a272f08cf..700f639311 100644
--- a/logging/rtc_event_log/rtc_event_log_impl.cc
+++ b/logging/rtc_event_log/rtc_event_log_impl.cc
@@ -90,8 +90,8 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr<RtcEventLogOutput> output,
return false;
}
- const int64_t timestamp_us = rtc::TimeMicros();
- const int64_t utc_time_us = rtc::TimeUTCMicros();
+ const int64_t timestamp_us = rtc::TimeMillis() * 1000;
+ const int64_t utc_time_us = rtc::TimeUTCMillis() * 1000;
RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = "
"("
<< timestamp_us << ", " << utc_time_us << ").";
@@ -253,7 +253,7 @@ void RtcEventLogImpl::StopOutput() {
void RtcEventLogImpl::StopLoggingInternal() {
if (event_output_) {
RTC_DCHECK(event_output_->IsActive());
- const int64_t timestamp_us = rtc::TimeMicros();
+ const int64_t timestamp_us = rtc::TimeMillis() * 1000;
event_output_->Write(event_encoder_->EncodeLogEnd(timestamp_us));
}
StopOutput();
diff --git a/logging/rtc_event_log/rtc_event_log_parser.cc b/logging/rtc_event_log/rtc_event_log_parser.cc
index 6cdaa75bb7..d10e4f987a 100644
--- a/logging/rtc_event_log/rtc_event_log_parser.cc
+++ b/logging/rtc_event_log/rtc_event_log_parser.cc
@@ -390,7 +390,7 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets(
RTC_PARSE_CHECK_OR_RETURN(!proto.has_voice_activity());
}
(*rtp_packets_map)[header.ssrc].emplace_back(
- proto.timestamp_ms() * 1000, header, proto.header_size(),
+ Timestamp::Millis(proto.timestamp_ms()), header, proto.header_size(),
proto.payload_size() + header.headerLength + header.paddingLength);
}
@@ -592,7 +592,7 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets(
!voice_activity_values[i].has_value());
}
(*rtp_packets_map)[header.ssrc].emplace_back(
- 1000 * timestamp_ms, header, header.headerLength,
+ Timestamp::Millis(timestamp_ms), header, header.headerLength,
payload_size_values[i].value() + header.headerLength +
header.paddingLength);
}
@@ -615,7 +615,8 @@ ParsedRtcEventLog::ParseStatus StoreRtcpPackets(
!IdenticalRtcpContents(rtcp_packets->back().rtcp.raw_data,
proto.raw_packet())) {
// Base event
- rtcp_packets->emplace_back(proto.timestamp_ms() * 1000, proto.raw_packet());
+ rtcp_packets->emplace_back(Timestamp::Millis(proto.timestamp_ms()),
+ proto.raw_packet());
}
const size_t number_of_deltas =
@@ -653,7 +654,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpPackets(
continue;
}
std::string data(raw_packet_values[i]);
- rtcp_packets->emplace_back(1000 * timestamp_ms, data);
+ rtcp_packets->emplace_back(Timestamp::Millis(timestamp_ms), data);
}
return ParsedRtcEventLog::ParseStatus::Success();
}
@@ -672,6 +673,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks(
std::vector<LoggedRtcpPacketBye>* bye_list,
std::vector<LoggedRtcpPacketTransportFeedback>* transport_feedback_list,
std::vector<LoggedRtcpPacketLossNotification>* loss_notification_list) {
+ Timestamp timestamp = Timestamp::Micros(timestamp_us);
rtcp::CommonHeader header;
for (const uint8_t* block = packet_begin; block < packet_end;
block = header.NextPacket()) {
@@ -679,44 +681,44 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks(
if (header.type() == rtcp::TransportFeedback::kPacketType &&
header.fmt() == rtcp::TransportFeedback::kFeedbackMessageType) {
LoggedRtcpPacketTransportFeedback parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.transport_feedback.Parse(header))
transport_feedback_list->push_back(std::move(parsed_block));
} else if (header.type() == rtcp::SenderReport::kPacketType) {
LoggedRtcpPacketSenderReport parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.sr.Parse(header)) {
sr_list->push_back(std::move(parsed_block));
}
} else if (header.type() == rtcp::ReceiverReport::kPacketType) {
LoggedRtcpPacketReceiverReport parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.rr.Parse(header)) {
rr_list->push_back(std::move(parsed_block));
}
} else if (header.type() == rtcp::ExtendedReports::kPacketType) {
LoggedRtcpPacketExtendedReports parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.xr.Parse(header)) {
xr_list->push_back(std::move(parsed_block));
}
} else if (header.type() == rtcp::Fir::kPacketType &&
header.fmt() == rtcp::Fir::kFeedbackMessageType) {
LoggedRtcpPacketFir parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.fir.Parse(header)) {
fir_list->push_back(std::move(parsed_block));
}
} else if (header.type() == rtcp::Pli::kPacketType &&
header.fmt() == rtcp::Pli::kFeedbackMessageType) {
LoggedRtcpPacketPli parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.pli.Parse(header)) {
pli_list->push_back(std::move(parsed_block));
}
} else if (header.type() == rtcp::Bye::kPacketType) {
LoggedRtcpPacketBye parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.bye.Parse(header)) {
bye_list->push_back(std::move(parsed_block));
}
@@ -725,7 +727,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks(
bool type_found = false;
if (!type_found) {
LoggedRtcpPacketRemb parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.remb.Parse(header)) {
remb_list->push_back(std::move(parsed_block));
type_found = true;
@@ -733,7 +735,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks(
}
if (!type_found) {
LoggedRtcpPacketLossNotification parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.loss_notification.Parse(header)) {
loss_notification_list->push_back(std::move(parsed_block));
type_found = true;
@@ -742,7 +744,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks(
} else if (header.type() == rtcp::Nack::kPacketType &&
header.fmt() == rtcp::Nack::kFeedbackMessageType) {
LoggedRtcpPacketNack parsed_block;
- parsed_block.timestamp_us = timestamp_us;
+ parsed_block.timestamp = timestamp;
if (parsed_block.nack.Parse(header)) {
nack_list->push_back(std::move(parsed_block));
}
@@ -1170,7 +1172,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream(
// Set up convenience wrappers around the most commonly used RTCP types.
for (const auto& incoming : incoming_rtcp_packets_) {
- const int64_t timestamp_us = incoming.rtcp.timestamp_us;
+ const int64_t timestamp_us = incoming.rtcp.timestamp.us();
const uint8_t* packet_begin = incoming.rtcp.raw_data.data();
const uint8_t* packet_end = packet_begin + incoming.rtcp.raw_data.size();
auto status = StoreRtcpBlocks(
@@ -1182,7 +1184,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream(
}
for (const auto& outgoing : outgoing_rtcp_packets_) {
- const int64_t timestamp_us = outgoing.rtcp.timestamp_us;
+ const int64_t timestamp_us = outgoing.rtcp.timestamp.us();
const uint8_t* packet_begin = outgoing.rtcp.raw_data.data();
const uint8_t* packet_end = packet_begin + outgoing.rtcp.raw_data.size();
auto status = StoreRtcpBlocks(
@@ -1374,7 +1376,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- video_recv_configs_.emplace_back(timestamp_us, config.value());
+ video_recv_configs_.emplace_back(Timestamp::Micros(timestamp_us),
+ config.value());
incoming_rtp_extensions_maps_[config.value().remote_ssrc] =
RtpHeaderExtensionMap(config.value().rtp_extensions);
incoming_rtp_extensions_maps_[config.value().rtx_ssrc] =
@@ -1388,7 +1391,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- video_send_configs_.emplace_back(timestamp_us, config.value());
+ video_send_configs_.emplace_back(Timestamp::Micros(timestamp_us),
+ config.value());
outgoing_rtp_extensions_maps_[config.value().local_ssrc] =
RtpHeaderExtensionMap(config.value().rtp_extensions);
outgoing_rtp_extensions_maps_[config.value().rtx_ssrc] =
@@ -1402,7 +1406,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- audio_recv_configs_.emplace_back(timestamp_us, config.value());
+ audio_recv_configs_.emplace_back(Timestamp::Micros(timestamp_us),
+ config.value());
incoming_rtp_extensions_maps_[config.value().remote_ssrc] =
RtpHeaderExtensionMap(config.value().rtp_extensions);
break;
@@ -1413,7 +1418,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
return config.status();
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- audio_send_configs_.emplace_back(timestamp_us, config.value());
+ audio_send_configs_.emplace_back(Timestamp::Micros(timestamp_us),
+ config.value());
outgoing_rtp_extensions_maps_[config.value().local_ssrc] =
RtpHeaderExtensionMap(config.value().rtp_extensions);
break;
@@ -1446,11 +1452,13 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
int64_t timestamp_us = event.timestamp_us();
if (direction == kIncomingPacket) {
incoming_rtp_packets_map_[parsed_header.ssrc].push_back(
- LoggedRtpPacketIncoming(timestamp_us, parsed_header, header_length,
+ LoggedRtpPacketIncoming(Timestamp::Micros(timestamp_us),
+ parsed_header, header_length,
total_length));
} else {
outgoing_rtp_packets_map_[parsed_header.ssrc].push_back(
- LoggedRtpPacketOutgoing(timestamp_us, parsed_header, header_length,
+ LoggedRtpPacketOutgoing(Timestamp::Micros(timestamp_us),
+ parsed_header, header_length,
total_length));
}
break;
@@ -1469,24 +1477,26 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent(
if (packet == last_incoming_rtcp_packet_)
break;
incoming_rtcp_packets_.push_back(
- LoggedRtcpPacketIncoming(timestamp_us, packet));
+ LoggedRtcpPacketIncoming(Timestamp::Micros(timestamp_us), packet));
last_incoming_rtcp_packet_ = packet;
} else {
outgoing_rtcp_packets_.push_back(
- LoggedRtcpPacketOutgoing(timestamp_us, packet));
+ LoggedRtcpPacketOutgoing(Timestamp::Micros(timestamp_us), packet));
}
break;
}
case rtclog::Event::LOG_START: {
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- start_log_events_.push_back(LoggedStartEvent(timestamp_us));
+ start_log_events_.push_back(
+ LoggedStartEvent(Timestamp::Micros(timestamp_us)));
break;
}
case rtclog::Event::LOG_END: {
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
int64_t timestamp_us = event.timestamp_us();
- stop_log_events_.push_back(LoggedStopEvent(timestamp_us));
+ stop_log_events_.push_back(
+ LoggedStopEvent(Timestamp::Micros(timestamp_us)));
break;
}
case rtclog::Event::AUDIO_PLAYOUT_EVENT: {
@@ -1805,7 +1815,7 @@ ParsedRtcEventLog::GetAudioPlayout(const rtclog::Event& event) const {
const rtclog::AudioPlayoutEvent& playout_event = event.audio_playout_event();
LoggedAudioPlayoutEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(playout_event.has_local_ssrc());
res.ssrc = playout_event.local_ssrc();
return res;
@@ -1821,7 +1831,7 @@ ParsedRtcEventLog::GetLossBasedBweUpdate(const rtclog::Event& event) const {
LoggedBweLossBasedUpdate bwe_update;
RTC_CHECK(event.has_timestamp_us());
- bwe_update.timestamp_us = event.timestamp_us();
+ bwe_update.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(loss_event.has_bitrate_bps());
bwe_update.bitrate_bps = loss_event.bitrate_bps();
RTC_PARSE_CHECK_OR_RETURN(loss_event.has_fraction_loss());
@@ -1842,7 +1852,7 @@ ParsedRtcEventLog::GetDelayBasedBweUpdate(const rtclog::Event& event) const {
LoggedBweDelayBasedUpdate res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(delay_event.has_bitrate_bps());
res.bitrate_bps = delay_event.bitrate_bps();
RTC_PARSE_CHECK_OR_RETURN(delay_event.has_detector_state());
@@ -1861,7 +1871,7 @@ ParsedRtcEventLog::GetAudioNetworkAdaptation(const rtclog::Event& event) const {
LoggedAudioNetworkAdaptationEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
if (ana_event.has_bitrate_bps())
res.config.bitrate_bps = ana_event.bitrate_bps();
if (ana_event.has_enable_fec())
@@ -1887,7 +1897,7 @@ ParsedRtcEventLog::GetBweProbeClusterCreated(const rtclog::Event& event) const {
const rtclog::BweProbeCluster& pcc_event = event.probe_cluster();
LoggedBweProbeClusterCreatedEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(pcc_event.has_id());
res.id = pcc_event.id();
RTC_PARSE_CHECK_OR_RETURN(pcc_event.has_bitrate_bps());
@@ -1912,7 +1922,7 @@ ParsedRtcEventLog::GetBweProbeFailure(const rtclog::Event& event) const {
LoggedBweProbeFailureEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(pr_event.has_id());
res.id = pr_event.id();
RTC_PARSE_CHECK_OR_RETURN(pr_event.has_result());
@@ -1945,7 +1955,7 @@ ParsedRtcEventLog::GetBweProbeSuccess(const rtclog::Event& event) const {
LoggedBweProbeSuccessEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(pr_event.has_id());
res.id = pr_event.id();
RTC_PARSE_CHECK_OR_RETURN(pr_event.has_bitrate_bps());
@@ -1962,7 +1972,7 @@ ParsedRtcEventLog::GetAlrState(const rtclog::Event& event) const {
const rtclog::AlrState& alr_event = event.alr_state();
LoggedAlrStateEvent res;
RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us());
- res.timestamp_us = event.timestamp_us();
+ res.timestamp = Timestamp::Micros(event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(alr_event.has_in_alr());
res.in_alr = alr_event.in_alr();
@@ -1979,7 +1989,7 @@ ParsedRtcEventLog::GetIceCandidatePairConfig(
const rtclog::IceCandidatePairConfig& config =
rtc_event.ice_candidate_pair_config();
RTC_CHECK(rtc_event.has_timestamp_us());
- res.timestamp_us = rtc_event.timestamp_us();
+ res.timestamp = Timestamp::Micros(rtc_event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(config.has_config_type());
res.type = GetRuntimeIceCandidatePairConfigType(config.config_type());
RTC_PARSE_CHECK_OR_RETURN(config.has_candidate_pair_id());
@@ -2018,7 +2028,7 @@ ParsedRtcEventLog::GetIceCandidatePairEvent(
const rtclog::IceCandidatePairEvent& event =
rtc_event.ice_candidate_pair_event();
RTC_CHECK(rtc_event.has_timestamp_us());
- res.timestamp_us = rtc_event.timestamp_us();
+ res.timestamp = Timestamp::Micros(rtc_event.timestamp_us());
RTC_PARSE_CHECK_OR_RETURN(event.has_event_type());
res.type = GetRuntimeIceCandidatePairEventType(event.event_type());
RTC_PARSE_CHECK_OR_RETURN(event.has_candidate_pair_id());
@@ -2404,7 +2414,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAlrStateEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_in_alr());
LoggedAlrStateEvent alr_event;
- alr_event.timestamp_us = proto.timestamp_ms() * 1000;
+ alr_event.timestamp = Timestamp::Millis(proto.timestamp_ms());
alr_event.in_alr = proto.in_alr();
alr_state_events_.push_back(alr_event);
@@ -2418,7 +2428,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreRouteChangeEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_connected());
RTC_PARSE_CHECK_OR_RETURN(proto.has_overhead());
LoggedRouteChangeEvent route_event;
- route_event.timestamp_ms = proto.timestamp_ms();
+ route_event.timestamp = Timestamp::Millis(proto.timestamp_ms());
route_event.connected = proto.connected();
route_event.overhead = proto.overhead();
@@ -2432,7 +2442,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreRemoteEstimateEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
// Base event
LoggedRemoteEstimateEvent base_event;
- base_event.timestamp_ms = proto.timestamp_ms();
+ base_event.timestamp = Timestamp::Millis(proto.timestamp_ms());
absl::optional<uint64_t> base_link_capacity_lower_kbps;
if (proto.has_link_capacity_lower_kbps()) {
@@ -2480,7 +2490,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreRemoteEstimateEvent(
for (size_t i = 0; i < number_of_deltas; ++i) {
LoggedRemoteEstimateEvent event;
RTC_PARSE_CHECK_OR_RETURN(timestamp_ms_values[i].has_value());
- event.timestamp_ms = *timestamp_ms_values[i];
+ event.timestamp = Timestamp::Millis(*timestamp_ms_values[i]);
if (link_capacity_lower_kbps_values[i])
event.link_capacity_lower =
DataRate::KilobitsPerSec(*link_capacity_lower_kbps_values[i]);
@@ -2499,7 +2509,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAudioPlayoutEvent(
// Base event
audio_playout_events_[proto.local_ssrc()].emplace_back(
- 1000 * proto.timestamp_ms(), proto.local_ssrc());
+ Timestamp::Millis(proto.timestamp_ms()), proto.local_ssrc());
const size_t number_of_deltas =
proto.has_number_of_deltas() ? proto.number_of_deltas() : 0u;
@@ -2531,8 +2541,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAudioPlayoutEvent(
const uint32_t local_ssrc =
static_cast<uint32_t>(local_ssrc_values[i].value());
- audio_playout_events_[local_ssrc].emplace_back(1000 * timestamp_ms,
- local_ssrc);
+ audio_playout_events_[local_ssrc].emplace_back(
+ Timestamp::Millis(timestamp_ms), local_ssrc);
}
return ParseStatus::Success();
}
@@ -2565,8 +2575,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreStartEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_version());
RTC_PARSE_CHECK_OR_RETURN(proto.has_utc_time_ms());
RTC_PARSE_CHECK_OR_RETURN_EQ(proto.version(), 2);
- LoggedStartEvent start_event(proto.timestamp_ms() * 1000,
- proto.utc_time_ms());
+ LoggedStartEvent start_event(Timestamp::Millis(proto.timestamp_ms()),
+ Timestamp::Millis(proto.utc_time_ms()));
start_log_events_.push_back(start_event);
return ParseStatus::Success();
@@ -2575,7 +2585,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreStartEvent(
ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreStopEvent(
const rtclog2::EndLogEvent& proto) {
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- LoggedStopEvent stop_event(proto.timestamp_ms() * 1000);
+ LoggedStopEvent stop_event(Timestamp::Millis(proto.timestamp_ms()));
stop_log_events_.push_back(stop_event);
return ParseStatus::Success();
@@ -2589,7 +2599,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweLossBasedUpdate(
RTC_PARSE_CHECK_OR_RETURN(proto.has_total_packets());
// Base event
- bwe_loss_updates_.emplace_back(1000 * proto.timestamp_ms(),
+ bwe_loss_updates_.emplace_back(Timestamp::Millis(proto.timestamp_ms()),
proto.bitrate_bps(), proto.fraction_loss(),
proto.total_packets());
@@ -2645,7 +2655,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweLossBasedUpdate(
const uint32_t total_packets =
static_cast<uint32_t>(total_packets_values[i].value());
- bwe_loss_updates_.emplace_back(1000 * timestamp_ms, bitrate_bps,
+ bwe_loss_updates_.emplace_back(Timestamp::Millis(timestamp_ms), bitrate_bps,
fraction_loss, total_packets);
}
return ParseStatus::Success();
@@ -2660,7 +2670,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweDelayBasedUpdate(
// Base event
const BandwidthUsage base_detector_state =
GetRuntimeDetectorState(proto.detector_state());
- bwe_delay_updates_.emplace_back(1000 * proto.timestamp_ms(),
+ bwe_delay_updates_.emplace_back(Timestamp::Millis(proto.timestamp_ms()),
proto.bitrate_bps(), base_detector_state);
const size_t number_of_deltas =
@@ -2704,7 +2714,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweDelayBasedUpdate(
static_cast<rtclog2::DelayBasedBweUpdates::DetectorState>(
detector_state_values[i].value());
- bwe_delay_updates_.emplace_back(1000 * timestamp_ms, bitrate_bps,
+ bwe_delay_updates_.emplace_back(Timestamp::Millis(timestamp_ms),
+ bitrate_bps,
GetRuntimeDetectorState(detector_state));
}
return ParseStatus::Success();
@@ -2714,7 +2725,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweProbeClusterCreated(
const rtclog2::BweProbeCluster& proto) {
LoggedBweProbeClusterCreatedEvent probe_cluster;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- probe_cluster.timestamp_us = proto.timestamp_ms() * 1000;
+ probe_cluster.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_id());
probe_cluster.id = proto.id();
RTC_PARSE_CHECK_OR_RETURN(proto.has_bitrate_bps());
@@ -2734,7 +2745,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweProbeSuccessEvent(
const rtclog2::BweProbeResultSuccess& proto) {
LoggedBweProbeSuccessEvent probe_result;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- probe_result.timestamp_us = proto.timestamp_ms() * 1000;
+ probe_result.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_id());
probe_result.id = proto.id();
RTC_PARSE_CHECK_OR_RETURN(proto.has_bitrate_bps());
@@ -2750,7 +2761,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweProbeFailureEvent(
const rtclog2::BweProbeResultFailure& proto) {
LoggedBweProbeFailureEvent probe_result;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- probe_result.timestamp_us = proto.timestamp_ms() * 1000;
+ probe_result.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_id());
probe_result.id = proto.id();
RTC_PARSE_CHECK_OR_RETURN(proto.has_failure());
@@ -2773,7 +2784,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreFrameDecodedEvents(
RTC_PARSE_CHECK_OR_RETURN(proto.has_qp());
LoggedFrameDecoded base_frame;
- base_frame.timestamp_us = 1000 * proto.timestamp_ms();
+ base_frame.timestamp = Timestamp::Millis(proto.timestamp_ms());
base_frame.ssrc = proto.ssrc();
base_frame.render_time_ms = proto.render_time_ms();
base_frame.width = proto.width();
@@ -2836,7 +2847,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreFrameDecodedEvents(
RTC_PARSE_CHECK_OR_RETURN(timestamp_ms_values[i].has_value());
RTC_PARSE_CHECK_OR_RETURN(
ToSigned(timestamp_ms_values[i].value(), &timestamp_ms));
- frame.timestamp_us = 1000 * timestamp_ms;
+ frame.timestamp = Timestamp::Millis(timestamp_ms);
RTC_PARSE_CHECK_OR_RETURN(ssrc_values[i].has_value());
RTC_PARSE_CHECK_OR_RETURN_LE(ssrc_values[i].value(),
@@ -2881,7 +2892,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericAckReceivedEvent(
base_receive_acked_packet_time_ms = proto.receive_acked_packet_time_ms();
}
generic_acks_received_.push_back(
- {proto.timestamp_ms() * 1000, proto.packet_number(),
+ {Timestamp::Millis(proto.timestamp_ms()), proto.packet_number(),
proto.acked_packet_number(), base_receive_acked_packet_time_ms});
const size_t number_of_deltas =
@@ -2940,8 +2951,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericAckReceivedEvent(
ToSigned(receive_acked_packet_time_ms_values[i].value(), &value));
receive_acked_packet_time_ms = value;
}
- generic_acks_received_.push_back({timestamp_ms * 1000, packet_number,
- acked_packet_number,
+ generic_acks_received_.push_back({Timestamp::Millis(timestamp_ms),
+ packet_number, acked_packet_number,
receive_acked_packet_time_ms});
}
return ParseStatus::Success();
@@ -2958,7 +2969,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericPacketSentEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_padding_length());
generic_packets_sent_.push_back(
- {proto.timestamp_ms() * 1000, proto.packet_number(),
+ {Timestamp::Millis(proto.timestamp_ms()), proto.packet_number(),
static_cast<size_t>(proto.overhead_length()),
static_cast<size_t>(proto.payload_length()),
static_cast<size_t>(proto.padding_length())});
@@ -3005,7 +3016,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericPacketSentEvent(
RTC_PARSE_CHECK_OR_RETURN(payload_length_values[i].has_value());
RTC_PARSE_CHECK_OR_RETURN(padding_length_values[i].has_value());
generic_packets_sent_.push_back(
- {timestamp_ms * 1000, packet_number,
+ {Timestamp::Millis(timestamp_ms), packet_number,
static_cast<size_t>(overhead_length_values[i].value()),
static_cast<size_t>(payload_length_values[i].value()),
static_cast<size_t>(padding_length_values[i].value())});
@@ -3022,7 +3033,7 @@ ParsedRtcEventLog::StoreGenericPacketReceivedEvent(
RTC_PARSE_CHECK_OR_RETURN(proto.has_packet_number());
RTC_PARSE_CHECK_OR_RETURN(proto.has_packet_length());
- generic_packets_received_.push_back({proto.timestamp_ms() * 1000,
+ generic_packets_received_.push_back({Timestamp::Millis(proto.timestamp_ms()),
proto.packet_number(),
proto.packet_length()});
@@ -3060,7 +3071,7 @@ ParsedRtcEventLog::StoreGenericPacketReceivedEvent(
int32_t packet_length =
static_cast<int32_t>(packet_length_values[i].value());
generic_packets_received_.push_back(
- {timestamp_ms * 1000, packet_number, packet_length});
+ {Timestamp::Millis(timestamp_ms), packet_number, packet_length});
}
return ParseStatus::Success();
}
@@ -3095,8 +3106,8 @@ ParsedRtcEventLog::StoreAudioNetworkAdaptationEvent(
// Note: Encoding N as N-1 only done for |num_channels_deltas|.
runtime_config.num_channels = proto.num_channels();
}
- audio_network_adaptation_events_.emplace_back(1000 * proto.timestamp_ms(),
- runtime_config);
+ audio_network_adaptation_events_.emplace_back(
+ Timestamp::Millis(proto.timestamp_ms()), runtime_config);
}
const size_t number_of_deltas =
@@ -3217,8 +3228,8 @@ ParsedRtcEventLog::StoreAudioNetworkAdaptationEvent(
runtime_config.num_channels =
rtc::checked_cast<size_t>(num_channels_values[i].value());
}
- audio_network_adaptation_events_.emplace_back(1000 * timestamp_ms,
- runtime_config);
+ audio_network_adaptation_events_.emplace_back(
+ Timestamp::Millis(timestamp_ms), runtime_config);
}
return ParseStatus::Success();
}
@@ -3227,7 +3238,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreDtlsTransportState(
const rtclog2::DtlsTransportStateEvent& proto) {
LoggedDtlsTransportState dtls_state;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- dtls_state.timestamp_us = proto.timestamp_ms() * 1000;
+ dtls_state.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_dtls_transport_state());
dtls_state.dtls_transport_state =
@@ -3241,7 +3252,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreDtlsWritableState(
const rtclog2::DtlsWritableState& proto) {
LoggedDtlsWritableState dtls_writable_state;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- dtls_writable_state.timestamp_us = proto.timestamp_ms() * 1000;
+ dtls_writable_state.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_writable());
dtls_writable_state.writable = proto.writable();
@@ -3253,7 +3264,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreIceCandidatePairConfig(
const rtclog2::IceCandidatePairConfig& proto) {
LoggedIceCandidatePairConfig ice_config;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- ice_config.timestamp_us = proto.timestamp_ms() * 1000;
+ ice_config.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_config_type());
ice_config.type = GetRuntimeIceCandidatePairConfigType(proto.config_type());
@@ -3291,7 +3302,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreIceCandidateEvent(
const rtclog2::IceCandidatePairEvent& proto) {
LoggedIceCandidatePairEvent ice_event;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- ice_event.timestamp_us = proto.timestamp_ms() * 1000;
+ ice_event.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_event_type());
ice_event.type = GetRuntimeIceCandidatePairEventType(proto.event_type());
RTC_PARSE_CHECK_OR_RETURN(proto.has_candidate_pair_id());
@@ -3311,7 +3322,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreVideoRecvConfig(
const rtclog2::VideoRecvStreamConfig& proto) {
LoggedVideoRecvConfig stream;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- stream.timestamp_us = proto.timestamp_ms() * 1000;
+ stream.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_remote_ssrc());
stream.config.remote_ssrc = proto.remote_ssrc();
RTC_PARSE_CHECK_OR_RETURN(proto.has_local_ssrc());
@@ -3331,7 +3342,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreVideoSendConfig(
const rtclog2::VideoSendStreamConfig& proto) {
LoggedVideoSendConfig stream;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- stream.timestamp_us = proto.timestamp_ms() * 1000;
+ stream.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_ssrc());
stream.config.local_ssrc = proto.ssrc();
if (proto.has_rtx_ssrc()) {
@@ -3349,7 +3360,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAudioRecvConfig(
const rtclog2::AudioRecvStreamConfig& proto) {
LoggedAudioRecvConfig stream;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- stream.timestamp_us = proto.timestamp_ms() * 1000;
+ stream.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_remote_ssrc());
stream.config.remote_ssrc = proto.remote_ssrc();
RTC_PARSE_CHECK_OR_RETURN(proto.has_local_ssrc());
@@ -3366,7 +3377,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAudioSendConfig(
const rtclog2::AudioSendStreamConfig& proto) {
LoggedAudioSendConfig stream;
RTC_PARSE_CHECK_OR_RETURN(proto.has_timestamp_ms());
- stream.timestamp_us = proto.timestamp_ms() * 1000;
+ stream.timestamp = Timestamp::Millis(proto.timestamp_ms());
RTC_PARSE_CHECK_OR_RETURN(proto.has_ssrc());
stream.config.local_ssrc = proto.ssrc();
if (proto.has_header_extensions()) {
diff --git a/logging/rtc_event_log/rtc_event_log_unittest.cc b/logging/rtc_event_log/rtc_event_log_unittest.cc
index dca7fb7774..5535bec44f 100644
--- a/logging/rtc_event_log/rtc_event_log_unittest.cc
+++ b/logging/rtc_event_log/rtc_event_log_unittest.cc
@@ -944,7 +944,7 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) {
EXPECT_LT(probe_success_events.size(), kNumEvents);
ASSERT_GT(probe_success_events.size(), 1u);
- int64_t first_timestamp_us = probe_success_events[0].timestamp_us;
+ int64_t first_timestamp_ms = probe_success_events[0].timestamp.ms();
uint32_t first_id = probe_success_events[0].id;
int32_t first_bitrate_bps = probe_success_events[0].bitrate_bps;
// We want to reset the time to what we used when generating the events, but
@@ -953,7 +953,7 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) {
// destroyed before the new one is created, so we have to reset() first.
fake_clock.reset();
fake_clock = std::make_unique<rtc::ScopedFakeClock>();
- fake_clock->SetTime(Timestamp::Micros(first_timestamp_us));
+ fake_clock->SetTime(Timestamp::Millis(first_timestamp_ms));
for (size_t i = 1; i < probe_success_events.size(); i++) {
fake_clock->AdvanceTime(TimeDelta::Millis(10));
verifier_.VerifyLoggedBweProbeSuccessEvent(
diff --git a/logging/rtc_event_log/rtc_event_log_unittest_helper.cc b/logging/rtc_event_log/rtc_event_log_unittest_helper.cc
index 7ad0ad68e8..0960c98502 100644
--- a/logging/rtc_event_log/rtc_event_log_unittest_helper.cc
+++ b/logging/rtc_event_log/rtc_event_log_unittest_helper.cc
@@ -931,7 +931,8 @@ void EventVerifier::VerifyLoggedIceCandidatePairEvent(
}
}
-void VerifyLoggedRtpHeader(const RtpPacket& original_header,
+template <typename Event>
+void VerifyLoggedRtpHeader(const Event& original_header,
const RTPHeader& logged_header) {
// Standard RTP header.
EXPECT_EQ(original_header.Marker(), logged_header.markerBit);
@@ -940,53 +941,57 @@ void VerifyLoggedRtpHeader(const RtpPacket& original_header,
EXPECT_EQ(original_header.Timestamp(), logged_header.timestamp);
EXPECT_EQ(original_header.Ssrc(), logged_header.ssrc);
- EXPECT_EQ(original_header.headers_size(), logged_header.headerLength);
+ EXPECT_EQ(original_header.header_length(), logged_header.headerLength);
// TransmissionOffset header extension.
- ASSERT_EQ(original_header.HasExtension<TransmissionOffset>(),
+ ASSERT_EQ(original_header.template HasExtension<TransmissionOffset>(),
logged_header.extension.hasTransmissionTimeOffset);
if (logged_header.extension.hasTransmissionTimeOffset) {
int32_t offset;
- ASSERT_TRUE(original_header.GetExtension<TransmissionOffset>(&offset));
+ ASSERT_TRUE(
+ original_header.template GetExtension<TransmissionOffset>(&offset));
EXPECT_EQ(offset, logged_header.extension.transmissionTimeOffset);
}
// AbsoluteSendTime header extension.
- ASSERT_EQ(original_header.HasExtension<AbsoluteSendTime>(),
+ ASSERT_EQ(original_header.template HasExtension<AbsoluteSendTime>(),
logged_header.extension.hasAbsoluteSendTime);
if (logged_header.extension.hasAbsoluteSendTime) {
uint32_t sendtime;
- ASSERT_TRUE(original_header.GetExtension<AbsoluteSendTime>(&sendtime));
+ ASSERT_TRUE(
+ original_header.template GetExtension<AbsoluteSendTime>(&sendtime));
EXPECT_EQ(sendtime, logged_header.extension.absoluteSendTime);
}
// TransportSequenceNumber header extension.
- ASSERT_EQ(original_header.HasExtension<TransportSequenceNumber>(),
+ ASSERT_EQ(original_header.template HasExtension<TransportSequenceNumber>(),
logged_header.extension.hasTransportSequenceNumber);
if (logged_header.extension.hasTransportSequenceNumber) {
uint16_t seqnum;
- ASSERT_TRUE(original_header.GetExtension<TransportSequenceNumber>(&seqnum));
+ ASSERT_TRUE(original_header.template GetExtension<TransportSequenceNumber>(
+ &seqnum));
EXPECT_EQ(seqnum, logged_header.extension.transportSequenceNumber);
}
// AudioLevel header extension.
- ASSERT_EQ(original_header.HasExtension<AudioLevel>(),
+ ASSERT_EQ(original_header.template HasExtension<AudioLevel>(),
logged_header.extension.hasAudioLevel);
if (logged_header.extension.hasAudioLevel) {
bool voice_activity;
uint8_t audio_level;
- ASSERT_TRUE(original_header.GetExtension<AudioLevel>(&voice_activity,
- &audio_level));
+ ASSERT_TRUE(original_header.template GetExtension<AudioLevel>(
+ &voice_activity, &audio_level));
EXPECT_EQ(voice_activity, logged_header.extension.voiceActivity);
EXPECT_EQ(audio_level, logged_header.extension.audioLevel);
}
// VideoOrientation header extension.
- ASSERT_EQ(original_header.HasExtension<VideoOrientation>(),
+ ASSERT_EQ(original_header.template HasExtension<VideoOrientation>(),
logged_header.extension.hasVideoRotation);
if (logged_header.extension.hasVideoRotation) {
uint8_t rotation;
- ASSERT_TRUE(original_header.GetExtension<VideoOrientation>(&rotation));
+ ASSERT_TRUE(
+ original_header.template GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(ConvertCVOByteToVideoRotation(rotation),
logged_header.extension.videoRotation);
}
@@ -1015,8 +1020,7 @@ void EventVerifier::VerifyLoggedRtpPacketIncoming(
const LoggedRtpPacketIncoming& logged_event) const {
EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms());
- EXPECT_EQ(original_event.header().headers_size(),
- logged_event.rtp.header_length);
+ EXPECT_EQ(original_event.header_length(), logged_event.rtp.header_length);
EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length);
@@ -1025,7 +1029,7 @@ void EventVerifier::VerifyLoggedRtpPacketIncoming(
EXPECT_EQ(original_event.padding_length(),
logged_event.rtp.header.paddingLength);
- VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header);
+ VerifyLoggedRtpHeader(original_event, logged_event.rtp.header);
}
void EventVerifier::VerifyLoggedRtpPacketOutgoing(
@@ -1033,8 +1037,7 @@ void EventVerifier::VerifyLoggedRtpPacketOutgoing(
const LoggedRtpPacketOutgoing& logged_event) const {
EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms());
- EXPECT_EQ(original_event.header().headers_size(),
- logged_event.rtp.header_length);
+ EXPECT_EQ(original_event.header_length(), logged_event.rtp.header_length);
EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length);
@@ -1046,7 +1049,7 @@ void EventVerifier::VerifyLoggedRtpPacketOutgoing(
// TODO(terelius): Probe cluster ID isn't parsed, used or tested. Unless
// someone has a strong reason to keep it, it'll be removed.
- VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header);
+ VerifyLoggedRtpHeader(original_event, logged_event.rtp.header);
}
void EventVerifier::VerifyLoggedGenericPacketSent(
@@ -1121,10 +1124,10 @@ void EventVerifier::VerifyReportBlock(
}
void EventVerifier::VerifyLoggedSenderReport(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::SenderReport& original_sr,
const LoggedRtcpPacketSenderReport& logged_sr) {
- EXPECT_EQ(log_time_us, logged_sr.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_sr.log_time_ms());
EXPECT_EQ(original_sr.sender_ssrc(), logged_sr.sr.sender_ssrc());
EXPECT_EQ(original_sr.ntp(), logged_sr.sr.ntp());
EXPECT_EQ(original_sr.rtp_timestamp(), logged_sr.sr.rtp_timestamp());
@@ -1141,10 +1144,10 @@ void EventVerifier::VerifyLoggedSenderReport(
}
void EventVerifier::VerifyLoggedReceiverReport(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::ReceiverReport& original_rr,
const LoggedRtcpPacketReceiverReport& logged_rr) {
- EXPECT_EQ(log_time_us, logged_rr.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_rr.log_time_ms());
EXPECT_EQ(original_rr.sender_ssrc(), logged_rr.rr.sender_ssrc());
ASSERT_EQ(original_rr.report_blocks().size(),
logged_rr.rr.report_blocks().size());
@@ -1155,10 +1158,10 @@ void EventVerifier::VerifyLoggedReceiverReport(
}
void EventVerifier::VerifyLoggedExtendedReports(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::ExtendedReports& original_xr,
const LoggedRtcpPacketExtendedReports& logged_xr) {
- EXPECT_EQ(log_time_us, logged_xr.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_xr.log_time_ms());
EXPECT_EQ(original_xr.sender_ssrc(), logged_xr.xr.sender_ssrc());
EXPECT_EQ(original_xr.rrtr().has_value(), logged_xr.xr.rrtr().has_value());
@@ -1196,10 +1199,10 @@ void EventVerifier::VerifyLoggedExtendedReports(
}
}
-void EventVerifier::VerifyLoggedFir(int64_t log_time_us,
+void EventVerifier::VerifyLoggedFir(int64_t log_time_ms,
const rtcp::Fir& original_fir,
const LoggedRtcpPacketFir& logged_fir) {
- EXPECT_EQ(log_time_us, logged_fir.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_fir.log_time_ms());
EXPECT_EQ(original_fir.sender_ssrc(), logged_fir.fir.sender_ssrc());
const auto& original_requests = original_fir.requests();
const auto& logged_requests = logged_fir.fir.requests();
@@ -1210,35 +1213,35 @@ void EventVerifier::VerifyLoggedFir(int64_t log_time_us,
}
}
-void EventVerifier::VerifyLoggedPli(int64_t log_time_us,
+void EventVerifier::VerifyLoggedPli(int64_t log_time_ms,
const rtcp::Pli& original_pli,
const LoggedRtcpPacketPli& logged_pli) {
- EXPECT_EQ(log_time_us, logged_pli.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_pli.log_time_ms());
EXPECT_EQ(original_pli.sender_ssrc(), logged_pli.pli.sender_ssrc());
EXPECT_EQ(original_pli.media_ssrc(), logged_pli.pli.media_ssrc());
}
-void EventVerifier::VerifyLoggedBye(int64_t log_time_us,
+void EventVerifier::VerifyLoggedBye(int64_t log_time_ms,
const rtcp::Bye& original_bye,
const LoggedRtcpPacketBye& logged_bye) {
- EXPECT_EQ(log_time_us, logged_bye.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_bye.log_time_ms());
EXPECT_EQ(original_bye.sender_ssrc(), logged_bye.bye.sender_ssrc());
EXPECT_EQ(original_bye.csrcs(), logged_bye.bye.csrcs());
EXPECT_EQ(original_bye.reason(), logged_bye.bye.reason());
}
-void EventVerifier::VerifyLoggedNack(int64_t log_time_us,
+void EventVerifier::VerifyLoggedNack(int64_t log_time_ms,
const rtcp::Nack& original_nack,
const LoggedRtcpPacketNack& logged_nack) {
- EXPECT_EQ(log_time_us, logged_nack.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_nack.log_time_ms());
EXPECT_EQ(original_nack.packet_ids(), logged_nack.nack.packet_ids());
}
void EventVerifier::VerifyLoggedTransportFeedback(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::TransportFeedback& original_transport_feedback,
const LoggedRtcpPacketTransportFeedback& logged_transport_feedback) {
- EXPECT_EQ(log_time_us, logged_transport_feedback.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_transport_feedback.log_time_ms());
ASSERT_EQ(
original_transport_feedback.GetReceivedPackets().size(),
logged_transport_feedback.transport_feedback.GetReceivedPackets().size());
@@ -1255,19 +1258,19 @@ void EventVerifier::VerifyLoggedTransportFeedback(
}
}
-void EventVerifier::VerifyLoggedRemb(int64_t log_time_us,
+void EventVerifier::VerifyLoggedRemb(int64_t log_time_ms,
const rtcp::Remb& original_remb,
const LoggedRtcpPacketRemb& logged_remb) {
- EXPECT_EQ(log_time_us, logged_remb.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_remb.log_time_ms());
EXPECT_EQ(original_remb.ssrcs(), logged_remb.remb.ssrcs());
EXPECT_EQ(original_remb.bitrate_bps(), logged_remb.remb.bitrate_bps());
}
void EventVerifier::VerifyLoggedLossNotification(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::LossNotification& original_loss_notification,
const LoggedRtcpPacketLossNotification& logged_loss_notification) {
- EXPECT_EQ(log_time_us, logged_loss_notification.log_time_us());
+ EXPECT_EQ(log_time_ms, logged_loss_notification.log_time_ms());
EXPECT_EQ(original_loss_notification.last_decoded(),
logged_loss_notification.loss_notification.last_decoded());
EXPECT_EQ(original_loss_notification.last_received(),
@@ -1282,7 +1285,7 @@ void EventVerifier::VerifyLoggedStartEvent(
const LoggedStartEvent& logged_event) const {
EXPECT_EQ(start_time_us / 1000, logged_event.log_time_ms());
if (encoding_type_ == RtcEventLog::EncodingType::NewFormat) {
- EXPECT_EQ(utc_start_time_us / 1000, logged_event.utc_start_time_ms);
+ EXPECT_EQ(utc_start_time_us / 1000, logged_event.utc_start_time.ms());
}
}
diff --git a/logging/rtc_event_log/rtc_event_log_unittest_helper.h b/logging/rtc_event_log/rtc_event_log_unittest_helper.h
index 94cf3d5ae7..eb16592271 100644
--- a/logging/rtc_event_log/rtc_event_log_unittest_helper.h
+++ b/logging/rtc_event_log/rtc_event_log_unittest_helper.h
@@ -260,38 +260,38 @@ class EventVerifier {
const RtcEventRtcpPacketOutgoing& original_event,
const LoggedRtcpPacketOutgoing& logged_event) const;
- void VerifyLoggedSenderReport(int64_t log_time_us,
+ void VerifyLoggedSenderReport(int64_t log_time_ms,
const rtcp::SenderReport& original_sr,
const LoggedRtcpPacketSenderReport& logged_sr);
void VerifyLoggedReceiverReport(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::ReceiverReport& original_rr,
const LoggedRtcpPacketReceiverReport& logged_rr);
void VerifyLoggedExtendedReports(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::ExtendedReports& original_xr,
const LoggedRtcpPacketExtendedReports& logged_xr);
- void VerifyLoggedFir(int64_t log_time_us,
+ void VerifyLoggedFir(int64_t log_time_ms,
const rtcp::Fir& original_fir,
const LoggedRtcpPacketFir& logged_fir);
- void VerifyLoggedPli(int64_t log_time_us,
+ void VerifyLoggedPli(int64_t log_time_ms,
const rtcp::Pli& original_pli,
const LoggedRtcpPacketPli& logged_pli);
- void VerifyLoggedBye(int64_t log_time_us,
+ void VerifyLoggedBye(int64_t log_time_ms,
const rtcp::Bye& original_bye,
const LoggedRtcpPacketBye& logged_bye);
- void VerifyLoggedNack(int64_t log_time_us,
+ void VerifyLoggedNack(int64_t log_time_ms,
const rtcp::Nack& original_nack,
const LoggedRtcpPacketNack& logged_nack);
void VerifyLoggedTransportFeedback(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::TransportFeedback& original_transport_feedback,
const LoggedRtcpPacketTransportFeedback& logged_transport_feedback);
- void VerifyLoggedRemb(int64_t log_time_us,
+ void VerifyLoggedRemb(int64_t log_time_ms,
const rtcp::Remb& original_remb,
const LoggedRtcpPacketRemb& logged_remb);
void VerifyLoggedLossNotification(
- int64_t log_time_us,
+ int64_t log_time_ms,
const rtcp::LossNotification& original_loss_notification,
const LoggedRtcpPacketLossNotification& logged_loss_notification);
diff --git a/logging/rtc_event_log/rtc_event_processor_unittest.cc b/logging/rtc_event_log/rtc_event_processor_unittest.cc
index 4ec5abee5e..b0cec25f1f 100644
--- a/logging/rtc_event_log/rtc_event_processor_unittest.cc
+++ b/logging/rtc_event_log/rtc_event_processor_unittest.cc
@@ -29,7 +29,7 @@ std::vector<LoggedStartEvent> CreateEventList(
std::initializer_list<int64_t> timestamp_list) {
std::vector<LoggedStartEvent> v;
for (int64_t timestamp_ms : timestamp_list) {
- v.emplace_back(timestamp_ms * 1000); // Convert ms to us.
+ v.emplace_back(Timestamp::Millis(timestamp_ms));
}
return v;
}
@@ -41,7 +41,7 @@ CreateRandomEventLists(size_t num_lists, size_t num_elements, uint64_t seed) {
for (size_t elem = 0; elem < num_elements; elem++) {
uint32_t i = prng.Rand(0u, num_lists - 1);
int64_t timestamp_ms = elem;
- lists[i].emplace_back(timestamp_ms * 1000);
+ lists[i].emplace_back(Timestamp::Millis(timestamp_ms));
}
return lists;
}
@@ -146,8 +146,8 @@ TEST(RtcEventProcessor, DifferentTypes) {
result.push_back(elem.log_time_ms());
};
- std::vector<LoggedStartEvent> events1{LoggedStartEvent(2000)};
- std::vector<LoggedStopEvent> events2{LoggedStopEvent(1000)};
+ std::vector<LoggedStartEvent> events1{LoggedStartEvent(Timestamp::Millis(2))};
+ std::vector<LoggedStopEvent> events2{LoggedStopEvent(Timestamp::Millis(1))};
RtcEventProcessor processor;
processor.AddEvents(events1, f1);
processor.AddEvents(events2, f2);
diff --git a/media/BUILD.gn b/media/BUILD.gn
index 29ba403fa8..30f642a25d 100644
--- a/media/BUILD.gn
+++ b/media/BUILD.gn
@@ -81,6 +81,7 @@ rtc_library("rtc_media_base") {
"../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
+ "../api/transport:datagram_transport_interface",
"../api/transport:stun_types",
"../api/transport:webrtc_key_value_config",
"../api/transport/rtp:rtp_source",
@@ -105,7 +106,10 @@ rtc_library("rtc_media_base") {
"../rtc_base:stringutils",
"../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
+ "../rtc_base/system:no_unique_address",
"../rtc_base/system:rtc_export",
+ "../rtc_base/task_utils:pending_task_safety_flag",
+ "../rtc_base/task_utils:to_queued_task",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
]
@@ -379,6 +383,7 @@ rtc_library("rtc_media_engine_defaults") {
rtc_source_set("rtc_data_sctp_transport_internal") {
sources = [ "sctp/sctp_transport_internal.h" ]
deps = [
+ "../api/transport:datagram_transport_interface",
"../media:rtc_media_base",
"../p2p:rtc_p2p",
"../rtc_base:rtc_base_approved",
@@ -387,6 +392,36 @@ rtc_source_set("rtc_data_sctp_transport_internal") {
]
}
+if (rtc_build_dcsctp) {
+ rtc_library("rtc_data_dcsctp_transport") {
+ sources = [
+ "sctp/dcsctp_transport.cc",
+ "sctp/dcsctp_transport.h",
+ ]
+ deps = [
+ ":rtc_data_sctp_transport_internal",
+ "../api:array_view",
+ "../media:rtc_media_base",
+ "../net/dcsctp/public:factory",
+ "../net/dcsctp/public:socket",
+ "../net/dcsctp/public:types",
+ "../net/dcsctp/timer:task_queue_timeout",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:rtc_base_approved",
+ "../rtc_base:threading",
+ "../rtc_base/task_utils:pending_task_safety_flag",
+ "../rtc_base/task_utils:to_queued_task",
+ "../rtc_base/third_party/sigslot:sigslot",
+ "../system_wrappers",
+ ]
+ absl_deps += [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+}
+
if (rtc_build_usrsctp) {
rtc_library("rtc_data_usrsctp_transport") {
defines = [
@@ -426,11 +461,22 @@ rtc_library("rtc_data_sctp_transport_factory") {
":rtc_data_sctp_transport_internal",
"../api/transport:sctp_transport_factory_interface",
"../rtc_base:threading",
+ "../rtc_base/experiments:field_trial_parser",
"../rtc_base/system:unused",
]
if (rtc_enable_sctp) {
- assert(rtc_build_usrsctp, "An SCTP backend is required to enable SCTP")
+ assert(rtc_build_dcsctp || rtc_build_usrsctp,
+ "An SCTP backend is required to enable SCTP")
+ }
+
+ if (rtc_build_dcsctp) {
+ defines += [ "WEBRTC_HAVE_DCSCTP" ]
+ deps += [
+ ":rtc_data_dcsctp_transport",
+ "../system_wrappers",
+ "../system_wrappers:field_trial",
+ ]
}
if (rtc_build_usrsctp) {
@@ -576,6 +622,7 @@ if (rtc_include_tests) {
"../modules/audio_processing:api",
"../modules/audio_processing:mocks",
"../modules/rtp_rtcp",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/video_coding:simulcast_test_fixture_impl",
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:webrtc_h264",
@@ -593,6 +640,7 @@ if (rtc_include_tests) {
"../rtc_base/experiments:min_video_bitrate_experiment",
"../rtc_base/synchronization:mutex",
"../rtc_base/third_party/sigslot",
+ "../system_wrappers:field_trial",
"../test:audio_codec_mocks",
"../test:fake_video_codecs",
"../test:field_trial",
diff --git a/media/DEPS b/media/DEPS
index 5b4d9f93b5..127e3ef081 100644
--- a/media/DEPS
+++ b/media/DEPS
@@ -11,6 +11,7 @@ include_rules = [
"+modules/video_capture",
"+modules/video_coding",
"+modules/video_coding/utility",
+ "+net/dcsctp",
"+p2p",
"+sound",
"+system_wrappers",
diff --git a/media/base/codec.cc b/media/base/codec.cc
index e8a591e44b..cb6913e76a 100644
--- a/media/base/codec.cc
+++ b/media/base/codec.cc
@@ -58,18 +58,6 @@ bool IsSameCodecSpecific(const std::string& name1,
return true;
}
-bool IsCodecInList(
- const webrtc::SdpVideoFormat& format,
- const std::vector<webrtc::SdpVideoFormat>& existing_formats) {
- for (auto existing_format : existing_formats) {
- if (IsSameCodec(format.name, format.parameters, existing_format.name,
- existing_format.parameters)) {
- return true;
- }
- }
- return false;
-}
-
} // namespace
FeedbackParams::FeedbackParams() = default;
@@ -396,25 +384,6 @@ bool VideoCodec::ValidateCodecFormat() const {
return true;
}
-RtpDataCodec::RtpDataCodec(int id, const std::string& name)
- : Codec(id, name, kDataCodecClockrate) {}
-
-RtpDataCodec::RtpDataCodec() : Codec() {
- clockrate = kDataCodecClockrate;
-}
-
-RtpDataCodec::RtpDataCodec(const RtpDataCodec& c) = default;
-RtpDataCodec::RtpDataCodec(RtpDataCodec&& c) = default;
-RtpDataCodec& RtpDataCodec::operator=(const RtpDataCodec& c) = default;
-RtpDataCodec& RtpDataCodec::operator=(RtpDataCodec&& c) = default;
-
-std::string RtpDataCodec::ToString() const {
- char buf[256];
- rtc::SimpleStringBuilder sb(buf);
- sb << "RtpDataCodec[" << id << ":" << name << "]";
- return sb.str();
-}
-
bool HasLntf(const Codec& codec) {
return codec.HasFeedbackParam(
FeedbackParam(kRtcpFbParamLntf, kParamValueEmpty));
@@ -452,6 +421,8 @@ const VideoCodec* FindMatchingCodec(
return nullptr;
}
+// TODO(crbug.com/1187565): Remove once downstream projects stopped using this
+// method in favor of SdpVideoFormat::IsSameCodec().
bool IsSameCodec(const std::string& name1,
const CodecParameterMap& params1,
const std::string& name2,
@@ -493,7 +464,7 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats(
std::copy_if(cbr_supported_formats.begin(), cbr_supported_formats.end(),
std::back_inserter(*supported_formats),
[supported_formats](const webrtc::SdpVideoFormat& format) {
- return !IsCodecInList(format, *supported_formats);
+ return !format.IsCodecInList(*supported_formats);
});
if (supported_formats->size() > original_size) {
diff --git a/media/base/codec.h b/media/base/codec.h
index c3be2334ce..c7c99bf732 100644
--- a/media/base/codec.h
+++ b/media/base/codec.h
@@ -202,23 +202,6 @@ struct RTC_EXPORT VideoCodec : public Codec {
void SetDefaultParameters();
};
-struct RtpDataCodec : public Codec {
- RtpDataCodec(int id, const std::string& name);
- RtpDataCodec();
- RtpDataCodec(const RtpDataCodec& c);
- RtpDataCodec(RtpDataCodec&& c);
- ~RtpDataCodec() override = default;
-
- RtpDataCodec& operator=(const RtpDataCodec& c);
- RtpDataCodec& operator=(RtpDataCodec&& c);
-
- std::string ToString() const;
-};
-
-// For backwards compatibility
-// TODO(bugs.webrtc.org/10597): Remove when no longer needed.
-typedef RtpDataCodec DataCodec;
-
// Get the codec setting associated with |payload_type|. If there
// is no codec associated with that payload type it returns nullptr.
template <class Codec>
diff --git a/media/base/codec_unittest.cc b/media/base/codec_unittest.cc
index 3586760a14..23bae7b7fe 100644
--- a/media/base/codec_unittest.cc
+++ b/media/base/codec_unittest.cc
@@ -19,7 +19,6 @@
using cricket::AudioCodec;
using cricket::Codec;
-using cricket::DataCodec;
using cricket::FeedbackParam;
using cricket::kCodecParamAssociatedPayloadType;
using cricket::kCodecParamMaxBitrate;
@@ -31,7 +30,8 @@ class TestCodec : public Codec {
TestCodec(int id, const std::string& name, int clockrate)
: Codec(id, name, clockrate) {}
TestCodec() : Codec() {}
- TestCodec(const TestCodec& c) : Codec(c) {}
+ TestCodec(const TestCodec& c) = default;
+ TestCodec& operator=(const TestCodec& c) = default;
};
TEST(CodecTest, TestCodecOperators) {
@@ -303,27 +303,6 @@ TEST(CodecTest, TestH264CodecMatches) {
}
}
-TEST(CodecTest, TestDataCodecMatches) {
- // Test a codec with a static payload type.
- DataCodec c0(34, "D");
- EXPECT_TRUE(c0.Matches(DataCodec(34, "")));
- EXPECT_FALSE(c0.Matches(DataCodec(96, "D")));
- EXPECT_FALSE(c0.Matches(DataCodec(96, "")));
-
- // Test a codec with a dynamic payload type.
- DataCodec c1(96, "D");
- EXPECT_TRUE(c1.Matches(DataCodec(96, "D")));
- EXPECT_TRUE(c1.Matches(DataCodec(97, "D")));
- EXPECT_TRUE(c1.Matches(DataCodec(96, "d")));
- EXPECT_TRUE(c1.Matches(DataCodec(97, "d")));
- EXPECT_TRUE(c1.Matches(DataCodec(35, "d")));
- EXPECT_TRUE(c1.Matches(DataCodec(42, "d")));
- EXPECT_TRUE(c1.Matches(DataCodec(63, "d")));
- EXPECT_FALSE(c1.Matches(DataCodec(96, "")));
- EXPECT_FALSE(c1.Matches(DataCodec(95, "D")));
- EXPECT_FALSE(c1.Matches(DataCodec(34, "D")));
-}
-
TEST(CodecTest, TestSetParamGetParamAndRemoveParam) {
AudioCodec codec;
codec.SetParam("a", "1");
diff --git a/media/base/fake_media_engine.cc b/media/base/fake_media_engine.cc
index c96b5a4caf..aa8e2325b6 100644
--- a/media/base/fake_media_engine.cc
+++ b/media/base/fake_media_engine.cc
@@ -18,6 +18,7 @@
#include "rtc_base/checks.h"
namespace cricket {
+using webrtc::TaskQueueBase;
FakeVoiceMediaChannel::DtmfInfo::DtmfInfo(uint32_t ssrc,
int event_code,
@@ -49,8 +50,11 @@ AudioSource* FakeVoiceMediaChannel::VoiceChannelAudioSink::source() const {
}
FakeVoiceMediaChannel::FakeVoiceMediaChannel(FakeVoiceEngine* engine,
- const AudioOptions& options)
- : engine_(engine), max_bps_(-1) {
+ const AudioOptions& options,
+ TaskQueueBase* network_thread)
+ : RtpHelper<VoiceMediaChannel>(network_thread),
+ engine_(engine),
+ max_bps_(-1) {
output_scalings_[0] = 1.0; // For default channel.
SetOptions(options);
}
@@ -253,8 +257,11 @@ bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info,
}
FakeVideoMediaChannel::FakeVideoMediaChannel(FakeVideoEngine* engine,
- const VideoOptions& options)
- : engine_(engine), max_bps_(-1) {
+ const VideoOptions& options,
+ TaskQueueBase* network_thread)
+ : RtpHelper<VideoMediaChannel>(network_thread),
+ engine_(engine),
+ max_bps_(-1) {
SetOptions(options);
}
FakeVideoMediaChannel::~FakeVideoMediaChannel() {
@@ -440,7 +447,8 @@ VoiceMediaChannel* FakeVoiceEngine::CreateMediaChannel(
return nullptr;
}
- FakeVoiceMediaChannel* ch = new FakeVoiceMediaChannel(this, options);
+ FakeVoiceMediaChannel* ch =
+ new FakeVoiceMediaChannel(this, options, call->network_thread());
channels_.push_back(ch);
return ch;
}
@@ -506,7 +514,8 @@ VideoMediaChannel* FakeVideoEngine::CreateMediaChannel(
return nullptr;
}
- FakeVideoMediaChannel* ch = new FakeVideoMediaChannel(this, options);
+ FakeVideoMediaChannel* ch =
+ new FakeVideoMediaChannel(this, options, call->network_thread());
channels_.emplace_back(ch);
return ch;
}
diff --git a/media/base/fake_media_engine.h b/media/base/fake_media_engine.h
index d683b7e1d7..e4f7b6659f 100644
--- a/media/base/fake_media_engine.h
+++ b/media/base/fake_media_engine.h
@@ -11,6 +11,7 @@
#ifndef MEDIA_BASE_FAKE_MEDIA_ENGINE_H_
#define MEDIA_BASE_FAKE_MEDIA_ENGINE_H_
+#include <atomic>
#include <list>
#include <map>
#include <memory>
@@ -42,8 +43,9 @@ class FakeVoiceEngine;
template <class Base>
class RtpHelper : public Base {
public:
- RtpHelper()
- : sending_(false),
+ explicit RtpHelper(webrtc::TaskQueueBase* network_thread)
+ : Base(network_thread),
+ sending_(false),
playout_(false),
fail_set_send_codecs_(false),
fail_set_recv_codecs_(false),
@@ -283,7 +285,10 @@ class RtpHelper : public Base {
bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; }
private:
- bool sending_;
+ // TODO(bugs.webrtc.org/12783): This flag is used from more than one thread.
+ // As a workaround for tsan, it's currently std::atomic but that might not
+ // be the appropriate fix.
+ std::atomic<bool> sending_;
bool playout_;
std::vector<RtpExtension> recv_extensions_;
std::vector<RtpExtension> send_extensions_;
@@ -314,8 +319,9 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
int event_code;
int duration;
};
- explicit FakeVoiceMediaChannel(FakeVoiceEngine* engine,
- const AudioOptions& options);
+ FakeVoiceMediaChannel(FakeVoiceEngine* engine,
+ const AudioOptions& options,
+ webrtc::TaskQueueBase* network_thread);
~FakeVoiceMediaChannel();
const std::vector<AudioCodec>& recv_codecs() const;
const std::vector<AudioCodec>& send_codecs() const;
@@ -406,7 +412,9 @@ bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info,
class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
public:
- FakeVideoMediaChannel(FakeVideoEngine* engine, const VideoOptions& options);
+ FakeVideoMediaChannel(FakeVideoEngine* engine,
+ const VideoOptions& options,
+ webrtc::TaskQueueBase* network_thread);
~FakeVideoMediaChannel();
diff --git a/media/base/fake_network_interface.h b/media/base/fake_network_interface.h
index 02d53f6781..4023037731 100644
--- a/media/base/fake_network_interface.h
+++ b/media/base/fake_network_interface.h
@@ -83,14 +83,12 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
return static_cast<int>(sent_ssrcs_.size());
}
- // Note: callers are responsible for deleting the returned buffer.
- const rtc::CopyOnWriteBuffer* GetRtpPacket(int index)
- RTC_LOCKS_EXCLUDED(mutex_) {
+ rtc::CopyOnWriteBuffer GetRtpPacket(int index) RTC_LOCKS_EXCLUDED(mutex_) {
webrtc::MutexLock lock(&mutex_);
if (index >= static_cast<int>(rtp_packets_.size())) {
- return NULL;
+ return {};
}
- return new rtc::CopyOnWriteBuffer(rtp_packets_[index]);
+ return rtp_packets_[index];
}
int NumRtcpPackets() RTC_LOCKS_EXCLUDED(mutex_) {
diff --git a/media/base/media_channel.cc b/media/base/media_channel.cc
index d2370d2ccf..01b043b828 100644
--- a/media/base/media_channel.cc
+++ b/media/base/media_channel.cc
@@ -10,21 +10,40 @@
#include "media/base/media_channel.h"
+#include "media/base/rtp_utils.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+
namespace cricket {
+using webrtc::FrameDecryptorInterface;
+using webrtc::FrameEncryptorInterface;
+using webrtc::FrameTransformerInterface;
+using webrtc::PendingTaskSafetyFlag;
+using webrtc::TaskQueueBase;
+using webrtc::ToQueuedTask;
+using webrtc::VideoTrackInterface;
VideoOptions::VideoOptions()
- : content_hint(webrtc::VideoTrackInterface::ContentHint::kNone) {}
+ : content_hint(VideoTrackInterface::ContentHint::kNone) {}
VideoOptions::~VideoOptions() = default;
-MediaChannel::MediaChannel(const MediaConfig& config)
- : enable_dscp_(config.enable_dscp) {}
+MediaChannel::MediaChannel(const MediaConfig& config,
+ TaskQueueBase* network_thread)
+ : enable_dscp_(config.enable_dscp),
+ network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()),
+ network_thread_(network_thread) {}
-MediaChannel::MediaChannel() : enable_dscp_(false) {}
+MediaChannel::MediaChannel(TaskQueueBase* network_thread)
+ : enable_dscp_(false),
+ network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()),
+ network_thread_(network_thread) {}
-MediaChannel::~MediaChannel() {}
+MediaChannel::~MediaChannel() {
+ RTC_DCHECK(!network_interface_);
+}
void MediaChannel::SetInterface(NetworkInterface* iface) {
- webrtc::MutexLock lock(&network_interface_mutex_);
+ RTC_DCHECK_RUN_ON(network_thread_);
+ iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive();
network_interface_ = iface;
UpdateDscp();
}
@@ -35,13 +54,13 @@ int MediaChannel::GetRtpSendTimeExtnId() const {
void MediaChannel::SetFrameEncryptor(
uint32_t ssrc,
- rtc::scoped_refptr<webrtc::FrameEncryptorInterface> frame_encryptor) {
+ rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) {
// Placeholder should be pure virtual once internal supports it.
}
void MediaChannel::SetFrameDecryptor(
uint32_t ssrc,
- rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
// Placeholder should be pure virtual once internal supports it.
}
@@ -59,9 +78,8 @@ bool MediaChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet,
int MediaChannel::SetOption(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
- int option)
- RTC_LOCKS_EXCLUDED(network_interface_mutex_) {
- webrtc::MutexLock lock(&network_interface_mutex_);
+ int option) {
+ RTC_DCHECK_RUN_ON(network_thread_);
return SetOptionLocked(type, opt, option);
}
@@ -79,11 +97,11 @@ bool MediaChannel::ExtmapAllowMixed() const {
void MediaChannel::SetEncoderToPacketizerFrameTransformer(
uint32_t ssrc,
- rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {}
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {}
void MediaChannel::SetDepacketizerToDecoderFrameTransformer(
uint32_t ssrc,
- rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {}
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {}
int MediaChannel::SetOptionLocked(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
@@ -100,35 +118,45 @@ bool MediaChannel::DscpEnabled() const {
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
rtc::DiffServCodePoint MediaChannel::PreferredDscp() const {
- webrtc::MutexLock lock(&network_interface_mutex_);
+ RTC_DCHECK_RUN_ON(network_thread_);
return preferred_dscp_;
}
-int MediaChannel::SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp) {
- webrtc::MutexLock lock(&network_interface_mutex_);
- if (preferred_dscp == preferred_dscp_) {
- return 0;
+void MediaChannel::SetPreferredDscp(rtc::DiffServCodePoint new_dscp) {
+ if (!network_thread_->IsCurrent()) {
+ // This is currently the common path as the derived channel classes
+ // get called on the worker thread. There are still some tests though
+ // that call directly on the network thread.
+ network_thread_->PostTask(ToQueuedTask(
+ network_safety_, [this, new_dscp]() { SetPreferredDscp(new_dscp); }));
+ return;
}
- preferred_dscp_ = preferred_dscp;
- return UpdateDscp();
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (new_dscp == preferred_dscp_)
+ return;
+
+ preferred_dscp_ = new_dscp;
+ UpdateDscp();
+}
+
+rtc::scoped_refptr<PendingTaskSafetyFlag> MediaChannel::network_safety() {
+ return network_safety_;
}
-int MediaChannel::UpdateDscp() {
+void MediaChannel::UpdateDscp() {
rtc::DiffServCodePoint value =
enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT;
int ret =
SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value);
- if (ret == 0) {
- ret = SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP,
- value);
- }
- return ret;
+ if (ret == 0)
+ SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value);
}
bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
const rtc::PacketOptions& options) {
- webrtc::MutexLock lock(&network_interface_mutex_);
+ RTC_DCHECK_RUN_ON(network_thread_);
if (!network_interface_)
return false;
@@ -136,6 +164,54 @@ bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet,
: network_interface_->SendRtcp(packet, options);
}
+void MediaChannel::SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options) {
+ auto send =
+ [this, packet_id = options.packet_id,
+ included_in_feedback = options.included_in_feedback,
+ included_in_allocation = options.included_in_allocation,
+ packet = rtc::CopyOnWriteBuffer(data, len, kMaxRtpPacketLen)]() mutable {
+ rtc::PacketOptions rtc_options;
+ rtc_options.packet_id = packet_id;
+ if (DscpEnabled()) {
+ rtc_options.dscp = PreferredDscp();
+ }
+ rtc_options.info_signaled_after_sent.included_in_feedback =
+ included_in_feedback;
+ rtc_options.info_signaled_after_sent.included_in_allocation =
+ included_in_allocation;
+ SendPacket(&packet, rtc_options);
+ };
+
+ // TODO(bugs.webrtc.org/11993): ModuleRtpRtcpImpl2 and related classes (e.g.
+ // RTCPSender) aren't aware of the network thread and may trigger calls to
+ // this function from different threads. Update those classes to keep
+ // network traffic on the network thread.
+ if (network_thread_->IsCurrent()) {
+ send();
+ } else {
+ network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send)));
+ }
+}
+
+void MediaChannel::SendRtcp(const uint8_t* data, size_t len) {
+ auto send = [this, packet = rtc::CopyOnWriteBuffer(
+ data, len, kMaxRtpPacketLen)]() mutable {
+ rtc::PacketOptions rtc_options;
+ if (DscpEnabled()) {
+ rtc_options.dscp = PreferredDscp();
+ }
+ SendRtcp(&packet, rtc_options);
+ };
+
+ if (network_thread_->IsCurrent()) {
+ send();
+ } else {
+ network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send)));
+ }
+}
+
MediaSenderInfo::MediaSenderInfo() = default;
MediaSenderInfo::~MediaSenderInfo() = default;
diff --git a/media/base/media_channel.h b/media/base/media_channel.h
index 8a67c2a6e5..a4a925e912 100644
--- a/media/base/media_channel.h
+++ b/media/base/media_channel.h
@@ -26,6 +26,7 @@
#include "api/media_stream_interface.h"
#include "api/rtc_error.h"
#include "api/rtp_parameters.h"
+#include "api/transport/data_channel_transport_interface.h"
#include "api/transport/rtp/rtp_source.h"
#include "api/video/video_content_type.h"
#include "api/video/video_sink_interface.h"
@@ -50,7 +51,7 @@
#include "rtc_base/socket.h"
#include "rtc_base/string_encode.h"
#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
namespace rtc {
class Timing;
@@ -168,15 +169,15 @@ class MediaChannel {
virtual ~NetworkInterface() {}
};
- explicit MediaChannel(const MediaConfig& config);
- MediaChannel();
+ MediaChannel(const MediaConfig& config,
+ webrtc::TaskQueueBase* network_thread);
+ explicit MediaChannel(webrtc::TaskQueueBase* network_thread);
virtual ~MediaChannel();
virtual cricket::MediaType media_type() const = 0;
// Sets the abstract interface class for sending RTP/RTCP data.
- virtual void SetInterface(NetworkInterface* iface)
- RTC_LOCKS_EXCLUDED(network_interface_mutex_);
+ virtual void SetInterface(NetworkInterface* iface);
// Called on the network when an RTP packet is received.
virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) = 0;
@@ -248,7 +249,7 @@ class MediaChannel {
int SetOption(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
- int option) RTC_LOCKS_EXCLUDED(network_interface_mutex_);
+ int option);
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
// Set to true if it's allowed to mix one- and two-byte RTP header extensions
@@ -272,38 +273,42 @@ class MediaChannel {
protected:
int SetOptionLocked(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
- int option)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_);
+ int option) RTC_RUN_ON(network_thread_);
bool DscpEnabled() const;
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
- rtc::DiffServCodePoint PreferredDscp() const
- RTC_LOCKS_EXCLUDED(network_interface_mutex_);
+ rtc::DiffServCodePoint PreferredDscp() const;
+ void SetPreferredDscp(rtc::DiffServCodePoint new_dscp);
- int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp)
- RTC_LOCKS_EXCLUDED(network_interface_mutex_);
+ rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety();
+
+ // Utility implementation for derived classes (video/voice) that applies
+ // the packet options and passes the data onwards to `SendPacket`.
+ void SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options);
+
+ void SendRtcp(const uint8_t* data, size_t len);
private:
// Apply the preferred DSCP setting to the underlying network interface RTP
// and RTCP channels. If DSCP is disabled, then apply the default DSCP value.
- int UpdateDscp() RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_);
+ void UpdateDscp() RTC_RUN_ON(network_thread_);
bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
- const rtc::PacketOptions& options)
- RTC_LOCKS_EXCLUDED(network_interface_mutex_);
+ const rtc::PacketOptions& options);
const bool enable_dscp_;
- // |network_interface_| can be accessed from the worker_thread and
- // from any MediaEngine threads. This critical section is to protect accessing
- // of network_interface_ object.
- mutable webrtc::Mutex network_interface_mutex_;
- NetworkInterface* network_interface_
- RTC_GUARDED_BY(network_interface_mutex_) = nullptr;
- rtc::DiffServCodePoint preferred_dscp_
- RTC_GUARDED_BY(network_interface_mutex_) = rtc::DSCP_DEFAULT;
+ const rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety_
+ RTC_PT_GUARDED_BY(network_thread_);
+ webrtc::TaskQueueBase* const network_thread_;
+ NetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) =
+ nullptr;
+ rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) =
+ rtc::DSCP_DEFAULT;
bool extmap_allow_mixed_ = false;
};
@@ -761,9 +766,11 @@ struct AudioRecvParameters : RtpParameters<AudioCodec> {};
class VoiceMediaChannel : public MediaChannel, public Delayable {
public:
- VoiceMediaChannel() {}
- explicit VoiceMediaChannel(const MediaConfig& config)
- : MediaChannel(config) {}
+ explicit VoiceMediaChannel(webrtc::TaskQueueBase* network_thread)
+ : MediaChannel(network_thread) {}
+ VoiceMediaChannel(const MediaConfig& config,
+ webrtc::TaskQueueBase* network_thread)
+ : MediaChannel(config, network_thread) {}
~VoiceMediaChannel() override {}
cricket::MediaType media_type() const override;
@@ -831,9 +838,11 @@ struct VideoRecvParameters : RtpParameters<VideoCodec> {};
class VideoMediaChannel : public MediaChannel, public Delayable {
public:
- VideoMediaChannel() {}
- explicit VideoMediaChannel(const MediaConfig& config)
- : MediaChannel(config) {}
+ explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread)
+ : MediaChannel(network_thread) {}
+ VideoMediaChannel(const MediaConfig& config,
+ webrtc::TaskQueueBase* network_thread)
+ : MediaChannel(config, network_thread) {}
~VideoMediaChannel() override {}
cricket::MediaType media_type() const override;
@@ -884,15 +893,6 @@ class VideoMediaChannel : public MediaChannel, public Delayable {
virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
};
-enum DataMessageType {
- // Chrome-Internal use only. See SctpDataMediaChannel for the actual PPID
- // values.
- DMT_NONE = 0,
- DMT_CONTROL = 1,
- DMT_BINARY = 2,
- DMT_TEXT = 3,
-};
-
// Info about data received in DataMediaChannel. For use in
// DataMediaChannel::SignalDataReceived and in all of the signals that
// signal fires, on up the chain.
@@ -901,34 +901,11 @@ struct ReceiveDataParams {
// SCTP data channels use SIDs.
int sid = 0;
// The type of message (binary, text, or control).
- DataMessageType type = DMT_TEXT;
+ webrtc::DataMessageType type = webrtc::DataMessageType::kText;
// A per-stream value incremented per packet in the stream.
int seq_num = 0;
};
-struct SendDataParams {
- // The in-packet stream indentifier.
- int sid = 0;
- // The type of message (binary, text, or control).
- DataMessageType type = DMT_TEXT;
-
- // TODO(pthatcher): Make |ordered| and |reliable| true by default?
- // For SCTP, whether to send messages flagged as ordered or not.
- // If false, messages can be received out of order.
- bool ordered = false;
- // For SCTP, whether the messages are sent reliably or not.
- // If false, messages may be lost.
- bool reliable = false;
- // For SCTP, if reliable == false, provide partial reliability by
- // resending up to this many times. Either count or millis
- // is supported, not both at the same time.
- int max_rtx_count = 0;
- // For SCTP, if reliable == false, provide partial reliability by
- // resending for up to this many milliseconds. Either count or millis
- // is supported, not both at the same time.
- int max_rtx_ms = 0;
-};
-
enum SendDataResult { SDR_SUCCESS, SDR_ERROR, SDR_BLOCK };
} // namespace cricket
diff --git a/media/base/media_constants.cc b/media/base/media_constants.cc
index 2ac382510b..17a8a83bd0 100644
--- a/media/base/media_constants.cc
+++ b/media/base/media_constants.cc
@@ -13,8 +13,6 @@
namespace cricket {
const int kVideoCodecClockrate = 90000;
-const int kDataCodecClockrate = 90000;
-const int kRtpDataMaxBandwidth = 30720; // bps
const int kVideoMtu = 1200;
const int kVideoRtpSendBufferSize = 65536;
@@ -97,9 +95,6 @@ const char kCodecParamMinBitrate[] = "x-google-min-bitrate";
const char kCodecParamStartBitrate[] = "x-google-start-bitrate";
const char kCodecParamMaxQuantization[] = "x-google-max-quantization";
-const int kGoogleRtpDataCodecPlType = 109;
-const char kGoogleRtpDataCodecName[] = "google-data";
-
const char kComfortNoiseCodecName[] = "CN";
const char kVp8CodecName[] = "VP8";
diff --git a/media/base/media_constants.h b/media/base/media_constants.h
index 16b97caacb..bf7f0c3047 100644
--- a/media/base/media_constants.h
+++ b/media/base/media_constants.h
@@ -20,8 +20,6 @@
namespace cricket {
extern const int kVideoCodecClockrate;
-extern const int kDataCodecClockrate;
-extern const int kRtpDataMaxBandwidth; // bps
extern const int kVideoMtu;
extern const int kVideoRtpSendBufferSize;
@@ -119,12 +117,6 @@ extern const char kCodecParamMinBitrate[];
extern const char kCodecParamStartBitrate[];
extern const char kCodecParamMaxQuantization[];
-// We put the data codec names here so callers of DataEngine::CreateChannel
-// don't have to import rtpdataengine.h to get the codec names they want to
-// pass in.
-extern const int kGoogleRtpDataCodecPlType;
-extern const char kGoogleRtpDataCodecName[];
-
extern const char kComfortNoiseCodecName[];
RTC_EXPORT extern const char kVp8CodecName[];
diff --git a/media/base/turn_utils.h b/media/base/turn_utils.h
index ed8e282ba7..82e492c028 100644
--- a/media/base/turn_utils.h
+++ b/media/base/turn_utils.h
@@ -18,8 +18,6 @@
namespace cricket {
-struct PacketOptions;
-
// Finds data location within a TURN Channel Message or TURN Send Indication
// message.
bool RTC_EXPORT UnwrapTurnPacket(const uint8_t* packet,
diff --git a/media/base/video_source_base.cc b/media/base/video_source_base.cc
index d057a24ad8..2454902069 100644
--- a/media/base/video_source_base.cc
+++ b/media/base/video_source_base.cc
@@ -10,6 +10,8 @@
#include "media/base/video_source_base.h"
+#include <algorithm>
+
#include "absl/algorithm/container.h"
#include "rtc_base/checks.h"
@@ -52,4 +54,51 @@ VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair(
return nullptr;
}
+VideoSourceBaseGuarded::VideoSourceBaseGuarded() = default;
+VideoSourceBaseGuarded::~VideoSourceBaseGuarded() = default;
+
+void VideoSourceBaseGuarded::AddOrUpdateSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) {
+ RTC_DCHECK_RUN_ON(&source_sequence_);
+ RTC_DCHECK(sink != nullptr);
+
+ SinkPair* sink_pair = FindSinkPair(sink);
+ if (!sink_pair) {
+ sinks_.push_back(SinkPair(sink, wants));
+ } else {
+ sink_pair->wants = wants;
+ }
+}
+
+void VideoSourceBaseGuarded::RemoveSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK_RUN_ON(&source_sequence_);
+ RTC_DCHECK(sink != nullptr);
+ RTC_DCHECK(FindSinkPair(sink));
+ sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
+ [sink](const SinkPair& sink_pair) {
+ return sink_pair.sink == sink;
+ }),
+ sinks_.end());
+}
+
+VideoSourceBaseGuarded::SinkPair* VideoSourceBaseGuarded::FindSinkPair(
+ const VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK_RUN_ON(&source_sequence_);
+ auto sink_pair_it = absl::c_find_if(
+ sinks_,
+ [sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
+ if (sink_pair_it != sinks_.end()) {
+ return &*sink_pair_it;
+ }
+ return nullptr;
+}
+
+const std::vector<VideoSourceBaseGuarded::SinkPair>&
+VideoSourceBaseGuarded::sink_pairs() const {
+ RTC_DCHECK_RUN_ON(&source_sequence_);
+ return sinks_;
+}
+
} // namespace rtc
diff --git a/media/base/video_source_base.h b/media/base/video_source_base.h
index 59b7dab164..2644723aa7 100644
--- a/media/base/video_source_base.h
+++ b/media/base/video_source_base.h
@@ -17,10 +17,14 @@
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
+#include "rtc_base/system/no_unique_address.h"
namespace rtc {
-// VideoSourceBase is not thread safe.
+// VideoSourceBase is not thread safe. Before using this class, consider using
+// VideoSourceBaseGuarded below instead, which is an identical implementation
+// but applies a sequence checker to help protect internal state.
+// TODO(bugs.webrtc.org/12780): Delete this class.
class VideoSourceBase : public VideoSourceInterface<webrtc::VideoFrame> {
public:
VideoSourceBase();
@@ -44,6 +48,36 @@ class VideoSourceBase : public VideoSourceInterface<webrtc::VideoFrame> {
std::vector<SinkPair> sinks_;
};
+// VideoSourceBaseGuarded assumes that operations related to sinks, occur on the
+// same TQ/thread that the object was constructed on.
+class VideoSourceBaseGuarded : public VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ VideoSourceBaseGuarded();
+ ~VideoSourceBaseGuarded() override;
+
+ void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ protected:
+ struct SinkPair {
+ SinkPair(VideoSinkInterface<webrtc::VideoFrame>* sink, VideoSinkWants wants)
+ : sink(sink), wants(wants) {}
+ VideoSinkInterface<webrtc::VideoFrame>* sink;
+ VideoSinkWants wants;
+ };
+
+ SinkPair* FindSinkPair(const VideoSinkInterface<webrtc::VideoFrame>* sink);
+ const std::vector<SinkPair>& sink_pairs() const;
+
+ // Keep the `source_sequence_` checker protected to allow sub classes the
+ // ability to call Detach() if/when appropriate.
+ RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker source_sequence_;
+
+ private:
+ std::vector<SinkPair> sinks_ RTC_GUARDED_BY(&source_sequence_);
+};
+
} // namespace rtc
#endif // MEDIA_BASE_VIDEO_SOURCE_BASE_H_
diff --git a/media/engine/fake_webrtc_call.cc b/media/engine/fake_webrtc_call.cc
index 76a70aaa57..e8c7f6e0c9 100644
--- a/media/engine/fake_webrtc_call.cc
+++ b/media/engine/fake_webrtc_call.cc
@@ -96,9 +96,31 @@ bool FakeAudioReceiveStream::DeliverRtp(const uint8_t* packet,
return true;
}
-void FakeAudioReceiveStream::Reconfigure(
- const webrtc::AudioReceiveStream::Config& config) {
- config_ = config;
+void FakeAudioReceiveStream::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
+ config_.frame_transformer = std::move(frame_transformer);
+}
+
+void FakeAudioReceiveStream::SetDecoderMap(
+ std::map<int, webrtc::SdpAudioFormat> decoder_map) {
+ config_.decoder_map = std::move(decoder_map);
+}
+
+void FakeAudioReceiveStream::SetUseTransportCcAndNackHistory(
+ bool use_transport_cc,
+ int history_ms) {
+ config_.rtp.transport_cc = use_transport_cc;
+ config_.rtp.nack.rtp_history_ms = history_ms;
+}
+
+void FakeAudioReceiveStream::SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
+ config_.frame_decryptor = std::move(frame_decryptor);
+}
+
+void FakeAudioReceiveStream::SetRtpExtensions(
+ std::vector<webrtc::RtpExtension> extensions) {
+ config_.rtp.extensions = std::move(extensions);
}
webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats(
@@ -646,6 +668,18 @@ void FakeCall::SignalChannelNetworkState(webrtc::MediaType media,
void FakeCall::OnAudioTransportOverheadChanged(
int transport_overhead_per_packet) {}
+void FakeCall::OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream,
+ uint32_t local_ssrc) {
+ auto& fake_stream = static_cast<FakeAudioReceiveStream&>(stream);
+ fake_stream.SetLocalSsrc(local_ssrc);
+}
+
+void FakeCall::OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream,
+ const std::string& sync_group) {
+ auto& fake_stream = static_cast<FakeAudioReceiveStream&>(stream);
+ fake_stream.SetSyncGroup(sync_group);
+}
+
void FakeCall::OnSentPacket(const rtc::SentPacket& sent_packet) {
last_sent_packet_ = sent_packet;
if (sent_packet.packet_id >= 0) {
diff --git a/media/engine/fake_webrtc_call.h b/media/engine/fake_webrtc_call.h
index fd383dadd1..20e65d45f4 100644
--- a/media/engine/fake_webrtc_call.h
+++ b/media/engine/fake_webrtc_call.h
@@ -100,12 +100,31 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
return base_mininum_playout_delay_ms_;
}
+ void SetLocalSsrc(uint32_t local_ssrc) {
+ config_.rtp.local_ssrc = local_ssrc;
+ }
+
+ void SetSyncGroup(const std::string& sync_group) {
+ config_.sync_group = sync_group;
+ }
+
private:
- // webrtc::AudioReceiveStream implementation.
- void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override;
+ const webrtc::ReceiveStream::RtpConfig& rtp_config() const override {
+ return config_.rtp;
+ }
void Start() override { started_ = true; }
void Stop() override { started_ = false; }
bool IsRunning() const override { return started_; }
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
+ override;
+ void SetDecoderMap(
+ std::map<int, webrtc::SdpAudioFormat> decoder_map) override;
+ void SetUseTransportCcAndNackHistory(bool use_transport_cc,
+ int history_ms) override;
+ void SetFrameDecryptor(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
+ frame_decryptor) override;
+ void SetRtpExtensions(std::vector<webrtc::RtpExtension> extensions) override;
webrtc::AudioReceiveStream::Stats GetStats(
bool get_and_clear_legacy_stats) const override;
@@ -243,6 +262,9 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
private:
// webrtc::VideoReceiveStream implementation.
+ const webrtc::ReceiveStream::RtpConfig& rtp_config() const override {
+ return config_.rtp;
+ }
void Start() override;
void Stop() override;
@@ -269,7 +291,11 @@ class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream {
explicit FakeFlexfecReceiveStream(
const webrtc::FlexfecReceiveStream::Config& config);
- const webrtc::FlexfecReceiveStream::Config& GetConfig() const override;
+ const webrtc::ReceiveStream::RtpConfig& rtp_config() const override {
+ return config_.rtp;
+ }
+
+ const webrtc::FlexfecReceiveStream::Config& GetConfig() const;
private:
webrtc::FlexfecReceiveStream::Stats GetStats() const override;
@@ -373,6 +399,10 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
webrtc::NetworkState state) override;
void OnAudioTransportOverheadChanged(
int transport_overhead_per_packet) override;
+ void OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream,
+ uint32_t local_ssrc) override;
+ void OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream,
+ const std::string& sync_group) override;
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
webrtc::TaskQueueBase* const network_thread_;
diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc
index 1c084846a2..a8d1f00009 100644
--- a/media/engine/internal_decoder_factory.cc
+++ b/media/engine/internal_decoder_factory.cc
@@ -23,23 +23,6 @@
namespace webrtc {
-namespace {
-
-bool IsFormatSupported(
- const std::vector<webrtc::SdpVideoFormat>& supported_formats,
- const webrtc::SdpVideoFormat& format) {
- for (const webrtc::SdpVideoFormat& supported_format : supported_formats) {
- if (cricket::IsSameCodec(format.name, format.parameters,
- supported_format.name,
- supported_format.parameters)) {
- return true;
- }
- }
- return false;
-}
-
-} // namespace
-
std::vector<SdpVideoFormat> InternalDecoderFactory::GetSupportedFormats()
const {
std::vector<SdpVideoFormat> formats;
@@ -55,7 +38,7 @@ std::vector<SdpVideoFormat> InternalDecoderFactory::GetSupportedFormats()
std::unique_ptr<VideoDecoder> InternalDecoderFactory::CreateVideoDecoder(
const SdpVideoFormat& format) {
- if (!IsFormatSupported(GetSupportedFormats(), format)) {
+ if (!format.IsCodecInList(GetSupportedFormats())) {
RTC_LOG(LS_WARNING) << "Trying to create decoder for unsupported format. "
<< format.ToString();
return nullptr;
diff --git a/media/engine/payload_type_mapper.cc b/media/engine/payload_type_mapper.cc
index 8c85f02fe8..cbc0a5340d 100644
--- a/media/engine/payload_type_mapper.cc
+++ b/media/engine/payload_type_mapper.cc
@@ -61,7 +61,6 @@ PayloadTypeMapper::PayloadTypeMapper()
// Payload type assignments currently used by WebRTC.
// Includes data to reduce collisions (and thus reassignments)
- {{kGoogleRtpDataCodecName, 0, 0}, kGoogleRtpDataCodecPlType},
{{kIlbcCodecName, 8000, 1}, 102},
{{kIsacCodecName, 16000, 1}, 103},
{{kIsacCodecName, 32000, 1}, 104},
@@ -73,6 +72,8 @@ PayloadTypeMapper::PayloadTypeMapper()
{{kCodecParamMinPTime, "10"},
{kCodecParamUseInbandFec, kParamValueTrue}}},
111},
+ // RED for opus is assigned in the lower range, starting at the top.
+ {{kRedCodecName, 48000, 2}, 63},
// TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we
// assign payload types dynamically for send side as well.
{{kDtmfCodecName, 48000, 1}, 110},
diff --git a/media/engine/payload_type_mapper_unittest.cc b/media/engine/payload_type_mapper_unittest.cc
index fa6864b48a..9c29827fa9 100644
--- a/media/engine/payload_type_mapper_unittest.cc
+++ b/media/engine/payload_type_mapper_unittest.cc
@@ -46,13 +46,8 @@ TEST_F(PayloadTypeMapperTest, StaticPayloadTypes) {
}
TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) {
- // Tests that the payload mapper knows about the audio and data formats we've
+ // Tests that the payload mapper knows about the audio formats we've
// been using in WebRTC, with their hard coded values.
- auto data_mapping = [this](const char* name) {
- return mapper_.FindMappingFor({name, 0, 0});
- };
- EXPECT_EQ(kGoogleRtpDataCodecPlType, data_mapping(kGoogleRtpDataCodecName));
-
EXPECT_EQ(102, mapper_.FindMappingFor({kIlbcCodecName, 8000, 1}));
EXPECT_EQ(103, mapper_.FindMappingFor({kIsacCodecName, 16000, 1}));
EXPECT_EQ(104, mapper_.FindMappingFor({kIsacCodecName, 32000, 1}));
@@ -63,6 +58,7 @@ TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) {
48000,
2,
{{"minptime", "10"}, {"useinbandfec", "1"}}}));
+ EXPECT_EQ(63, mapper_.FindMappingFor({kRedCodecName, 48000, 2}));
// TODO(solenberg): Remove 16k, 32k, 48k DTMF checks once these payload types
// are dynamically assigned.
EXPECT_EQ(110, mapper_.FindMappingFor({kDtmfCodecName, 48000, 1}));
diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc
index bee7b23c4e..3af022ab17 100644
--- a/media/engine/simulcast_encoder_adapter.cc
+++ b/media/engine/simulcast_encoder_adapter.cc
@@ -344,20 +344,24 @@ int SimulcastEncoderAdapter::InitEncode(
// Two distinct scenarios:
// * Singlecast (total_streams_count == 1) or simulcast with simulcast-capable
- // underlaying encoder implementation. SEA operates in bypass mode: original
- // settings are passed to the underlaying encoder, frame encode complete
- // callback is not intercepted.
+ // underlaying encoder implementation if active_streams_count > 1. SEA
+ // operates in bypass mode: original settings are passed to the underlaying
+ // encoder, frame encode complete callback is not intercepted.
// * Multi-encoder simulcast or singlecast if layers are deactivated
- // (total_streams_count > 1 and active_streams_count >= 1). SEA creates
- // N=active_streams_count encoders and configures each to produce a single
- // stream.
-
+ // (active_streams_count >= 1). SEA creates N=active_streams_count encoders
+ // and configures each to produce a single stream.
+
+ int active_streams_count = CountActiveStreams(*inst);
+ // If we only have a single active layer it is better to create an encoder
+ // with only one configured layer than creating it with all-but-one disabled
+ // layers because that way we control scaling.
+ bool separate_encoders_needed =
+ !encoder_context->encoder().GetEncoderInfo().supports_simulcast ||
+ active_streams_count == 1;
// Singlecast or simulcast with simulcast-capable underlaying encoder.
- if (total_streams_count_ == 1 ||
- encoder_context->encoder().GetEncoderInfo().supports_simulcast) {
+ if (total_streams_count_ == 1 || !separate_encoders_needed) {
int ret = encoder_context->encoder().InitEncode(&codec_, settings);
if (ret >= 0) {
- int active_streams_count = CountActiveStreams(*inst);
stream_contexts_.emplace_back(
/*parent=*/nullptr, std::move(encoder_context),
/*framerate_controller=*/nullptr, /*stream_idx=*/0, codec_.width,
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index b90f2fc416..a74a2c3785 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -1006,8 +1006,8 @@ TEST_F(TestSimulcastEncoderAdapterFake,
EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle);
rtc::scoped_refptr<VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<FakeNativeBufferI420>(1280, 720,
- /*allow_to_i420=*/false));
+ rtc::make_ref_counted<FakeNativeBufferI420>(1280, 720,
+ /*allow_to_i420=*/false));
VideoFrame input_frame = VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_timestamp_rtp(100)
@@ -1043,8 +1043,8 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) {
EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle);
rtc::scoped_refptr<VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<FakeNativeBufferI420>(1280, 720,
- /*allow_to_i420=*/true));
+ rtc::make_ref_counted<FakeNativeBufferI420>(1280, 720,
+ /*allow_to_i420=*/true));
VideoFrame input_frame = VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_timestamp_rtp(100)
diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc
index cc2a181bbf..897aa77ac5 100644
--- a/media/engine/webrtc_video_engine.cc
+++ b/media/engine/webrtc_video_engine.cc
@@ -39,7 +39,6 @@
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/thread.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
@@ -48,6 +47,7 @@ namespace cricket {
namespace {
const int kMinLayerSize = 16;
+constexpr int64_t kUnsignaledSsrcCooldownMs = rtc::kNumMillisecsPerSec / 2;
const char* StreamTypeToString(
webrtc::VideoSendStream::StreamStats::StreamType type) {
@@ -106,10 +106,10 @@ void AddDefaultFeedbackParams(VideoCodec* codec,
}
}
-// This function will assign dynamic payload types (in the range [96, 127]) to
-// the input codecs, and also add ULPFEC, RED, FlexFEC, and associated RTX
-// codecs for recognized codecs (VP8, VP9, H264, and RED). It will also add
-// default feedback params to the codecs.
+// This function will assign dynamic payload types (in the range [96, 127]
+// and then [35, 63]) to the input codecs, and also add ULPFEC, RED, FlexFEC,
+// and associated RTX codecs for recognized codecs (VP8, VP9, H264, and RED).
+// It will also add default feedback params to the codecs.
// is_decoder_factory is needed to keep track of the implict assumption that any
// H264 decoder also supports constrained base line profile.
// Also, is_decoder_factory is used to decide whether FlexFEC video format
@@ -134,16 +134,6 @@ std::vector<VideoCodec> GetPayloadTypesAndDefaultCodecs(
if (supported_formats.empty())
return std::vector<VideoCodec>();
- // Due to interoperability issues with old Chrome/WebRTC versions only use
- // the lower range for new codecs.
- static const int kFirstDynamicPayloadTypeLowerRange = 35;
- static const int kLastDynamicPayloadTypeLowerRange = 65;
-
- static const int kFirstDynamicPayloadTypeUpperRange = 96;
- static const int kLastDynamicPayloadTypeUpperRange = 127;
- int payload_type_upper = kFirstDynamicPayloadTypeUpperRange;
- int payload_type_lower = kFirstDynamicPayloadTypeLowerRange;
-
supported_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName));
supported_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName));
@@ -163,60 +153,65 @@ std::vector<VideoCodec> GetPayloadTypesAndDefaultCodecs(
supported_formats.push_back(flexfec_format);
}
+ // Due to interoperability issues with old Chrome/WebRTC versions that
+ // ignore the [35, 63] range prefer the lower range for new codecs.
+ static const int kFirstDynamicPayloadTypeLowerRange = 35;
+ static const int kLastDynamicPayloadTypeLowerRange = 63;
+
+ static const int kFirstDynamicPayloadTypeUpperRange = 96;
+ static const int kLastDynamicPayloadTypeUpperRange = 127;
+ int payload_type_upper = kFirstDynamicPayloadTypeUpperRange;
+ int payload_type_lower = kFirstDynamicPayloadTypeLowerRange;
+
std::vector<VideoCodec> output_codecs;
for (const webrtc::SdpVideoFormat& format : supported_formats) {
VideoCodec codec(format);
bool isCodecValidForLowerRange =
absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName) ||
absl::EqualsIgnoreCase(codec.name, kAv1CodecName);
- if (!isCodecValidForLowerRange) {
- codec.id = payload_type_upper++;
- } else {
- codec.id = payload_type_lower++;
- }
- AddDefaultFeedbackParams(&codec, trials);
- output_codecs.push_back(codec);
+ bool isFecCodec = absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName) ||
+ absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName);
- if (payload_type_upper > kLastDynamicPayloadTypeUpperRange) {
- RTC_LOG(LS_ERROR)
- << "Out of dynamic payload types [96,127], skipping the rest.";
- // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12194):
- // continue in lower range.
- break;
- }
+ // Check if we ran out of payload types.
if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) {
// TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248):
// return an error.
- RTC_LOG(LS_ERROR)
- << "Out of dynamic payload types [35,65], skipping the rest.";
+ RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after "
+ "fallback from [96, 127], skipping the rest.";
+ RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange);
break;
}
- // Add associated RTX codec for non-FEC codecs.
- if (!absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName) &&
- !absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName)) {
- if (!isCodecValidForLowerRange) {
- output_codecs.push_back(
- VideoCodec::CreateRtxCodec(payload_type_upper++, codec.id));
- } else {
- output_codecs.push_back(
- VideoCodec::CreateRtxCodec(payload_type_lower++, codec.id));
- }
+ // Lower range gets used for "new" codecs or when running out of payload
+ // types in the upper range.
+ if (isCodecValidForLowerRange ||
+ payload_type_upper >= kLastDynamicPayloadTypeUpperRange) {
+ codec.id = payload_type_lower++;
+ } else {
+ codec.id = payload_type_upper++;
+ }
+ AddDefaultFeedbackParams(&codec, trials);
+ output_codecs.push_back(codec);
- if (payload_type_upper > kLastDynamicPayloadTypeUpperRange) {
- RTC_LOG(LS_ERROR)
- << "Out of dynamic payload types [96,127], skipping rtx.";
- // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12194):
- // continue in lower range.
- break;
- }
+ // Add associated RTX codec for non-FEC codecs.
+ if (!isFecCodec) {
+ // Check if we ran out of payload types.
if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) {
// TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248):
// return an error.
- RTC_LOG(LS_ERROR)
- << "Out of dynamic payload types [35,65], skipping rtx.";
+ RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after "
+ "fallback from [96, 127], skipping the rest.";
+ RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange);
break;
}
+ if (isCodecValidForLowerRange ||
+ payload_type_upper >= kLastDynamicPayloadTypeUpperRange) {
+ output_codecs.push_back(
+ VideoCodec::CreateRtxCodec(payload_type_lower++, codec.id));
+ } else {
+ output_codecs.push_back(
+ VideoCodec::CreateRtxCodec(payload_type_upper++, codec.id));
+ }
}
}
return output_codecs;
@@ -502,7 +497,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
webrtc::VideoCodecH264 h264_settings =
webrtc::VideoEncoder::GetDefaultH264Settings();
h264_settings.frameDroppingOn = frame_dropping;
- return new rtc::RefCountedObject<
+ return rtc::make_ref_counted<
webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
}
if (absl::EqualsIgnoreCase(codec.name, kVp8CodecName)) {
@@ -512,7 +507,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
// VP8 denoising is enabled by default.
vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
vp8_settings.frameDroppingOn = frame_dropping;
- return new rtc::RefCountedObject<
+ return rtc::make_ref_counted<
webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
}
if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) {
@@ -562,7 +557,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1;
vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOn;
}
- return new rtc::RefCountedObject<
+ return rtc::make_ref_counted<
webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
return nullptr;
@@ -703,8 +698,8 @@ WebRtcVideoChannel::WebRtcVideoChannel(
webrtc::VideoEncoderFactory* encoder_factory,
webrtc::VideoDecoderFactory* decoder_factory,
webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory)
- : VideoMediaChannel(config),
- worker_thread_(rtc::Thread::Current()),
+ : VideoMediaChannel(config, call->network_thread()),
+ worker_thread_(call->worker_thread()),
call_(call),
unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_),
video_config_(config.video),
@@ -759,8 +754,8 @@ WebRtcVideoChannel::SelectSendVideoCodecs(
// following the spec in https://tools.ietf.org/html/rfc6184#section-8.2.2
// since we should limit the encode level to the lower of local and remote
// level when level asymmetry is not allowed.
- if (IsSameCodec(format_it->name, format_it->parameters,
- remote_codec.codec.name, remote_codec.codec.params)) {
+ if (format_it->IsSameCodec(
+ {remote_codec.codec.name, remote_codec.codec.params})) {
encoders.push_back(remote_codec);
// To allow the VideoEncoderFactory to keep information about which
@@ -954,8 +949,8 @@ void WebRtcVideoChannel::RequestEncoderSwitch(
RTC_DCHECK_RUN_ON(&thread_checker_);
for (const VideoCodecSettings& codec_setting : negotiated_codecs_) {
- if (IsSameCodec(format.name, format.parameters, codec_setting.codec.name,
- codec_setting.codec.params)) {
+ if (format.IsSameCodec(
+ {codec_setting.codec.name, codec_setting.codec.params})) {
VideoCodecSettings new_codec_setting = codec_setting;
for (const auto& kv : format.parameters) {
new_codec_setting.codec.params[kv.first] = kv.second;
@@ -1473,7 +1468,7 @@ bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp,
for (uint32_t used_ssrc : sp.ssrcs)
receive_ssrcs_.insert(used_ssrc);
- webrtc::VideoReceiveStream::Config config(this);
+ webrtc::VideoReceiveStream::Config config(this, decoder_factory_);
webrtc::FlexfecReceiveStream::Config flexfec_config(this);
ConfigureReceiverRtp(&config, &flexfec_config, sp);
@@ -1488,8 +1483,8 @@ bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp,
config.frame_transformer = unsignaled_frame_transformer_;
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
- this, call_, sp, std::move(config), decoder_factory_, default_stream,
- recv_codecs_, flexfec_config);
+ this, call_, sp, std::move(config), default_stream, recv_codecs_,
+ flexfec_config);
return true;
}
@@ -1540,14 +1535,14 @@ void WebRtcVideoChannel::ConfigureReceiverRtp(
// TODO(brandtr): Generalize when we add support for multistream protection.
flexfec_config->payload_type = recv_flexfec_payload_type_;
if (!IsDisabled(call_->trials(), "WebRTC-FlexFEC-03-Advertised") &&
- sp.GetFecFrSsrc(ssrc, &flexfec_config->remote_ssrc)) {
+ sp.GetFecFrSsrc(ssrc, &flexfec_config->rtp.remote_ssrc)) {
flexfec_config->protected_media_ssrcs = {ssrc};
- flexfec_config->local_ssrc = config->rtp.local_ssrc;
+ flexfec_config->rtp.local_ssrc = config->rtp.local_ssrc;
flexfec_config->rtcp_mode = config->rtp.rtcp_mode;
// TODO(brandtr): We should be spec-compliant and set |transport_cc| here
// based on the rtcp-fb for the FlexFEC codec, not the media codec.
- flexfec_config->transport_cc = config->rtp.transport_cc;
- flexfec_config->rtp_header_extensions = config->rtp.extensions;
+ flexfec_config->rtp.transport_cc = config->rtp.transport_cc;
+ flexfec_config->rtp.extensions = config->rtp.extensions;
}
}
@@ -1571,6 +1566,7 @@ void WebRtcVideoChannel::ResetUnsignaledRecvStream() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_LOG(LS_INFO) << "ResetUnsignaledRecvStream.";
unsignaled_stream_params_ = StreamParams();
+ last_unsignalled_ssrc_creation_time_ms_ = absl::nullopt;
// Delete any created default streams. This is needed to avoid SSRC collisions
// in Call's RtpDemuxer, in the case that |this| has created a default video
@@ -1773,7 +1769,23 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet,
if (demuxer_criteria_id_ != demuxer_criteria_completed_id_) {
return;
}
-
+ // Ignore unknown ssrcs if we recently created an unsignalled receive
+ // stream since this shouldn't happen frequently. Getting into a state
+ // of creating decoders on every packet eats up processing time (e.g.
+ // https://crbug.com/1069603) and this cooldown prevents that.
+ if (last_unsignalled_ssrc_creation_time_ms_.has_value()) {
+ int64_t now_ms = rtc::TimeMillis();
+ if (now_ms - last_unsignalled_ssrc_creation_time_ms_.value() <
+ kUnsignaledSsrcCooldownMs) {
+ // We've already created an unsignalled ssrc stream within the last
+ // 0.5 s, ignore with a warning.
+ RTC_LOG(LS_WARNING)
+ << "Another unsignalled ssrc packet arrived shortly after the "
+ << "creation of an unsignalled ssrc stream. Dropping packet.";
+ return;
+ }
+ }
+ // Let the unsignalled ssrc handler decide whether to drop or deliver.
switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) {
case UnsignalledSsrcHandler::kDropPacket:
return;
@@ -1786,6 +1798,7 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet,
webrtc::PacketReceiver::DELIVERY_OK) {
RTC_LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery.";
}
+ last_unsignalled_ssrc_creation_time_ms_ = rtc::TimeMillis();
}));
}
@@ -1870,7 +1883,7 @@ void WebRtcVideoChannel::OnNetworkRouteChanged(
}
void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK_RUN_ON(&network_thread_checker_);
MediaChannel::SetInterface(iface);
// Set the RTP recv/send buffer to a bigger size.
@@ -2019,27 +2032,13 @@ std::vector<webrtc::RtpSource> WebRtcVideoChannel::GetSources(
bool WebRtcVideoChannel::SendRtp(const uint8_t* data,
size_t len,
const webrtc::PacketOptions& options) {
- rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
- rtc::PacketOptions rtc_options;
- rtc_options.packet_id = options.packet_id;
- if (DscpEnabled()) {
- rtc_options.dscp = PreferredDscp();
- }
- rtc_options.info_signaled_after_sent.included_in_feedback =
- options.included_in_feedback;
- rtc_options.info_signaled_after_sent.included_in_allocation =
- options.included_in_allocation;
- return MediaChannel::SendPacket(&packet, rtc_options);
+ MediaChannel::SendRtp(data, len, options);
+ return true;
}
bool WebRtcVideoChannel::SendRtcp(const uint8_t* data, size_t len) {
- rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
- rtc::PacketOptions rtc_options;
- if (DscpEnabled()) {
- rtc_options.dscp = PreferredDscp();
- }
-
- return MediaChannel::SendRtcp(&packet, rtc_options);
+ MediaChannel::SendRtcp(data, len);
+ return true;
}
WebRtcVideoChannel::WebRtcVideoSendStream::VideoSendStreamParameters::
@@ -2066,7 +2065,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream(
// TODO(deadbeef): Don't duplicate information between send_params,
// rtp_extensions, options, etc.
const VideoSendParameters& send_params)
- : worker_thread_(rtc::Thread::Current()),
+ : worker_thread_(call->worker_thread()),
ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups),
call_(call),
@@ -2546,7 +2545,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
int max_qp = kDefaultQpMax;
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
encoder_config.video_stream_factory =
- new rtc::RefCountedObject<EncoderStreamFactory>(
+ rtc::make_ref_counted<EncoderStreamFactory>(
codec.name, max_qp, is_screencast, parameters_.conference_mode);
return encoder_config;
@@ -2676,15 +2675,18 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos(
stream_stats.rtp_stats.retransmitted.payload_bytes;
info.retransmitted_packets_sent =
stream_stats.rtp_stats.retransmitted.packets;
- info.packets_lost = stream_stats.rtcp_stats.packets_lost;
info.firs_rcvd = stream_stats.rtcp_packet_type_counts.fir_packets;
info.nacks_rcvd = stream_stats.rtcp_packet_type_counts.nack_packets;
info.plis_rcvd = stream_stats.rtcp_packet_type_counts.pli_packets;
if (stream_stats.report_block_data.has_value()) {
- info.report_block_datas.push_back(stream_stats.report_block_data.value());
+ info.packets_lost =
+ stream_stats.report_block_data->report_block().packets_lost;
+ info.fraction_lost =
+ static_cast<float>(
+ stream_stats.report_block_data->report_block().fraction_lost) /
+ (1 << 8);
+ info.report_block_datas.push_back(*stream_stats.report_block_data);
}
- info.fraction_lost =
- static_cast<float>(stream_stats.rtcp_stats.fraction_lost) / (1 << 8);
info.qp_sum = stream_stats.qp_sum;
info.total_encode_time_ms = stream_stats.total_encode_time_ms;
info.total_encoded_bytes_target = stream_stats.total_encoded_bytes_target;
@@ -2817,7 +2819,6 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
webrtc::Call* call,
const StreamParams& sp,
webrtc::VideoReceiveStream::Config config,
- webrtc::VideoDecoderFactory* decoder_factory,
bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs,
const webrtc::FlexfecReceiveStream::Config& flexfec_config)
@@ -2829,10 +2830,10 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
config_(std::move(config)),
flexfec_config_(flexfec_config),
flexfec_stream_(nullptr),
- decoder_factory_(decoder_factory),
sink_(NULL),
first_frame_timestamp_(-1),
estimated_remote_start_ntp_time_ms_(0) {
+ RTC_DCHECK(config_.decoder_factory);
config_.renderer = this;
ConfigureCodecs(recv_codecs);
flexfec_config_.payload_type = flexfec_config.payload_type;
@@ -2877,16 +2878,12 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetRtpParameters() const {
void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs(
const std::vector<VideoCodecSettings>& recv_codecs) {
RTC_DCHECK(!recv_codecs.empty());
+
config_.decoders.clear();
config_.rtp.rtx_associated_payload_types.clear();
config_.rtp.raw_payload_types.clear();
- config_.decoder_factory = decoder_factory_;
for (const auto& recv_codec : recv_codecs) {
- webrtc::SdpVideoFormat video_format(recv_codec.codec.name,
- recv_codec.codec.params);
-
webrtc::VideoReceiveStream::Decoder decoder;
- decoder.video_format = video_format;
decoder.payload_type = recv_codec.codec.id;
decoder.video_format =
webrtc::SdpVideoFormat(recv_codec.codec.name, recv_codec.codec.params);
@@ -2931,7 +2928,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc(
}
config_.rtp.local_ssrc = local_ssrc;
- flexfec_config_.local_ssrc = local_ssrc;
+ flexfec_config_.rtp.local_ssrc = local_ssrc;
RTC_LOG(LS_INFO)
<< "RecreateWebRtcVideoStream (recv) because of SetLocalSsrc; local_ssrc="
<< local_ssrc;
@@ -2964,7 +2961,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters(
config_.rtp.rtcp_mode = rtcp_mode;
// TODO(brandtr): We should be spec-compliant and set |transport_cc| here
// based on the rtcp-fb for the FlexFEC codec, not the media codec.
- flexfec_config_.transport_cc = config_.rtp.transport_cc;
+ flexfec_config_.rtp.transport_cc = config_.rtp.transport_cc;
flexfec_config_.rtcp_mode = config_.rtp.rtcp_mode;
RTC_LOG(LS_INFO) << "RecreateWebRtcVideoStream (recv) because of "
"SetFeedbackParameters; nack="
@@ -2979,10 +2976,18 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters(
ConfigureCodecs(*params.codec_settings);
video_needs_recreation = true;
}
+
if (params.rtp_header_extensions) {
- config_.rtp.extensions = *params.rtp_header_extensions;
- flexfec_config_.rtp_header_extensions = *params.rtp_header_extensions;
- video_needs_recreation = true;
+ if (config_.rtp.extensions != *params.rtp_header_extensions) {
+ config_.rtp.extensions = *params.rtp_header_extensions;
+ video_needs_recreation = true;
+ }
+
+ if (flexfec_config_.rtp.extensions != *params.rtp_header_extensions) {
+ flexfec_config_.rtp.extensions = *params.rtp_header_extensions;
+ if (flexfec_stream_ || flexfec_config_.IsCompleteAndEnabled())
+ video_needs_recreation = true;
+ }
}
if (params.flexfec_payload_type) {
flexfec_config_.payload_type = *params.flexfec_payload_type;
@@ -2990,7 +2995,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters(
// configured and instead of recreating the video stream, reconfigure the
// flexfec object from within the rtp callback (soon to be on the network
// thread).
- video_needs_recreation = true;
+ if (flexfec_stream_ || flexfec_config_.IsCompleteAndEnabled())
+ video_needs_recreation = true;
}
if (video_needs_recreation) {
RecreateWebRtcVideoStream();
@@ -3021,7 +3027,6 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() {
webrtc::VideoReceiveStream::Config config = config_.Copy();
config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr);
config.rtp.packet_sink_ = flexfec_stream_;
- config.stream_id = stream_params_.id;
stream_ = call_->CreateVideoReceiveStream(std::move(config));
if (base_minimum_playout_delay_ms) {
stream_->SetBaseMinimumPlayoutDelayMs(
diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h
index 0ec7216e6d..e79ebbf24a 100644
--- a/media/engine/webrtc_video_engine.h
+++ b/media/engine/webrtc_video_engine.h
@@ -40,13 +40,8 @@
namespace webrtc {
class VideoDecoderFactory;
class VideoEncoderFactory;
-struct MediaConfig;
} // namespace webrtc
-namespace rtc {
-class Thread;
-} // namespace rtc
-
namespace cricket {
class WebRtcVideoChannel;
@@ -402,7 +397,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
RTC_EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_);
webrtc::SequenceChecker thread_checker_;
- rtc::Thread* worker_thread_;
+ webrtc::TaskQueueBase* const worker_thread_;
const std::vector<uint32_t> ssrcs_ RTC_GUARDED_BY(&thread_checker_);
const std::vector<SsrcGroup> ssrc_groups_ RTC_GUARDED_BY(&thread_checker_);
webrtc::Call* const call_;
@@ -441,7 +436,6 @@ class WebRtcVideoChannel : public VideoMediaChannel,
webrtc::Call* call,
const StreamParams& sp,
webrtc::VideoReceiveStream::Config config,
- webrtc::VideoDecoderFactory* decoder_factory,
bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs,
const webrtc::FlexfecReceiveStream::Config& flexfec_config);
@@ -506,8 +500,6 @@ class WebRtcVideoChannel : public VideoMediaChannel,
webrtc::FlexfecReceiveStream::Config flexfec_config_;
webrtc::FlexfecReceiveStream* flexfec_stream_;
- webrtc::VideoDecoderFactory* const decoder_factory_;
-
webrtc::Mutex sink_lock_;
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink_
RTC_GUARDED_BY(sink_lock_);
@@ -553,7 +545,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
void FillSendAndReceiveCodecStats(VideoMediaInfo* video_media_info)
RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_);
- rtc::Thread* const worker_thread_;
+ webrtc::TaskQueueBase* const worker_thread_;
webrtc::ScopedTaskSafety task_safety_;
webrtc::SequenceChecker network_thread_checker_;
webrtc::SequenceChecker thread_checker_;
@@ -593,6 +585,8 @@ class WebRtcVideoChannel : public VideoMediaChannel,
// is a risk of receiving ssrcs for other, recently added m= sections.
uint32_t demuxer_criteria_id_ RTC_GUARDED_BY(thread_checker_) = 0;
uint32_t demuxer_criteria_completed_id_ RTC_GUARDED_BY(thread_checker_) = 0;
+ absl::optional<int64_t> last_unsignalled_ssrc_creation_time_ms_
+ RTC_GUARDED_BY(thread_checker_);
std::set<uint32_t> send_ssrcs_ RTC_GUARDED_BY(thread_checker_);
std::set<uint32_t> receive_ssrcs_ RTC_GUARDED_BY(thread_checker_);
diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc
index 4270e274f5..89cc1f4d9f 100644
--- a/media/engine/webrtc_video_engine_unittest.cc
+++ b/media/engine/webrtc_video_engine_unittest.cc
@@ -51,17 +51,19 @@
#include "media/engine/fake_webrtc_video_engine.h"
#include "media/engine/simulcast.h"
#include "media/engine/webrtc_voice_engine.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "rtc_base/arraysize.h"
+#include "rtc_base/event.h"
#include "rtc_base/experiments/min_video_bitrate_experiment.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/gunit.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/field_trial.h"
#include "test/fake_decoder.h"
#include "test/field_trial.h"
#include "test/frame_forwarder.h"
#include "test/gmock.h"
-#include "test/rtp_header_parser.h"
using ::testing::_;
using ::testing::Contains;
@@ -95,6 +97,7 @@ static const uint32_t kSsrcs3[] = {1, 2, 3};
static const uint32_t kRtxSsrcs1[] = {4};
static const uint32_t kFlexfecSsrc = 5;
static const uint32_t kIncomingUnsignalledSsrc = 0xC0FFEE;
+static const int64_t kUnsignalledReceiveStreamCooldownMs = 500;
constexpr uint32_t kRtpHeaderSize = 12;
@@ -1417,6 +1420,10 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test {
channel_->SetRecvParameters(parameters);
}
+ ~WebRtcVideoChannelEncodedFrameCallbackTest() override {
+ channel_->SetInterface(nullptr);
+ }
+
void DeliverKeyFrame(uint32_t ssrc) {
webrtc::RtpPacket packet;
packet.SetMarker(true);
@@ -1544,7 +1551,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
webrtc::CreateBuiltinVideoDecoderFactory(),
field_trials_) {}
- virtual void SetUp() {
+ void SetUp() override {
// One testcase calls SetUp in a loop, only create call_ once.
if (!call_) {
webrtc::Call::Config call_config(&event_log_);
@@ -1586,6 +1593,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
// Make the second renderer available for use by a new stream.
EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_));
}
+
// Setup an additional stream just to send video. Defer add recv stream.
// This is required if you want to test unsignalled recv of video rtp packets.
void SetUpSecondStreamWithNoRecv() {
@@ -1604,7 +1612,17 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
EXPECT_TRUE(
channel_->SetVideoSend(kSsrc + 2, nullptr, frame_forwarder_2_.get()));
}
- virtual void TearDown() { channel_.reset(); }
+
+ void TearDown() override {
+ channel_->SetInterface(nullptr);
+ channel_.reset();
+ }
+
+ void ResetTest() {
+ TearDown();
+ SetUp();
+ }
+
bool SetDefaultCodec() { return SetOneCodec(DefaultCodec()); }
bool SetOneCodec(const cricket::VideoCodec& codec) {
@@ -1644,20 +1662,13 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
return network_interface_.NumRtpPackets(ssrc);
}
int NumSentSsrcs() { return network_interface_.NumSentSsrcs(); }
- const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ rtc::CopyOnWriteBuffer GetRtpPacket(int index) {
return network_interface_.GetRtpPacket(index);
}
- static int GetPayloadType(const rtc::CopyOnWriteBuffer* p) {
- webrtc::RTPHeader header;
- EXPECT_TRUE(ParseRtpPacket(p, &header));
- return header.payloadType;
- }
-
- static bool ParseRtpPacket(const rtc::CopyOnWriteBuffer* p,
- webrtc::RTPHeader* header) {
- std::unique_ptr<webrtc::RtpHeaderParser> parser(
- webrtc::RtpHeaderParser::CreateForTest());
- return parser->Parse(p->cdata(), p->size(), header);
+ static int GetPayloadType(rtc::CopyOnWriteBuffer p) {
+ webrtc::RtpPacket header;
+ EXPECT_TRUE(header.Parse(std::move(p)));
+ return header.PayloadType();
}
// Tests that we can send and receive frames.
@@ -1668,8 +1679,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
EXPECT_EQ(0, renderer_.num_rendered_frames());
SendFrame();
EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
- EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0)));
}
void SendReceiveManyAndGetStats(const cricket::VideoCodec& codec,
@@ -1685,8 +1695,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
EXPECT_FRAME_WAIT(frame + i * fps, kVideoWidth, kVideoHeight, kTimeout);
}
}
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
- EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0)));
}
cricket::VideoSenderInfo GetSenderStats(size_t i) {
@@ -1732,6 +1741,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
webrtc::RtcEventLogNull event_log_;
webrtc::FieldTrialBasedConfig field_trials_;
+ std::unique_ptr<webrtc::test::ScopedFieldTrials> override_field_trials_;
std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory_;
std::unique_ptr<webrtc::Call> call_;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
@@ -1786,9 +1796,11 @@ TEST_F(WebRtcVideoChannelBaseTest, OverridesRecvBufferSize) {
// Set field trial to override the default recv buffer size, and then re-run
// setup where the interface is created and configured.
const int kCustomRecvBufferSize = 123456;
- webrtc::test::ScopedFieldTrials field_trial(
+ RTC_DCHECK(!override_field_trials_);
+ override_field_trials_ = std::make_unique<webrtc::test::ScopedFieldTrials>(
"WebRTC-IncreasedReceivebuffers/123456/");
- SetUp();
+
+ ResetTest();
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
@@ -1802,9 +1814,10 @@ TEST_F(WebRtcVideoChannelBaseTest, OverridesRecvBufferSizeWithSuffix) {
// Set field trial to override the default recv buffer size, and then re-run
// setup where the interface is created and configured.
const int kCustomRecvBufferSize = 123456;
- webrtc::test::ScopedFieldTrials field_trial(
+ RTC_DCHECK(!override_field_trials_);
+ override_field_trials_ = std::make_unique<webrtc::test::ScopedFieldTrials>(
"WebRTC-IncreasedReceivebuffers/123456_Dogfood/");
- SetUp();
+ ResetTest();
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
@@ -1819,24 +1832,50 @@ TEST_F(WebRtcVideoChannelBaseTest, InvalidRecvBufferSize) {
// then re-run setup where the interface is created and configured. The
// default value should still be used.
+ const char* prev_field_trials = webrtc::field_trial::GetFieldTrialString();
+
+ std::string field_trial_string;
for (std::string group : {" ", "NotANumber", "-1", "0"}) {
- std::string field_trial_string = "WebRTC-IncreasedReceivebuffers/";
- field_trial_string += group;
- field_trial_string += "/";
- webrtc::test::ScopedFieldTrials field_trial(field_trial_string);
+ std::string trial_string = "WebRTC-IncreasedReceivebuffers/";
+ trial_string += group;
+ trial_string += "/";
+
+ // Dear reader. Sorry for this... it's a bit of a mess.
+ // TODO(bugs.webrtc.org/12854): This test needs to be rewritten to not use
+ // ResetTest and changing global field trials in a loop.
+ TearDown();
+ // This is a hack to appease tsan. Because of the way the test is written
+ // active state within Call, including running task queues may race with
+ // the test changing the global field trial variable.
+ // This particular hack, pauses the transport controller TQ while we
+ // change the field trial.
+ rtc::TaskQueue* tq = call_->GetTransportControllerSend()->GetWorkerQueue();
+ rtc::Event waiting, resume;
+ tq->PostTask([&waiting, &resume]() {
+ waiting.Set();
+ resume.Wait(rtc::Event::kForever);
+ });
+
+ waiting.Wait(rtc::Event::kForever);
+ field_trial_string = std::move(trial_string);
+ webrtc::field_trial::InitFieldTrialsFromString(field_trial_string.c_str());
+
SetUp();
+ resume.Set();
+
+ // OK, now the test can carry on.
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
EXPECT_EQ(64 * 1024, network_interface_.sendbuf_size());
EXPECT_EQ(256 * 1024, network_interface_.recvbuf_size());
}
+
+ webrtc::field_trial::InitFieldTrialsFromString(prev_field_trials);
}
// Test that stats work properly for a 1-1 call.
TEST_F(WebRtcVideoChannelBaseTest, GetStats) {
- SetUp();
-
const int kDurationSec = 3;
const int kFps = 10;
SendReceiveManyAndGetStats(DefaultCodec(), kDurationSec, kFps);
@@ -1893,8 +1932,6 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) {
// Test that stats work properly for a conf call with multiple recv streams.
TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) {
- SetUp();
-
cricket::FakeVideoRenderer renderer1, renderer2;
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
cricket::VideoSendParameters parameters;
@@ -2023,15 +2060,14 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrc) {
EXPECT_TRUE(SetSend(true));
SendFrame();
EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
- webrtc::RTPHeader header;
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
- EXPECT_TRUE(ParseRtpPacket(p.get(), &header));
- EXPECT_EQ(kSsrc, header.ssrc);
+ webrtc::RtpPacket header;
+ EXPECT_TRUE(header.Parse(GetRtpPacket(0)));
+ EXPECT_EQ(kSsrc, header.Ssrc());
// Packets are being paced out, so these can mismatch between the first and
// second call to NumRtpPackets until pending packets are paced out.
- EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(header.ssrc), kTimeout);
- EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(header.ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(header.Ssrc()), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(header.Ssrc()), kTimeout);
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc - 1));
EXPECT_EQ(0, NumRtpBytes(kSsrc - 1));
@@ -2048,14 +2084,13 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) {
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(WaitAndSendFrame(0));
EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
- webrtc::RTPHeader header;
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
- EXPECT_TRUE(ParseRtpPacket(p.get(), &header));
- EXPECT_EQ(999u, header.ssrc);
+ webrtc::RtpPacket header;
+ EXPECT_TRUE(header.Parse(GetRtpPacket(0)));
+ EXPECT_EQ(999u, header.Ssrc());
// Packets are being paced out, so these can mismatch between the first and
// second call to NumRtpPackets until pending packets are paced out.
- EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(header.ssrc), kTimeout);
- EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(header.ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(header.Ssrc()), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(header.Ssrc()), kTimeout);
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc));
EXPECT_EQ(0, NumRtpBytes(kSsrc));
@@ -2087,12 +2122,10 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) {
SendFrame();
EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
EXPECT_GT(NumRtpPackets(), 0);
- webrtc::RTPHeader header;
+ webrtc::RtpPacket header;
size_t last_packet = NumRtpPackets() - 1;
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(
- GetRtpPacket(static_cast<int>(last_packet)));
- EXPECT_TRUE(ParseRtpPacket(p.get(), &header));
- EXPECT_EQ(kSsrc, header.ssrc);
+ EXPECT_TRUE(header.Parse(GetRtpPacket(static_cast<int>(last_packet))));
+ EXPECT_EQ(kSsrc, header.Ssrc());
// Remove the send stream that was added during Setup.
EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
@@ -2107,9 +2140,8 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) {
EXPECT_TRUE_WAIT(NumRtpPackets() > rtp_packets, kTimeout);
last_packet = NumRtpPackets() - 1;
- p.reset(GetRtpPacket(static_cast<int>(last_packet)));
- EXPECT_TRUE(ParseRtpPacket(p.get(), &header));
- EXPECT_EQ(789u, header.ssrc);
+ EXPECT_TRUE(header.Parse(GetRtpPacket(static_cast<int>(last_packet))));
+ EXPECT_EQ(789u, header.Ssrc());
}
// Tests the behavior of incoming streams in a conference scenario.
@@ -2137,8 +2169,7 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) {
EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kVideoWidth, kVideoHeight,
kTimeout);
- std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
- EXPECT_EQ(DefaultCodec().id, GetPayloadType(p.get()));
+ EXPECT_EQ(DefaultCodec().id, GetPayloadType(GetRtpPacket(0)));
EXPECT_EQ(kVideoWidth, renderer1.width());
EXPECT_EQ(kVideoHeight, renderer1.height());
EXPECT_EQ(kVideoWidth, renderer2.width());
@@ -2513,6 +2544,16 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest {
ASSERT_TRUE(channel_->SetSendParameters(send_parameters_));
}
+ void TearDown() override {
+ channel_->SetInterface(nullptr);
+ channel_ = nullptr;
+ }
+
+ void ResetTest() {
+ TearDown();
+ SetUp();
+ }
+
cricket::VideoCodec GetEngineCodec(const std::string& name) {
for (const cricket::VideoCodec& engine_codec : engine_.send_codecs()) {
if (absl::EqualsIgnoreCase(name, engine_codec.name))
@@ -2525,6 +2566,16 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest {
cricket::VideoCodec DefaultCodec() { return GetEngineCodec("VP8"); }
+ // After receciving and processing the packet, enough time is advanced that
+ // the unsignalled receive stream cooldown is no longer in effect.
+ void ReceivePacketAndAdvanceTime(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ channel_->OnPacketReceived(packet, packet_time_us);
+ rtc::Thread::Current()->ProcessMessages(0);
+ fake_clock_.AdvanceTime(
+ webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs));
+ }
+
protected:
FakeVideoSendStream* AddSendStream() {
return AddSendStream(StreamParams::CreateLegacy(++last_ssrc_));
@@ -2934,7 +2985,7 @@ TEST_F(WebRtcVideoChannelTest, RecvAbsoluteSendTimeHeaderExtensions) {
}
TEST_F(WebRtcVideoChannelTest, FiltersExtensionsPicksTransportSeqNum) {
- webrtc::test::ScopedFieldTrials override_field_trials_(
+ webrtc::test::ScopedFieldTrials override_field_trials(
"WebRTC-FilterAbsSendTimeExtension/Enabled/");
// Enable three redundant extensions.
std::vector<std::string> extensions;
@@ -3165,7 +3216,7 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationIsEnabledByFieldTrial) {
RTC_DCHECK(!override_field_trials_);
override_field_trials_ = std::make_unique<webrtc::test::ScopedFieldTrials>(
"WebRTC-RtcpLossNotification/Enabled/");
- SetUp();
+ ResetTest();
TestLossNotificationState(true);
}
@@ -3173,7 +3224,7 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) {
RTC_DCHECK(!override_field_trials_);
override_field_trials_ = std::make_unique<webrtc::test::ScopedFieldTrials>(
"WebRTC-RtcpLossNotification/Enabled/");
- SetUp();
+ ResetTest();
AssignDefaultCodec();
VerifyCodecHasDefaultFeedbackParams(default_codec_, true);
@@ -4113,10 +4164,10 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) {
const std::vector<FakeFlexfecReceiveStream*>& streams =
fake_call_->GetFlexfecReceiveStreams();
ASSERT_EQ(1U, streams.size());
- const FakeFlexfecReceiveStream* stream = streams.front();
+ const auto* stream = streams.front();
const webrtc::FlexfecReceiveStream::Config& config = stream->GetConfig();
EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.payload_type);
- EXPECT_EQ(kFlexfecSsrc, config.remote_ssrc);
+ EXPECT_EQ(kFlexfecSsrc, config.rtp.remote_ssrc);
ASSERT_EQ(1U, config.protected_media_ssrcs.size());
EXPECT_EQ(kSsrcs1[0], config.protected_media_ssrcs[0]);
@@ -4229,7 +4280,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, DuplicateFlexfecCodecIsDropped) {
const std::vector<FakeFlexfecReceiveStream*>& streams =
fake_call_->GetFlexfecReceiveStreams();
ASSERT_EQ(1U, streams.size());
- const FakeFlexfecReceiveStream* stream = streams.front();
+ const auto* stream = streams.front();
const webrtc::FlexfecReceiveStream::Config& config = stream->GetConfig();
EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.payload_type);
}
@@ -4305,7 +4356,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) {
flexfec_stream->GetConfig();
EXPECT_EQ(GetEngineCodec("flexfec-03").id,
flexfec_stream_config.payload_type);
- EXPECT_EQ(kFlexfecSsrc, flexfec_stream_config.remote_ssrc);
+ EXPECT_EQ(kFlexfecSsrc, flexfec_stream_config.rtp.remote_ssrc);
ASSERT_EQ(1U, flexfec_stream_config.protected_media_ssrcs.size());
EXPECT_EQ(kSsrcs1[0], flexfec_stream_config.protected_media_ssrcs[0]);
const std::vector<FakeVideoReceiveStream*>& video_streams =
@@ -4314,17 +4365,17 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) {
const webrtc::VideoReceiveStream::Config& video_stream_config =
video_stream->GetConfig();
EXPECT_EQ(video_stream_config.rtp.local_ssrc,
- flexfec_stream_config.local_ssrc);
+ flexfec_stream_config.rtp.local_ssrc);
EXPECT_EQ(video_stream_config.rtp.rtcp_mode, flexfec_stream_config.rtcp_mode);
EXPECT_EQ(video_stream_config.rtcp_send_transport,
flexfec_stream_config.rtcp_send_transport);
// TODO(brandtr): Update this EXPECT when we set |transport_cc| in a
// spec-compliant way.
EXPECT_EQ(video_stream_config.rtp.transport_cc,
- flexfec_stream_config.transport_cc);
+ flexfec_stream_config.rtp.transport_cc);
EXPECT_EQ(video_stream_config.rtp.rtcp_mode, flexfec_stream_config.rtcp_mode);
EXPECT_EQ(video_stream_config.rtp.extensions,
- flexfec_stream_config.rtp_header_extensions);
+ flexfec_stream_config.rtp.extensions);
}
// We should not send FlexFEC, even if we advertise it, unless the right
@@ -5075,7 +5126,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvParamsWithoutFecDisablesFec) {
ASSERT_EQ(1U, streams.size());
const FakeFlexfecReceiveStream* stream = streams.front();
EXPECT_EQ(GetEngineCodec("flexfec-03").id, stream->GetConfig().payload_type);
- EXPECT_EQ(kFlexfecSsrc, stream->GetConfig().remote_ssrc);
+ EXPECT_EQ(kFlexfecSsrc, stream->rtp_config().remote_ssrc);
ASSERT_EQ(1U, stream->GetConfig().protected_media_ssrcs.size());
EXPECT_EQ(kSsrcs1[0], stream->GetConfig().protected_media_ssrcs[0]);
@@ -5128,7 +5179,7 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest,
const FakeFlexfecReceiveStream* stream_with_recv_params = streams.front();
EXPECT_EQ(GetEngineCodec("flexfec-03").id,
stream_with_recv_params->GetConfig().payload_type);
- EXPECT_EQ(kFlexfecSsrc, stream_with_recv_params->GetConfig().remote_ssrc);
+ EXPECT_EQ(kFlexfecSsrc, stream_with_recv_params->GetConfig().rtp.remote_ssrc);
EXPECT_EQ(1U,
stream_with_recv_params->GetConfig().protected_media_ssrcs.size());
EXPECT_EQ(kSsrcs1[0],
@@ -5142,7 +5193,7 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest,
const FakeFlexfecReceiveStream* stream_with_send_params = streams.front();
EXPECT_EQ(GetEngineCodec("flexfec-03").id,
stream_with_send_params->GetConfig().payload_type);
- EXPECT_EQ(kFlexfecSsrc, stream_with_send_params->GetConfig().remote_ssrc);
+ EXPECT_EQ(kFlexfecSsrc, stream_with_send_params->GetConfig().rtp.remote_ssrc);
EXPECT_EQ(1U,
stream_with_send_params->GetConfig().protected_media_ssrcs.size());
EXPECT_EQ(kSsrcs1[0],
@@ -5248,6 +5299,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
channel->SetInterface(network_interface.get());
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
+ channel->SetInterface(nullptr);
// Default value when DSCP is enabled is also DSCP_DEFAULT, until it is set
// through rtp parameters.
@@ -5277,6 +5329,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
EXPECT_TRUE(static_cast<webrtc::Transport*>(channel.get())
->SendRtcp(kData, sizeof(kData)));
EXPECT_EQ(rtc::DSCP_CS1, network_interface->options().dscp);
+ channel->SetInterface(nullptr);
// Verify that setting the option to false resets the
// DiffServCodePoint.
@@ -5287,6 +5340,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
video_bitrate_allocator_factory_.get())));
channel->SetInterface(network_interface.get());
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
+ channel->SetInterface(nullptr);
}
// This test verifies that the RTCP reduced size mode is properly applied to
@@ -5561,9 +5615,11 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) {
substream.rtcp_packet_type_counts.fir_packets = 14;
substream.rtcp_packet_type_counts.nack_packets = 15;
substream.rtcp_packet_type_counts.pli_packets = 16;
- substream.rtcp_stats.packets_lost = 17;
- substream.rtcp_stats.fraction_lost = 18;
+ webrtc::RTCPReportBlock report_block;
+ report_block.packets_lost = 17;
+ report_block.fraction_lost = 18;
webrtc::ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(report_block, 0);
report_block_data.AddRoundTripTimeSample(19);
substream.report_block_data = report_block_data;
substream.encode_frame_rate = 20.0;
@@ -5597,9 +5653,12 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) {
static_cast<int>(2 * substream.rtp_stats.transmitted.packets));
EXPECT_EQ(sender.retransmitted_packets_sent,
2u * substream.rtp_stats.retransmitted.packets);
- EXPECT_EQ(sender.packets_lost, 2 * substream.rtcp_stats.packets_lost);
+ EXPECT_EQ(sender.packets_lost,
+ 2 * substream.report_block_data->report_block().packets_lost);
EXPECT_EQ(sender.fraction_lost,
- static_cast<float>(substream.rtcp_stats.fraction_lost) / (1 << 8));
+ static_cast<float>(
+ substream.report_block_data->report_block().fraction_lost) /
+ (1 << 8));
EXPECT_EQ(sender.rtt_ms, 0);
EXPECT_EQ(sender.codec_name, DefaultCodec().name);
EXPECT_EQ(sender.codec_payload_type, DefaultCodec().id);
@@ -5679,9 +5738,11 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) {
substream.rtcp_packet_type_counts.fir_packets = 14;
substream.rtcp_packet_type_counts.nack_packets = 15;
substream.rtcp_packet_type_counts.pli_packets = 16;
- substream.rtcp_stats.packets_lost = 17;
- substream.rtcp_stats.fraction_lost = 18;
+ webrtc::RTCPReportBlock report_block;
+ report_block.packets_lost = 17;
+ report_block.fraction_lost = 18;
webrtc::ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(report_block, 0);
report_block_data.AddRoundTripTimeSample(19);
substream.report_block_data = report_block_data;
substream.encode_frame_rate = 20.0;
@@ -5715,9 +5776,12 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) {
static_cast<int>(substream.rtp_stats.transmitted.packets));
EXPECT_EQ(sender.retransmitted_packets_sent,
substream.rtp_stats.retransmitted.packets);
- EXPECT_EQ(sender.packets_lost, substream.rtcp_stats.packets_lost);
+ EXPECT_EQ(sender.packets_lost,
+ substream.report_block_data->report_block().packets_lost);
EXPECT_EQ(sender.fraction_lost,
- static_cast<float>(substream.rtcp_stats.fraction_lost) / (1 << 8));
+ static_cast<float>(
+ substream.report_block_data->report_block().fraction_lost) /
+ (1 << 8));
EXPECT_EQ(sender.rtt_ms, 0);
EXPECT_EQ(sender.codec_name, DefaultCodec().name);
EXPECT_EQ(sender.codec_payload_type, DefaultCodec().id);
@@ -6244,8 +6308,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], ssrcs[0]);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
<< "No default receive stream created.";
@@ -6406,8 +6469,7 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// The stream should now be created with the appropriate sync label.
EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
@@ -6422,16 +6484,14 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) {
// Until the demuxer criteria has been updated, we ignore in-flight ssrcs of
// the recently removed unsignaled receive stream.
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size());
// After the demuxer criteria has been updated, we should proceed to create
// unsignalled receive streams. This time when a default video receive stream
// is created it won't have a sync_group.
channel_->OnDemuxerCriteriaUpdateComplete();
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
EXPECT_TRUE(
fake_call_->GetVideoReceiveStreams()[0]->GetConfig().sync_group.empty());
@@ -6448,8 +6508,7 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// Default receive stream created.
const auto& receivers1 = fake_call_->GetVideoReceiveStreams();
@@ -6499,7 +6558,7 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc1);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
{
// Receive a packet for kSsrc2.
@@ -6508,9 +6567,8 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc2);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
// No unsignaled ssrc for kSsrc2 should have been created, but kSsrc1 should
// arrive since it already has a stream.
@@ -6532,7 +6590,7 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc1);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
{
// Receive a packet for kSsrc2.
@@ -6541,9 +6599,8 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc2);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
// An unsignalled ssrc for kSsrc2 should be created and the packet counter
// should increase for both ssrcs.
@@ -6584,7 +6641,7 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc1);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
{
// Receive a packet for kSsrc2.
@@ -6593,9 +6650,8 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc2);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
// No unsignaled ssrc for kSsrc1 should have been created, but the packet
// count for kSsrc2 should increase.
@@ -6616,7 +6672,7 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc1);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
{
// Receive a packet for kSsrc2.
@@ -6625,9 +6681,8 @@ TEST_F(WebRtcVideoChannelTest,
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc2);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
// An unsignalled ssrc for kSsrc1 should be created and the packet counter
// should increase for both ssrcs.
@@ -6664,9 +6719,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 1u);
// Signal that the demuxer knows about the first update: the removal.
@@ -6681,9 +6735,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u);
// Remove the kSsrc again while previous demuxer updates are still pending.
@@ -6699,9 +6752,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u);
EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u);
@@ -6717,9 +6769,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
}
- rtc::Thread::Current()->ProcessMessages(0);
EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u);
EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u);
@@ -6735,11 +6786,72 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
+ }
+ EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 3u);
+}
+
+TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) {
+ const uint32_t kSsrc1 = 1;
+ const uint32_t kSsrc2 = 2;
+
+ // Send packets for kSsrc1, creating an unsignalled receive stream.
+ {
+ // Receive a packet for kSsrc1.
+ const size_t kDataLength = 12;
+ uint8_t data[kDataLength];
+ memset(data, 0, sizeof(data));
+ rtc::SetBE32(&data[8], kSsrc1);
+ rtc::CopyOnWriteBuffer packet(data, kDataLength);
channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
}
rtc::Thread::Current()->ProcessMessages(0);
+ fake_clock_.AdvanceTime(
+ webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs - 1));
+
+ // We now have an unsignalled receive stream for kSsrc1.
EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u);
- EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 3u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc1), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc2), 0u);
+
+ {
+ // Receive a packet for kSsrc2.
+ const size_t kDataLength = 12;
+ uint8_t data[kDataLength];
+ memset(data, 0, sizeof(data));
+ rtc::SetBE32(&data[8], kSsrc2);
+ rtc::CopyOnWriteBuffer packet(data, kDataLength);
+ channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ }
+ rtc::Thread::Current()->ProcessMessages(0);
+
+ // Not enough time has passed to replace the unsignalled receive stream, so
+ // the kSsrc2 should be ignored.
+ EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc1), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc2), 0u);
+
+ // After 500 ms, kSsrc2 should trigger a new unsignalled receive stream that
+ // replaces the old one.
+ fake_clock_.AdvanceTime(webrtc::TimeDelta::Millis(1));
+ {
+ // Receive a packet for kSsrc2.
+ const size_t kDataLength = 12;
+ uint8_t data[kDataLength];
+ memset(data, 0, sizeof(data));
+ rtc::SetBE32(&data[8], kSsrc2);
+ rtc::CopyOnWriteBuffer packet(data, kDataLength);
+ channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
+ }
+ rtc::Thread::Current()->ProcessMessages(0);
+
+ // The old unsignalled receive stream was destroyed and replaced, so we still
+ // only have one unsignalled receive stream. But tha packet counter for kSsrc2
+ // has now increased.
+ EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc1), 1u);
+ EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc2), 1u);
}
// Test BaseMinimumPlayoutDelayMs on receive streams.
@@ -6775,8 +6887,7 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) {
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc);
EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200);
@@ -6813,8 +6924,7 @@ void WebRtcVideoChannelTest::TestReceiveUnsignaledSsrcPacket(
rtc::Set8(data, 1, payload_type);
rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
if (expect_created_receive_stream) {
EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
@@ -6901,8 +7011,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) {
rtpHeader.ssrc = kIncomingUnsignalledSsrc + 1;
cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
rtc::CopyOnWriteBuffer packet(data, sizeof(data));
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// VP8 packet should create default receive stream.
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
FakeVideoReceiveStream* recv_stream = fake_call_->GetVideoReceiveStreams()[0];
@@ -6923,8 +7032,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) {
rtpHeader.ssrc = kIncomingUnsignalledSsrc + 2;
cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
rtc::CopyOnWriteBuffer packet2(data, sizeof(data));
- channel_->OnPacketReceived(packet2, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet2, /* packet_time_us */ -1);
// VP9 packet should replace the default receive SSRC.
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
recv_stream = fake_call_->GetVideoReceiveStreams()[0];
@@ -6946,8 +7054,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) {
rtpHeader.ssrc = kIncomingUnsignalledSsrc + 3;
cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
rtc::CopyOnWriteBuffer packet3(data, sizeof(data));
- channel_->OnPacketReceived(packet3, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet3, /* packet_time_us */ -1);
// H264 packet should replace the default receive SSRC.
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
recv_stream = fake_call_->GetVideoReceiveStreams()[0];
@@ -6986,8 +7093,7 @@ TEST_F(WebRtcVideoChannelTest,
rtp_header.ssrc = kSsrcs3[0];
cricket::SetRtpHeader(data, sizeof(data), rtp_header);
rtc::CopyOnWriteBuffer packet(data, sizeof(data));
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// Default receive stream should be created.
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size());
FakeVideoReceiveStream* recv_stream0 =
@@ -7005,8 +7111,7 @@ TEST_F(WebRtcVideoChannelTest,
rtp_header.ssrc = kSsrcs3[1];
cricket::SetRtpHeader(data, sizeof(data), rtp_header);
packet.SetData(data, sizeof(data));
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// New default receive stream should be created, but old stream should remain.
ASSERT_EQ(2u, fake_call_->GetVideoReceiveStreams().size());
EXPECT_EQ(recv_stream0, fake_call_->GetVideoReceiveStreams()[0]);
@@ -8619,8 +8724,7 @@ TEST_F(WebRtcVideoChannelTest,
rtpHeader.ssrc = kIncomingUnsignalledSsrc;
cricket::SetRtpHeader(data, sizeof(data), rtpHeader);
rtc::CopyOnWriteBuffer packet(data, sizeof(data));
- channel_->OnPacketReceived(packet, /* packet_time_us */ -1);
- rtc::Thread::Current()->ProcessMessages(0);
+ ReceivePacketAndAdvanceTime(packet, /* packet_time_us */ -1);
// The |ssrc| member should still be unset.
rtp_parameters = channel_->GetDefaultRtpReceiveParameters();
diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc
index e952394569..578eab4f59 100644
--- a/media/engine/webrtc_voice_engine.cc
+++ b/media/engine/webrtc_voice_engine.cc
@@ -50,7 +50,6 @@
#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/base64/base64.h"
-#include "rtc_base/thread.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/metrics.h"
@@ -241,6 +240,49 @@ struct AdaptivePtimeConfig {
}
};
+// TODO(tommi): Constructing a receive stream could be made simpler.
+// Move some of this boiler plate code into the config structs themselves.
+webrtc::AudioReceiveStream::Config BuildReceiveStreamConfig(
+ uint32_t remote_ssrc,
+ uint32_t local_ssrc,
+ bool use_transport_cc,
+ bool use_nack,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<webrtc::RtpExtension>& extensions,
+ webrtc::Transport* rtcp_send_transport,
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
+ const std::map<int, webrtc::SdpAudioFormat>& decoder_map,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id,
+ size_t jitter_buffer_max_packets,
+ bool jitter_buffer_fast_accelerate,
+ int jitter_buffer_min_delay_ms,
+ bool jitter_buffer_enable_rtx_handling,
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor,
+ const webrtc::CryptoOptions& crypto_options,
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
+ webrtc::AudioReceiveStream::Config config;
+ config.rtp.remote_ssrc = remote_ssrc;
+ config.rtp.local_ssrc = local_ssrc;
+ config.rtp.transport_cc = use_transport_cc;
+ config.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0;
+ if (!stream_ids.empty()) {
+ config.sync_group = stream_ids[0];
+ }
+ config.rtp.extensions = extensions;
+ config.rtcp_send_transport = rtcp_send_transport;
+ config.decoder_factory = decoder_factory;
+ config.decoder_map = decoder_map;
+ config.codec_pair_id = codec_pair_id;
+ config.jitter_buffer_max_packets = jitter_buffer_max_packets;
+ config.jitter_buffer_fast_accelerate = jitter_buffer_fast_accelerate;
+ config.jitter_buffer_min_delay_ms = jitter_buffer_min_delay_ms;
+ config.jitter_buffer_enable_rtx_handling = jitter_buffer_enable_rtx_handling;
+ config.frame_decryptor = std::move(frame_decryptor);
+ config.crypto_options = crypto_options;
+ config.frame_transformer = std::move(frame_transformer);
+ return config;
+}
+
} // namespace
WebRtcVoiceEngine::WebRtcVoiceEngine(
@@ -330,7 +372,7 @@ void WebRtcVoiceEngine::Init() {
config.audio_device_module = adm_;
if (audio_frame_processor_)
config.async_audio_processing_factory =
- new rtc::RefCountedObject<webrtc::AsyncAudioProcessing::Factory>(
+ rtc::make_ref_counted<webrtc::AsyncAudioProcessing::Factory>(
*audio_frame_processor_, *task_queue_factory_);
audio_state_ = webrtc::AudioState::Create(config);
}
@@ -377,7 +419,7 @@ VoiceMediaChannel* WebRtcVoiceEngine::CreateMediaChannel(
const MediaConfig& config,
const AudioOptions& options,
const webrtc::CryptoOptions& crypto_options) {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ RTC_DCHECK_RUN_ON(call->worker_thread());
return new WebRtcVoiceMediaChannel(this, config, options, crypto_options,
call);
}
@@ -626,19 +668,6 @@ WebRtcVoiceEngine::GetRtpHeaderExtensions() const {
return result;
}
-void WebRtcVoiceEngine::RegisterChannel(WebRtcVoiceMediaChannel* channel) {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(channel);
- channels_.push_back(channel);
-}
-
-void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel* channel) {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- auto it = absl::c_find(channels_, channel);
- RTC_DCHECK(it != channels_.end());
- channels_.erase(it);
-}
-
bool WebRtcVoiceEngine::StartAecDump(webrtc::FileWrapper file,
int64_t max_size_bytes) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
@@ -1175,48 +1204,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
public:
- WebRtcAudioReceiveStream(
- uint32_t remote_ssrc,
- uint32_t local_ssrc,
- bool use_transport_cc,
- bool use_nack,
- const std::vector<std::string>& stream_ids,
- const std::vector<webrtc::RtpExtension>& extensions,
- webrtc::Call* call,
- webrtc::Transport* rtcp_send_transport,
- const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
- const std::map<int, webrtc::SdpAudioFormat>& decoder_map,
- absl::optional<webrtc::AudioCodecPairId> codec_pair_id,
- size_t jitter_buffer_max_packets,
- bool jitter_buffer_fast_accelerate,
- int jitter_buffer_min_delay_ms,
- bool jitter_buffer_enable_rtx_handling,
- rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor,
- const webrtc::CryptoOptions& crypto_options,
- rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
- : call_(call), config_() {
+ WebRtcAudioReceiveStream(webrtc::AudioReceiveStream::Config config,
+ webrtc::Call* call)
+ : call_(call), stream_(call_->CreateAudioReceiveStream(config)) {
RTC_DCHECK(call);
- config_.rtp.remote_ssrc = remote_ssrc;
- config_.rtp.local_ssrc = local_ssrc;
- config_.rtp.transport_cc = use_transport_cc;
- config_.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0;
- config_.rtp.extensions = extensions;
- config_.rtcp_send_transport = rtcp_send_transport;
- config_.jitter_buffer_max_packets = jitter_buffer_max_packets;
- config_.jitter_buffer_fast_accelerate = jitter_buffer_fast_accelerate;
- config_.jitter_buffer_min_delay_ms = jitter_buffer_min_delay_ms;
- config_.jitter_buffer_enable_rtx_handling =
- jitter_buffer_enable_rtx_handling;
- if (!stream_ids.empty()) {
- config_.sync_group = stream_ids[0];
- }
- config_.decoder_factory = decoder_factory;
- config_.decoder_map = decoder_map;
- config_.codec_pair_id = codec_pair_id;
- config_.frame_decryptor = frame_decryptor;
- config_.crypto_options = crypto_options;
- config_.frame_transformer = std::move(frame_transformer);
- RecreateAudioReceiveStream();
+ RTC_DCHECK(stream_);
}
WebRtcAudioReceiveStream() = delete;
@@ -1228,63 +1220,37 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
call_->DestroyAudioReceiveStream(stream_);
}
- void SetFrameDecryptor(
- rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- config_.frame_decryptor = frame_decryptor;
- RecreateAudioReceiveStream();
+ webrtc::AudioReceiveStream& stream() {
+ RTC_DCHECK(stream_);
+ return *stream_;
}
- void SetLocalSsrc(uint32_t local_ssrc) {
+ void SetFrameDecryptor(
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- if (local_ssrc != config_.rtp.local_ssrc) {
- config_.rtp.local_ssrc = local_ssrc;
- RecreateAudioReceiveStream();
- }
+ stream_->SetFrameDecryptor(std::move(frame_decryptor));
}
- void SetUseTransportCcAndRecreateStream(bool use_transport_cc,
- bool use_nack) {
+ void SetUseTransportCc(bool use_transport_cc, bool use_nack) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- config_.rtp.transport_cc = use_transport_cc;
- config_.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0;
- ReconfigureAudioReceiveStream();
+ stream_->SetUseTransportCcAndNackHistory(use_transport_cc,
+ use_nack ? kNackRtpHistoryMs : 0);
}
- void SetRtpExtensionsAndRecreateStream(
- const std::vector<webrtc::RtpExtension>& extensions) {
+ void SetRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- config_.rtp.extensions = extensions;
- RecreateAudioReceiveStream();
+ stream_->SetRtpExtensions(extensions);
}
// Set a new payload type -> decoder map.
void SetDecoderMap(const std::map<int, webrtc::SdpAudioFormat>& decoder_map) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- config_.decoder_map = decoder_map;
- ReconfigureAudioReceiveStream();
- }
-
- void MaybeRecreateAudioReceiveStream(
- const std::vector<std::string>& stream_ids) {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- std::string sync_group;
- if (!stream_ids.empty()) {
- sync_group = stream_ids[0];
- }
- if (config_.sync_group != sync_group) {
- RTC_LOG(LS_INFO) << "Recreating AudioReceiveStream for SSRC="
- << config_.rtp.remote_ssrc
- << " because of sync group change.";
- config_.sync_group = sync_group;
- RecreateAudioReceiveStream();
- }
+ stream_->SetDecoderMap(decoder_map);
}
webrtc::AudioReceiveStream::Stats GetStats(
bool get_and_clear_legacy_stats) const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
return stream_->GetStats(get_and_clear_legacy_stats);
}
@@ -1298,13 +1264,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
void SetOutputVolume(double volume) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- output_volume_ = volume;
stream_->SetGain(volume);
}
void SetPlayout(bool playout) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
if (playout) {
stream_->Start();
} else {
@@ -1314,79 +1278,47 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
- if (stream_->SetBaseMinimumPlayoutDelayMs(delay_ms)) {
- // Memorize only valid delay because during stream recreation it will be
- // passed to the constructor and it must be valid value.
- config_.jitter_buffer_min_delay_ms = delay_ms;
+ if (stream_->SetBaseMinimumPlayoutDelayMs(delay_ms))
return true;
- } else {
- RTC_LOG(LS_ERROR) << "Failed to SetBaseMinimumPlayoutDelayMs"
- " on AudioReceiveStream on SSRC="
- << config_.rtp.remote_ssrc
- << " with delay_ms=" << delay_ms;
- return false;
- }
+
+ RTC_LOG(LS_ERROR) << "Failed to SetBaseMinimumPlayoutDelayMs"
+ " on AudioReceiveStream on SSRC="
+ << stream_->rtp_config().remote_ssrc
+ << " with delay_ms=" << delay_ms;
+ return false;
}
int GetBaseMinimumPlayoutDelayMs() const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
return stream_->GetBaseMinimumPlayoutDelayMs();
}
std::vector<webrtc::RtpSource> GetSources() {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
return stream_->GetSources();
}
webrtc::RtpParameters GetRtpParameters() const {
webrtc::RtpParameters rtp_parameters;
rtp_parameters.encodings.emplace_back();
- rtp_parameters.encodings[0].ssrc = config_.rtp.remote_ssrc;
- rtp_parameters.header_extensions = config_.rtp.extensions;
-
+ const auto& config = stream_->rtp_config();
+ rtp_parameters.encodings[0].ssrc = config.remote_ssrc;
+ rtp_parameters.header_extensions = config.extensions;
return rtp_parameters;
}
void SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- config_.frame_transformer = std::move(frame_transformer);
- ReconfigureAudioReceiveStream();
+ stream_->SetDepacketizerToDecoderFrameTransformer(frame_transformer);
}
private:
- void RecreateAudioReceiveStream() {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- bool was_running = false;
- if (stream_) {
- was_running = stream_->IsRunning();
- call_->DestroyAudioReceiveStream(stream_);
- }
- stream_ = call_->CreateAudioReceiveStream(config_);
- RTC_CHECK(stream_);
- stream_->SetGain(output_volume_);
- if (was_running)
- SetPlayout(was_running);
- stream_->SetSink(raw_audio_sink_.get());
- }
-
- void ReconfigureAudioReceiveStream() {
- RTC_DCHECK_RUN_ON(&worker_thread_checker_);
- RTC_DCHECK(stream_);
- stream_->Reconfigure(config_);
- }
-
webrtc::SequenceChecker worker_thread_checker_;
webrtc::Call* call_ = nullptr;
- webrtc::AudioReceiveStream::Config config_;
- // The stream is owned by WebRtcAudioReceiveStream and may be reallocated if
- // configuration changes.
- webrtc::AudioReceiveStream* stream_ = nullptr;
- float output_volume_ = 1.0;
- std::unique_ptr<webrtc::AudioSinkInterface> raw_audio_sink_;
+ webrtc::AudioReceiveStream* const stream_ = nullptr;
+ std::unique_ptr<webrtc::AudioSinkInterface> raw_audio_sink_
+ RTC_GUARDED_BY(worker_thread_checker_);
};
WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(
@@ -1395,19 +1327,17 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(
const AudioOptions& options,
const webrtc::CryptoOptions& crypto_options,
webrtc::Call* call)
- : VoiceMediaChannel(config),
- worker_thread_(rtc::Thread::Current()),
+ : VoiceMediaChannel(config, call->network_thread()),
+ worker_thread_(call->worker_thread()),
engine_(engine),
call_(call),
audio_config_(config.audio),
crypto_options_(crypto_options),
audio_red_for_opus_trial_enabled_(
IsEnabled(call->trials(), "WebRTC-Audio-Red-For-Opus")) {
- RTC_DCHECK_RUN_ON(worker_thread_);
network_thread_checker_.Detach();
RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel";
RTC_DCHECK(call);
- engine->RegisterChannel(this);
SetOptions(options);
}
@@ -1423,7 +1353,6 @@ WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
while (!recv_streams_.empty()) {
RemoveRecvStream(recv_streams_.begin()->first);
}
- engine()->UnregisterChannel(this);
}
bool WebRtcVoiceMediaChannel::SetSendParameters(
@@ -1494,7 +1423,7 @@ bool WebRtcVoiceMediaChannel::SetRecvParameters(
if (recv_rtp_extensions_ != filtered_extensions) {
recv_rtp_extensions_.swap(filtered_extensions);
for (auto& it : recv_streams_) {
- it.second->SetRtpExtensionsAndRecreateStream(recv_rtp_extensions_);
+ it.second->SetRtpExtensions(recv_rtp_extensions_);
}
}
return true;
@@ -1859,8 +1788,8 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
recv_transport_cc_enabled_ = send_codec_spec_->transport_cc_enabled;
recv_nack_enabled_ = send_codec_spec_->nack_enabled;
for (auto& kv : recv_streams_) {
- kv.second->SetUseTransportCcAndRecreateStream(recv_transport_cc_enabled_,
- recv_nack_enabled_);
+ kv.second->SetUseTransportCc(recv_transport_cc_enabled_,
+ recv_nack_enabled_);
}
}
@@ -1956,10 +1885,8 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
// same SSRC in order to send receiver reports.
if (send_streams_.size() == 1) {
receiver_reports_ssrc_ = ssrc;
- for (const auto& kv : recv_streams_) {
- // TODO(solenberg): Allow applications to set the RTCP SSRC of receive
- // streams instead, so we can avoid reconfiguring the streams here.
- kv.second->SetLocalSsrc(ssrc);
+ for (auto& kv : recv_streams_) {
+ call_->OnLocalSsrcUpdated(kv.second->stream(), ssrc);
}
}
@@ -2012,9 +1939,12 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
const uint32_t ssrc = sp.first_ssrc();
// If this stream was previously received unsignaled, we promote it, possibly
- // recreating the AudioReceiveStream, if stream ids have changed.
+ // updating the sync group if stream ids have changed.
if (MaybeDeregisterUnsignaledRecvStream(ssrc)) {
- recv_streams_[ssrc]->MaybeRecreateAudioReceiveStream(sp.stream_ids());
+ auto stream_ids = sp.stream_ids();
+ std::string sync_group = stream_ids.empty() ? std::string() : stream_ids[0];
+ call_->OnUpdateSyncGroup(recv_streams_[ssrc]->stream(),
+ std::move(sync_group));
return true;
}
@@ -2024,16 +1954,18 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
}
// Create a new channel for receiving audio data.
+ auto config = BuildReceiveStreamConfig(
+ ssrc, receiver_reports_ssrc_, recv_transport_cc_enabled_,
+ recv_nack_enabled_, sp.stream_ids(), recv_rtp_extensions_, this,
+ engine()->decoder_factory_, decoder_map_, codec_pair_id_,
+ engine()->audio_jitter_buffer_max_packets_,
+ engine()->audio_jitter_buffer_fast_accelerate_,
+ engine()->audio_jitter_buffer_min_delay_ms_,
+ engine()->audio_jitter_buffer_enable_rtx_handling_,
+ unsignaled_frame_decryptor_, crypto_options_, nullptr);
+
recv_streams_.insert(std::make_pair(
- ssrc, new WebRtcAudioReceiveStream(
- ssrc, receiver_reports_ssrc_, recv_transport_cc_enabled_,
- recv_nack_enabled_, sp.stream_ids(), recv_rtp_extensions_,
- call_, this, engine()->decoder_factory_, decoder_map_,
- codec_pair_id_, engine()->audio_jitter_buffer_max_packets_,
- engine()->audio_jitter_buffer_fast_accelerate_,
- engine()->audio_jitter_buffer_min_delay_ms_,
- engine()->audio_jitter_buffer_enable_rtx_handling_,
- unsignaled_frame_decryptor_, crypto_options_, nullptr)));
+ ssrc, new WebRtcAudioReceiveStream(std::move(config), call_)));
recv_streams_[ssrc]->SetPlayout(playout_);
return true;
@@ -2571,6 +2503,18 @@ void WebRtcVoiceMediaChannel::SetDepacketizerToDecoderFrameTransformer(
std::move(frame_transformer));
}
+bool WebRtcVoiceMediaChannel::SendRtp(const uint8_t* data,
+ size_t len,
+ const webrtc::PacketOptions& options) {
+ MediaChannel::SendRtp(data, len, options);
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::SendRtcp(const uint8_t* data, size_t len) {
+ MediaChannel::SendRtcp(data, len);
+ return true;
+}
+
bool WebRtcVoiceMediaChannel::MaybeDeregisterUnsignaledRecvStream(
uint32_t ssrc) {
RTC_DCHECK_RUN_ON(worker_thread_);
diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h
index 5a1cb57ff6..147688b0e0 100644
--- a/media/engine/webrtc_voice_engine.h
+++ b/media/engine/webrtc_voice_engine.h
@@ -38,8 +38,6 @@ class AudioFrameProcessor;
namespace cricket {
-class AudioDeviceModule;
-class AudioMixer;
class AudioSource;
class WebRtcVoiceMediaChannel;
@@ -80,12 +78,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
std::vector<webrtc::RtpHeaderExtensionCapability> GetRtpHeaderExtensions()
const override;
- // For tracking WebRtc channels. Needed because we have to pause them
- // all when switching devices.
- // May only be called by WebRtcVoiceMediaChannel.
- void RegisterChannel(WebRtcVoiceMediaChannel* channel);
- void UnregisterChannel(WebRtcVoiceMediaChannel* channel);
-
// Starts AEC dump using an existing file. A maximum file size in bytes can be
// specified. When the maximum file size is reached, logging is stopped and
// the file is closed. If max_size_bytes is set to <= 0, no limit will be
@@ -129,7 +121,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
rtc::scoped_refptr<webrtc::AudioState> audio_state_;
std::vector<AudioCodec> send_codecs_;
std::vector<AudioCodec> recv_codecs_;
- std::vector<WebRtcVoiceMediaChannel*> channels_;
bool is_dumping_aec_ = false;
bool initialized_ = false;
@@ -248,29 +239,9 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
// implements Transport interface
bool SendRtp(const uint8_t* data,
size_t len,
- const webrtc::PacketOptions& options) override {
- rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
- rtc::PacketOptions rtc_options;
- rtc_options.packet_id = options.packet_id;
- if (DscpEnabled()) {
- rtc_options.dscp = PreferredDscp();
- }
- rtc_options.info_signaled_after_sent.included_in_feedback =
- options.included_in_feedback;
- rtc_options.info_signaled_after_sent.included_in_allocation =
- options.included_in_allocation;
- return VoiceMediaChannel::SendPacket(&packet, rtc_options);
- }
-
- bool SendRtcp(const uint8_t* data, size_t len) override {
- rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
- rtc::PacketOptions rtc_options;
- if (DscpEnabled()) {
- rtc_options.dscp = PreferredDscp();
- }
-
- return VoiceMediaChannel::SendRtcp(&packet, rtc_options);
- }
+ const webrtc::PacketOptions& options) override;
+
+ bool SendRtcp(const uint8_t* data, size_t len) override;
private:
bool SetOptions(const AudioOptions& options);
diff --git a/media/engine/webrtc_voice_engine_unittest.cc b/media/engine/webrtc_voice_engine_unittest.cc
index 3286837d81..c570b1a03a 100644
--- a/media/engine/webrtc_voice_engine_unittest.cc
+++ b/media/engine/webrtc_voice_engine_unittest.cc
@@ -2845,7 +2845,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_NoRecreate) {
EXPECT_EQ(audio_receive_stream_id, streams.front()->id());
}
-TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Recreate) {
+TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Updates) {
EXPECT_TRUE(SetupChannel());
// Spawn unsignaled stream with SSRC=1.
@@ -2854,17 +2854,26 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Recreate) {
EXPECT_TRUE(
GetRecvStream(1).VerifyLastPacket(kPcmuFrame, sizeof(kPcmuFrame)));
- // Verify that the underlying stream object in Call *is* recreated when a
+ // Verify that the underlying stream object in Call gets updated when a
// stream with SSRC=1 is added, and which has changed stream parameters.
const auto& streams = call_.GetAudioReceiveStreams();
EXPECT_EQ(1u, streams.size());
+ // The sync_group id should be empty.
+ EXPECT_TRUE(streams.front()->GetConfig().sync_group.empty());
+
+ const std::string new_stream_id("stream_id");
int audio_receive_stream_id = streams.front()->id();
cricket::StreamParams stream_params;
stream_params.ssrcs.push_back(1);
- stream_params.set_stream_ids({"stream_id"});
+ stream_params.set_stream_ids({new_stream_id});
+
EXPECT_TRUE(channel_->AddRecvStream(stream_params));
EXPECT_EQ(1u, streams.size());
- EXPECT_NE(audio_receive_stream_id, streams.front()->id());
+ // The audio receive stream should not have been recreated.
+ EXPECT_EQ(audio_receive_stream_id, streams.front()->id());
+
+ // The sync_group id should now match with the new stream params.
+ EXPECT_EQ(new_stream_id, streams.front()->GetConfig().sync_group);
}
// Test that AddRecvStream creates new stream.
@@ -3203,6 +3212,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel->SetInterface(&network_interface);
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
+ channel->SetInterface(nullptr);
config.enable_dscp = true;
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
@@ -3229,6 +3239,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
const uint8_t kData[10] = {0};
EXPECT_TRUE(channel->SendRtcp(kData, sizeof(kData)));
EXPECT_EQ(rtc::DSCP_CS1, network_interface.options().dscp);
+ channel->SetInterface(nullptr);
// Verify that setting the option to false resets the
// DiffServCodePoint.
diff --git a/media/sctp/OWNERS.webrtc b/media/sctp/OWNERS.webrtc
index a32f041ac8..da2f0178a8 100644
--- a/media/sctp/OWNERS.webrtc
+++ b/media/sctp/OWNERS.webrtc
@@ -1 +1,3 @@
+boivie@webrtc.org
deadbeef@webrtc.org
+orphis@webrtc.org
diff --git a/media/sctp/dcsctp_transport.cc b/media/sctp/dcsctp_transport.cc
new file mode 100644
index 0000000000..90fb0e8aca
--- /dev/null
+++ b/media/sctp/dcsctp_transport.cc
@@ -0,0 +1,532 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/sctp/dcsctp_transport.h"
+
+#include <cstdint>
+#include <limits>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "media/base/media_channel.h"
+#include "net/dcsctp/public/dcsctp_socket_factory.h"
+#include "net/dcsctp/public/packet_observer.h"
+#include "net/dcsctp/public/types.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+
+enum class WebrtcPPID : dcsctp::PPID::UnderlyingType {
+ // https://www.rfc-editor.org/rfc/rfc8832.html#section-8.1
+ kDCEP = 50,
+ // https://www.rfc-editor.org/rfc/rfc8831.html#section-8
+ kString = 51,
+ kBinaryPartial = 52, // Deprecated
+ kBinary = 53,
+ kStringPartial = 54, // Deprecated
+ kStringEmpty = 56,
+ kBinaryEmpty = 57,
+};
+
+WebrtcPPID ToPPID(DataMessageType message_type, size_t size) {
+ switch (message_type) {
+ case webrtc::DataMessageType::kControl:
+ return WebrtcPPID::kDCEP;
+ case webrtc::DataMessageType::kText:
+ return size > 0 ? WebrtcPPID::kString : WebrtcPPID::kStringEmpty;
+ case webrtc::DataMessageType::kBinary:
+ return size > 0 ? WebrtcPPID::kBinary : WebrtcPPID::kBinaryEmpty;
+ }
+}
+
+absl::optional<DataMessageType> ToDataMessageType(dcsctp::PPID ppid) {
+ switch (static_cast<WebrtcPPID>(ppid.value())) {
+ case WebrtcPPID::kDCEP:
+ return webrtc::DataMessageType::kControl;
+ case WebrtcPPID::kString:
+ case WebrtcPPID::kStringPartial:
+ case WebrtcPPID::kStringEmpty:
+ return webrtc::DataMessageType::kText;
+ case WebrtcPPID::kBinary:
+ case WebrtcPPID::kBinaryPartial:
+ case WebrtcPPID::kBinaryEmpty:
+ return webrtc::DataMessageType::kBinary;
+ }
+ return absl::nullopt;
+}
+
+bool IsEmptyPPID(dcsctp::PPID ppid) {
+ WebrtcPPID webrtc_ppid = static_cast<WebrtcPPID>(ppid.value());
+ return webrtc_ppid == WebrtcPPID::kStringEmpty ||
+ webrtc_ppid == WebrtcPPID::kBinaryEmpty;
+}
+
+// Print outs all sent and received packets to the logs, at LS_VERBOSE severity.
+class TextPcapPacketObserver : public dcsctp::PacketObserver {
+ public:
+ explicit TextPcapPacketObserver(absl::string_view name) : name_(name) {}
+
+ void OnSentPacket(dcsctp::TimeMs now, rtc::ArrayView<const uint8_t> payload) {
+ PrintPacket("O ", now, payload);
+ }
+
+ void OnReceivedPacket(dcsctp::TimeMs now,
+ rtc::ArrayView<const uint8_t> payload) {
+ PrintPacket("I ", now, payload);
+ }
+
+ private:
+ void PrintPacket(absl::string_view prefix,
+ dcsctp::TimeMs now,
+ rtc::ArrayView<const uint8_t> payload) {
+ rtc::StringBuilder s;
+ s << "\n" << prefix;
+ int64_t remaining = *now % (24 * 60 * 60 * 1000);
+ int hours = remaining / (60 * 60 * 1000);
+ remaining = remaining % (60 * 60 * 1000);
+ int minutes = remaining / (60 * 1000);
+ remaining = remaining % (60 * 1000);
+ int seconds = remaining / 1000;
+ int ms = remaining % 1000;
+ s.AppendFormat("%02d:%02d:%02d.%03d", hours, minutes, seconds, ms);
+ s << " 0000";
+ for (uint8_t byte : payload) {
+ s.AppendFormat(" %02x", byte);
+ }
+ s << " # SCTP_PACKET " << name_;
+ RTC_LOG(LS_VERBOSE) << s.str();
+ }
+
+ const std::string name_;
+};
+
+} // namespace
+
+DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread,
+ rtc::PacketTransportInternal* transport,
+ Clock* clock)
+ : network_thread_(network_thread),
+ transport_(transport),
+ clock_(clock),
+ random_(clock_->TimeInMicroseconds()),
+ task_queue_timeout_factory_(
+ *network_thread,
+ [this]() { return TimeMillis(); },
+ [this](dcsctp::TimeoutID timeout_id) {
+ socket_->HandleTimeout(timeout_id);
+ }) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ static int instance_count = 0;
+ rtc::StringBuilder sb;
+ sb << debug_name_ << instance_count++;
+ debug_name_ = sb.Release();
+ ConnectTransportSignals();
+}
+
+DcSctpTransport::~DcSctpTransport() {
+ if (socket_) {
+ socket_->Close();
+ }
+}
+
+void DcSctpTransport::SetDtlsTransport(
+ rtc::PacketTransportInternal* transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ DisconnectTransportSignals();
+ transport_ = transport;
+ ConnectTransportSignals();
+ MaybeConnectSocket();
+}
+
+bool DcSctpTransport::Start(int local_sctp_port,
+ int remote_sctp_port,
+ int max_message_size) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(max_message_size > 0);
+
+ RTC_LOG(LS_INFO) << debug_name_ << "->Start(local=" << local_sctp_port
+ << ", remote=" << remote_sctp_port
+ << ", max_message_size=" << max_message_size << ")";
+
+ if (!socket_) {
+ dcsctp::DcSctpOptions options;
+ options.local_port = local_sctp_port;
+ options.remote_port = remote_sctp_port;
+ options.max_message_size = max_message_size;
+
+ std::unique_ptr<dcsctp::PacketObserver> packet_observer;
+ if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) {
+ packet_observer = std::make_unique<TextPcapPacketObserver>(debug_name_);
+ }
+
+ dcsctp::DcSctpSocketFactory factory;
+ socket_ =
+ factory.Create(debug_name_, *this, std::move(packet_observer), options);
+ } else {
+ if (local_sctp_port != socket_->options().local_port ||
+ remote_sctp_port != socket_->options().remote_port) {
+ RTC_LOG(LS_ERROR)
+ << debug_name_ << "->Start(local=" << local_sctp_port
+ << ", remote=" << remote_sctp_port
+ << "): Can't change ports on already started transport.";
+ return false;
+ }
+ socket_->SetMaxMessageSize(max_message_size);
+ }
+
+ MaybeConnectSocket();
+
+ return true;
+}
+
+bool DcSctpTransport::OpenStream(int sid) {
+ RTC_LOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ").";
+ if (!socket_) {
+ RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid
+ << "): Transport is not started.";
+ return false;
+ }
+ return true;
+}
+
+bool DcSctpTransport::ResetStream(int sid) {
+ RTC_LOG(LS_INFO) << debug_name_ << "->ResetStream(" << sid << ").";
+ if (!socket_) {
+ RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid
+ << "): Transport is not started.";
+ return false;
+ }
+ dcsctp::StreamID streams[1] = {dcsctp::StreamID(static_cast<uint16_t>(sid))};
+ socket_->ResetStreams(streams);
+ return true;
+}
+
+bool DcSctpTransport::SendData(int sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ RTC_LOG(LS_VERBOSE) << debug_name_ << "->SendData(sid=" << sid
+ << ", type=" << static_cast<int>(params.type)
+ << ", length=" << payload.size() << ").";
+
+ if (!socket_) {
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->SendData(...): Transport is not started.";
+ *result = cricket::SDR_ERROR;
+ return false;
+ }
+
+ auto max_message_size = socket_->options().max_message_size;
+ if (max_message_size > 0 && payload.size() > max_message_size) {
+ RTC_LOG(LS_WARNING) << debug_name_
+ << "->SendData(...): "
+ "Trying to send packet bigger "
+ "than the max message size: "
+ << payload.size() << " vs max of " << max_message_size;
+ *result = cricket::SDR_ERROR;
+ return false;
+ }
+
+ std::vector<uint8_t> message_payload(payload.cdata(),
+ payload.cdata() + payload.size());
+ if (message_payload.empty()) {
+ // https://www.rfc-editor.org/rfc/rfc8831.html#section-6.6
+ // SCTP does not support the sending of empty user messages. Therefore, if
+ // an empty message has to be sent, the appropriate PPID (WebRTC String
+ // Empty or WebRTC Binary Empty) is used, and the SCTP user message of one
+ // zero byte is sent.
+ message_payload.push_back('\0');
+ }
+
+ dcsctp::DcSctpMessage message(
+ dcsctp::StreamID(static_cast<uint16_t>(sid)),
+ dcsctp::PPID(static_cast<uint16_t>(ToPPID(params.type, payload.size()))),
+ std::move(message_payload));
+
+ dcsctp::SendOptions send_options;
+ send_options.unordered = dcsctp::IsUnordered(!params.ordered);
+ if (params.max_rtx_ms.has_value()) {
+ RTC_DCHECK(*params.max_rtx_ms >= 0 &&
+ *params.max_rtx_ms <= std::numeric_limits<uint16_t>::max());
+ send_options.lifetime = dcsctp::DurationMs(*params.max_rtx_ms);
+ }
+ if (params.max_rtx_count.has_value()) {
+ RTC_DCHECK(*params.max_rtx_count >= 0 &&
+ *params.max_rtx_count <= std::numeric_limits<uint16_t>::max());
+ send_options.max_retransmissions = *params.max_rtx_count;
+ }
+
+ auto error = socket_->Send(std::move(message), send_options);
+ switch (error) {
+ case dcsctp::SendStatus::kSuccess:
+ *result = cricket::SDR_SUCCESS;
+ break;
+ case dcsctp::SendStatus::kErrorResourceExhaustion:
+ *result = cricket::SDR_BLOCK;
+ ready_to_send_data_ = false;
+ break;
+ default:
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->SendData(...): send() failed with error "
+ << dcsctp::ToString(error) << ".";
+ *result = cricket::SDR_ERROR;
+ }
+
+ return *result == cricket::SDR_SUCCESS;
+}
+
+bool DcSctpTransport::ReadyToSendData() {
+ return ready_to_send_data_;
+}
+
+int DcSctpTransport::max_message_size() const {
+ if (!socket_) {
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->max_message_size(...): Transport is not started.";
+ return 0;
+ }
+ return socket_->options().max_message_size;
+}
+
+absl::optional<int> DcSctpTransport::max_outbound_streams() const {
+ if (!socket_)
+ return absl::nullopt;
+ return socket_->options().announced_maximum_outgoing_streams;
+}
+
+absl::optional<int> DcSctpTransport::max_inbound_streams() const {
+ if (!socket_)
+ return absl::nullopt;
+ return socket_->options().announced_maximum_incoming_streams;
+}
+
+void DcSctpTransport::set_debug_name_for_testing(const char* debug_name) {
+ debug_name_ = debug_name;
+}
+
+void DcSctpTransport::SendPacket(rtc::ArrayView<const uint8_t> data) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(socket_);
+
+ if (data.size() > (socket_->options().mtu)) {
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->SendPacket(...): "
+ "SCTP seems to have made a packet that is bigger "
+ "than its official MTU: "
+ << data.size() << " vs max of " << socket_->options().mtu;
+ return;
+ }
+ TRACE_EVENT0("webrtc", "DcSctpTransport::SendPacket");
+
+ if (!transport_ || !transport_->writable())
+ return;
+
+ RTC_LOG(LS_VERBOSE) << debug_name_ << "->SendPacket(length=" << data.size()
+ << ")";
+
+ auto result =
+ transport_->SendPacket(reinterpret_cast<const char*>(data.data()),
+ data.size(), rtc::PacketOptions(), 0);
+
+ if (result < 0) {
+ RTC_LOG(LS_WARNING) << debug_name_ << "->SendPacket(length=" << data.size()
+ << ") failed with error: " << transport_->GetError()
+ << ".";
+ }
+}
+
+std::unique_ptr<dcsctp::Timeout> DcSctpTransport::CreateTimeout() {
+ return task_queue_timeout_factory_.CreateTimeout();
+}
+
+dcsctp::TimeMs DcSctpTransport::TimeMillis() {
+ return dcsctp::TimeMs(clock_->TimeInMilliseconds());
+}
+
+uint32_t DcSctpTransport::GetRandomInt(uint32_t low, uint32_t high) {
+ return random_.Rand(low, high);
+}
+
+void DcSctpTransport::OnTotalBufferedAmountLow() {
+ if (!ready_to_send_data_) {
+ ready_to_send_data_ = true;
+ SignalReadyToSendData();
+ }
+}
+
+void DcSctpTransport::OnMessageReceived(dcsctp::DcSctpMessage message) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnMessageReceived(sid="
+ << message.stream_id().value()
+ << ", ppid=" << message.ppid().value()
+ << ", length=" << message.payload().size() << ").";
+ cricket::ReceiveDataParams receive_data_params;
+ receive_data_params.sid = message.stream_id().value();
+ auto type = ToDataMessageType(message.ppid());
+ if (!type.has_value()) {
+ RTC_LOG(LS_VERBOSE) << debug_name_
+ << "->OnMessageReceived(): Received an unknown PPID "
+ << message.ppid().value()
+ << " on an SCTP packet. Dropping.";
+ }
+ receive_data_params.type = *type;
+ // No seq_num available from dcSCTP
+ receive_data_params.seq_num = 0;
+ receive_buffer_.Clear();
+ if (!IsEmptyPPID(message.ppid()))
+ receive_buffer_.AppendData(message.payload().data(),
+ message.payload().size());
+
+ SignalDataReceived(receive_data_params, receive_buffer_);
+}
+
+void DcSctpTransport::OnError(dcsctp::ErrorKind error,
+ absl::string_view message) {
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->OnError(error=" << dcsctp::ToString(error)
+ << ", message=" << message << ").";
+}
+
+void DcSctpTransport::OnAborted(dcsctp::ErrorKind error,
+ absl::string_view message) {
+ RTC_LOG(LS_ERROR) << debug_name_
+ << "->OnAborted(error=" << dcsctp::ToString(error)
+ << ", message=" << message << ").";
+ ready_to_send_data_ = false;
+}
+
+void DcSctpTransport::OnConnected() {
+ RTC_LOG(LS_INFO) << debug_name_ << "->OnConnected().";
+ ready_to_send_data_ = true;
+ SignalReadyToSendData();
+ SignalAssociationChangeCommunicationUp();
+}
+
+void DcSctpTransport::OnClosed() {
+ RTC_LOG(LS_INFO) << debug_name_ << "->OnClosed().";
+ ready_to_send_data_ = false;
+}
+
+void DcSctpTransport::OnConnectionRestarted() {
+ RTC_LOG(LS_INFO) << debug_name_ << "->OnConnectionRestarted().";
+}
+
+void DcSctpTransport::OnStreamsResetFailed(
+ rtc::ArrayView<const dcsctp::StreamID> outgoing_streams,
+ absl::string_view reason) {
+ // TODO(orphis): Need a test to check for correct behavior
+ for (auto& stream_id : outgoing_streams) {
+ RTC_LOG(LS_WARNING)
+ << debug_name_
+ << "->OnStreamsResetFailed(...): Outgoing stream reset failed"
+ << ", sid=" << stream_id.value() << ", reason: " << reason << ".";
+ }
+}
+
+void DcSctpTransport::OnStreamsResetPerformed(
+ rtc::ArrayView<const dcsctp::StreamID> outgoing_streams) {
+ for (auto& stream_id : outgoing_streams) {
+ RTC_LOG(LS_INFO) << debug_name_
+ << "->OnStreamsResetPerformed(...): Outgoing stream reset"
+ << ", sid=" << stream_id.value();
+ SignalClosingProcedureComplete(stream_id.value());
+ }
+}
+
+void DcSctpTransport::OnIncomingStreamsReset(
+ rtc::ArrayView<const dcsctp::StreamID> incoming_streams) {
+ for (auto& stream_id : incoming_streams) {
+ RTC_LOG(LS_INFO) << debug_name_
+ << "->OnIncomingStreamsReset(...): Incoming stream reset"
+ << ", sid=" << stream_id.value();
+ SignalClosingProcedureStartedRemotely(stream_id.value());
+ SignalClosingProcedureComplete(stream_id.value());
+ }
+}
+
+void DcSctpTransport::ConnectTransportSignals() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!transport_) {
+ return;
+ }
+ transport_->SignalWritableState.connect(
+ this, &DcSctpTransport::OnTransportWritableState);
+ transport_->SignalReadPacket.connect(this,
+ &DcSctpTransport::OnTransportReadPacket);
+ transport_->SignalClosed.connect(this, &DcSctpTransport::OnTransportClosed);
+}
+
+void DcSctpTransport::DisconnectTransportSignals() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!transport_) {
+ return;
+ }
+ transport_->SignalWritableState.disconnect(this);
+ transport_->SignalReadPacket.disconnect(this);
+ transport_->SignalClosed.disconnect(this);
+}
+
+void DcSctpTransport::OnTransportWritableState(
+ rtc::PacketTransportInternal* transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK_EQ(transport_, transport);
+
+ RTC_LOG(LS_VERBOSE) << debug_name_
+ << "->OnTransportWritableState(), writable="
+ << transport->writable();
+
+ MaybeConnectSocket();
+}
+
+void DcSctpTransport::OnTransportReadPacket(
+ rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t length,
+ const int64_t& /* packet_time_us */,
+ int flags) {
+ if (flags) {
+ // We are only interested in SCTP packets.
+ return;
+ }
+
+ RTC_LOG(LS_VERBOSE) << debug_name_
+ << "->OnTransportReadPacket(), length=" << length;
+ if (socket_) {
+ socket_->ReceivePacket(rtc::ArrayView<const uint8_t>(
+ reinterpret_cast<const uint8_t*>(data), length));
+ }
+}
+
+void DcSctpTransport::OnTransportClosed(
+ rtc::PacketTransportInternal* transport) {
+ RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed().";
+ SignalClosedAbruptly();
+}
+
+void DcSctpTransport::MaybeConnectSocket() {
+ if (transport_ && transport_->writable() && socket_ &&
+ socket_->state() == dcsctp::SocketState::kClosed) {
+ socket_->Connect();
+ }
+}
+} // namespace webrtc
diff --git a/media/sctp/dcsctp_transport.h b/media/sctp/dcsctp_transport.h
new file mode 100644
index 0000000000..15933383b5
--- /dev/null
+++ b/media/sctp/dcsctp_transport.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_SCTP_DCSCTP_TRANSPORT_H_
+#define MEDIA_SCTP_DCSCTP_TRANSPORT_H_
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/timer/task_queue_timeout.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/random.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+class DcSctpTransport : public cricket::SctpTransportInternal,
+ public dcsctp::DcSctpSocketCallbacks,
+ public sigslot::has_slots<> {
+ public:
+ DcSctpTransport(rtc::Thread* network_thread,
+ rtc::PacketTransportInternal* transport,
+ Clock* clock);
+ ~DcSctpTransport() override;
+
+ // cricket::SctpTransportInternal
+ void SetDtlsTransport(rtc::PacketTransportInternal* transport) override;
+ bool Start(int local_sctp_port,
+ int remote_sctp_port,
+ int max_message_size) override;
+ bool OpenStream(int sid) override;
+ bool ResetStream(int sid) override;
+ bool SendData(int sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result = nullptr) override;
+ bool ReadyToSendData() override;
+ int max_message_size() const override;
+ absl::optional<int> max_outbound_streams() const override;
+ absl::optional<int> max_inbound_streams() const override;
+ void set_debug_name_for_testing(const char* debug_name) override;
+
+ private:
+ // dcsctp::DcSctpSocketCallbacks
+ void SendPacket(rtc::ArrayView<const uint8_t> data) override;
+ std::unique_ptr<dcsctp::Timeout> CreateTimeout() override;
+ dcsctp::TimeMs TimeMillis() override;
+ uint32_t GetRandomInt(uint32_t low, uint32_t high) override;
+ void OnTotalBufferedAmountLow() override;
+ void OnMessageReceived(dcsctp::DcSctpMessage message) override;
+ void OnError(dcsctp::ErrorKind error, absl::string_view message) override;
+ void OnAborted(dcsctp::ErrorKind error, absl::string_view message) override;
+ void OnConnected() override;
+ void OnClosed() override;
+ void OnConnectionRestarted() override;
+ void OnStreamsResetFailed(
+ rtc::ArrayView<const dcsctp::StreamID> outgoing_streams,
+ absl::string_view reason) override;
+ void OnStreamsResetPerformed(
+ rtc::ArrayView<const dcsctp::StreamID> outgoing_streams) override;
+ void OnIncomingStreamsReset(
+ rtc::ArrayView<const dcsctp::StreamID> incoming_streams) override;
+
+ // Transport callbacks
+ void ConnectTransportSignals();
+ void DisconnectTransportSignals();
+ void OnTransportWritableState(rtc::PacketTransportInternal* transport);
+ void OnTransportReadPacket(rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t length,
+ const int64_t& /* packet_time_us */,
+ int flags);
+ void OnTransportClosed(rtc::PacketTransportInternal* transport);
+
+ void MaybeConnectSocket();
+
+ rtc::Thread* network_thread_;
+ rtc::PacketTransportInternal* transport_;
+ Clock* clock_;
+ Random random_;
+
+ dcsctp::TaskQueueTimeoutFactory task_queue_timeout_factory_;
+ std::unique_ptr<dcsctp::DcSctpSocketInterface> socket_;
+ std::string debug_name_ = "DcSctpTransport";
+ rtc::CopyOnWriteBuffer receive_buffer_;
+
+ bool ready_to_send_data_ = false;
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_SCTP_DCSCTP_TRANSPORT_H_
diff --git a/media/sctp/sctp_transport_factory.cc b/media/sctp/sctp_transport_factory.cc
index 40480e797a..5097d423d9 100644
--- a/media/sctp/sctp_transport_factory.cc
+++ b/media/sctp/sctp_transport_factory.cc
@@ -12,6 +12,12 @@
#include "rtc_base/system/unused.h"
+#ifdef WEBRTC_HAVE_DCSCTP
+#include "media/sctp/dcsctp_transport.h" // nogncheck
+#include "system_wrappers/include/clock.h" // nogncheck
+#include "system_wrappers/include/field_trial.h" // nogncheck
+#endif
+
#ifdef WEBRTC_HAVE_USRSCTP
#include "media/sctp/usrsctp_transport.h" // nogncheck
#endif
@@ -19,14 +25,24 @@
namespace cricket {
SctpTransportFactory::SctpTransportFactory(rtc::Thread* network_thread)
- : network_thread_(network_thread) {
+ : network_thread_(network_thread), use_dcsctp_("Enabled", false) {
RTC_UNUSED(network_thread_);
+#ifdef WEBRTC_HAVE_DCSCTP
+ webrtc::ParseFieldTrial({&use_dcsctp_}, webrtc::field_trial::FindFullName(
+ "WebRTC-DataChannel-Dcsctp"));
+#endif
}
std::unique_ptr<SctpTransportInternal>
SctpTransportFactory::CreateSctpTransport(
rtc::PacketTransportInternal* transport) {
std::unique_ptr<SctpTransportInternal> result;
+#ifdef WEBRTC_HAVE_DCSCTP
+ if (use_dcsctp_.Get()) {
+ result = std::unique_ptr<SctpTransportInternal>(new webrtc::DcSctpTransport(
+ network_thread_, transport, webrtc::Clock::GetRealTimeClock()));
+ }
+#endif
#ifdef WEBRTC_HAVE_USRSCTP
if (!result) {
result = std::unique_ptr<SctpTransportInternal>(
diff --git a/media/sctp/sctp_transport_factory.h b/media/sctp/sctp_transport_factory.h
index 4fff214129..ed7c2163d7 100644
--- a/media/sctp/sctp_transport_factory.h
+++ b/media/sctp/sctp_transport_factory.h
@@ -15,6 +15,7 @@
#include "api/transport/sctp_transport_factory_interface.h"
#include "media/sctp/sctp_transport_internal.h"
+#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/thread.h"
namespace cricket {
@@ -28,6 +29,7 @@ class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface {
private:
rtc::Thread* network_thread_;
+ webrtc::FieldTrialFlag use_dcsctp_;
};
} // namespace cricket
diff --git a/media/sctp/sctp_transport_internal.h b/media/sctp/sctp_transport_internal.h
index dc8ac4558d..96c35ffb93 100644
--- a/media/sctp/sctp_transport_internal.h
+++ b/media/sctp/sctp_transport_internal.h
@@ -18,6 +18,7 @@
#include <string>
#include <vector>
+#include "api/transport/data_channel_transport_interface.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/thread.h"
// For SendDataParams/ReceiveDataParams.
@@ -101,7 +102,8 @@ class SctpTransportInternal {
// usrsctp that will then post the network interface).
// Returns true iff successful data somewhere on the send-queue/network.
// Uses |params.ssrc| as the SCTP sid.
- virtual bool SendData(const SendDataParams& params,
+ virtual bool SendData(int sid,
+ const webrtc::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result = nullptr) = 0;
diff --git a/media/sctp/usrsctp_transport.cc b/media/sctp/usrsctp_transport.cc
index fc226bf7ff..d43c017207 100644
--- a/media/sctp/usrsctp_transport.cc
+++ b/media/sctp/usrsctp_transport.cc
@@ -74,24 +74,25 @@ static constexpr size_t kSctpMtu = 1191;
ABSL_CONST_INIT int g_usrsctp_usage_count = 0;
ABSL_CONST_INIT bool g_usrsctp_initialized_ = false;
ABSL_CONST_INIT webrtc::GlobalMutex g_usrsctp_lock_(absl::kConstInit);
+ABSL_CONST_INIT char kZero[] = {'\0'};
// DataMessageType is used for the SCTP "Payload Protocol Identifier", as
// defined in http://tools.ietf.org/html/rfc4960#section-14.4
//
// For the list of IANA approved values see:
+// https://tools.ietf.org/html/rfc8831 Sec. 8
// http://www.iana.org/assignments/sctp-parameters/sctp-parameters.xml
// The value is not used by SCTP itself. It indicates the protocol running
// on top of SCTP.
enum {
PPID_NONE = 0, // No protocol is specified.
- // Matches the PPIDs in mozilla source and
- // https://datatracker.ietf.org/doc/draft-ietf-rtcweb-data-protocol Sec. 9
- // They're not yet assigned by IANA.
PPID_CONTROL = 50,
- PPID_BINARY_PARTIAL = 52,
+ PPID_TEXT_LAST = 51,
+ PPID_BINARY_PARTIAL = 52, // Deprecated
PPID_BINARY_LAST = 53,
- PPID_TEXT_PARTIAL = 54,
- PPID_TEXT_LAST = 51
+ PPID_TEXT_PARTIAL = 54, // Deprecated
+ PPID_TEXT_EMPTY = 56,
+ PPID_BINARY_EMPTY = 57,
};
// Should only be modified by UsrSctpWrapper.
@@ -128,44 +129,41 @@ void DebugSctpPrintf(const char* format, ...) {
}
// Get the PPID to use for the terminating fragment of this type.
-uint32_t GetPpid(cricket::DataMessageType type) {
+uint32_t GetPpid(webrtc::DataMessageType type, size_t size) {
switch (type) {
- default:
- case cricket::DMT_NONE:
- return PPID_NONE;
- case cricket::DMT_CONTROL:
+ case webrtc::DataMessageType::kControl:
return PPID_CONTROL;
- case cricket::DMT_BINARY:
- return PPID_BINARY_LAST;
- case cricket::DMT_TEXT:
- return PPID_TEXT_LAST;
+ case webrtc::DataMessageType::kBinary:
+ return size > 0 ? PPID_BINARY_LAST : PPID_BINARY_EMPTY;
+ case webrtc::DataMessageType::kText:
+ return size > 0 ? PPID_TEXT_LAST : PPID_TEXT_EMPTY;
}
}
-bool GetDataMediaType(uint32_t ppid, cricket::DataMessageType* dest) {
+bool GetDataMediaType(uint32_t ppid, webrtc::DataMessageType* dest) {
RTC_DCHECK(dest != NULL);
switch (ppid) {
case PPID_BINARY_PARTIAL:
case PPID_BINARY_LAST:
- *dest = cricket::DMT_BINARY;
+ case PPID_BINARY_EMPTY:
+ *dest = webrtc::DataMessageType::kBinary;
return true;
case PPID_TEXT_PARTIAL:
case PPID_TEXT_LAST:
- *dest = cricket::DMT_TEXT;
+ case PPID_TEXT_EMPTY:
+ *dest = webrtc::DataMessageType::kText;
return true;
case PPID_CONTROL:
- *dest = cricket::DMT_CONTROL;
- return true;
-
- case PPID_NONE:
- *dest = cricket::DMT_NONE;
+ *dest = webrtc::DataMessageType::kControl;
return true;
-
- default:
- return false;
}
+ return false;
+}
+
+bool IsEmptyPPID(uint32_t ppid) {
+ return ppid == PPID_BINARY_EMPTY || ppid == PPID_TEXT_EMPTY;
}
// Log the packet in text2pcap format, if log level is at LS_VERBOSE.
@@ -205,11 +203,13 @@ void VerboseLogPacket(const void* data, size_t length, int direction) {
// Creates the sctp_sendv_spa struct used for setting flags in the
// sctp_sendv() call.
-sctp_sendv_spa CreateSctpSendParams(const cricket::SendDataParams& params) {
+sctp_sendv_spa CreateSctpSendParams(int sid,
+ const webrtc::SendDataParams& params,
+ size_t size) {
struct sctp_sendv_spa spa = {0};
spa.sendv_flags |= SCTP_SEND_SNDINFO_VALID;
- spa.sendv_sndinfo.snd_sid = params.sid;
- spa.sendv_sndinfo.snd_ppid = rtc::HostToNetwork32(GetPpid(params.type));
+ spa.sendv_sndinfo.snd_sid = sid;
+ spa.sendv_sndinfo.snd_ppid = rtc::HostToNetwork32(GetPpid(params.type, size));
// Explicitly marking the EOR flag turns the usrsctp_sendv call below into a
// non atomic operation. This means that the sctp lib might only accept the
// message partially. This is done in order to improve throughput, so that we
@@ -217,18 +217,22 @@ sctp_sendv_spa CreateSctpSendParams(const cricket::SendDataParams& params) {
// example.
spa.sendv_sndinfo.snd_flags |= SCTP_EOR;
- // Ordered implies reliable.
if (!params.ordered) {
spa.sendv_sndinfo.snd_flags |= SCTP_UNORDERED;
- if (params.max_rtx_count >= 0 || params.max_rtx_ms == 0) {
- spa.sendv_flags |= SCTP_SEND_PRINFO_VALID;
- spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX;
- spa.sendv_prinfo.pr_value = params.max_rtx_count;
- } else {
- spa.sendv_flags |= SCTP_SEND_PRINFO_VALID;
- spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL;
- spa.sendv_prinfo.pr_value = params.max_rtx_ms;
- }
+ }
+ if (params.max_rtx_count.has_value()) {
+ RTC_DCHECK(*params.max_rtx_count >= 0 &&
+ *params.max_rtx_count <= std::numeric_limits<uint16_t>::max());
+ spa.sendv_flags |= SCTP_SEND_PRINFO_VALID;
+ spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX;
+ spa.sendv_prinfo.pr_value = *params.max_rtx_count;
+ }
+ if (params.max_rtx_ms.has_value()) {
+ RTC_DCHECK(*params.max_rtx_ms >= 0 &&
+ *params.max_rtx_ms <= std::numeric_limits<uint16_t>::max());
+ spa.sendv_flags |= SCTP_SEND_PRINFO_VALID;
+ spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL;
+ spa.sendv_prinfo.pr_value = *params.max_rtx_ms;
}
return spa;
}
@@ -712,7 +716,8 @@ bool UsrsctpTransport::ResetStream(int sid) {
return true;
}
-bool UsrsctpTransport::SendData(const SendDataParams& params,
+bool UsrsctpTransport::SendData(int sid,
+ const webrtc::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) {
RTC_DCHECK_RUN_ON(network_thread_);
@@ -727,13 +732,13 @@ bool UsrsctpTransport::SendData(const SendDataParams& params,
}
// Do not queue data to send on a closing stream.
- auto it = stream_status_by_sid_.find(params.sid);
+ auto it = stream_status_by_sid_.find(sid);
if (it == stream_status_by_sid_.end() || !it->second.is_open()) {
RTC_LOG(LS_WARNING)
<< debug_name_
<< "->SendData(...): "
"Not sending data because sid is unknown or closing: "
- << params.sid;
+ << sid;
if (result) {
*result = SDR_ERROR;
}
@@ -741,7 +746,7 @@ bool UsrsctpTransport::SendData(const SendDataParams& params,
}
size_t payload_size = payload.size();
- OutgoingMessage message(payload, params);
+ OutgoingMessage message(payload, sid, params);
SendDataResult send_message_result = SendMessageInternal(&message);
if (result) {
*result = send_message_result;
@@ -770,17 +775,17 @@ SendDataResult UsrsctpTransport::SendMessageInternal(OutgoingMessage* message) {
RTC_LOG(LS_WARNING) << debug_name_
<< "->SendMessageInternal(...): "
"Not sending packet with sid="
- << message->send_params().sid
- << " len=" << message->size() << " before Start().";
+ << message->sid() << " len=" << message->size()
+ << " before Start().";
return SDR_ERROR;
}
- if (message->send_params().type != DMT_CONTROL) {
- auto it = stream_status_by_sid_.find(message->send_params().sid);
+ if (message->send_params().type != webrtc::DataMessageType::kControl) {
+ auto it = stream_status_by_sid_.find(message->sid());
if (it == stream_status_by_sid_.end()) {
RTC_LOG(LS_WARNING) << debug_name_
<< "->SendMessageInternal(...): "
"Not sending data because sid is unknown: "
- << message->send_params().sid;
+ << message->sid();
return SDR_ERROR;
}
}
@@ -792,13 +797,23 @@ SendDataResult UsrsctpTransport::SendMessageInternal(OutgoingMessage* message) {
}
// Send data using SCTP.
- sctp_sendv_spa spa = CreateSctpSendParams(message->send_params());
+ sctp_sendv_spa spa = CreateSctpSendParams(
+ message->sid(), message->send_params(), message->size());
+ const void* data = message->data();
+ size_t data_length = message->size();
+ if (message->size() == 0) {
+ // Empty messages are replaced by a single NUL byte on the wire as SCTP
+ // doesn't support empty messages.
+ // The PPID carries the information that the payload needs to be ignored.
+ data = kZero;
+ data_length = 1;
+ }
// Note: this send call is not atomic because the EOR bit is set. This means
// that usrsctp can partially accept this message and it is our duty to buffer
// the rest.
- ssize_t send_res = usrsctp_sendv(
- sock_, message->data(), message->size(), NULL, 0, &spa,
- rtc::checked_cast<socklen_t>(sizeof(spa)), SCTP_SENDV_SPA, 0);
+ ssize_t send_res = usrsctp_sendv(sock_, data, data_length, NULL, 0, &spa,
+ rtc::checked_cast<socklen_t>(sizeof(spa)),
+ SCTP_SENDV_SPA, 0);
if (send_res < 0) {
if (errno == SCTP_EWOULDBLOCK) {
ready_to_send_data_ = false;
@@ -814,8 +829,9 @@ SendDataResult UsrsctpTransport::SendMessageInternal(OutgoingMessage* message) {
}
size_t amount_sent = static_cast<size_t>(send_res);
- RTC_DCHECK_LE(amount_sent, message->size());
- message->Advance(amount_sent);
+ RTC_DCHECK_LE(amount_sent, data_length);
+ if (message->size() != 0)
+ message->Advance(amount_sent);
// Only way out now is success.
return SDR_SUCCESS;
}
@@ -1058,7 +1074,7 @@ bool UsrsctpTransport::SendQueuedStreamResets() {
// https://w3c.github.io/webrtc-pc/#closing-procedure
return stream.second.need_outgoing_reset() &&
(!partial_outgoing_message_.has_value() ||
- partial_outgoing_message_.value().send_params().sid !=
+ partial_outgoing_message_.value().sid() !=
static_cast<int>(stream.first));
};
// Figure out how many streams need to be reset. We need to do this so we can
@@ -1135,7 +1151,7 @@ bool UsrsctpTransport::SendBufferedMessage() {
}
RTC_DCHECK_EQ(0u, partial_outgoing_message_->size());
- int sid = partial_outgoing_message_->send_params().sid;
+ int sid = partial_outgoing_message_->sid();
partial_outgoing_message_.reset();
// Send the queued stream reset if it was pending for this stream.
@@ -1291,7 +1307,7 @@ void UsrsctpTransport::OnDataOrNotificationFromSctp(const void* data,
<< ", eor=" << ((flags & MSG_EOR) ? "y" : "n");
// Validate payload protocol identifier
- DataMessageType type = DMT_NONE;
+ webrtc::DataMessageType type;
if (!GetDataMediaType(ppid, &type)) {
// Unexpected PPID, dropping
RTC_LOG(LS_ERROR) << "Received an unknown PPID " << ppid
@@ -1319,9 +1335,12 @@ void UsrsctpTransport::OnDataOrNotificationFromSctp(const void* data,
// association.
params.seq_num = rcv.rcv_ssn;
- // Append the chunk's data to the message buffer
- partial_incoming_message_.AppendData(reinterpret_cast<const uint8_t*>(data),
- length);
+ // Append the chunk's data to the message buffer unless we have a chunk with a
+ // PPID marking an empty message.
+ // See: https://tools.ietf.org/html/rfc8831#section-6.6
+ if (!IsEmptyPPID(ppid))
+ partial_incoming_message_.AppendData(reinterpret_cast<const uint8_t*>(data),
+ length);
partial_params_ = params;
partial_flags_ = flags;
diff --git a/media/sctp/usrsctp_transport.h b/media/sctp/usrsctp_transport.h
index de018b924e..5dcf57b243 100644
--- a/media/sctp/usrsctp_transport.h
+++ b/media/sctp/usrsctp_transport.h
@@ -81,7 +81,8 @@ class UsrsctpTransport : public SctpTransportInternal,
bool Start(int local_port, int remote_port, int max_message_size) override;
bool OpenStream(int sid) override;
bool ResetStream(int sid) override;
- bool SendData(const SendDataParams& params,
+ bool SendData(int sid,
+ const webrtc::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result = nullptr) override;
bool ReadyToSendData() override;
@@ -113,8 +114,9 @@ class UsrsctpTransport : public SctpTransportInternal,
class OutgoingMessage {
public:
OutgoingMessage(const rtc::CopyOnWriteBuffer& buffer,
- const SendDataParams& send_params)
- : buffer_(buffer), send_params_(send_params) {}
+ int sid,
+ const webrtc::SendDataParams& send_params)
+ : buffer_(buffer), sid_(sid), send_params_(send_params) {}
// Advances the buffer by the incremented amount. Must not advance further
// than the current data size.
@@ -127,11 +129,13 @@ class UsrsctpTransport : public SctpTransportInternal,
const void* data() const { return buffer_.data() + offset_; }
- SendDataParams send_params() const { return send_params_; }
+ int sid() const { return sid_; }
+ webrtc::SendDataParams send_params() const { return send_params_; }
private:
const rtc::CopyOnWriteBuffer buffer_;
- const SendDataParams send_params_;
+ int sid_;
+ const webrtc::SendDataParams send_params_;
size_t offset_ = 0;
};
diff --git a/media/sctp/usrsctp_transport_reliability_unittest.cc b/media/sctp/usrsctp_transport_reliability_unittest.cc
index ddc8419a11..104e320398 100644
--- a/media/sctp/usrsctp_transport_reliability_unittest.cc
+++ b/media/sctp/usrsctp_transport_reliability_unittest.cc
@@ -133,23 +133,19 @@ class SimulatedPacketTransport final : public rtc::PacketTransportInternal {
};
/**
- * A helper class to send specified number of messages
- * over UsrsctpTransport with SCTP reliability settings
- * provided by user. The reliability settings are specified
- * by passing a template instance of SendDataParams.
- * When .sid field inside SendDataParams is specified to
- * negative value it means that actual .sid will be
- * assigned by sender itself, .sid will be assigned from
- * range [cricket::kMinSctpSid; cricket::kMaxSctpSid].
- * The wide range of sids are used to possibly trigger
- * more execution paths inside usrsctp.
+ * A helper class to send specified number of messages over UsrsctpTransport
+ * with SCTP reliability settings provided by user. The reliability settings are
+ * specified by passing a template instance of SendDataParams. The sid will be
+ * assigned by sender itself and will be assigned from range
+ * [cricket::kMinSctpSid; cricket::kMaxSctpSid]. The wide range of sids are used
+ * to possibly trigger more execution paths inside usrsctp.
*/
class SctpDataSender final {
public:
SctpDataSender(rtc::Thread* thread,
cricket::UsrsctpTransport* transport,
uint64_t target_messages_count,
- cricket::SendDataParams send_params,
+ webrtc::SendDataParams send_params,
uint32_t sender_id)
: thread_(thread),
transport_(transport),
@@ -200,14 +196,12 @@ class SctpDataSender final {
<< target_messages_count_;
}
- cricket::SendDataParams params(send_params_);
- if (params.sid < 0) {
- params.sid = cricket::kMinSctpSid +
- (num_messages_sent_ % cricket::kMaxSctpStreams);
- }
+ webrtc::SendDataParams params(send_params_);
+ int sid =
+ cricket::kMinSctpSid + (num_messages_sent_ % cricket::kMaxSctpStreams);
cricket::SendDataResult result;
- transport_->SendData(params, payload_, &result);
+ transport_->SendData(sid, params, payload_, &result);
switch (result) {
case cricket::SDR_BLOCK:
// retry after timeout
@@ -233,7 +227,7 @@ class SctpDataSender final {
rtc::Thread* const thread_;
cricket::UsrsctpTransport* const transport_;
const uint64_t target_messages_count_;
- const cricket::SendDataParams send_params_;
+ const webrtc::SendDataParams send_params_;
const uint32_t sender_id_;
rtc::CopyOnWriteBuffer payload_{std::string(1400, '.').c_str(), 1400};
std::atomic<bool> started_ ATOMIC_VAR_INIT(false);
@@ -329,7 +323,7 @@ class SctpPingPong final {
uint32_t messages_count,
uint8_t packet_loss_percents,
uint16_t avg_send_delay_millis,
- cricket::SendDataParams send_params)
+ webrtc::SendDataParams send_params)
: id_(id),
port1_(port1),
port2_(port2),
@@ -582,7 +576,7 @@ class SctpPingPong final {
const uint32_t messages_count_;
const uint8_t packet_loss_percents_;
const uint16_t avg_send_delay_millis_;
- const cricket::SendDataParams send_params_;
+ const webrtc::SendDataParams send_params_;
RTC_DISALLOW_COPY_AND_ASSIGN(SctpPingPong);
};
@@ -643,12 +637,8 @@ TEST_F(UsrSctpReliabilityTest,
static_assert(wait_timeout > 0,
"Timeout computation must produce positive value");
- cricket::SendDataParams send_params;
- send_params.sid = -1;
+ webrtc::SendDataParams send_params;
send_params.ordered = true;
- send_params.reliable = true;
- send_params.max_rtx_count = 0;
- send_params.max_rtx_ms = 0;
SctpPingPong test(1, kTransport1Port, kTransport2Port, thread1.get(),
thread2.get(), messages_count, packet_loss_percents,
@@ -681,12 +671,8 @@ TEST_F(UsrSctpReliabilityTest,
static_assert(wait_timeout > 0,
"Timeout computation must produce positive value");
- cricket::SendDataParams send_params;
- send_params.sid = -1;
+ webrtc::SendDataParams send_params;
send_params.ordered = true;
- send_params.reliable = true;
- send_params.max_rtx_count = 0;
- send_params.max_rtx_ms = 0;
SctpPingPong test(1, kTransport1Port, kTransport2Port, thread1.get(),
thread2.get(), messages_count, packet_loss_percents,
@@ -720,12 +706,10 @@ TEST_F(UsrSctpReliabilityTest,
static_assert(wait_timeout > 0,
"Timeout computation must produce positive value");
- cricket::SendDataParams send_params;
- send_params.sid = -1;
+ webrtc::SendDataParams send_params;
send_params.ordered = false;
- send_params.reliable = false;
- send_params.max_rtx_count = INT_MAX;
- send_params.max_rtx_ms = INT_MAX;
+ send_params.max_rtx_count = std::numeric_limits<uint16_t>::max();
+ send_params.max_rtx_ms = std::numeric_limits<uint16_t>::max();
SctpPingPong test(1, kTransport1Port, kTransport2Port, thread1.get(),
thread2.get(), messages_count, packet_loss_percents,
@@ -757,12 +741,8 @@ TEST_F(UsrSctpReliabilityTest,
DISABLED_AllMessagesAreDeliveredOverLossyConnectionConcurrentTests) {
ThreadPool pool(16);
- cricket::SendDataParams send_params;
- send_params.sid = -1;
+ webrtc::SendDataParams send_params;
send_params.ordered = true;
- send_params.reliable = true;
- send_params.max_rtx_count = 0;
- send_params.max_rtx_ms = 0;
constexpr uint32_t base_sctp_port = 5000;
// The constants value below were experimentally chosen
diff --git a/media/sctp/usrsctp_transport_unittest.cc b/media/sctp/usrsctp_transport_unittest.cc
index f75cb4a25d..59e9c59b3d 100644
--- a/media/sctp/usrsctp_transport_unittest.cc
+++ b/media/sctp/usrsctp_transport_unittest.cc
@@ -185,12 +185,11 @@ class SctpTransportTest : public ::testing::Test, public sigslot::has_slots<> {
const std::string& msg,
SendDataResult* result,
bool ordered = false) {
- SendDataParams params;
- params.sid = sid;
+ webrtc::SendDataParams params;
params.ordered = ordered;
- return chan->SendData(params, rtc::CopyOnWriteBuffer(&msg[0], msg.length()),
- result);
+ return chan->SendData(
+ sid, params, rtc::CopyOnWriteBuffer(&msg[0], msg.length()), result);
}
bool ReceivedData(const SctpFakeDataReceiver* recv,
@@ -599,15 +598,14 @@ TEST_P(SctpTransportTestWithOrdered, SendDataBlocked) {
SetupConnectedTransportsWithTwoStreams();
SendDataResult result;
- SendDataParams params;
- params.sid = 1;
+ webrtc::SendDataParams params;
params.ordered = GetParam();
std::vector<char> buffer(1024 * 64, 0);
for (size_t i = 0; i < 100; ++i) {
transport1()->SendData(
- params, rtc::CopyOnWriteBuffer(&buffer[0], buffer.size()), &result);
+ 1, params, rtc::CopyOnWriteBuffer(&buffer[0], buffer.size()), &result);
if (result == SDR_BLOCK)
break;
}
@@ -626,15 +624,15 @@ TEST_P(SctpTransportTestWithOrdered, SignalReadyToSendDataAfterBlocked) {
fake_dtls1()->SetWritable(false);
// Send messages until we get EWOULDBLOCK.
static const size_t kMaxMessages = 1024;
- SendDataParams params;
- params.sid = 1;
+ webrtc::SendDataParams params;
params.ordered = GetParam();
rtc::CopyOnWriteBuffer buf(1024);
memset(buf.MutableData(), 0, 1024);
SendDataResult result;
size_t message_count = 0;
for (; message_count < kMaxMessages; ++message_count) {
- if (!transport1()->SendData(params, buf, &result) && result == SDR_BLOCK) {
+ if (!transport1()->SendData(1, params, buf, &result) &&
+ result == SDR_BLOCK) {
break;
}
}
diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn
index c817905de5..28d30d3248 100644
--- a/modules/audio_coding/BUILD.gn
+++ b/modules/audio_coding/BUILD.gn
@@ -123,6 +123,7 @@ rtc_library("red") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../system_wrappers:field_trial",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
@@ -1057,6 +1058,7 @@ rtc_library("neteq_tools_minimal") {
deps = [
":default_neteq_factory",
":neteq",
+ "../../api:array_view",
"../../api:neteq_simulator_api",
"../../api:rtp_headers",
"../../api/audio:audio_frame_api",
@@ -1067,7 +1069,6 @@ rtc_library("neteq_tools_minimal") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
- "../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
@@ -1247,7 +1248,6 @@ rtc_library("audio_coding_modules_tests_shared") {
"../../system_wrappers",
"../../test:fileutils",
"../../test:test_support",
- "../rtp_rtcp:rtp_rtcp_format",
"//testing/gtest",
]
absl_deps = [
@@ -1833,7 +1833,7 @@ if (rtc_include_tests) {
]
}
- rtc_executable("isac_fix_test") {
+ rtc_test("isac_fix_test") {
testonly = true
sources = [ "codecs/isac/fix/test/kenny.cc" ]
diff --git a/modules/audio_coding/OWNERS.webrtc b/modules/audio_coding/OWNERS.webrtc
index f7a0e4797e..c27c2a8d2d 100644
--- a/modules/audio_coding/OWNERS.webrtc
+++ b/modules/audio_coding/OWNERS.webrtc
@@ -1,3 +1,4 @@
henrik.lundin@webrtc.org
minyue@webrtc.org
ivoc@webrtc.org
+jakobi@webrtc.org
diff --git a/modules/audio_coding/acm2/acm_receiver.cc b/modules/audio_coding/acm2/acm_receiver.cc
index 0e615cae82..3214ce6f7b 100644
--- a/modules/audio_coding/acm2/acm_receiver.cc
+++ b/modules/audio_coding/acm2/acm_receiver.cc
@@ -146,20 +146,22 @@ int AcmReceiver::GetAudio(int desired_freq_hz,
AudioFrame* audio_frame,
bool* muted) {
RTC_DCHECK(muted);
- // Accessing members, take the lock.
- MutexLock lock(&mutex_);
- if (neteq_->GetAudio(audio_frame, muted) != NetEq::kOK) {
+ int current_sample_rate_hz = 0;
+ if (neteq_->GetAudio(audio_frame, muted, &current_sample_rate_hz) !=
+ NetEq::kOK) {
RTC_LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
return -1;
}
- const int current_sample_rate_hz = neteq_->last_output_sample_rate_hz();
+ RTC_DCHECK_NE(current_sample_rate_hz, 0);
// Update if resampling is required.
const bool need_resampling =
(desired_freq_hz != -1) && (current_sample_rate_hz != desired_freq_hz);
+ // Accessing members, take the lock.
+ MutexLock lock(&mutex_);
if (need_resampling && !resampled_last_output_frame_) {
// Prime the resampler with the last frame.
int16_t temp_output[AudioFrame::kMaxDataSizeSamples];
@@ -174,8 +176,8 @@ int AcmReceiver::GetAudio(int desired_freq_hz,
}
}
- // TODO(henrik.lundin) Glitches in the output may appear if the output rate
- // from NetEq changes. See WebRTC issue 3923.
+ // TODO(bugs.webrtc.org/3923) Glitches in the output may appear if the output
+ // rate from NetEq changes.
if (need_resampling) {
// TODO(yujo): handle this more efficiently for muted frames.
int samples_per_channel_int = resampler_.Resample10Msec(
diff --git a/modules/audio_coding/acm2/acm_send_test.cc b/modules/audio_coding/acm2/acm_send_test.cc
index b3e1e1ecb2..6f395d6073 100644
--- a/modules/audio_coding/acm2/acm_send_test.cc
+++ b/modules/audio_coding/acm2/acm_send_test.cc
@@ -140,8 +140,9 @@ int32_t AcmSendTestOldApi::SendData(AudioFrameType frame_type,
std::unique_ptr<Packet> AcmSendTestOldApi::CreatePacket() {
const size_t kRtpHeaderSize = 12;
- size_t allocated_bytes = last_payload_vec_.size() + kRtpHeaderSize;
- uint8_t* packet_memory = new uint8_t[allocated_bytes];
+ rtc::CopyOnWriteBuffer packet_buffer(last_payload_vec_.size() +
+ kRtpHeaderSize);
+ uint8_t* packet_memory = packet_buffer.MutableData();
// Populate the header bytes.
packet_memory[0] = 0x80;
packet_memory[1] = static_cast<uint8_t>(payload_type_);
@@ -162,8 +163,8 @@ std::unique_ptr<Packet> AcmSendTestOldApi::CreatePacket() {
// Copy the payload data.
memcpy(packet_memory + kRtpHeaderSize, &last_payload_vec_[0],
last_payload_vec_.size());
- std::unique_ptr<Packet> packet(
- new Packet(packet_memory, allocated_bytes, clock_.TimeInMilliseconds()));
+ auto packet = std::make_unique<Packet>(std::move(packet_buffer),
+ clock_.TimeInMilliseconds());
RTC_DCHECK(packet);
RTC_DCHECK(packet->valid_header());
return packet;
diff --git a/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
index 28899aaf84..5b0577745c 100644
--- a/modules/audio_coding/acm2/audio_coding_module_unittest.cc
+++ b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
@@ -429,15 +429,6 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
AudioCodingModuleMtTestOldApi()
: AudioCodingModuleTestOldApi(),
- send_thread_(CbSendThread, this, "send", rtc::kRealtimePriority),
- insert_packet_thread_(CbInsertPacketThread,
- this,
- "insert_packet",
- rtc::kRealtimePriority),
- pull_audio_thread_(CbPullAudioThread,
- this,
- "pull_audio",
- rtc::kRealtimePriority),
send_count_(0),
insert_packet_count_(0),
pull_audio_count_(0),
@@ -454,17 +445,38 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
void StartThreads() {
quit_.store(false);
- send_thread_.Start();
- insert_packet_thread_.Start();
- pull_audio_thread_.Start();
+
+ const auto attributes =
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime);
+ send_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!quit_.load()) {
+ CbSendImpl();
+ }
+ },
+ "send", attributes);
+ insert_packet_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!quit_.load()) {
+ CbInsertPacketImpl();
+ }
+ },
+ "insert_packet", attributes);
+ pull_audio_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!quit_.load()) {
+ CbPullAudioImpl();
+ }
+ },
+ "pull_audio", attributes);
}
void TearDown() {
AudioCodingModuleTestOldApi::TearDown();
quit_.store(true);
- pull_audio_thread_.Stop();
- send_thread_.Stop();
- insert_packet_thread_.Stop();
+ pull_audio_thread_.Finalize();
+ send_thread_.Finalize();
+ insert_packet_thread_.Finalize();
}
bool RunTest() {
@@ -482,14 +494,6 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
return false;
}
- static void CbSendThread(void* context) {
- AudioCodingModuleMtTestOldApi* fixture =
- reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context);
- while (!fixture->quit_.load()) {
- fixture->CbSendImpl();
- }
- }
-
// The send thread doesn't have to care about the current simulated time,
// since only the AcmReceiver is using the clock.
void CbSendImpl() {
@@ -505,14 +509,6 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
}
}
- static void CbInsertPacketThread(void* context) {
- AudioCodingModuleMtTestOldApi* fixture =
- reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context);
- while (!fixture->quit_.load()) {
- fixture->CbInsertPacketImpl();
- }
- }
-
void CbInsertPacketImpl() {
SleepMs(1);
{
@@ -527,14 +523,6 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi {
InsertPacket();
}
- static void CbPullAudioThread(void* context) {
- AudioCodingModuleMtTestOldApi* fixture =
- reinterpret_cast<AudioCodingModuleMtTestOldApi*>(context);
- while (!fixture->quit_.load()) {
- fixture->CbPullAudioImpl();
- }
- }
-
void CbPullAudioImpl() {
SleepMs(1);
{
@@ -693,14 +681,6 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
AcmReRegisterIsacMtTestOldApi()
: AudioCodingModuleTestOldApi(),
- receive_thread_(CbReceiveThread,
- this,
- "receive",
- rtc::kRealtimePriority),
- codec_registration_thread_(CbCodecRegistrationThread,
- this,
- "codec_registration",
- rtc::kRealtimePriority),
codec_registered_(false),
receive_packet_count_(0),
next_insert_packet_time_ms_(0),
@@ -732,28 +712,34 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
void StartThreads() {
quit_.store(false);
- receive_thread_.Start();
- codec_registration_thread_.Start();
+ const auto attributes =
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime);
+ receive_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!quit_.load() && CbReceiveImpl()) {
+ }
+ },
+ "receive", attributes);
+ codec_registration_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!quit_.load()) {
+ CbCodecRegistrationImpl();
+ }
+ },
+ "codec_registration", attributes);
}
void TearDown() override {
AudioCodingModuleTestOldApi::TearDown();
quit_.store(true);
- receive_thread_.Stop();
- codec_registration_thread_.Stop();
+ receive_thread_.Finalize();
+ codec_registration_thread_.Finalize();
}
bool RunTest() {
return test_complete_.Wait(10 * 60 * 1000); // 10 minutes' timeout.
}
- static void CbReceiveThread(void* context) {
- AcmReRegisterIsacMtTestOldApi* fixture =
- reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context);
- while (!fixture->quit_.load() && fixture->CbReceiveImpl()) {
- }
- }
-
bool CbReceiveImpl() {
SleepMs(1);
rtc::Buffer encoded;
@@ -799,14 +785,6 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
return true;
}
- static void CbCodecRegistrationThread(void* context) {
- AcmReRegisterIsacMtTestOldApi* fixture =
- reinterpret_cast<AcmReRegisterIsacMtTestOldApi*>(context);
- while (!fixture->quit_.load()) {
- fixture->CbCodecRegistrationImpl();
- }
- }
-
void CbCodecRegistrationImpl() {
SleepMs(1);
if (HasFatalFailure()) {
diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
index 6412be5f72..f1953eaacf 100644
--- a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
+++ b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
@@ -831,6 +831,7 @@ TEST(AudioEncoderOpusTest, OpusDtxFilteringHighEnergyRefreshPackets) {
constexpr size_t kSilenceDurationSamples = kSampleRateHz * 0.2f;
std::array<int16_t, kSilenceDurationSamples> silence;
uint32_t rtp_timestamp = 0;
+ bool last_packet_dtx_frame = false;
bool opus_entered_dtx = false;
bool silence_filled = false;
size_t timestamp_start_silence = 0;
@@ -850,10 +851,13 @@ TEST(AudioEncoderOpusTest, OpusDtxFilteringHighEnergyRefreshPackets) {
}
rtp_timestamp += kInputBlockSizeSamples;
}
- if (info.encoded_bytes < 2 && !opus_entered_dtx) {
+ EXPECT_TRUE(info.encoded_bytes > 0 || last_packet_dtx_frame);
+ last_packet_dtx_frame = info.encoded_bytes > 0 ? info.encoded_bytes <= 2
+ : last_packet_dtx_frame;
+ if (info.encoded_bytes <= 2 && !opus_entered_dtx) {
timestamp_start_silence = rtp_timestamp;
}
- opus_entered_dtx = info.encoded_bytes < 2;
+ opus_entered_dtx = info.encoded_bytes <= 2;
}
EXPECT_TRUE(silence_filled);
@@ -880,6 +884,9 @@ TEST(AudioEncoderOpusTest, OpusDtxFilteringHighEnergyRefreshPackets) {
info = encoder->Encode(rtp_timestamp, silence_frame, &encoded);
rtp_timestamp += kInputBlockSizeSamples;
}
+ EXPECT_TRUE(info.encoded_bytes > 0 || last_packet_dtx_frame);
+ last_packet_dtx_frame = info.encoded_bytes > 0 ? info.encoded_bytes <= 2
+ : last_packet_dtx_frame;
// Tracking the number of non empty packets.
if (increase_noise && info.encoded_bytes > 2) {
number_non_empty_packets_during_increase++;
diff --git a/modules/audio_coding/codecs/opus/opus_interface.cc b/modules/audio_coding/codecs/opus/opus_interface.cc
index 95c3bb9db1..f684452ad5 100644
--- a/modules/audio_coding/codecs/opus/opus_interface.cc
+++ b/modules/audio_coding/codecs/opus/opus_interface.cc
@@ -247,9 +247,15 @@ int WebRtcOpus_Encode(OpusEncInst* inst,
inst, rtc::MakeArrayView(audio_in, samples),
rtc::MakeArrayView(encoded, res))) {
// This packet is a high energy refresh DTX packet. For avoiding an increase
- // of the energy in the DTX region at the decoder, this packet is dropped.
- inst->in_dtx_mode = 0;
- return 0;
+ // of the energy in the DTX region at the decoder, this packet is
+ // substituted by a TOC byte with one empty frame.
+ // The number of frames described in the TOC byte
+ // (https://tools.ietf.org/html/rfc6716#section-3.1) are overwritten to
+ // always indicate one frame (last two bits equal to 0).
+ encoded[0] = encoded[0] & 0b11111100;
+ inst->in_dtx_mode = 1;
+ // The payload is just the TOC byte and has 1 byte as length.
+ return 1;
}
inst->in_dtx_mode = 0;
return res;
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index 8f8e328b8c..c72768e937 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -17,22 +17,51 @@
#include "rtc_base/byte_order.h"
#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
-// RED packets must be less than 1024 bytes to fit the 10 bit block length.
-static constexpr const int kRedMaxPacketSize = 1 << 10;
-// The typical MTU is 1200 bytes.
-static constexpr const size_t kAudioMaxRtpPacketLen = 1200;
+static constexpr const int kRedMaxPacketSize =
+ 1 << 10; // RED packets must be less than 1024 bytes to fit the 10 bit
+ // block length.
+static constexpr const size_t kAudioMaxRtpPacketLen =
+ 1200; // The typical MTU is 1200 bytes.
+
+static constexpr size_t kRedHeaderLength = 4; // 4 bytes RED header.
+static constexpr size_t kRedLastHeaderLength =
+ 1; // reduced size for last RED header.
+
+static constexpr size_t kRedNumberOfRedundantEncodings =
+ 2; // The level of redundancy we support.
AudioEncoderCopyRed::Config::Config() = default;
AudioEncoderCopyRed::Config::Config(Config&&) = default;
AudioEncoderCopyRed::Config::~Config() = default;
+size_t GetMaxRedundancyFromFieldTrial() {
+ const std::string red_trial =
+ webrtc::field_trial::FindFullName("WebRTC-Audio-Red-For-Opus");
+ size_t redundancy = 0;
+ if (sscanf(red_trial.c_str(), "Enabled-%zu", &redundancy) != 1 ||
+ redundancy < 1 || redundancy > 9) {
+ return kRedNumberOfRedundantEncodings;
+ }
+ return redundancy;
+}
+
AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config)
: speech_encoder_(std::move(config.speech_encoder)),
+ primary_encoded_(0, kAudioMaxRtpPacketLen),
max_packet_length_(kAudioMaxRtpPacketLen),
red_payload_type_(config.payload_type) {
RTC_CHECK(speech_encoder_) << "Speech encoder not provided.";
+
+ auto number_of_redundant_encodings = GetMaxRedundancyFromFieldTrial();
+ for (size_t i = 0; i < number_of_redundant_encodings; i++) {
+ std::pair<EncodedInfo, rtc::Buffer> redundant;
+ redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen);
+ redundant_encodings_.push_front(std::move(redundant));
+ }
}
AudioEncoderCopyRed::~AudioEncoderCopyRed() = default;
@@ -61,104 +90,86 @@ int AudioEncoderCopyRed::GetTargetBitrate() const {
return speech_encoder_->GetTargetBitrate();
}
-size_t AudioEncoderCopyRed::CalculateHeaderLength(size_t encoded_bytes) const {
- size_t header_size = 1;
- size_t bytes_available = max_packet_length_ - encoded_bytes;
- if (secondary_info_.encoded_bytes > 0 &&
- secondary_info_.encoded_bytes < bytes_available) {
- header_size += 4;
- bytes_available -= secondary_info_.encoded_bytes;
- }
- if (tertiary_info_.encoded_bytes > 0 &&
- tertiary_info_.encoded_bytes < bytes_available) {
- header_size += 4;
- }
- return header_size > 1 ? header_size : 0;
-}
-
AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
- rtc::Buffer primary_encoded;
+ primary_encoded_.Clear();
EncodedInfo info =
- speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded);
+ speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded_);
RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders.";
- RTC_DCHECK_EQ(primary_encoded.size(), info.encoded_bytes);
+ RTC_DCHECK_EQ(primary_encoded_.size(), info.encoded_bytes);
if (info.encoded_bytes == 0 || info.encoded_bytes > kRedMaxPacketSize) {
return info;
}
RTC_DCHECK_GT(max_packet_length_, info.encoded_bytes);
+ size_t header_length_bytes = kRedLastHeaderLength;
+ size_t bytes_available = max_packet_length_ - info.encoded_bytes;
+ auto it = redundant_encodings_.begin();
+
+ // Determine how much redundancy we can fit into our packet by
+ // iterating forward.
+ for (; it != redundant_encodings_.end(); it++) {
+ if (bytes_available < kRedHeaderLength + it->first.encoded_bytes) {
+ break;
+ }
+ if (it->first.encoded_bytes == 0) {
+ break;
+ }
+ bytes_available -= kRedHeaderLength + it->first.encoded_bytes;
+ header_length_bytes += kRedHeaderLength;
+ }
+
// Allocate room for RFC 2198 header if there is redundant data.
// Otherwise this will send the primary payload type without
// wrapping in RED.
- const size_t header_length_bytes = CalculateHeaderLength(info.encoded_bytes);
+ if (header_length_bytes == kRedLastHeaderLength) {
+ header_length_bytes = 0;
+ }
encoded->SetSize(header_length_bytes);
+ // Iterate backwards and append the data.
size_t header_offset = 0;
- size_t bytes_available = max_packet_length_ - info.encoded_bytes;
- if (tertiary_info_.encoded_bytes > 0 &&
- tertiary_info_.encoded_bytes + secondary_info_.encoded_bytes <
- bytes_available) {
- encoded->AppendData(tertiary_encoded_);
+ while (it-- != redundant_encodings_.begin()) {
+ encoded->AppendData(it->second);
const uint32_t timestamp_delta =
- info.encoded_timestamp - tertiary_info_.encoded_timestamp;
-
- encoded->data()[header_offset] = tertiary_info_.payload_type | 0x80;
+ info.encoded_timestamp - it->first.encoded_timestamp;
+ encoded->data()[header_offset] = it->first.payload_type | 0x80;
rtc::SetBE16(static_cast<uint8_t*>(encoded->data()) + header_offset + 1,
- (timestamp_delta << 2) | (tertiary_info_.encoded_bytes >> 8));
- encoded->data()[header_offset + 3] = tertiary_info_.encoded_bytes & 0xff;
- header_offset += 4;
- bytes_available -= tertiary_info_.encoded_bytes;
+ (timestamp_delta << 2) | (it->first.encoded_bytes >> 8));
+ encoded->data()[header_offset + 3] = it->first.encoded_bytes & 0xff;
+ header_offset += kRedHeaderLength;
+ info.redundant.push_back(it->first);
}
- if (secondary_info_.encoded_bytes > 0 &&
- secondary_info_.encoded_bytes < bytes_available) {
- encoded->AppendData(secondary_encoded_);
-
- const uint32_t timestamp_delta =
- info.encoded_timestamp - secondary_info_.encoded_timestamp;
-
- encoded->data()[header_offset] = secondary_info_.payload_type | 0x80;
- rtc::SetBE16(static_cast<uint8_t*>(encoded->data()) + header_offset + 1,
- (timestamp_delta << 2) | (secondary_info_.encoded_bytes >> 8));
- encoded->data()[header_offset + 3] = secondary_info_.encoded_bytes & 0xff;
- header_offset += 4;
- bytes_available -= secondary_info_.encoded_bytes;
+ // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
+ // discarding the (empty) vector of redundant information. This is
+ // intentional.
+ if (header_length_bytes > 0) {
+ info.redundant.push_back(info);
+ RTC_DCHECK_EQ(info.speech,
+ info.redundant[info.redundant.size() - 1].speech);
}
- encoded->AppendData(primary_encoded);
+ encoded->AppendData(primary_encoded_);
if (header_length_bytes > 0) {
RTC_DCHECK_EQ(header_offset, header_length_bytes - 1);
encoded->data()[header_offset] = info.payload_type;
}
- // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
- // discarding the (empty) vector of redundant information. This is
- // intentional.
- info.redundant.push_back(info);
- RTC_DCHECK_EQ(info.redundant.size(), 1);
- RTC_DCHECK_EQ(info.speech, info.redundant[0].speech);
- if (secondary_info_.encoded_bytes > 0) {
- info.redundant.push_back(secondary_info_);
- RTC_DCHECK_EQ(info.redundant.size(), 2);
+ // Shift the redundant encodings.
+ it = redundant_encodings_.begin();
+ for (auto next = std::next(it); next != redundant_encodings_.end();
+ it++, next = std::next(it)) {
+ next->first = it->first;
+ next->second.SetData(it->second);
}
- if (tertiary_info_.encoded_bytes > 0) {
- info.redundant.push_back(tertiary_info_);
- RTC_DCHECK_EQ(info.redundant.size(),
- 2 + (secondary_info_.encoded_bytes > 0 ? 1 : 0));
- }
-
- // Save secondary to tertiary.
- tertiary_encoded_.SetData(secondary_encoded_);
- tertiary_info_ = secondary_info_;
-
- // Save primary to secondary.
- secondary_encoded_.SetData(primary_encoded);
- secondary_info_ = info;
+ it = redundant_encodings_.begin();
+ it->first = info;
+ it->second.SetData(primary_encoded_);
// Update main EncodedInfo.
if (header_length_bytes > 0) {
@@ -170,8 +181,13 @@ AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl(
void AudioEncoderCopyRed::Reset() {
speech_encoder_->Reset();
- secondary_encoded_.Clear();
- secondary_info_.encoded_bytes = 0;
+ auto number_of_redundant_encodings = redundant_encodings_.size();
+ redundant_encodings_.clear();
+ for (size_t i = 0; i < number_of_redundant_encodings; i++) {
+ std::pair<EncodedInfo, rtc::Buffer> redundant;
+ redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen);
+ redundant_encodings_.push_front(std::move(redundant));
+ }
}
bool AudioEncoderCopyRed::SetFec(bool enable) {
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
index 9acb9b842c..d5b1bf6868 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdint.h>
+#include <list>
#include <memory>
#include <utility>
@@ -26,10 +27,12 @@
namespace webrtc {
-// This class implements redundant audio coding. The class object will have an
-// underlying AudioEncoder object that performs the actual encodings. The
-// current class will gather the two latest encodings from the underlying codec
-// into one packet.
+// This class implements redundant audio coding as described in
+// https://tools.ietf.org/html/rfc2198
+// The class object will have an underlying AudioEncoder object that performs
+// the actual encodings. The current class will gather the N latest encodings
+// from the underlying codec into one packet. Currently N is hard-coded to 2.
+
class AudioEncoderCopyRed final : public AudioEncoder {
public:
struct Config {
@@ -84,15 +87,11 @@ class AudioEncoderCopyRed final : public AudioEncoder {
rtc::Buffer* encoded) override;
private:
- size_t CalculateHeaderLength(size_t encoded_bytes) const;
-
std::unique_ptr<AudioEncoder> speech_encoder_;
+ rtc::Buffer primary_encoded_;
size_t max_packet_length_;
int red_payload_type_;
- rtc::Buffer secondary_encoded_;
- EncodedInfoLeaf secondary_info_;
- rtc::Buffer tertiary_encoded_;
- EncodedInfoLeaf tertiary_info_;
+ std::list<std::pair<EncodedInfo, rtc::Buffer>> redundant_encodings_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCopyRed);
};
diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index 33527997b5..ddd82441db 100644
--- a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -152,7 +152,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckNoOutput) {
Encode();
// First call is a special case, since it does not include a secondary
// payload.
- EXPECT_EQ(1u, encoded_info_.redundant.size());
+ EXPECT_EQ(0u, encoded_info_.redundant.size());
EXPECT_EQ(kEncodedSize, encoded_info_.encoded_bytes);
// Next call to the speech encoder will not produce any output.
@@ -180,7 +180,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadSizes) {
// First call is a special case, since it does not include a secondary
// payload.
Encode();
- EXPECT_EQ(1u, encoded_info_.redundant.size());
+ EXPECT_EQ(0u, encoded_info_.redundant.size());
EXPECT_EQ(1u, encoded_info_.encoded_bytes);
// Second call is also special since it does not include a ternary
@@ -192,9 +192,9 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadSizes) {
for (size_t i = 3; i <= kNumPackets; ++i) {
Encode();
ASSERT_EQ(3u, encoded_info_.redundant.size());
- EXPECT_EQ(i, encoded_info_.redundant[0].encoded_bytes);
+ EXPECT_EQ(i, encoded_info_.redundant[2].encoded_bytes);
EXPECT_EQ(i - 1, encoded_info_.redundant[1].encoded_bytes);
- EXPECT_EQ(i - 2, encoded_info_.redundant[2].encoded_bytes);
+ EXPECT_EQ(i - 2, encoded_info_.redundant[0].encoded_bytes);
EXPECT_EQ(9 + i + (i - 1) + (i - 2), encoded_info_.encoded_bytes);
}
}
@@ -222,8 +222,8 @@ TEST_F(AudioEncoderCopyRedTest, CheckTimestamps) {
Encode();
ASSERT_EQ(2u, encoded_info_.redundant.size());
- EXPECT_EQ(primary_timestamp, encoded_info_.redundant[0].encoded_timestamp);
- EXPECT_EQ(secondary_timestamp, encoded_info_.redundant[1].encoded_timestamp);
+ EXPECT_EQ(primary_timestamp, encoded_info_.redundant[1].encoded_timestamp);
+ EXPECT_EQ(secondary_timestamp, encoded_info_.redundant[0].encoded_timestamp);
EXPECT_EQ(primary_timestamp, encoded_info_.encoded_timestamp);
}
@@ -280,9 +280,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
// First call is a special case, since it does not include a secondary
// payload.
Encode();
- ASSERT_EQ(1u, encoded_info_.redundant.size());
- EXPECT_EQ(primary_payload_type, encoded_info_.redundant[0].payload_type);
- EXPECT_EQ(primary_payload_type, encoded_info_.payload_type);
+ ASSERT_EQ(0u, encoded_info_.redundant.size());
const int secondary_payload_type = red_payload_type_ + 2;
info.payload_type = secondary_payload_type;
@@ -291,8 +289,8 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
Encode();
ASSERT_EQ(2u, encoded_info_.redundant.size());
- EXPECT_EQ(secondary_payload_type, encoded_info_.redundant[0].payload_type);
- EXPECT_EQ(primary_payload_type, encoded_info_.redundant[1].payload_type);
+ EXPECT_EQ(secondary_payload_type, encoded_info_.redundant[1].payload_type);
+ EXPECT_EQ(primary_payload_type, encoded_info_.redundant[0].payload_type);
EXPECT_EQ(red_payload_type_, encoded_info_.payload_type);
}
@@ -316,7 +314,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckRFC2198Header) {
EXPECT_EQ(encoded_[0], primary_payload_type | 0x80);
uint32_t timestamp_delta = encoded_info_.encoded_timestamp -
- encoded_info_.redundant[1].encoded_timestamp;
+ encoded_info_.redundant[0].encoded_timestamp;
// Timestamp delta is encoded as a 14 bit value.
EXPECT_EQ(encoded_[1], timestamp_delta >> 6);
EXPECT_EQ(static_cast<uint8_t>(encoded_[2] >> 2), timestamp_delta & 0x3f);
@@ -335,13 +333,13 @@ TEST_F(AudioEncoderCopyRedTest, CheckRFC2198Header) {
EXPECT_EQ(encoded_[0], primary_payload_type | 0x80);
timestamp_delta = encoded_info_.encoded_timestamp -
- encoded_info_.redundant[2].encoded_timestamp;
+ encoded_info_.redundant[0].encoded_timestamp;
// Timestamp delta is encoded as a 14 bit value.
EXPECT_EQ(encoded_[1], timestamp_delta >> 6);
EXPECT_EQ(static_cast<uint8_t>(encoded_[2] >> 2), timestamp_delta & 0x3f);
// Redundant length is encoded as 10 bit value.
- EXPECT_EQ(encoded_[2] & 0x3u, encoded_info_.redundant[2].encoded_bytes >> 8);
- EXPECT_EQ(encoded_[3], encoded_info_.redundant[2].encoded_bytes & 0xff);
+ EXPECT_EQ(encoded_[2] & 0x3u, encoded_info_.redundant[1].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[3], encoded_info_.redundant[1].encoded_bytes & 0xff);
EXPECT_EQ(encoded_[4], primary_payload_type | 0x80);
timestamp_delta = encoded_info_.encoded_timestamp -
@@ -350,8 +348,8 @@ TEST_F(AudioEncoderCopyRedTest, CheckRFC2198Header) {
EXPECT_EQ(encoded_[5], timestamp_delta >> 6);
EXPECT_EQ(static_cast<uint8_t>(encoded_[6] >> 2), timestamp_delta & 0x3f);
// Redundant length is encoded as 10 bit value.
- EXPECT_EQ(encoded_[6] & 0x3u, encoded_info_.redundant[2].encoded_bytes >> 8);
- EXPECT_EQ(encoded_[7], encoded_info_.redundant[2].encoded_bytes & 0xff);
+ EXPECT_EQ(encoded_[6] & 0x3u, encoded_info_.redundant[1].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[7], encoded_info_.redundant[1].encoded_bytes & 0xff);
EXPECT_EQ(encoded_[8], primary_payload_type);
}
diff --git a/modules/audio_coding/g3doc/index.md b/modules/audio_coding/g3doc/index.md
index 0be22f7d80..bf50c155fc 100644
--- a/modules/audio_coding/g3doc/index.md
+++ b/modules/audio_coding/g3doc/index.md
@@ -1,5 +1,5 @@
-<?% config.freshness.owner = 'minyue' %?> <?% config.freshness.reviewed =
-'2021-04-13' %?>
+<?% config.freshness.owner = 'minyue' %?>
+<?% config.freshness.reviewed = '2021-04-13' %?>
# The WebRTC Audio Coding Module
diff --git a/modules/audio_coding/neteq/decoder_database_unittest.cc b/modules/audio_coding/neteq/decoder_database_unittest.cc
index c1b92b5375..33bee8d6f5 100644
--- a/modules/audio_coding/neteq/decoder_database_unittest.cc
+++ b/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -27,15 +27,14 @@ using ::testing::Invoke;
namespace webrtc {
TEST(DecoderDatabase, CreateAndDestroy) {
- DecoderDatabase db(new rtc::RefCountedObject<MockAudioDecoderFactory>,
+ DecoderDatabase db(rtc::make_ref_counted<MockAudioDecoderFactory>(),
absl::nullopt);
EXPECT_EQ(0, db.Size());
EXPECT_TRUE(db.Empty());
}
TEST(DecoderDatabase, InsertAndRemove) {
- rtc::scoped_refptr<MockAudioDecoderFactory> factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
DecoderDatabase db(factory, absl::nullopt);
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
@@ -50,8 +49,7 @@ TEST(DecoderDatabase, InsertAndRemove) {
}
TEST(DecoderDatabase, InsertAndRemoveAll) {
- rtc::scoped_refptr<MockAudioDecoderFactory> factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
DecoderDatabase db(factory, absl::nullopt);
const std::string kCodecName1 = "Robert\'); DROP TABLE Students;";
const std::string kCodecName2 = "https://xkcd.com/327/";
@@ -67,8 +65,7 @@ TEST(DecoderDatabase, InsertAndRemoveAll) {
}
TEST(DecoderDatabase, GetDecoderInfo) {
- rtc::scoped_refptr<MockAudioDecoderFactory> factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
auto* decoder = new MockAudioDecoder;
EXPECT_CALL(*factory, MakeAudioDecoderMock(_, _, _))
.WillOnce(Invoke([decoder](const SdpAudioFormat& format,
@@ -103,8 +100,7 @@ TEST(DecoderDatabase, GetDecoder) {
}
TEST(DecoderDatabase, TypeTests) {
- rtc::scoped_refptr<MockAudioDecoderFactory> factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
DecoderDatabase db(factory, absl::nullopt);
const uint8_t kPayloadTypePcmU = 0;
const uint8_t kPayloadTypeCng = 13;
@@ -140,8 +136,7 @@ TEST(DecoderDatabase, TypeTests) {
TEST(DecoderDatabase, CheckPayloadTypes) {
constexpr int kNumPayloads = 10;
- rtc::scoped_refptr<MockAudioDecoderFactory> factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
DecoderDatabase db(factory, absl::nullopt);
// Load a number of payloads into the database. Payload types are 0, 1, ...,
// while the decoder type is the same for all payload types (this does not
diff --git a/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc b/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc
index cb0a3d88f8..2b4ae7e63e 100644
--- a/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc
@@ -208,7 +208,7 @@ TestStatistics RunTest(int loss_cadence,
NetEqTest neteq_test(
config, /*decoder_factory=*/
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&dec),
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&dec),
/*codecs=*/decoders, /*text_log=*/nullptr, /*neteq_factory=*/nullptr,
/*input=*/std::move(lossy_input), std::move(output), callbacks);
EXPECT_LE(kRunTimeMs, neteq_test.Run());
diff --git a/modules/audio_coding/neteq/neteq_impl.cc b/modules/audio_coding/neteq/neteq_impl.cc
index d156352a26..e9ddbb92a1 100644
--- a/modules/audio_coding/neteq/neteq_impl.cc
+++ b/modules/audio_coding/neteq/neteq_impl.cc
@@ -258,6 +258,7 @@ void SetAudioFrameActivityAndType(bool vad_enabled,
int NetEqImpl::GetAudio(AudioFrame* audio_frame,
bool* muted,
+ int* current_sample_rate_hz,
absl::optional<Operation> action_override) {
TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
MutexLock lock(&mutex_);
@@ -296,6 +297,11 @@ int NetEqImpl::GetAudio(AudioFrame* audio_frame,
}
}
+ if (current_sample_rate_hz) {
+ *current_sample_rate_hz = delayed_last_output_sample_rate_hz_.value_or(
+ last_output_sample_rate_hz_);
+ }
+
return kOK;
}
@@ -565,19 +571,19 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header,
return kInvalidPointer;
}
- int64_t receive_time_ms = clock_->TimeInMilliseconds();
+ Timestamp receive_time = clock_->CurrentTime();
stats_->ReceivedPacket();
PacketList packet_list;
// Insert packet in a packet list.
- packet_list.push_back([&rtp_header, &payload, &receive_time_ms] {
+ packet_list.push_back([&rtp_header, &payload, &receive_time] {
// Convert to Packet.
Packet packet;
packet.payload_type = rtp_header.payloadType;
packet.sequence_number = rtp_header.sequenceNumber;
packet.timestamp = rtp_header.timestamp;
packet.payload.SetData(payload.data(), payload.size());
- packet.packet_info = RtpPacketInfo(rtp_header, receive_time_ms);
+ packet.packet_info = RtpPacketInfo(rtp_header, receive_time);
// Waiting time will be set upon inserting the packet in the buffer.
RTC_DCHECK(!packet.waiting_time);
return packet;
diff --git a/modules/audio_coding/neteq/neteq_impl.h b/modules/audio_coding/neteq/neteq_impl.h
index e130422a30..88da6dcbd5 100644
--- a/modules/audio_coding/neteq/neteq_impl.h
+++ b/modules/audio_coding/neteq/neteq_impl.h
@@ -133,6 +133,7 @@ class NetEqImpl : public webrtc::NetEq {
int GetAudio(
AudioFrame* audio_frame,
bool* muted,
+ int* current_sample_rate_hz = nullptr,
absl::optional<Operation> action_override = absl::nullopt) override;
void SetCodecs(const std::map<int, SdpAudioFormat>& codecs) override;
diff --git a/modules/audio_coding/neteq/neteq_impl_unittest.cc b/modules/audio_coding/neteq/neteq_impl_unittest.cc
index a073d82295..b3c25cae2d 100644
--- a/modules/audio_coding/neteq/neteq_impl_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -303,8 +303,7 @@ TEST_F(NetEqImplTest, InsertPacket) {
fake_packet.sequence_number = kFirstSequenceNumber;
fake_packet.timestamp = kFirstTimestamp;
- rtc::scoped_refptr<MockAudioDecoderFactory> mock_decoder_factory(
- new rtc::RefCountedObject<MockAudioDecoderFactory>);
+ auto mock_decoder_factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
EXPECT_CALL(*mock_decoder_factory, MakeAudioDecoderMock(_, _, _))
.WillOnce(Invoke([&](const SdpAudioFormat& format,
absl::optional<AudioCodecPairId> codec_pair_id,
@@ -487,8 +486,8 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
int16_t next_value_;
} decoder_;
- rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&decoder_);
+ auto decoder_factory =
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&decoder_);
UseNoMocks();
CreateInstance(decoder_factory);
@@ -498,7 +497,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
// Insert one packet.
clock_.AdvanceTimeMilliseconds(123456);
- int64_t expected_receive_time_ms = clock_.TimeInMilliseconds();
+ Timestamp expected_receive_time = clock_.CurrentTime();
EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload));
// Pull audio once.
@@ -519,7 +518,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
EXPECT_THAT(packet_info.csrcs(), ElementsAre(43, 65, 17));
EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp);
EXPECT_FALSE(packet_info.audio_level().has_value());
- EXPECT_EQ(packet_info.receive_time_ms(), expected_receive_time_ms);
+ EXPECT_EQ(packet_info.receive_time(), expected_receive_time);
}
// Start with a simple check that the fake decoder is behaving as expected.
@@ -555,7 +554,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
MockAudioDecoder mock_decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&mock_decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&mock_decoder));
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const int kSampleRateHz = 8000;
@@ -591,7 +590,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
// Insert one packet.
clock_.AdvanceTimeMilliseconds(123456);
- int64_t expected_receive_time_ms = clock_.TimeInMilliseconds();
+ Timestamp expected_receive_time = clock_.CurrentTime();
EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload));
// Pull audio once.
@@ -611,7 +610,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
EXPECT_THAT(packet_info.csrcs(), IsEmpty());
EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp);
EXPECT_EQ(packet_info.audio_level(), rtp_header.extension.audioLevel);
- EXPECT_EQ(packet_info.receive_time_ms(), expected_receive_time_ms);
+ EXPECT_EQ(packet_info.receive_time(), expected_receive_time);
}
// Insert two more packets. The first one is out of order, and is already too
@@ -627,7 +626,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
rtp_header.extension.audioLevel = 2;
payload[0] = 2;
clock_.AdvanceTimeMilliseconds(2000);
- expected_receive_time_ms = clock_.TimeInMilliseconds();
+ expected_receive_time = clock_.CurrentTime();
EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload));
// Expect only the second packet to be decoded (the one with "2" as the first
@@ -657,7 +656,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
EXPECT_THAT(packet_info.csrcs(), IsEmpty());
EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp);
EXPECT_EQ(packet_info.audio_level(), rtp_header.extension.audioLevel);
- EXPECT_EQ(packet_info.receive_time_ms(), expected_receive_time_ms);
+ EXPECT_EQ(packet_info.receive_time(), expected_receive_time);
}
EXPECT_CALL(mock_decoder, Die());
@@ -927,7 +926,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
// Create a mock decoder object.
MockAudioDecoder mock_decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&mock_decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&mock_decoder));
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const int kSampleRateKhz = 48;
@@ -1066,7 +1065,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
::testing::NiceMock<MockAudioDecoder> decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&decoder));
static const size_t kNetEqMaxFrameSize = 5760; // 120 ms @ 48 kHz.
static const size_t kChannels = 2;
@@ -1193,7 +1192,7 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) {
MockAudioDecoder mock_decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&mock_decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&mock_decoder));
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const int kSampleRateHz = 8000;
@@ -1252,7 +1251,7 @@ TEST_F(NetEqImplTest, DecodingError) {
MockAudioDecoder mock_decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&mock_decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&mock_decoder));
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const int kSampleRateHz = 8000;
@@ -1364,7 +1363,7 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
// Create a mock decoder object.
MockAudioDecoder mock_decoder;
CreateInstance(
- new rtc::RefCountedObject<test::AudioDecoderProxyFactory>(&mock_decoder));
+ rtc::make_ref_counted<test::AudioDecoderProxyFactory>(&mock_decoder));
const uint8_t kPayloadType = 17; // Just an arbitrary number.
const int kSampleRateHz = 8000;
@@ -1658,14 +1657,13 @@ class NetEqImplTest120ms : public NetEqImplTest {
void Register120msCodec(AudioDecoder::SpeechType speech_type) {
const uint32_t sampling_freq = kSamplingFreq_;
- decoder_factory_ =
- new rtc::RefCountedObject<test::FunctionAudioDecoderFactory>(
- [sampling_freq, speech_type]() {
- std::unique_ptr<AudioDecoder> decoder =
- std::make_unique<Decoder120ms>(sampling_freq, speech_type);
- RTC_CHECK_EQ(2, decoder->Channels());
- return decoder;
- });
+ decoder_factory_ = rtc::make_ref_counted<test::FunctionAudioDecoderFactory>(
+ [sampling_freq, speech_type]() {
+ std::unique_ptr<AudioDecoder> decoder =
+ std::make_unique<Decoder120ms>(sampling_freq, speech_type);
+ RTC_CHECK_EQ(2, decoder->Channels());
+ return decoder;
+ });
}
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory_;
diff --git a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index 5f15babbe3..8f72734d23 100644
--- a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -162,7 +162,7 @@ class NetEqNetworkStatsTest {
NetEqNetworkStatsTest(const SdpAudioFormat& format, MockAudioDecoder* decoder)
: decoder_(decoder),
decoder_factory_(
- new rtc::RefCountedObject<AudioDecoderProxyFactory>(decoder)),
+ rtc::make_ref_counted<AudioDecoderProxyFactory>(decoder)),
samples_per_ms_(format.clockrate_hz / 1000),
frame_size_samples_(kFrameSizeMs * samples_per_ms_),
rtp_generator_(new RtpGenerator(samples_per_ms_)),
diff --git a/modules/audio_coding/neteq/neteq_unittest.cc b/modules/audio_coding/neteq/neteq_unittest.cc
index 1369ead63c..8553307b5a 100644
--- a/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/modules/audio_coding/neteq/neteq_unittest.cc
@@ -1066,7 +1066,7 @@ TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithAcceleration) {
expected_target_delay += neteq_->TargetDelayMs() * 2 * kSamples;
// We have two packets in the buffer and kAccelerate operation will
// extract 20 ms of data.
- neteq_->GetAudio(&out_frame_, &muted, NetEq::Operation::kAccelerate);
+ neteq_->GetAudio(&out_frame_, &muted, nullptr, NetEq::Operation::kAccelerate);
// Check jitter buffer delay.
NetEqLifetimeStatistics stats = neteq_->GetLifetimeStatistics();
diff --git a/modules/audio_coding/neteq/red_payload_splitter.cc b/modules/audio_coding/neteq/red_payload_splitter.cc
index 5681464f4d..f5cd9c29e4 100644
--- a/modules/audio_coding/neteq/red_payload_splitter.cc
+++ b/modules/audio_coding/neteq/red_payload_splitter.cc
@@ -139,7 +139,7 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) {
/*rtp_timestamp=*/new_packet.timestamp,
/*audio_level=*/absl::nullopt,
/*absolute_capture_time=*/absl::nullopt,
- /*receive_time_ms=*/red_packet.packet_info.receive_time_ms());
+ /*receive_time=*/red_packet.packet_info.receive_time());
new_packets.push_front(std::move(new_packet));
payload_ptr += payload_length;
}
diff --git a/modules/audio_coding/neteq/red_payload_splitter_unittest.cc b/modules/audio_coding/neteq/red_payload_splitter_unittest.cc
index 5956971b33..1cf616748f 100644
--- a/modules/audio_coding/neteq/red_payload_splitter_unittest.cc
+++ b/modules/audio_coding/neteq/red_payload_splitter_unittest.cc
@@ -298,7 +298,7 @@ TEST(RedPayloadSplitter, CheckRedPayloads) {
// easier to just register the payload types and let the actual implementation
// do its job.
DecoderDatabase decoder_database(
- new rtc::RefCountedObject<MockAudioDecoderFactory>, absl::nullopt);
+ rtc::make_ref_counted<MockAudioDecoderFactory>(), absl::nullopt);
decoder_database.RegisterPayload(0, SdpAudioFormat("cn", 8000, 1));
decoder_database.RegisterPayload(1, SdpAudioFormat("pcmu", 8000, 1));
decoder_database.RegisterPayload(2,
@@ -333,7 +333,7 @@ TEST(RedPayloadSplitter, CheckRedPayloadsRecursiveRed) {
// easier to just register the payload types and let the actual implementation
// do its job.
DecoderDatabase decoder_database(
- new rtc::RefCountedObject<MockAudioDecoderFactory>, absl::nullopt);
+ rtc::make_ref_counted<MockAudioDecoderFactory>(), absl::nullopt);
decoder_database.RegisterPayload(kRedPayloadType,
SdpAudioFormat("red", 8000, 1));
diff --git a/modules/audio_coding/neteq/test/result_sink.cc b/modules/audio_coding/neteq/test/result_sink.cc
index bb2a59bcfe..b70016180e 100644
--- a/modules/audio_coding/neteq/test/result_sink.cc
+++ b/modules/audio_coding/neteq/test/result_sink.cc
@@ -47,15 +47,6 @@ void Convert(const webrtc::NetEqNetworkStatistics& stats_raw,
stats->set_max_waiting_time_ms(stats_raw.max_waiting_time_ms);
}
-void Convert(const webrtc::RtcpStatistics& stats_raw,
- webrtc::neteq_unittest::RtcpStatistics* stats) {
- stats->set_fraction_lost(stats_raw.fraction_lost);
- stats->set_cumulative_lost(stats_raw.packets_lost);
- stats->set_extended_max_sequence_number(
- stats_raw.extended_highest_sequence_number);
- stats->set_jitter(stats_raw.jitter);
-}
-
void AddMessage(FILE* file,
rtc::MessageDigest* digest,
const std::string& message) {
@@ -99,19 +90,6 @@ void ResultSink::AddResult(const NetEqNetworkStatistics& stats_raw) {
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void ResultSink::AddResult(const RtcpStatistics& stats_raw) {
-#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- neteq_unittest::RtcpStatistics stats;
- Convert(stats_raw, &stats);
-
- std::string stats_string;
- ASSERT_TRUE(stats.SerializeToString(&stats_string));
- AddMessage(output_fp_, digest_.get(), stats_string);
-#else
- FAIL() << "Writing to reference file requires Proto Buffer.";
-#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
-}
-
void ResultSink::VerifyChecksum(const std::string& checksum) {
std::vector<char> buffer;
buffer.resize(digest_->Size());
diff --git a/modules/audio_coding/neteq/test/result_sink.h b/modules/audio_coding/neteq/test/result_sink.h
index 357b635b08..dcde02d450 100644
--- a/modules/audio_coding/neteq/test/result_sink.h
+++ b/modules/audio_coding/neteq/test/result_sink.h
@@ -16,7 +16,6 @@
#include <string>
#include "api/neteq/neteq.h"
-#include "modules/rtp_rtcp/include/rtcp_statistics.h"
#include "rtc_base/message_digest.h"
namespace webrtc {
@@ -30,7 +29,6 @@ class ResultSink {
void AddResult(const T* test_results, size_t length);
void AddResult(const NetEqNetworkStatistics& stats);
- void AddResult(const RtcpStatistics& stats);
void VerifyChecksum(const std::string& ref_check_sum);
diff --git a/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc b/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
index 6b325b6c5c..6cbba20e5f 100644
--- a/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
+++ b/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc
@@ -37,14 +37,15 @@ ConstantPcmPacketSource::ConstantPcmPacketSource(size_t payload_len_samples,
std::unique_ptr<Packet> ConstantPcmPacketSource::NextPacket() {
RTC_CHECK_GT(packet_len_bytes_, kHeaderLenBytes);
- uint8_t* packet_memory = new uint8_t[packet_len_bytes_];
+ rtc::CopyOnWriteBuffer packet_buffer(packet_len_bytes_);
+ uint8_t* packet_memory = packet_buffer.MutableData();
// Fill the payload part of the packet memory with the pre-encoded value.
for (unsigned i = 0; i < 2 * payload_len_samples_; ++i)
packet_memory[kHeaderLenBytes + i] = encoded_sample_[i % 2];
WriteHeader(packet_memory);
// |packet| assumes ownership of |packet_memory|.
- std::unique_ptr<Packet> packet(
- new Packet(packet_memory, packet_len_bytes_, next_arrival_time_ms_));
+ auto packet =
+ std::make_unique<Packet>(std::move(packet_buffer), next_arrival_time_ms_);
next_arrival_time_ms_ += payload_len_samples_ / samples_per_ms_;
return packet;
}
diff --git a/modules/audio_coding/neteq/tools/neteq_test.cc b/modules/audio_coding/neteq/tools/neteq_test.cc
index 0988d2c8e5..22f5ad6931 100644
--- a/modules/audio_coding/neteq/tools/neteq_test.cc
+++ b/modules/audio_coding/neteq/tools/neteq_test.cc
@@ -172,7 +172,7 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() {
}
AudioFrame out_frame;
bool muted;
- int error = neteq_->GetAudio(&out_frame, &muted,
+ int error = neteq_->GetAudio(&out_frame, &muted, nullptr,
ActionToOperations(next_action_));
next_action_ = absl::nullopt;
RTC_CHECK(!muted) << "The code does not handle enable_muted_state";
diff --git a/modules/audio_coding/neteq/tools/neteq_test_factory.cc b/modules/audio_coding/neteq/tools/neteq_test_factory.cc
index f8ec36bd25..1a0ea156f1 100644
--- a/modules/audio_coding/neteq/tools/neteq_test_factory.cc
+++ b/modules/audio_coding/neteq/tools/neteq_test_factory.cc
@@ -285,7 +285,7 @@ std::unique_ptr<NetEqTest> NetEqTestFactory::InitializeTest(
// Note that capture-by-copy implies that the lambda captures the value of
// decoder_factory before it's reassigned on the left-hand side.
- decoder_factory = new rtc::RefCountedObject<FunctionAudioDecoderFactory>(
+ decoder_factory = rtc::make_ref_counted<FunctionAudioDecoderFactory>(
[decoder_factory, config](
const SdpAudioFormat& format,
absl::optional<AudioCodecPairId> codec_pair_id) {
diff --git a/modules/audio_coding/neteq/tools/packet.cc b/modules/audio_coding/neteq/tools/packet.cc
index 48959e4f62..e540173f43 100644
--- a/modules/audio_coding/neteq/tools/packet.cc
+++ b/modules/audio_coding/neteq/tools/packet.cc
@@ -10,30 +10,22 @@
#include "modules/audio_coding/neteq/tools/packet.h"
-#include <string.h>
-
-#include <memory>
-
-#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
namespace webrtc {
namespace test {
-using webrtc::RtpUtility::RtpHeaderParser;
-
-Packet::Packet(uint8_t* packet_memory,
- size_t allocated_bytes,
+Packet::Packet(rtc::CopyOnWriteBuffer packet,
size_t virtual_packet_length_bytes,
double time_ms,
- const RtpUtility::RtpHeaderParser& parser,
- const RtpHeaderExtensionMap* extension_map /*= nullptr*/)
- : payload_memory_(packet_memory),
- packet_length_bytes_(allocated_bytes),
+ const RtpHeaderExtensionMap* extension_map)
+ : packet_(std::move(packet)),
virtual_packet_length_bytes_(virtual_packet_length_bytes),
- virtual_payload_length_bytes_(0),
time_ms_(time_ms),
- valid_header_(ParseHeader(parser, extension_map)) {}
+ valid_header_(ParseHeader(extension_map)) {}
Packet::Packet(const RTPHeader& header,
size_t virtual_packet_length_bytes,
@@ -45,23 +37,6 @@ Packet::Packet(const RTPHeader& header,
time_ms_(time_ms),
valid_header_(true) {}
-Packet::Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms)
- : Packet(packet_memory,
- allocated_bytes,
- allocated_bytes,
- time_ms,
- RtpUtility::RtpHeaderParser(packet_memory, allocated_bytes)) {}
-
-Packet::Packet(uint8_t* packet_memory,
- size_t allocated_bytes,
- size_t virtual_packet_length_bytes,
- double time_ms)
- : Packet(packet_memory,
- allocated_bytes,
- virtual_packet_length_bytes,
- time_ms,
- RtpUtility::RtpHeaderParser(packet_memory, allocated_bytes)) {}
-
Packet::~Packet() = default;
bool Packet::ExtractRedHeaders(std::list<RTPHeader*>* headers) const {
@@ -77,9 +52,8 @@ bool Packet::ExtractRedHeaders(std::list<RTPHeader*>* headers) const {
// +-+-+-+-+-+-+-+-+
//
- RTC_DCHECK(payload_);
- const uint8_t* payload_ptr = payload_;
- const uint8_t* payload_end_ptr = payload_ptr + payload_length_bytes_;
+ const uint8_t* payload_ptr = payload();
+ const uint8_t* payload_end_ptr = payload_ptr + payload_length_bytes();
// Find all RED headers with the extension bit set to 1. That is, all headers
// but the last one.
@@ -111,27 +85,43 @@ void Packet::DeleteRedHeaders(std::list<RTPHeader*>* headers) {
}
}
-bool Packet::ParseHeader(const RtpHeaderParser& parser,
- const RtpHeaderExtensionMap* extension_map) {
- bool valid_header = parser.Parse(&header_, extension_map);
-
- // Special case for dummy packets that have padding marked in the RTP header.
- // This causes the RTP header parser to report failure, but is fine in this
- // context.
- const bool header_only_with_padding =
- (header_.headerLength == packet_length_bytes_ &&
- header_.paddingLength > 0);
- if (!valid_header && !header_only_with_padding) {
- return false;
+bool Packet::ParseHeader(const RtpHeaderExtensionMap* extension_map) {
+ // Use RtpPacketReceived instead of RtpPacket because former already has a
+ // converter into legacy RTPHeader.
+ webrtc::RtpPacketReceived rtp_packet(extension_map);
+
+ // Because of the special case of dummy packets that have padding marked in
+ // the RTP header, but do not have rtp payload with the padding size, handle
+ // padding manually. Regular RTP packet parser reports failure, but it is fine
+ // in this context.
+ bool padding = (packet_[0] & 0b0010'0000);
+ size_t padding_size = 0;
+ if (padding) {
+ // Clear the padding bit to prevent failure when rtp payload is omited.
+ rtc::CopyOnWriteBuffer packet(packet_);
+ packet.MutableData()[0] &= ~0b0010'0000;
+ if (!rtp_packet.Parse(std::move(packet))) {
+ return false;
+ }
+ if (rtp_packet.payload_size() > 0) {
+ padding_size = rtp_packet.data()[rtp_packet.size() - 1];
+ }
+ if (padding_size > rtp_packet.payload_size()) {
+ return false;
+ }
+ } else {
+ if (!rtp_packet.Parse(packet_)) {
+ return false;
+ }
}
- RTC_DCHECK_LE(header_.headerLength, packet_length_bytes_);
- payload_ = &payload_memory_[header_.headerLength];
- RTC_DCHECK_GE(packet_length_bytes_, header_.headerLength);
- payload_length_bytes_ = packet_length_bytes_ - header_.headerLength;
- RTC_CHECK_GE(virtual_packet_length_bytes_, packet_length_bytes_);
- RTC_DCHECK_GE(virtual_packet_length_bytes_, header_.headerLength);
+ rtp_payload_ = rtc::MakeArrayView(packet_.data() + rtp_packet.headers_size(),
+ rtp_packet.payload_size() - padding_size);
+ rtp_packet.GetHeader(&header_);
+
+ RTC_CHECK_GE(virtual_packet_length_bytes_, rtp_packet.size());
+ RTC_DCHECK_GE(virtual_packet_length_bytes_, rtp_packet.headers_size());
virtual_payload_length_bytes_ =
- virtual_packet_length_bytes_ - header_.headerLength;
+ virtual_packet_length_bytes_ - rtp_packet.headers_size();
return true;
}
diff --git a/modules/audio_coding/neteq/tools/packet.h b/modules/audio_coding/neteq/tools/packet.h
index f4189aae10..ef118d9f0b 100644
--- a/modules/audio_coding/neteq/tools/packet.h
+++ b/modules/audio_coding/neteq/tools/packet.h
@@ -12,62 +12,46 @@
#define MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
#include <list>
-#include <memory>
-#include "api/rtp_headers.h" // NOLINT(build/include)
+#include "api/array_view.h"
+#include "api/rtp_headers.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "rtc_base/constructor_magic.h"
+#include "rtc_base/copy_on_write_buffer.h"
namespace webrtc {
-
-namespace RtpUtility {
-class RtpHeaderParser;
-} // namespace RtpUtility
-
namespace test {
// Class for handling RTP packets in test applications.
class Packet {
public:
// Creates a packet, with the packet payload (including header bytes) in
- // |packet_memory|. The length of |packet_memory| is |allocated_bytes|.
- // The new object assumes ownership of |packet_memory| and will delete it
- // when the Packet object is deleted. The |time_ms| is an extra time
- // associated with this packet, typically used to denote arrival time.
- // The first bytes in |packet_memory| will be parsed using |parser|.
- // |virtual_packet_length_bytes| is typically used when reading RTP dump files
+ // `packet`. The `time_ms` is an extra time associated with this packet,
+ // typically used to denote arrival time.
+ // `virtual_packet_length_bytes` is typically used when reading RTP dump files
// that only contain the RTP headers, and no payload (a.k.a RTP dummy files or
- // RTP light). The |virtual_packet_length_bytes| tells what size the packet
- // had on wire, including the now discarded payload, whereas |allocated_bytes|
- // is the length of the remaining payload (typically only the RTP header).
- Packet(uint8_t* packet_memory,
- size_t allocated_bytes,
+ // RTP light). The `virtual_packet_length_bytes` tells what size the packet
+ // had on wire, including the now discarded payload.
+ Packet(rtc::CopyOnWriteBuffer packet,
size_t virtual_packet_length_bytes,
double time_ms,
- const RtpUtility::RtpHeaderParser& parser,
const RtpHeaderExtensionMap* extension_map = nullptr);
+ Packet(rtc::CopyOnWriteBuffer packet,
+ double time_ms,
+ const RtpHeaderExtensionMap* extension_map = nullptr)
+ : Packet(packet, packet.size(), time_ms, extension_map) {}
+
// Same as above, but creates the packet from an already parsed RTPHeader.
// This is typically used when reading RTP dump files that only contain the
- // RTP headers, and no payload. The |virtual_packet_length_bytes| tells what
+ // RTP headers, and no payload. The `virtual_packet_length_bytes` tells what
// size the packet had on wire, including the now discarded payload,
- // The |virtual_payload_length_bytes| tells the size of the payload.
+ // The `virtual_payload_length_bytes` tells the size of the payload.
Packet(const RTPHeader& header,
size_t virtual_packet_length_bytes,
size_t virtual_payload_length_bytes,
double time_ms);
- // The following constructors are the same as the first two, but without a
- // parser. Note that when the object is constructed using any of these
- // methods, the header will be parsed using a default RtpHeaderParser object.
- // In particular, RTP header extensions won't be parsed.
- Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms);
-
- Packet(uint8_t* packet_memory,
- size_t allocated_bytes,
- size_t virtual_packet_length_bytes,
- double time_ms);
-
virtual ~Packet();
// Parses the first bytes of the RTP payload, interpreting them as RED headers
@@ -80,11 +64,11 @@ class Packet {
// itself.
static void DeleteRedHeaders(std::list<RTPHeader*>* headers);
- const uint8_t* payload() const { return payload_; }
+ const uint8_t* payload() const { return rtp_payload_.data(); }
- size_t packet_length_bytes() const { return packet_length_bytes_; }
+ size_t packet_length_bytes() const { return packet_.size(); }
- size_t payload_length_bytes() const { return payload_length_bytes_; }
+ size_t payload_length_bytes() const { return rtp_payload_.size(); }
size_t virtual_packet_length_bytes() const {
return virtual_packet_length_bytes_;
@@ -100,21 +84,17 @@ class Packet {
bool valid_header() const { return valid_header_; }
private:
- bool ParseHeader(const webrtc::RtpUtility::RtpHeaderParser& parser,
- const RtpHeaderExtensionMap* extension_map);
+ bool ParseHeader(const RtpHeaderExtensionMap* extension_map);
void CopyToHeader(RTPHeader* destination) const;
RTPHeader header_;
- const std::unique_ptr<uint8_t[]> payload_memory_;
- const uint8_t* payload_ = nullptr; // First byte after header.
- const size_t packet_length_bytes_ = 0; // Total length of packet.
- size_t payload_length_bytes_ = 0; // Length of the payload, after RTP header.
- // Zero for dummy RTP packets.
+ const rtc::CopyOnWriteBuffer packet_;
+ rtc::ArrayView<const uint8_t> rtp_payload_; // Empty for dummy RTP packets.
// Virtual lengths are used when parsing RTP header files (dummy RTP files).
const size_t virtual_packet_length_bytes_;
size_t virtual_payload_length_bytes_ = 0;
const double time_ms_; // Used to denote a packet's arrival time.
- const bool valid_header_; // Set by the RtpHeaderParser.
+ const bool valid_header_;
RTC_DISALLOW_COPY_AND_ASSIGN(Packet);
};
diff --git a/modules/audio_coding/neteq/tools/packet_unittest.cc b/modules/audio_coding/neteq/tools/packet_unittest.cc
index 7f3d6630c3..7cc9a48ee6 100644
--- a/modules/audio_coding/neteq/tools/packet_unittest.cc
+++ b/modules/audio_coding/neteq/tools/packet_unittest.cc
@@ -42,16 +42,15 @@ void MakeRtpHeader(int payload_type,
TEST(TestPacket, RegularPacket) {
const size_t kPacketLengthBytes = 100;
- uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes);
const uint8_t kPayloadType = 17;
const uint16_t kSequenceNumber = 4711;
const uint32_t kTimestamp = 47114711;
const uint32_t kSsrc = 0x12345678;
MakeRtpHeader(kPayloadType, kSequenceNumber, kTimestamp, kSsrc,
- packet_memory);
+ packet_memory.MutableData());
const double kPacketTime = 1.0;
- // Hand over ownership of |packet_memory| to |packet|.
- Packet packet(packet_memory, kPacketLengthBytes, kPacketTime);
+ Packet packet(std::move(packet_memory), kPacketTime);
ASSERT_TRUE(packet.valid_header());
EXPECT_EQ(kPayloadType, packet.header().payloadType);
EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
@@ -70,16 +69,44 @@ TEST(TestPacket, RegularPacket) {
TEST(TestPacket, DummyPacket) {
const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header.
const size_t kVirtualPacketLengthBytes = 100;
- uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes);
const uint8_t kPayloadType = 17;
const uint16_t kSequenceNumber = 4711;
const uint32_t kTimestamp = 47114711;
const uint32_t kSsrc = 0x12345678;
MakeRtpHeader(kPayloadType, kSequenceNumber, kTimestamp, kSsrc,
- packet_memory);
+ packet_memory.MutableData());
const double kPacketTime = 1.0;
- // Hand over ownership of |packet_memory| to |packet|.
- Packet packet(packet_memory, kPacketLengthBytes, kVirtualPacketLengthBytes,
+ Packet packet(std::move(packet_memory), kVirtualPacketLengthBytes,
+ kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+}
+
+TEST(TestPacket, DummyPaddingPacket) {
+ const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header.
+ const size_t kVirtualPacketLengthBytes = 100;
+ rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes);
+ const uint8_t kPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(kPayloadType, kSequenceNumber, kTimestamp, kSsrc,
+ packet_memory.MutableData());
+ packet_memory.MutableData()[0] |= 0b0010'0000; // Set the padding bit.
+ const double kPacketTime = 1.0;
+ Packet packet(std::move(packet_memory), kVirtualPacketLengthBytes,
kPacketTime);
ASSERT_TRUE(packet.valid_header());
EXPECT_EQ(kPayloadType, packet.header().payloadType);
@@ -133,19 +160,19 @@ int MakeRedHeader(int payload_type,
TEST(TestPacket, RED) {
const size_t kPacketLengthBytes = 100;
- uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes);
const uint8_t kRedPayloadType = 17;
const uint16_t kSequenceNumber = 4711;
const uint32_t kTimestamp = 47114711;
const uint32_t kSsrc = 0x12345678;
MakeRtpHeader(kRedPayloadType, kSequenceNumber, kTimestamp, kSsrc,
- packet_memory);
+ packet_memory.MutableData());
// Create four RED headers.
// Payload types are just the same as the block index the offset is 100 times
// the block index.
const int kRedBlocks = 4;
- uint8_t* payload_ptr =
- &packet_memory[kHeaderLengthBytes]; // First byte after header.
+ uint8_t* payload_ptr = packet_memory.MutableData() +
+ kHeaderLengthBytes; // First byte after header.
for (int i = 0; i < kRedBlocks; ++i) {
int payload_type = i;
// Offset value is not used for the last block.
diff --git a/modules/audio_coding/neteq/tools/rtp_file_source.cc b/modules/audio_coding/neteq/tools/rtp_file_source.cc
index 78523308e3..16b225e5df 100644
--- a/modules/audio_coding/neteq/tools/rtp_file_source.cc
+++ b/modules/audio_coding/neteq/tools/rtp_file_source.cc
@@ -62,12 +62,9 @@ std::unique_ptr<Packet> RtpFileSource::NextPacket() {
// Read the next one.
continue;
}
- std::unique_ptr<uint8_t[]> packet_memory(new uint8_t[temp_packet.length]);
- memcpy(packet_memory.get(), temp_packet.data, temp_packet.length);
- RtpUtility::RtpHeaderParser parser(packet_memory.get(), temp_packet.length);
auto packet = std::make_unique<Packet>(
- packet_memory.release(), temp_packet.length,
- temp_packet.original_length, temp_packet.time_ms, parser,
+ rtc::CopyOnWriteBuffer(temp_packet.data, temp_packet.length),
+ temp_packet.original_length, temp_packet.time_ms,
&rtp_header_extension_map_);
if (!packet->valid_header()) {
continue;
diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc
index 89265a288f..be78fd16d7 100644
--- a/modules/audio_device/audio_device_data_observer.cc
+++ b/modules/audio_device/audio_device_data_observer.cc
@@ -301,9 +301,8 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceWithDataObserver(
rtc::scoped_refptr<AudioDeviceModule> impl,
std::unique_ptr<AudioDeviceDataObserver> observer) {
- rtc::scoped_refptr<ADMWrapper> audio_device(
- new rtc::RefCountedObject<ADMWrapper>(impl, observer.get(),
- std::move(observer)));
+ auto audio_device = rtc::make_ref_counted<ADMWrapper>(impl, observer.get(),
+ std::move(observer));
if (!audio_device->IsValid()) {
return nullptr;
@@ -315,8 +314,8 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceWithDataObserver(
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceWithDataObserver(
rtc::scoped_refptr<AudioDeviceModule> impl,
AudioDeviceDataObserver* legacy_observer) {
- rtc::scoped_refptr<ADMWrapper> audio_device(
- new rtc::RefCountedObject<ADMWrapper>(impl, legacy_observer, nullptr));
+ auto audio_device =
+ rtc::make_ref_counted<ADMWrapper>(impl, legacy_observer, nullptr);
if (!audio_device->IsValid()) {
return nullptr;
@@ -329,10 +328,8 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceWithDataObserver(
AudioDeviceModule::AudioLayer audio_layer,
TaskQueueFactory* task_queue_factory,
std::unique_ptr<AudioDeviceDataObserver> observer) {
- rtc::scoped_refptr<ADMWrapper> audio_device(
- new rtc::RefCountedObject<ADMWrapper>(audio_layer, task_queue_factory,
- observer.get(),
- std::move(observer)));
+ auto audio_device = rtc::make_ref_counted<ADMWrapper>(
+ audio_layer, task_queue_factory, observer.get(), std::move(observer));
if (!audio_device->IsValid()) {
return nullptr;
@@ -345,9 +342,8 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceWithDataObserver(
AudioDeviceModule::AudioLayer audio_layer,
TaskQueueFactory* task_queue_factory,
AudioDeviceDataObserver* legacy_observer) {
- rtc::scoped_refptr<ADMWrapper> audio_device(
- new rtc::RefCountedObject<ADMWrapper>(audio_layer, task_queue_factory,
- legacy_observer, nullptr));
+ auto audio_device = rtc::make_ref_counted<ADMWrapper>(
+ audio_layer, task_queue_factory, legacy_observer, nullptr);
if (!audio_device->IsValid()) {
return nullptr;
diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc
index 0e8bd28c50..84460ff83f 100644
--- a/modules/audio_device/audio_device_impl.cc
+++ b/modules/audio_device/audio_device_impl.cc
@@ -92,27 +92,26 @@ rtc::scoped_refptr<AudioDeviceModuleForTest> AudioDeviceModule::CreateForTest(
}
// Create the generic reference counted (platform independent) implementation.
- rtc::scoped_refptr<AudioDeviceModuleImpl> audioDevice(
- new rtc::RefCountedObject<AudioDeviceModuleImpl>(audio_layer,
- task_queue_factory));
+ auto audio_device = rtc::make_ref_counted<AudioDeviceModuleImpl>(
+ audio_layer, task_queue_factory);
// Ensure that the current platform is supported.
- if (audioDevice->CheckPlatform() == -1) {
+ if (audio_device->CheckPlatform() == -1) {
return nullptr;
}
// Create the platform-dependent implementation.
- if (audioDevice->CreatePlatformSpecificObjects() == -1) {
+ if (audio_device->CreatePlatformSpecificObjects() == -1) {
return nullptr;
}
// Ensure that the generic audio buffer can communicate with the platform
// specific parts.
- if (audioDevice->AttachAudioBuffer() == -1) {
+ if (audio_device->AttachAudioBuffer() == -1) {
return nullptr;
}
- return audioDevice;
+ return audio_device;
}
AudioDeviceModuleImpl::AudioDeviceModuleImpl(
diff --git a/modules/audio_device/dummy/file_audio_device.cc b/modules/audio_device/dummy/file_audio_device.cc
index c68e7bba1a..e345a16c44 100644
--- a/modules/audio_device/dummy/file_audio_device.cc
+++ b/modules/audio_device/dummy/file_audio_device.cc
@@ -216,10 +216,13 @@ int32_t FileAudioDevice::StartPlayout() {
}
}
- _ptrThreadPlay.reset(new rtc::PlatformThread(
- PlayThreadFunc, this, "webrtc_audio_module_play_thread",
- rtc::kRealtimePriority));
- _ptrThreadPlay->Start();
+ _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (PlayThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_play_thread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
RTC_LOG(LS_INFO) << "Started playout capture to output file: "
<< _outputFilename;
@@ -233,10 +236,8 @@ int32_t FileAudioDevice::StopPlayout() {
}
// stop playout thread first
- if (_ptrThreadPlay) {
- _ptrThreadPlay->Stop();
- _ptrThreadPlay.reset();
- }
+ if (!_ptrThreadPlay.empty())
+ _ptrThreadPlay.Finalize();
MutexLock lock(&mutex_);
@@ -276,11 +277,13 @@ int32_t FileAudioDevice::StartRecording() {
}
}
- _ptrThreadRec.reset(new rtc::PlatformThread(
- RecThreadFunc, this, "webrtc_audio_module_capture_thread",
- rtc::kRealtimePriority));
-
- _ptrThreadRec->Start();
+ _ptrThreadRec = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (RecThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_capture_thread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
RTC_LOG(LS_INFO) << "Started recording from input file: " << _inputFilename;
@@ -293,10 +296,8 @@ int32_t FileAudioDevice::StopRecording() {
_recording = false;
}
- if (_ptrThreadRec) {
- _ptrThreadRec->Stop();
- _ptrThreadRec.reset();
- }
+ if (!_ptrThreadRec.empty())
+ _ptrThreadRec.Finalize();
MutexLock lock(&mutex_);
_recordingFramesLeft = 0;
@@ -439,18 +440,6 @@ void FileAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
_ptrAudioBuffer->SetPlayoutChannels(0);
}
-void FileAudioDevice::PlayThreadFunc(void* pThis) {
- FileAudioDevice* device = static_cast<FileAudioDevice*>(pThis);
- while (device->PlayThreadProcess()) {
- }
-}
-
-void FileAudioDevice::RecThreadFunc(void* pThis) {
- FileAudioDevice* device = static_cast<FileAudioDevice*>(pThis);
- while (device->RecThreadProcess()) {
- }
-}
-
bool FileAudioDevice::PlayThreadProcess() {
if (!_playing) {
return false;
diff --git a/modules/audio_device/dummy/file_audio_device.h b/modules/audio_device/dummy/file_audio_device.h
index ecb3f2f533..f4a6b76586 100644
--- a/modules/audio_device/dummy/file_audio_device.h
+++ b/modules/audio_device/dummy/file_audio_device.h
@@ -17,14 +17,11 @@
#include <string>
#include "modules/audio_device/audio_device_generic.h"
+#include "rtc_base/platform_thread.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/time_utils.h"
-namespace rtc {
-class PlatformThread;
-} // namespace rtc
-
namespace webrtc {
// This is a fake audio device which plays audio from a file as its microphone
@@ -145,9 +142,8 @@ class FileAudioDevice : public AudioDeviceGeneric {
size_t _recordingFramesIn10MS;
size_t _playoutFramesIn10MS;
- // TODO(pbos): Make plain members instead of pointers and stop resetting them.
- std::unique_ptr<rtc::PlatformThread> _ptrThreadRec;
- std::unique_ptr<rtc::PlatformThread> _ptrThreadPlay;
+ rtc::PlatformThread _ptrThreadRec;
+ rtc::PlatformThread _ptrThreadPlay;
bool _playing;
bool _recording;
diff --git a/modules/audio_device/g3doc/audio_device_module.md b/modules/audio_device/g3doc/audio_device_module.md
index 65072af0a4..3aa1a59d08 100644
--- a/modules/audio_device/g3doc/audio_device_module.md
+++ b/modules/audio_device/g3doc/audio_device_module.md
@@ -1,8 +1,7 @@
# Audio Device Module (ADM)
<?% config.freshness.owner = 'henrika' %?>
-<?% config.freshness.reviewed =
-'2021-04-12' %?>
+<?% config.freshness.reviewed = '2021-04-12' %?>
## Overview
diff --git a/modules/audio_device/include/mock_audio_device.h b/modules/audio_device/include/mock_audio_device.h
index 0ca19de156..8483aa3da8 100644
--- a/modules/audio_device/include/mock_audio_device.h
+++ b/modules/audio_device/include/mock_audio_device.h
@@ -23,11 +23,10 @@ namespace test {
class MockAudioDeviceModule : public AudioDeviceModule {
public:
static rtc::scoped_refptr<MockAudioDeviceModule> CreateNice() {
- return new rtc::RefCountedObject<
- ::testing::NiceMock<MockAudioDeviceModule>>();
+ return rtc::make_ref_counted<::testing::NiceMock<MockAudioDeviceModule>>();
}
static rtc::scoped_refptr<MockAudioDeviceModule> CreateStrict() {
- return new rtc::RefCountedObject<
+ return rtc::make_ref_counted<
::testing::StrictMock<MockAudioDeviceModule>>();
}
diff --git a/modules/audio_device/include/test_audio_device.cc b/modules/audio_device/include/test_audio_device.cc
index 46bf216540..8351e8a405 100644
--- a/modules/audio_device/include/test_audio_device.cc
+++ b/modules/audio_device/include/test_audio_device.cc
@@ -447,7 +447,7 @@ rtc::scoped_refptr<TestAudioDeviceModule> TestAudioDeviceModule::Create(
std::unique_ptr<TestAudioDeviceModule::Capturer> capturer,
std::unique_ptr<TestAudioDeviceModule::Renderer> renderer,
float speed) {
- return new rtc::RefCountedObject<TestAudioDeviceModuleImpl>(
+ return rtc::make_ref_counted<TestAudioDeviceModuleImpl>(
task_queue_factory, std::move(capturer), std::move(renderer), speed);
}
diff --git a/modules/audio_device/linux/audio_device_alsa_linux.cc b/modules/audio_device/linux/audio_device_alsa_linux.cc
index 1cc5761b7c..9e6bd168fc 100644
--- a/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -178,26 +178,13 @@ int32_t AudioDeviceLinuxALSA::Terminate() {
_mixerManager.Close();
// RECORDING
- if (_ptrThreadRec) {
- rtc::PlatformThread* tmpThread = _ptrThreadRec.release();
- mutex_.Unlock();
-
- tmpThread->Stop();
- delete tmpThread;
-
- mutex_.Lock();
- }
+ mutex_.Unlock();
+ _ptrThreadRec.Finalize();
// PLAYOUT
- if (_ptrThreadPlay) {
- rtc::PlatformThread* tmpThread = _ptrThreadPlay.release();
- mutex_.Unlock();
-
- tmpThread->Stop();
- delete tmpThread;
+ _ptrThreadPlay.Finalize();
+ mutex_.Lock();
- mutex_.Lock();
- }
#if defined(WEBRTC_USE_X11)
if (_XDisplay) {
XCloseDisplay(_XDisplay);
@@ -1040,11 +1027,13 @@ int32_t AudioDeviceLinuxALSA::StartRecording() {
return -1;
}
// RECORDING
- _ptrThreadRec.reset(new rtc::PlatformThread(
- RecThreadFunc, this, "webrtc_audio_module_capture_thread",
- rtc::kRealtimePriority));
-
- _ptrThreadRec->Start();
+ _ptrThreadRec = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (RecThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_capture_thread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
errVal = LATE(snd_pcm_prepare)(_handleRecord);
if (errVal < 0) {
@@ -1088,10 +1077,7 @@ int32_t AudioDeviceLinuxALSA::StopRecordingLocked() {
_recIsInitialized = false;
_recording = false;
- if (_ptrThreadRec) {
- _ptrThreadRec->Stop();
- _ptrThreadRec.reset();
- }
+ _ptrThreadRec.Finalize();
_recordingFramesLeft = 0;
if (_recordingBuffer) {
@@ -1158,10 +1144,13 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() {
}
// PLAYOUT
- _ptrThreadPlay.reset(new rtc::PlatformThread(
- PlayThreadFunc, this, "webrtc_audio_module_play_thread",
- rtc::kRealtimePriority));
- _ptrThreadPlay->Start();
+ _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (PlayThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_play_thread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
if (errVal < 0) {
@@ -1191,10 +1180,7 @@ int32_t AudioDeviceLinuxALSA::StopPlayoutLocked() {
_playing = false;
// stop playout thread first
- if (_ptrThreadPlay) {
- _ptrThreadPlay->Stop();
- _ptrThreadPlay.reset();
- }
+ _ptrThreadPlay.Finalize();
_playoutFramesLeft = 0;
delete[] _playoutBuffer;
@@ -1469,18 +1455,6 @@ int32_t AudioDeviceLinuxALSA::ErrorRecovery(int32_t error,
// Thread Methods
// ============================================================================
-void AudioDeviceLinuxALSA::PlayThreadFunc(void* pThis) {
- AudioDeviceLinuxALSA* device = static_cast<AudioDeviceLinuxALSA*>(pThis);
- while (device->PlayThreadProcess()) {
- }
-}
-
-void AudioDeviceLinuxALSA::RecThreadFunc(void* pThis) {
- AudioDeviceLinuxALSA* device = static_cast<AudioDeviceLinuxALSA*>(pThis);
- while (device->RecThreadProcess()) {
- }
-}
-
bool AudioDeviceLinuxALSA::PlayThreadProcess() {
if (!_playing)
return false;
diff --git a/modules/audio_device/linux/audio_device_alsa_linux.h b/modules/audio_device/linux/audio_device_alsa_linux.h
index 410afcf42c..1f4a231640 100644
--- a/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -155,10 +155,8 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric {
Mutex mutex_;
- // TODO(pbos): Make plain members and start/stop instead of resetting these
- // pointers. A thread can be reused.
- std::unique_ptr<rtc::PlatformThread> _ptrThreadRec;
- std::unique_ptr<rtc::PlatformThread> _ptrThreadPlay;
+ rtc::PlatformThread _ptrThreadRec;
+ rtc::PlatformThread _ptrThreadPlay;
AudioMixerManagerLinuxALSA _mixerManager;
diff --git a/modules/audio_device/linux/audio_device_pulse_linux.cc b/modules/audio_device/linux/audio_device_pulse_linux.cc
index 3256a05803..7742420fc2 100644
--- a/modules/audio_device/linux/audio_device_pulse_linux.cc
+++ b/modules/audio_device/linux/audio_device_pulse_linux.cc
@@ -15,6 +15,7 @@
#include "modules/audio_device/linux/latebindingsymboltable_linux.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
WebRTCPulseSymbolTable* GetPulseSymbolTable() {
static WebRTCPulseSymbolTable* pulse_symbol_table =
@@ -158,18 +159,22 @@ AudioDeviceGeneric::InitStatus AudioDeviceLinuxPulse::Init() {
#endif
// RECORDING
- _ptrThreadRec.reset(new rtc::PlatformThread(RecThreadFunc, this,
- "webrtc_audio_module_rec_thread",
- rtc::kRealtimePriority));
-
- _ptrThreadRec->Start();
+ const auto attributes =
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime);
+ _ptrThreadRec = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (RecThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_rec_thread", attributes);
// PLAYOUT
- _ptrThreadPlay.reset(new rtc::PlatformThread(
- PlayThreadFunc, this, "webrtc_audio_module_play_thread",
- rtc::kRealtimePriority));
- _ptrThreadPlay->Start();
-
+ _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (PlayThreadProcess()) {
+ }
+ },
+ "webrtc_audio_module_play_thread", attributes);
_initialized = true;
return InitStatus::OK;
@@ -187,22 +192,12 @@ int32_t AudioDeviceLinuxPulse::Terminate() {
_mixerManager.Close();
// RECORDING
- if (_ptrThreadRec) {
- rtc::PlatformThread* tmpThread = _ptrThreadRec.release();
-
- _timeEventRec.Set();
- tmpThread->Stop();
- delete tmpThread;
- }
+ _timeEventRec.Set();
+ _ptrThreadRec.Finalize();
// PLAYOUT
- if (_ptrThreadPlay) {
- rtc::PlatformThread* tmpThread = _ptrThreadPlay.release();
-
- _timeEventPlay.Set();
- tmpThread->Stop();
- delete tmpThread;
- }
+ _timeEventPlay.Set();
+ _ptrThreadPlay.Finalize();
// Terminate PulseAudio
if (TerminatePulseAudio() < 0) {
@@ -1981,18 +1976,6 @@ int32_t AudioDeviceLinuxPulse::ProcessRecordedData(int8_t* bufferData,
return 0;
}
-void AudioDeviceLinuxPulse::PlayThreadFunc(void* pThis) {
- AudioDeviceLinuxPulse* device = static_cast<AudioDeviceLinuxPulse*>(pThis);
- while (device->PlayThreadProcess()) {
- }
-}
-
-void AudioDeviceLinuxPulse::RecThreadFunc(void* pThis) {
- AudioDeviceLinuxPulse* device = static_cast<AudioDeviceLinuxPulse*>(pThis);
- while (device->RecThreadProcess()) {
- }
-}
-
bool AudioDeviceLinuxPulse::PlayThreadProcess() {
if (!_timeEventPlay.Wait(1000)) {
return true;
diff --git a/modules/audio_device/linux/audio_device_pulse_linux.h b/modules/audio_device/linux/audio_device_pulse_linux.h
index cfad6b1c15..0cf89ef011 100644
--- a/modules/audio_device/linux/audio_device_pulse_linux.h
+++ b/modules/audio_device/linux/audio_device_pulse_linux.h
@@ -268,9 +268,8 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric {
rtc::Event _recStartEvent;
rtc::Event _playStartEvent;
- // TODO(pbos): Remove unique_ptr and use directly without resetting.
- std::unique_ptr<rtc::PlatformThread> _ptrThreadPlay;
- std::unique_ptr<rtc::PlatformThread> _ptrThreadRec;
+ rtc::PlatformThread _ptrThreadPlay;
+ rtc::PlatformThread _ptrThreadRec;
AudioMixerManagerLinuxPulse _mixerManager;
diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc
index f1ee4251fa..2088b017a0 100644
--- a/modules/audio_device/mac/audio_device_mac.cc
+++ b/modules/audio_device/mac/audio_device_mac.cc
@@ -166,8 +166,8 @@ AudioDeviceMac::~AudioDeviceMac() {
Terminate();
}
- RTC_DCHECK(!capture_worker_thread_.get());
- RTC_DCHECK(!render_worker_thread_.get());
+ RTC_DCHECK(capture_worker_thread_.empty());
+ RTC_DCHECK(render_worker_thread_.empty());
if (_paRenderBuffer) {
delete _paRenderBuffer;
@@ -1308,11 +1308,14 @@ int32_t AudioDeviceMac::StartRecording() {
return -1;
}
- RTC_DCHECK(!capture_worker_thread_.get());
- capture_worker_thread_.reset(new rtc::PlatformThread(
- RunCapture, this, "CaptureWorkerThread", rtc::kRealtimePriority));
- RTC_DCHECK(capture_worker_thread_.get());
- capture_worker_thread_->Start();
+ RTC_DCHECK(capture_worker_thread_.empty());
+ capture_worker_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (CaptureWorkerThread()) {
+ }
+ },
+ "CaptureWorkerThread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
OSStatus err = noErr;
if (_twoDevices) {
@@ -1394,10 +1397,9 @@ int32_t AudioDeviceMac::StopRecording() {
// Setting this signal will allow the worker thread to be stopped.
AtomicSet32(&_captureDeviceIsAlive, 0);
- if (capture_worker_thread_.get()) {
+ if (!capture_worker_thread_.empty()) {
mutex_.Unlock();
- capture_worker_thread_->Stop();
- capture_worker_thread_.reset();
+ capture_worker_thread_.Finalize();
mutex_.Lock();
}
@@ -1443,10 +1445,14 @@ int32_t AudioDeviceMac::StartPlayout() {
return 0;
}
- RTC_DCHECK(!render_worker_thread_.get());
- render_worker_thread_.reset(new rtc::PlatformThread(
- RunRender, this, "RenderWorkerThread", rtc::kRealtimePriority));
- render_worker_thread_->Start();
+ RTC_DCHECK(render_worker_thread_.empty());
+ render_worker_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (RenderWorkerThread()) {
+ }
+ },
+ "RenderWorkerThread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
if (_twoDevices || !_recording) {
OSStatus err = noErr;
@@ -1504,10 +1510,9 @@ int32_t AudioDeviceMac::StopPlayout() {
// Setting this signal will allow the worker thread to be stopped.
AtomicSet32(&_renderDeviceIsAlive, 0);
- if (render_worker_thread_.get()) {
+ if (!render_worker_thread_.empty()) {
mutex_.Unlock();
- render_worker_thread_->Stop();
- render_worker_thread_.reset();
+ render_worker_thread_.Finalize();
mutex_.Lock();
}
@@ -2369,12 +2374,6 @@ OSStatus AudioDeviceMac::implInConverterProc(UInt32* numberDataPackets,
return 0;
}
-void AudioDeviceMac::RunRender(void* ptrThis) {
- AudioDeviceMac* device = static_cast<AudioDeviceMac*>(ptrThis);
- while (device->RenderWorkerThread()) {
- }
-}
-
bool AudioDeviceMac::RenderWorkerThread() {
PaRingBufferSize numSamples =
ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
@@ -2440,12 +2439,6 @@ bool AudioDeviceMac::RenderWorkerThread() {
return true;
}
-void AudioDeviceMac::RunCapture(void* ptrThis) {
- AudioDeviceMac* device = static_cast<AudioDeviceMac*>(ptrThis);
- while (device->CaptureWorkerThread()) {
- }
-}
-
bool AudioDeviceMac::CaptureWorkerThread() {
OSStatus err = noErr;
UInt32 noRecSamples =
diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h
index 985db9da52..f9504b64b5 100644
--- a/modules/audio_device/mac/audio_device_mac.h
+++ b/modules/audio_device/mac/audio_device_mac.h
@@ -21,15 +21,12 @@
#include "modules/audio_device/mac/audio_mixer_manager_mac.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
struct PaUtilRingBuffer;
-namespace rtc {
-class PlatformThread;
-} // namespace rtc
-
namespace webrtc {
const uint32_t N_REC_SAMPLES_PER_SEC = 48000;
@@ -271,13 +268,11 @@ class AudioDeviceMac : public AudioDeviceGeneric {
rtc::Event _stopEventRec;
rtc::Event _stopEvent;
- // TODO(pbos): Replace with direct members, just start/stop, no need to
- // recreate the thread.
// Only valid/running between calls to StartRecording and StopRecording.
- std::unique_ptr<rtc::PlatformThread> capture_worker_thread_;
+ rtc::PlatformThread capture_worker_thread_;
// Only valid/running between calls to StartPlayout and StopPlayout.
- std::unique_ptr<rtc::PlatformThread> render_worker_thread_;
+ rtc::PlatformThread render_worker_thread_;
AudioMixerManagerMac _mixerManager;
diff --git a/modules/audio_device/win/audio_device_module_win.cc b/modules/audio_device/win/audio_device_module_win.cc
index 9c10e71e47..8cc4b7fc36 100644
--- a/modules/audio_device/win/audio_device_module_win.cc
+++ b/modules/audio_device/win/audio_device_module_win.cc
@@ -514,7 +514,7 @@ CreateWindowsCoreAudioAudioDeviceModuleFromInputAndOutput(
std::unique_ptr<AudioOutput> audio_output,
TaskQueueFactory* task_queue_factory) {
RTC_DLOG(INFO) << __FUNCTION__;
- return new rtc::RefCountedObject<WindowsAudioDeviceModule>(
+ return rtc::make_ref_counted<WindowsAudioDeviceModule>(
std::move(audio_input), std::move(audio_output), task_queue_factory);
}
diff --git a/modules/audio_device/win/core_audio_base_win.cc b/modules/audio_device/win/core_audio_base_win.cc
index 672e482478..7d93fcb14a 100644
--- a/modules/audio_device/win/core_audio_base_win.cc
+++ b/modules/audio_device/win/core_audio_base_win.cc
@@ -9,15 +9,16 @@
*/
#include "modules/audio_device/win/core_audio_base_win.h"
-#include "modules/audio_device/audio_device_buffer.h"
#include <memory>
#include <string>
+#include "modules/audio_device/audio_device_buffer.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/platform_thread.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/win/scoped_com_initializer.h"
#include "rtc_base/win/windows_version.h"
@@ -118,11 +119,6 @@ const char* SessionDisconnectReasonToString(
}
}
-void Run(void* obj) {
- RTC_DCHECK(obj);
- reinterpret_cast<CoreAudioBase*>(obj)->ThreadRun();
-}
-
// Returns true if the selected audio device supports low latency, i.e, if it
// is possible to initialize the engine using periods less than the default
// period (10ms).
@@ -552,24 +548,19 @@ bool CoreAudioBase::Start() {
// Audio thread should be alive during internal restart since the restart
// callback is triggered on that thread and it also makes the restart
// sequence less complex.
- RTC_DCHECK(audio_thread_);
+ RTC_DCHECK(!audio_thread_.empty());
}
// Start an audio thread but only if one does not already exist (which is the
// case during restart).
- if (!audio_thread_) {
- audio_thread_ = std::make_unique<rtc::PlatformThread>(
- Run, this, IsInput() ? "wasapi_capture_thread" : "wasapi_render_thread",
- rtc::kRealtimePriority);
- RTC_DCHECK(audio_thread_);
- audio_thread_->Start();
- if (!audio_thread_->IsRunning()) {
- StopThread();
- RTC_LOG(LS_ERROR) << "Failed to start audio thread";
- return false;
- }
- RTC_DLOG(INFO) << "Started thread with name: " << audio_thread_->name()
- << " and id: " << audio_thread_->GetThreadRef();
+ if (audio_thread_.empty()) {
+ const absl::string_view name =
+ IsInput() ? "wasapi_capture_thread" : "wasapi_render_thread";
+ audio_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] { ThreadRun(); }, name,
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime));
+ RTC_DLOG(INFO) << "Started thread with name: " << name
+ << " and handle: " << *audio_thread_.GetHandle();
}
// Start streaming data between the endpoint buffer and the audio engine.
@@ -696,14 +687,11 @@ bool CoreAudioBase::Restart() {
void CoreAudioBase::StopThread() {
RTC_DLOG(INFO) << __FUNCTION__;
RTC_DCHECK(!IsRestarting());
- if (audio_thread_) {
- if (audio_thread_->IsRunning()) {
- RTC_DLOG(INFO) << "Sets stop_event...";
- SetEvent(stop_event_.Get());
- RTC_DLOG(INFO) << "PlatformThread::Stop...";
- audio_thread_->Stop();
- }
- audio_thread_.reset();
+ if (!audio_thread_.empty()) {
+ RTC_DLOG(INFO) << "Sets stop_event...";
+ SetEvent(stop_event_.Get());
+ RTC_DLOG(INFO) << "PlatformThread::Finalize...";
+ audio_thread_.Finalize();
// Ensure that we don't quit the main thread loop immediately next
// time Start() is called.
@@ -716,7 +704,7 @@ bool CoreAudioBase::HandleRestartEvent() {
RTC_DLOG(INFO) << __FUNCTION__ << "[" << DirectionToString(direction())
<< "]";
RTC_DCHECK_RUN_ON(&thread_checker_audio_);
- RTC_DCHECK(audio_thread_);
+ RTC_DCHECK(!audio_thread_.empty());
RTC_DCHECK(IsRestarting());
// Let each client (input and/or output) take care of its own restart
// sequence since each side might need unique actions.
diff --git a/modules/audio_device/win/core_audio_base_win.h b/modules/audio_device/win/core_audio_base_win.h
index 2a57636640..afcc6a684d 100644
--- a/modules/audio_device/win/core_audio_base_win.h
+++ b/modules/audio_device/win/core_audio_base_win.h
@@ -158,7 +158,7 @@ class CoreAudioBase : public IAudioSessionEvents {
// Set when restart process starts and cleared when restart stops
// successfully. Accessed atomically.
std::atomic<bool> is_restarting_;
- std::unique_ptr<rtc::PlatformThread> audio_thread_;
+ rtc::PlatformThread audio_thread_;
Microsoft::WRL::ComPtr<IAudioSessionControl> audio_session_control_;
void StopThread();
diff --git a/modules/audio_device/win/core_audio_utility_win.cc b/modules/audio_device/win/core_audio_utility_win.cc
index f17ee99143..289abe9d26 100644
--- a/modules/audio_device/win/core_audio_utility_win.cc
+++ b/modules/audio_device/win/core_audio_utility_win.cc
@@ -323,7 +323,7 @@ ComPtr<IMMDevice> CreateDeviceInternal(const std::string& device_id,
// Verify that the audio endpoint device is active, i.e., that the audio
// adapter that connects to the endpoint device is present and enabled.
- if (SUCCEEDED(error.Error()) && !audio_endpoint_device.Get() &&
+ if (SUCCEEDED(error.Error()) && audio_endpoint_device.Get() &&
!IsDeviceActive(audio_endpoint_device.Get())) {
RTC_LOG(LS_WARNING) << "Selected endpoint device is not active";
audio_endpoint_device.Reset();
diff --git a/modules/audio_mixer/BUILD.gn b/modules/audio_mixer/BUILD.gn
index 739d62d705..d51be4af04 100644
--- a/modules/audio_mixer/BUILD.gn
+++ b/modules/audio_mixer/BUILD.gn
@@ -39,6 +39,7 @@ rtc_library("audio_mixer_impl") {
deps = [
":audio_frame_manipulator",
"../../api:array_view",
+ "../../api:rtp_packet_info",
"../../api:scoped_refptr",
"../../api/audio:audio_frame_api",
"../../api/audio:audio_mixer_api",
@@ -46,6 +47,7 @@ rtc_library("audio_mixer_impl") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base:safe_conversions",
"../../rtc_base/synchronization:mutex",
"../../system_wrappers",
"../../system_wrappers:metrics",
@@ -104,13 +106,15 @@ if (rtc_include_tests) {
"audio_mixer_impl_unittest.cc",
"frame_combiner_unittest.cc",
]
-
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
deps = [
":audio_frame_manipulator",
":audio_mixer_impl",
":audio_mixer_test_utils",
"../../api:array_view",
+ "../../api:rtp_packet_info",
"../../api/audio:audio_mixer_api",
+ "../../api/units:timestamp",
"../../audio/utility:audio_frame_operations",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
diff --git a/modules/audio_mixer/OWNERS.webrtc b/modules/audio_mixer/OWNERS.webrtc
index b33d599697..5edc304ab3 100644
--- a/modules/audio_mixer/OWNERS.webrtc
+++ b/modules/audio_mixer/OWNERS.webrtc
@@ -1,2 +1,2 @@
-aleloi@webrtc.org
+alessiob@webrtc.org
henrik.lundin@webrtc.org
diff --git a/modules/audio_mixer/audio_mixer_impl.cc b/modules/audio_mixer/audio_mixer_impl.cc
index 04a8bcf723..8cebc38779 100644
--- a/modules/audio_mixer/audio_mixer_impl.cc
+++ b/modules/audio_mixer/audio_mixer_impl.cc
@@ -126,30 +126,33 @@ struct AudioMixerImpl::HelperContainers {
AudioMixerImpl::AudioMixerImpl(
std::unique_ptr<OutputRateCalculator> output_rate_calculator,
- bool use_limiter)
- : output_rate_calculator_(std::move(output_rate_calculator)),
+ bool use_limiter,
+ int max_sources_to_mix)
+ : max_sources_to_mix_(max_sources_to_mix),
+ output_rate_calculator_(std::move(output_rate_calculator)),
audio_source_list_(),
helper_containers_(std::make_unique<HelperContainers>()),
frame_combiner_(use_limiter) {
- const int kTypicalMaxNumberOfMixedStreams = 3;
- audio_source_list_.reserve(kTypicalMaxNumberOfMixedStreams);
- helper_containers_->resize(kTypicalMaxNumberOfMixedStreams);
+ RTC_CHECK_GE(max_sources_to_mix, 1) << "At least one source must be mixed";
+ audio_source_list_.reserve(max_sources_to_mix);
+ helper_containers_->resize(max_sources_to_mix);
}
AudioMixerImpl::~AudioMixerImpl() {}
-rtc::scoped_refptr<AudioMixerImpl> AudioMixerImpl::Create() {
+rtc::scoped_refptr<AudioMixerImpl> AudioMixerImpl::Create(
+ int max_sources_to_mix) {
return Create(std::unique_ptr<DefaultOutputRateCalculator>(
new DefaultOutputRateCalculator()),
- true);
+ /*use_limiter=*/true, max_sources_to_mix);
}
rtc::scoped_refptr<AudioMixerImpl> AudioMixerImpl::Create(
std::unique_ptr<OutputRateCalculator> output_rate_calculator,
- bool use_limiter) {
- return rtc::scoped_refptr<AudioMixerImpl>(
- new rtc::RefCountedObject<AudioMixerImpl>(
- std::move(output_rate_calculator), use_limiter));
+ bool use_limiter,
+ int max_sources_to_mix) {
+ return rtc::make_ref_counted<AudioMixerImpl>(
+ std::move(output_rate_calculator), use_limiter, max_sources_to_mix);
}
void AudioMixerImpl::Mix(size_t number_of_channels,
@@ -219,7 +222,7 @@ rtc::ArrayView<AudioFrame* const> AudioMixerImpl::GetAudioFromSources(
std::sort(audio_source_mixing_data_view.begin(),
audio_source_mixing_data_view.end(), ShouldMixBefore);
- int max_audio_frame_counter = kMaximumAmountOfMixedAudioSources;
+ int max_audio_frame_counter = max_sources_to_mix_;
int ramp_list_lengh = 0;
int audio_to_mix_count = 0;
// Go through list in order and put unmuted frames in result list.
diff --git a/modules/audio_mixer/audio_mixer_impl.h b/modules/audio_mixer/audio_mixer_impl.h
index 0a13082725..737fcbdc43 100644
--- a/modules/audio_mixer/audio_mixer_impl.h
+++ b/modules/audio_mixer/audio_mixer_impl.h
@@ -35,13 +35,16 @@ class AudioMixerImpl : public AudioMixer {
// AudioProcessing only accepts 10 ms frames.
static const int kFrameDurationInMs = 10;
- enum : int { kMaximumAmountOfMixedAudioSources = 3 };
- static rtc::scoped_refptr<AudioMixerImpl> Create();
+ static const int kDefaultNumberOfMixedAudioSources = 3;
+
+ static rtc::scoped_refptr<AudioMixerImpl> Create(
+ int max_sources_to_mix = kDefaultNumberOfMixedAudioSources);
static rtc::scoped_refptr<AudioMixerImpl> Create(
std::unique_ptr<OutputRateCalculator> output_rate_calculator,
- bool use_limiter);
+ bool use_limiter,
+ int max_sources_to_mix = kDefaultNumberOfMixedAudioSources);
~AudioMixerImpl() override;
@@ -60,7 +63,8 @@ class AudioMixerImpl : public AudioMixer {
protected:
AudioMixerImpl(std::unique_ptr<OutputRateCalculator> output_rate_calculator,
- bool use_limiter);
+ bool use_limiter,
+ int max_sources_to_mix);
private:
struct HelperContainers;
@@ -76,6 +80,8 @@ class AudioMixerImpl : public AudioMixer {
// checks that mixing is done sequentially.
mutable Mutex mutex_;
+ const int max_sources_to_mix_;
+
std::unique_ptr<OutputRateCalculator> output_rate_calculator_;
// List of all audio sources.
diff --git a/modules/audio_mixer/audio_mixer_impl_unittest.cc b/modules/audio_mixer/audio_mixer_impl_unittest.cc
index c2f02fbfbd..61aa74e0a1 100644
--- a/modules/audio_mixer/audio_mixer_impl_unittest.cc
+++ b/modules/audio_mixer/audio_mixer_impl_unittest.cc
@@ -12,12 +12,18 @@
#include <string.h>
+#include <cstdint>
#include <limits>
#include <memory>
#include <string>
#include <utility>
+#include <vector>
+#include "absl/types/optional.h"
#include "api/audio/audio_mixer.h"
+#include "api/rtp_packet_info.h"
+#include "api/rtp_packet_infos.h"
+#include "api/units/timestamp.h"
#include "modules/audio_mixer/default_output_rate_calculator.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
@@ -29,6 +35,7 @@ using ::testing::_;
using ::testing::Exactly;
using ::testing::Invoke;
using ::testing::Return;
+using ::testing::UnorderedElementsAre;
namespace webrtc {
@@ -87,6 +94,10 @@ class MockMixerAudioSource : public ::testing::NiceMock<AudioMixer::Source> {
fake_audio_frame_info_ = audio_frame_info;
}
+ void set_packet_infos(const RtpPacketInfos& packet_infos) {
+ packet_infos_ = packet_infos;
+ }
+
private:
AudioFrameInfo FakeAudioFrameWithInfo(int sample_rate_hz,
AudioFrame* audio_frame) {
@@ -94,11 +105,13 @@ class MockMixerAudioSource : public ::testing::NiceMock<AudioMixer::Source> {
audio_frame->sample_rate_hz_ = sample_rate_hz;
audio_frame->samples_per_channel_ =
rtc::CheckedDivExact(sample_rate_hz, 100);
+ audio_frame->packet_infos_ = packet_infos_;
return fake_info();
}
AudioFrame fake_frame_;
AudioFrameInfo fake_audio_frame_info_;
+ RtpPacketInfos packet_infos_;
};
class CustomRateCalculator : public OutputRateCalculator {
@@ -160,7 +173,7 @@ void MixMonoAtGivenNativeRate(int native_sample_rate,
TEST(AudioMixer, LargestEnergyVadActiveMixed) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 3;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 3;
const auto mixer = AudioMixerImpl::Create();
@@ -191,7 +204,7 @@ TEST(AudioMixer, LargestEnergyVadActiveMixed) {
mixer->GetAudioSourceMixabilityStatusForTest(&participants[i]);
if (i == kAudioSources - 1 ||
i < kAudioSources - 1 -
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources) {
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources) {
EXPECT_FALSE(is_mixed)
<< "Mixing status of AudioSource #" << i << " wrong.";
} else {
@@ -322,7 +335,7 @@ TEST(AudioMixer, ParticipantNumberOfChannels) {
// another participant with higher energy is added.
TEST(AudioMixer, RampedOutSourcesShouldNotBeMarkedMixed) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 1;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1;
const auto mixer = AudioMixerImpl::Create();
MockMixerAudioSource participants[kAudioSources];
@@ -399,7 +412,7 @@ TEST(AudioMixer, ConstructFromOtherThread) {
TEST(AudioMixer, MutedShouldMixAfterUnmuted) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 1;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1;
std::vector<AudioFrame> frames(kAudioSources);
for (auto& frame : frames) {
@@ -417,7 +430,7 @@ TEST(AudioMixer, MutedShouldMixAfterUnmuted) {
TEST(AudioMixer, PassiveShouldMixAfterNormal) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 1;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1;
std::vector<AudioFrame> frames(kAudioSources);
for (auto& frame : frames) {
@@ -435,7 +448,7 @@ TEST(AudioMixer, PassiveShouldMixAfterNormal) {
TEST(AudioMixer, ActiveShouldMixBeforeLoud) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 1;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1;
std::vector<AudioFrame> frames(kAudioSources);
for (auto& frame : frames) {
@@ -454,9 +467,52 @@ TEST(AudioMixer, ActiveShouldMixBeforeLoud) {
MixAndCompare(frames, frame_info, expected_status);
}
+TEST(AudioMixer, ShouldMixUpToSpecifiedNumberOfSourcesToMix) {
+ constexpr int kAudioSources = 5;
+ constexpr int kSourcesToMix = 2;
+
+ std::vector<AudioFrame> frames(kAudioSources);
+ for (auto& frame : frames) {
+ ResetFrame(&frame);
+ }
+
+ std::vector<AudioMixer::Source::AudioFrameInfo> frame_info(
+ kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal);
+ // Set up to kSourceToMix sources with kVadActive so that they're mixed.
+ const std::vector<AudioFrame::VADActivity> kVadActivities = {
+ AudioFrame::kVadUnknown, AudioFrame::kVadPassive, AudioFrame::kVadPassive,
+ AudioFrame::kVadActive, AudioFrame::kVadActive};
+ // Populate VAD and frame for all sources.
+ for (int i = 0; i < kAudioSources; i++) {
+ frames[i].vad_activity_ = kVadActivities[i];
+ }
+
+ std::vector<MockMixerAudioSource> participants(kAudioSources);
+ for (int i = 0; i < kAudioSources; ++i) {
+ participants[i].fake_frame()->CopyFrom(frames[i]);
+ participants[i].set_fake_info(frame_info[i]);
+ }
+
+ const auto mixer = AudioMixerImpl::Create(kSourcesToMix);
+ for (int i = 0; i < kAudioSources; ++i) {
+ EXPECT_TRUE(mixer->AddSource(&participants[i]));
+ EXPECT_CALL(participants[i], GetAudioFrameWithInfo(kDefaultSampleRateHz, _))
+ .Times(Exactly(1));
+ }
+
+ mixer->Mix(1, &frame_for_mixing);
+
+ std::vector<bool> expected_status = {false, false, false, true, true};
+ for (int i = 0; i < kAudioSources; ++i) {
+ EXPECT_EQ(expected_status[i],
+ mixer->GetAudioSourceMixabilityStatusForTest(&participants[i]))
+ << "Wrong mix status for source #" << i << " is wrong";
+ }
+}
+
TEST(AudioMixer, UnmutedShouldMixBeforeLoud) {
constexpr int kAudioSources =
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources + 1;
+ AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1;
std::vector<AudioFrame> frames(kAudioSources);
for (auto& frame : frames) {
@@ -595,6 +651,100 @@ TEST(AudioMixer, MultipleChannelsManyParticipants) {
}
}
+TEST(AudioMixer, ShouldIncludeRtpPacketInfoFromAllMixedSources) {
+ const uint32_t kSsrc0 = 10;
+ const uint32_t kSsrc1 = 11;
+ const uint32_t kSsrc2 = 12;
+ const uint32_t kCsrc0 = 20;
+ const uint32_t kCsrc1 = 21;
+ const uint32_t kCsrc2 = 22;
+ const uint32_t kCsrc3 = 23;
+ const int kAudioLevel0 = 10;
+ const int kAudioLevel1 = 40;
+ const absl::optional<uint32_t> kAudioLevel2 = absl::nullopt;
+ const uint32_t kRtpTimestamp0 = 300;
+ const uint32_t kRtpTimestamp1 = 400;
+ const Timestamp kReceiveTime0 = Timestamp::Millis(10);
+ const Timestamp kReceiveTime1 = Timestamp::Millis(20);
+
+ const RtpPacketInfo kPacketInfo0(kSsrc0, {kCsrc0, kCsrc1}, kRtpTimestamp0,
+ kAudioLevel0, absl::nullopt, kReceiveTime0);
+ const RtpPacketInfo kPacketInfo1(kSsrc1, {kCsrc2}, kRtpTimestamp1,
+ kAudioLevel1, absl::nullopt, kReceiveTime1);
+ const RtpPacketInfo kPacketInfo2(kSsrc2, {kCsrc3}, kRtpTimestamp1,
+ kAudioLevel2, absl::nullopt, kReceiveTime1);
+
+ const auto mixer = AudioMixerImpl::Create();
+
+ MockMixerAudioSource source;
+ source.set_packet_infos(RtpPacketInfos({kPacketInfo0}));
+ mixer->AddSource(&source);
+ ResetFrame(source.fake_frame());
+ mixer->Mix(1, &frame_for_mixing);
+
+ MockMixerAudioSource other_source;
+ other_source.set_packet_infos(RtpPacketInfos({kPacketInfo1, kPacketInfo2}));
+ ResetFrame(other_source.fake_frame());
+ mixer->AddSource(&other_source);
+
+ mixer->Mix(/*number_of_channels=*/1, &frame_for_mixing);
+
+ EXPECT_THAT(frame_for_mixing.packet_infos_,
+ UnorderedElementsAre(kPacketInfo0, kPacketInfo1, kPacketInfo2));
+}
+
+TEST(AudioMixer, MixerShouldIncludeRtpPacketInfoFromMixedSourcesOnly) {
+ const uint32_t kSsrc0 = 10;
+ const uint32_t kSsrc1 = 11;
+ const uint32_t kSsrc2 = 21;
+ const uint32_t kCsrc0 = 30;
+ const uint32_t kCsrc1 = 31;
+ const uint32_t kCsrc2 = 32;
+ const uint32_t kCsrc3 = 33;
+ const int kAudioLevel0 = 10;
+ const absl::optional<uint32_t> kAudioLevelMissing = absl::nullopt;
+ const uint32_t kRtpTimestamp0 = 300;
+ const uint32_t kRtpTimestamp1 = 400;
+ const Timestamp kReceiveTime0 = Timestamp::Millis(10);
+ const Timestamp kReceiveTime1 = Timestamp::Millis(20);
+
+ const RtpPacketInfo kPacketInfo0(kSsrc0, {kCsrc0, kCsrc1}, kRtpTimestamp0,
+ kAudioLevel0, absl::nullopt, kReceiveTime0);
+ const RtpPacketInfo kPacketInfo1(kSsrc1, {kCsrc2}, kRtpTimestamp1,
+ kAudioLevelMissing, absl::nullopt,
+ kReceiveTime1);
+ const RtpPacketInfo kPacketInfo2(kSsrc2, {kCsrc3}, kRtpTimestamp1,
+ kAudioLevelMissing, absl::nullopt,
+ kReceiveTime1);
+
+ const auto mixer = AudioMixerImpl::Create(/*max_sources_to_mix=*/2);
+
+ MockMixerAudioSource source1;
+ source1.set_packet_infos(RtpPacketInfos({kPacketInfo0}));
+ mixer->AddSource(&source1);
+ ResetFrame(source1.fake_frame());
+ mixer->Mix(1, &frame_for_mixing);
+
+ MockMixerAudioSource source2;
+ source2.set_packet_infos(RtpPacketInfos({kPacketInfo1}));
+ ResetFrame(source2.fake_frame());
+ mixer->AddSource(&source2);
+
+ // The mixer prioritizes kVadActive over kVadPassive.
+ // We limit the number of sources to mix to 2 and set the third source's VAD
+ // activity to kVadPassive so that it will not be added to the mix.
+ MockMixerAudioSource source3;
+ source3.set_packet_infos(RtpPacketInfos({kPacketInfo2}));
+ ResetFrame(source3.fake_frame());
+ source3.fake_frame()->vad_activity_ = AudioFrame::kVadPassive;
+ mixer->AddSource(&source3);
+
+ mixer->Mix(/*number_of_channels=*/1, &frame_for_mixing);
+
+ EXPECT_THAT(frame_for_mixing.packet_infos_,
+ UnorderedElementsAre(kPacketInfo0, kPacketInfo1));
+}
+
class HighOutputRateCalculator : public OutputRateCalculator {
public:
static const int kDefaultFrequency = 76000;
diff --git a/modules/audio_mixer/frame_combiner.cc b/modules/audio_mixer/frame_combiner.cc
index fb6f72af75..e31eea595f 100644
--- a/modules/audio_mixer/frame_combiner.cc
+++ b/modules/audio_mixer/frame_combiner.cc
@@ -16,8 +16,12 @@
#include <iterator>
#include <memory>
#include <string>
+#include <utility>
+#include <vector>
#include "api/array_view.h"
+#include "api/rtp_packet_info.h"
+#include "api/rtp_packet_infos.h"
#include "common_audio/include/audio_util.h"
#include "modules/audio_mixer/audio_frame_manipulator.h"
#include "modules/audio_mixer/audio_mixer_impl.h"
@@ -26,6 +30,7 @@
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_conversions.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -53,11 +58,23 @@ void SetAudioFrameFields(rtc::ArrayView<const AudioFrame* const> mix_list,
if (mix_list.empty()) {
audio_frame_for_mixing->elapsed_time_ms_ = -1;
- } else if (mix_list.size() == 1) {
+ } else {
audio_frame_for_mixing->timestamp_ = mix_list[0]->timestamp_;
audio_frame_for_mixing->elapsed_time_ms_ = mix_list[0]->elapsed_time_ms_;
audio_frame_for_mixing->ntp_time_ms_ = mix_list[0]->ntp_time_ms_;
- audio_frame_for_mixing->packet_infos_ = mix_list[0]->packet_infos_;
+ std::vector<RtpPacketInfo> packet_infos;
+ for (const auto& frame : mix_list) {
+ audio_frame_for_mixing->timestamp_ =
+ std::min(audio_frame_for_mixing->timestamp_, frame->timestamp_);
+ audio_frame_for_mixing->ntp_time_ms_ =
+ std::min(audio_frame_for_mixing->ntp_time_ms_, frame->ntp_time_ms_);
+ audio_frame_for_mixing->elapsed_time_ms_ = std::max(
+ audio_frame_for_mixing->elapsed_time_ms_, frame->elapsed_time_ms_);
+ packet_infos.insert(packet_infos.end(), frame->packet_infos_.begin(),
+ frame->packet_infos_.end());
+ }
+ audio_frame_for_mixing->packet_infos_ =
+ RtpPacketInfos(std::move(packet_infos));
}
}
@@ -207,10 +224,10 @@ void FrameCombiner::LogMixingStats(
uma_logging_counter_ = 0;
RTC_HISTOGRAM_COUNTS_100("WebRTC.Audio.AudioMixer.NumIncomingStreams",
static_cast<int>(number_of_streams));
- RTC_HISTOGRAM_ENUMERATION(
- "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams",
- static_cast<int>(mix_list.size()),
- AudioMixerImpl::kMaximumAmountOfMixedAudioSources);
+ RTC_HISTOGRAM_COUNTS_LINEAR(
+ "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams2",
+ rtc::dchecked_cast<int>(mix_list.size()), /*min=*/1, /*max=*/16,
+ /*bucket_count=*/16);
using NativeRate = AudioProcessing::NativeRate;
static constexpr NativeRate native_rates[] = {
diff --git a/modules/audio_mixer/frame_combiner_unittest.cc b/modules/audio_mixer/frame_combiner_unittest.cc
index 4b189a052e..fa1fef325c 100644
--- a/modules/audio_mixer/frame_combiner_unittest.cc
+++ b/modules/audio_mixer/frame_combiner_unittest.cc
@@ -15,8 +15,12 @@
#include <numeric>
#include <string>
#include <type_traits>
+#include <vector>
+#include "absl/types/optional.h"
#include "api/array_view.h"
+#include "api/rtp_packet_info.h"
+#include "api/rtp_packet_infos.h"
#include "audio/utility/audio_frame_operations.h"
#include "modules/audio_mixer/gain_change_calculator.h"
#include "modules/audio_mixer/sine_wave_generator.h"
@@ -28,7 +32,13 @@
namespace webrtc {
namespace {
+
+using ::testing::ElementsAreArray;
+using ::testing::IsEmpty;
+using ::testing::UnorderedElementsAreArray;
+
using LimiterType = FrameCombiner::LimiterType;
+
struct FrameCombinerConfig {
bool use_limiter;
int sample_rate_hz;
@@ -57,9 +67,24 @@ std::string ProduceDebugText(const FrameCombinerConfig& config) {
AudioFrame frame1;
AudioFrame frame2;
-AudioFrame audio_frame_for_mixing;
void SetUpFrames(int sample_rate_hz, int number_of_channels) {
+ RtpPacketInfo packet_info1(
+ /*ssrc=*/1001, /*csrcs=*/{}, /*rtp_timestamp=*/1000,
+ /*audio_level=*/absl::nullopt, /*absolute_capture_time=*/absl::nullopt,
+ /*receive_time_ms=*/1);
+ RtpPacketInfo packet_info2(
+ /*ssrc=*/4004, /*csrcs=*/{}, /*rtp_timestamp=*/1234,
+ /*audio_level=*/absl::nullopt, /*absolute_capture_time=*/absl::nullopt,
+ /*receive_time_ms=*/2);
+ RtpPacketInfo packet_info3(
+ /*ssrc=*/7007, /*csrcs=*/{}, /*rtp_timestamp=*/1333,
+ /*audio_level=*/absl::nullopt, /*absolute_capture_time=*/absl::nullopt,
+ /*receive_time_ms=*/2);
+
+ frame1.packet_infos_ = RtpPacketInfos({packet_info1});
+ frame2.packet_infos_ = RtpPacketInfos({packet_info2, packet_info3});
+
for (auto* frame : {&frame1, &frame2}) {
frame->UpdateFrame(0, nullptr, rtc::CheckedDivExact(sample_rate_hz, 100),
sample_rate_hz, AudioFrame::kNormalSpeech,
@@ -81,6 +106,7 @@ TEST(FrameCombiner, BasicApiCallsLimiter) {
ProduceDebugText(rate, number_of_channels, number_of_frames));
const std::vector<AudioFrame*> frames_to_combine(
all_frames.begin(), all_frames.begin() + number_of_frames);
+ AudioFrame audio_frame_for_mixing;
combiner.Combine(frames_to_combine, number_of_channels, rate,
frames_to_combine.size(), &audio_frame_for_mixing);
}
@@ -88,6 +114,35 @@ TEST(FrameCombiner, BasicApiCallsLimiter) {
}
}
+// The RtpPacketInfos field of the mixed packet should contain the union of the
+// RtpPacketInfos from the frames that were actually mixed.
+TEST(FrameCombiner, ContainsAllRtpPacketInfos) {
+ static constexpr int kSampleRateHz = 48000;
+ static constexpr int kNumChannels = 1;
+ FrameCombiner combiner(true);
+ const std::vector<AudioFrame*> all_frames = {&frame1, &frame2};
+ SetUpFrames(kSampleRateHz, kNumChannels);
+
+ for (const int number_of_frames : {0, 1, 2}) {
+ SCOPED_TRACE(
+ ProduceDebugText(kSampleRateHz, kNumChannels, number_of_frames));
+ const std::vector<AudioFrame*> frames_to_combine(
+ all_frames.begin(), all_frames.begin() + number_of_frames);
+
+ std::vector<RtpPacketInfo> packet_infos;
+ for (const auto& frame : frames_to_combine) {
+ packet_infos.insert(packet_infos.end(), frame->packet_infos_.begin(),
+ frame->packet_infos_.end());
+ }
+
+ AudioFrame audio_frame_for_mixing;
+ combiner.Combine(frames_to_combine, kNumChannels, kSampleRateHz,
+ frames_to_combine.size(), &audio_frame_for_mixing);
+ EXPECT_THAT(audio_frame_for_mixing.packet_infos_,
+ UnorderedElementsAreArray(packet_infos));
+ }
+}
+
// There are DCHECKs in place to check for invalid parameters.
TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) {
FrameCombiner combiner(true);
@@ -105,6 +160,7 @@ TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) {
ProduceDebugText(rate, number_of_channels, number_of_frames));
const std::vector<AudioFrame*> frames_to_combine(
all_frames.begin(), all_frames.begin() + number_of_frames);
+ AudioFrame audio_frame_for_mixing;
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
EXPECT_DEATH(
combiner.Combine(frames_to_combine, number_of_channels, rate,
@@ -134,6 +190,7 @@ TEST(FrameCombinerDeathTest, DebugBuildCrashesWithHighRate) {
ProduceDebugText(rate, number_of_channels, number_of_frames));
const std::vector<AudioFrame*> frames_to_combine(
all_frames.begin(), all_frames.begin() + number_of_frames);
+ AudioFrame audio_frame_for_mixing;
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
EXPECT_DEATH(
combiner.Combine(frames_to_combine, number_of_channels, rate,
@@ -161,6 +218,7 @@ TEST(FrameCombiner, BasicApiCallsNoLimiter) {
ProduceDebugText(rate, number_of_channels, number_of_frames));
const std::vector<AudioFrame*> frames_to_combine(
all_frames.begin(), all_frames.begin() + number_of_frames);
+ AudioFrame audio_frame_for_mixing;
combiner.Combine(frames_to_combine, number_of_channels, rate,
frames_to_combine.size(), &audio_frame_for_mixing);
}
@@ -174,10 +232,11 @@ TEST(FrameCombiner, CombiningZeroFramesShouldProduceSilence) {
for (const int number_of_channels : {1, 2}) {
SCOPED_TRACE(ProduceDebugText(rate, number_of_channels, 0));
+ AudioFrame audio_frame_for_mixing;
+
const std::vector<AudioFrame*> frames_to_combine;
combiner.Combine(frames_to_combine, number_of_channels, rate,
frames_to_combine.size(), &audio_frame_for_mixing);
-
const int16_t* audio_frame_for_mixing_data =
audio_frame_for_mixing.data();
const std::vector<int16_t> mixed_data(
@@ -186,6 +245,7 @@ TEST(FrameCombiner, CombiningZeroFramesShouldProduceSilence) {
const std::vector<int16_t> expected(number_of_channels * rate / 100, 0);
EXPECT_EQ(mixed_data, expected);
+ EXPECT_THAT(audio_frame_for_mixing.packet_infos_, IsEmpty());
}
}
}
@@ -196,6 +256,8 @@ TEST(FrameCombiner, CombiningOneFrameShouldNotChangeFrame) {
for (const int number_of_channels : {1, 2, 4, 8, 10}) {
SCOPED_TRACE(ProduceDebugText(rate, number_of_channels, 1));
+ AudioFrame audio_frame_for_mixing;
+
SetUpFrames(rate, number_of_channels);
int16_t* frame1_data = frame1.mutable_data();
std::iota(frame1_data, frame1_data + number_of_channels * rate / 100, 0);
@@ -212,6 +274,8 @@ TEST(FrameCombiner, CombiningOneFrameShouldNotChangeFrame) {
std::vector<int16_t> expected(number_of_channels * rate / 100);
std::iota(expected.begin(), expected.end(), 0);
EXPECT_EQ(mixed_data, expected);
+ EXPECT_THAT(audio_frame_for_mixing.packet_infos_,
+ ElementsAreArray(frame1.packet_infos_));
}
}
}
@@ -255,6 +319,7 @@ TEST(FrameCombiner, GainCurveIsSmoothForAlternatingNumberOfStreams) {
// Ensures limiter is on if 'use_limiter'.
constexpr size_t number_of_streams = 2;
+ AudioFrame audio_frame_for_mixing;
combiner.Combine(frames_to_combine, config.number_of_channels,
config.sample_rate_hz, number_of_streams,
&audio_frame_for_mixing);
diff --git a/modules/audio_mixer/g3doc/index.md b/modules/audio_mixer/g3doc/index.md
new file mode 100644
index 0000000000..285530e95a
--- /dev/null
+++ b/modules/audio_mixer/g3doc/index.md
@@ -0,0 +1,54 @@
+<?% config.freshness.owner = 'alessiob' %?>
+<?% config.freshness.reviewed = '2021-04-21' %?>
+
+# The WebRTC Audio Mixer Module
+
+The WebRTC audio mixer module is responsible for mixing multiple incoming audio
+streams (sources) into a single audio stream (mix). It works with 10 ms frames,
+it supports sample rates up to 48 kHz and up to 8 audio channels. The API is
+defined in
+[`api/audio/audio_mixer.h`](https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/audio/audio_mixer.h)
+and it includes the definition of
+[`AudioMixer::Source`](https://source.chromium.org/search?q=symbol:AudioMixer::Source%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h),
+which describes an incoming audio stream, and the definition of
+[`AudioMixer`](https://source.chromium.org/search?q=symbol:AudioMixer%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h),
+which operates on a collection of
+[`AudioMixer::Source`](https://source.chromium.org/search?q=symbol:AudioMixer::Source%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h)
+objects to produce a mix.
+
+## AudioMixer::Source
+
+A source has different characteristic (e.g., sample rate, number of channels,
+muted state) and it is identified by an SSRC[^1].
+[`AudioMixer::Source::GetAudioFrameWithInfo()`](https://source.chromium.org/search?q=symbol:AudioMixer::Source::GetAudioFrameWithInfo%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h)
+is used to retrieve the next 10 ms chunk of audio to be mixed.
+
+[^1]: A synchronization source (SSRC) is the source of a stream of RTP packets,
+ identified by a 32-bit numeric SSRC identifier carried in the RTP header
+ so as not to be dependent upon the network address (see
+ [RFC 3550](https://tools.ietf.org/html/rfc3550#section-3)).
+
+## AudioMixer
+
+The interface allows to add and remove sources and the
+[`AudioMixer::Mix()`](https://source.chromium.org/search?q=symbol:AudioMixer::Mix%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h)
+method allows to generates a mix with the desired number of channels.
+
+## WebRTC implementation
+
+The interface is implemented in different parts of WebRTC:
+
+* [`AudioMixer::Source`](https://source.chromium.org/search?q=symbol:AudioMixer::Source%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h):
+ [`audio/audio_receive_stream.h`](https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/audio/audio_receive_stream.h)
+* [`AudioMixer`](https://source.chromium.org/search?q=symbol:AudioMixer%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h):
+ [`modules/audio_mixer/audio_mixer_impl.h`](https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/modules/audio_mixer/audio_mixer_impl.h)
+
+[`AudioMixer`](https://source.chromium.org/search?q=symbol:AudioMixer%20file:third_party%2Fwebrtc%2Fapi%2Faudio%2Faudio_mixer.h)
+is thread-safe. The output sample rate of the generated mix is automatically
+assigned depending on the sample rate of the sources; whereas the number of
+output channels is defined by the caller[^2]. Samples from the non-muted sources
+are summed up and then a limiter is used to apply soft-clipping when needed.
+
+[^2]: [`audio/utility/channel_mixer.h`](https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/audio/utility/channel_mixer.h)
+ is used to mix channels in the non-trivial cases - i.e., if the number of
+ channels for a source or the mix is greater than 3.
diff --git a/modules/audio_processing/aec3/aec_state.h b/modules/audio_processing/aec3/aec_state.h
index 125ae83a2b..e2f70a4c68 100644
--- a/modules/audio_processing/aec3/aec_state.h
+++ b/modules/audio_processing/aec3/aec_state.h
@@ -75,6 +75,12 @@ class AecState {
return erle_estimator_.Erle(onset_compensated);
}
+ // Returns the non-capped ERLE.
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> ErleUnbounded()
+ const {
+ return erle_estimator_.ErleUnbounded();
+ }
+
// Returns the fullband ERLE estimate in log2 units.
float FullBandErleLog2() const { return erle_estimator_.FullbandErleLog2(); }
diff --git a/modules/audio_processing/aec3/echo_canceller3.cc b/modules/audio_processing/aec3/echo_canceller3.cc
index 35a2cff7ea..181b649f6d 100644
--- a/modules/audio_processing/aec3/echo_canceller3.cc
+++ b/modules/audio_processing/aec3/echo_canceller3.cc
@@ -49,7 +49,11 @@ void RetrieveFieldTrialValue(const char* trial_name,
ParseFieldTrial({&field_trial_param}, field_trial_str);
float field_trial_value = static_cast<float>(field_trial_param.Get());
- if (field_trial_value >= min && field_trial_value <= max) {
+ if (field_trial_value >= min && field_trial_value <= max &&
+ field_trial_value != *value_to_update) {
+ RTC_LOG(LS_INFO) << "Key " << trial_name
+ << " changing AEC3 parameter value from "
+ << *value_to_update << " to " << field_trial_value;
*value_to_update = field_trial_value;
}
}
@@ -65,7 +69,11 @@ void RetrieveFieldTrialValue(const char* trial_name,
ParseFieldTrial({&field_trial_param}, field_trial_str);
float field_trial_value = field_trial_param.Get();
- if (field_trial_value >= min && field_trial_value <= max) {
+ if (field_trial_value >= min && field_trial_value <= max &&
+ field_trial_value != *value_to_update) {
+ RTC_LOG(LS_INFO) << "Key " << trial_name
+ << " changing AEC3 parameter value from "
+ << *value_to_update << " to " << field_trial_value;
*value_to_update = field_trial_value;
}
}
@@ -572,6 +580,12 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) {
RetrieveFieldTrialValue("WebRTC-Aec3SuppressorEpStrengthDefaultLenOverride",
-1.f, 1.f, &adjusted_cfg.ep_strength.default_len);
+ // Field trial-based overrides of individual delay estimator parameters.
+ RetrieveFieldTrialValue("WebRTC-Aec3DelayEstimateSmoothingOverride", 0.f, 1.f,
+ &adjusted_cfg.delay.delay_estimate_smoothing);
+ RetrieveFieldTrialValue(
+ "WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride", 0.f, 1.f,
+ &adjusted_cfg.delay.delay_estimate_smoothing_delay_found);
return adjusted_cfg;
}
@@ -731,6 +745,10 @@ EchoCanceller3::EchoCanceller3(const EchoCanceller3Config& config,
std::vector<std::vector<rtc::ArrayView<float>>>(
1, std::vector<rtc::ArrayView<float>>(num_capture_channels_));
}
+
+ RTC_LOG(LS_INFO) << "AEC3 created with sample rate: " << sample_rate_hz_
+ << " Hz, num render channels: " << num_render_channels_
+ << ", num capture channels: " << num_capture_channels_;
}
EchoCanceller3::~EchoCanceller3() = default;
diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator.cc b/modules/audio_processing/aec3/echo_path_delay_estimator.cc
index 2c987f9341..8a78834143 100644
--- a/modules/audio_processing/aec3/echo_path_delay_estimator.cc
+++ b/modules/audio_processing/aec3/echo_path_delay_estimator.cc
@@ -42,6 +42,7 @@ EchoPathDelayEstimator::EchoPathDelayEstimator(
? config.render_levels.poor_excitation_render_limit_ds8
: config.render_levels.poor_excitation_render_limit,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold),
matched_filter_lag_aggregator_(data_dumper_,
matched_filter_.GetMaxFilterLag(),
@@ -71,7 +72,8 @@ absl::optional<DelayEstimate> EchoPathDelayEstimator::EstimateDelay(
data_dumper_->DumpWav("aec3_capture_decimator_output",
downsampled_capture.size(), downsampled_capture.data(),
16000 / down_sampling_factor_, 1);
- matched_filter_.Update(render_buffer, downsampled_capture);
+ matched_filter_.Update(render_buffer, downsampled_capture,
+ matched_filter_lag_aggregator_.ReliableDelayFound());
absl::optional<DelayEstimate> aggregated_matched_filter_lag =
matched_filter_lag_aggregator_.Aggregate(
diff --git a/modules/audio_processing/aec3/echo_remover.cc b/modules/audio_processing/aec3/echo_remover.cc
index 6c177c9a10..2bfaa951d8 100644
--- a/modules/audio_processing/aec3/echo_remover.cc
+++ b/modules/audio_processing/aec3/echo_remover.cc
@@ -172,6 +172,7 @@ class EchoRemoverImpl final : public EchoRemover {
std::vector<std::array<float, kFftLengthBy2Plus1>> Y2_heap_;
std::vector<std::array<float, kFftLengthBy2Plus1>> E2_heap_;
std::vector<std::array<float, kFftLengthBy2Plus1>> R2_heap_;
+ std::vector<std::array<float, kFftLengthBy2Plus1>> R2_unbounded_heap_;
std::vector<std::array<float, kFftLengthBy2Plus1>> S2_linear_heap_;
std::vector<FftData> Y_heap_;
std::vector<FftData> E_heap_;
@@ -218,6 +219,7 @@ EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config,
Y2_heap_(NumChannelsOnHeap(num_capture_channels_)),
E2_heap_(NumChannelsOnHeap(num_capture_channels_)),
R2_heap_(NumChannelsOnHeap(num_capture_channels_)),
+ R2_unbounded_heap_(NumChannelsOnHeap(num_capture_channels_)),
S2_linear_heap_(NumChannelsOnHeap(num_capture_channels_)),
Y_heap_(NumChannelsOnHeap(num_capture_channels_)),
E_heap_(NumChannelsOnHeap(num_capture_channels_)),
@@ -265,6 +267,8 @@ void EchoRemoverImpl::ProcessCapture(
std::array<std::array<float, kFftLengthBy2Plus1>, kMaxNumChannelsOnStack>
R2_stack;
std::array<std::array<float, kFftLengthBy2Plus1>, kMaxNumChannelsOnStack>
+ R2_unbounded_stack;
+ std::array<std::array<float, kFftLengthBy2Plus1>, kMaxNumChannelsOnStack>
S2_linear_stack;
std::array<FftData, kMaxNumChannelsOnStack> Y_stack;
std::array<FftData, kMaxNumChannelsOnStack> E_stack;
@@ -280,6 +284,8 @@ void EchoRemoverImpl::ProcessCapture(
E2_stack.data(), num_capture_channels_);
rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2(
R2_stack.data(), num_capture_channels_);
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2_unbounded(
+ R2_unbounded_stack.data(), num_capture_channels_);
rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> S2_linear(
S2_linear_stack.data(), num_capture_channels_);
rtc::ArrayView<FftData> Y(Y_stack.data(), num_capture_channels_);
@@ -301,6 +307,8 @@ void EchoRemoverImpl::ProcessCapture(
E2_heap_.data(), num_capture_channels_);
R2 = rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>>(
R2_heap_.data(), num_capture_channels_);
+ R2_unbounded = rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>>(
+ R2_unbounded_heap_.data(), num_capture_channels_);
S2_linear = rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>>(
S2_linear_heap_.data(), num_capture_channels_);
Y = rtc::ArrayView<FftData>(Y_heap_.data(), num_capture_channels_);
@@ -406,8 +414,8 @@ void EchoRemoverImpl::ProcessCapture(
if (capture_output_used_) {
// Estimate the residual echo power.
residual_echo_estimator_.Estimate(aec_state_, *render_buffer, S2_linear, Y2,
- suppression_gain_.IsDominantNearend(),
- R2);
+ suppression_gain_.IsDominantNearend(), R2,
+ R2_unbounded);
// Suppressor nearend estimate.
if (aec_state_.UsableLinearEstimate()) {
@@ -430,7 +438,7 @@ void EchoRemoverImpl::ProcessCapture(
// Compute preferred gains.
float high_bands_gain;
- suppression_gain_.GetGain(nearend_spectrum, echo_spectrum, R2,
+ suppression_gain_.GetGain(nearend_spectrum, echo_spectrum, R2, R2_unbounded,
cng_.NoiseSpectrum(), render_signal_analyzer_,
aec_state_, x, clock_drift, &high_bands_gain, &G);
diff --git a/modules/audio_processing/aec3/erle_estimator.h b/modules/audio_processing/aec3/erle_estimator.h
index cae896e82c..55797592a9 100644
--- a/modules/audio_processing/aec3/erle_estimator.h
+++ b/modules/audio_processing/aec3/erle_estimator.h
@@ -62,6 +62,18 @@ class ErleEstimator {
: subband_erle_estimator_.Erle(onset_compensated);
}
+ // Returns the non-capped subband ERLE.
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> ErleUnbounded()
+ const {
+ // Unbounded ERLE is only used with the subband erle estimator where the
+ // ERLE is often capped at low values. When the signal dependent ERLE
+ // estimator is used the capped ERLE is returned.
+ return !signal_dependent_erle_estimator_
+ ? subband_erle_estimator_.ErleUnbounded()
+ : signal_dependent_erle_estimator_->Erle(
+ /*onset_compensated=*/false);
+ }
+
// Returns the subband ERLE that are estimated during onsets (only used for
// testing).
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> ErleDuringOnsets()
diff --git a/modules/audio_processing/aec3/erle_estimator_unittest.cc b/modules/audio_processing/aec3/erle_estimator_unittest.cc
index 6df71424bc..e38f2386f7 100644
--- a/modules/audio_processing/aec3/erle_estimator_unittest.cc
+++ b/modules/audio_processing/aec3/erle_estimator_unittest.cc
@@ -50,6 +50,16 @@ void VerifyErle(
EXPECT_NEAR(kTrueErle, erle_time_domain, 0.5);
}
+void VerifyErleGreaterOrEqual(
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> erle1,
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> erle2) {
+ for (size_t ch = 0; ch < erle1.size(); ++ch) {
+ for (size_t i = 0; i < kFftLengthBy2Plus1; ++i) {
+ EXPECT_GE(erle1[ch][i], erle2[ch][i]);
+ }
+ }
+}
+
void FormFarendTimeFrame(std::vector<std::vector<std::vector<float>>>* x) {
const std::array<float, kBlockSize> frame = {
7459.88, 17209.6, 17383, 20768.9, 16816.7, 18386.3, 4492.83, 9675.85,
@@ -156,9 +166,10 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleIncreaseAndHold) {
kNumBands, std::vector<std::vector<float>>(
num_render_channels, std::vector<float>(kBlockSize, 0.f)));
std::vector<std::vector<std::array<float, kFftLengthBy2Plus1>>>
- filter_frequency_response(
- config.filter.refined.length_blocks,
- std::vector<std::array<float, kFftLengthBy2Plus1>>(num_capture_channels));
+ filter_frequency_response(
+ config.filter.refined.length_blocks,
+ std::vector<std::array<float, kFftLengthBy2Plus1>>(
+ num_capture_channels));
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
RenderDelayBuffer::Create(config, kSampleRateHz, num_render_channels));
@@ -181,6 +192,10 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleIncreaseAndHold) {
VerifyErle(estimator.Erle(/*onset_compensated=*/true),
std::pow(2.f, estimator.FullbandErleLog2()), config.erle.max_l,
config.erle.max_h);
+ VerifyErleGreaterOrEqual(estimator.Erle(/*onset_compensated=*/false),
+ estimator.Erle(/*onset_compensated=*/true));
+ VerifyErleGreaterOrEqual(estimator.ErleUnbounded(),
+ estimator.Erle(/*onset_compensated=*/false));
FormNearendFrame(&x, &X2, E2, Y2);
// Verifies that the ERLE is not immediately decreased during nearend
@@ -194,6 +209,10 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleIncreaseAndHold) {
VerifyErle(estimator.Erle(/*onset_compensated=*/true),
std::pow(2.f, estimator.FullbandErleLog2()), config.erle.max_l,
config.erle.max_h);
+ VerifyErleGreaterOrEqual(estimator.Erle(/*onset_compensated=*/false),
+ estimator.Erle(/*onset_compensated=*/true));
+ VerifyErleGreaterOrEqual(estimator.ErleUnbounded(),
+ estimator.Erle(/*onset_compensated=*/false));
}
TEST_P(ErleEstimatorMultiChannel, VerifyErleTrackingOnOnsets) {
@@ -212,9 +231,10 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleTrackingOnOnsets) {
kNumBands, std::vector<std::vector<float>>(
num_render_channels, std::vector<float>(kBlockSize, 0.f)));
std::vector<std::vector<std::array<float, kFftLengthBy2Plus1>>>
- filter_frequency_response(
- config.filter.refined.length_blocks,
- std::vector<std::array<float, kFftLengthBy2Plus1>>(num_capture_channels));
+ filter_frequency_response(
+ config.filter.refined.length_blocks,
+ std::vector<std::array<float, kFftLengthBy2Plus1>>(
+ num_capture_channels));
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
RenderDelayBuffer::Create(config, kSampleRateHz, num_render_channels));
diff --git a/modules/audio_processing/aec3/matched_filter.cc b/modules/audio_processing/aec3/matched_filter.cc
index 64b2d4e697..1721e9c983 100644
--- a/modules/audio_processing/aec3/matched_filter.cc
+++ b/modules/audio_processing/aec3/matched_filter.cc
@@ -307,7 +307,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper,
int num_matched_filters,
size_t alignment_shift_sub_blocks,
float excitation_limit,
- float smoothing,
+ float smoothing_fast,
+ float smoothing_slow,
float matching_filter_threshold)
: data_dumper_(data_dumper),
optimization_(optimization),
@@ -319,7 +320,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper,
lag_estimates_(num_matched_filters),
filters_offsets_(num_matched_filters, 0),
excitation_limit_(excitation_limit),
- smoothing_(smoothing),
+ smoothing_fast_(smoothing_fast),
+ smoothing_slow_(smoothing_slow),
matching_filter_threshold_(matching_filter_threshold) {
RTC_DCHECK(data_dumper);
RTC_DCHECK_LT(0, window_size_sub_blocks);
@@ -340,10 +342,14 @@ void MatchedFilter::Reset() {
}
void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer,
- rtc::ArrayView<const float> capture) {
+ rtc::ArrayView<const float> capture,
+ bool use_slow_smoothing) {
RTC_DCHECK_EQ(sub_block_size_, capture.size());
auto& y = capture;
+ const float smoothing =
+ use_slow_smoothing ? smoothing_slow_ : smoothing_fast_;
+
const float x2_sum_threshold =
filters_[0].size() * excitation_limit_ * excitation_limit_;
@@ -360,25 +366,25 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer,
switch (optimization_) {
#if defined(WEBRTC_ARCH_X86_FAMILY)
case Aec3Optimization::kSse2:
- aec3::MatchedFilterCore_SSE2(x_start_index, x2_sum_threshold,
- smoothing_, render_buffer.buffer, y,
- filters_[n], &filters_updated, &error_sum);
+ aec3::MatchedFilterCore_SSE2(x_start_index, x2_sum_threshold, smoothing,
+ render_buffer.buffer, y, filters_[n],
+ &filters_updated, &error_sum);
break;
case Aec3Optimization::kAvx2:
- aec3::MatchedFilterCore_AVX2(x_start_index, x2_sum_threshold,
- smoothing_, render_buffer.buffer, y,
- filters_[n], &filters_updated, &error_sum);
+ aec3::MatchedFilterCore_AVX2(x_start_index, x2_sum_threshold, smoothing,
+ render_buffer.buffer, y, filters_[n],
+ &filters_updated, &error_sum);
break;
#endif
#if defined(WEBRTC_HAS_NEON)
case Aec3Optimization::kNeon:
- aec3::MatchedFilterCore_NEON(x_start_index, x2_sum_threshold,
- smoothing_, render_buffer.buffer, y,
- filters_[n], &filters_updated, &error_sum);
+ aec3::MatchedFilterCore_NEON(x_start_index, x2_sum_threshold, smoothing,
+ render_buffer.buffer, y, filters_[n],
+ &filters_updated, &error_sum);
break;
#endif
default:
- aec3::MatchedFilterCore(x_start_index, x2_sum_threshold, smoothing_,
+ aec3::MatchedFilterCore(x_start_index, x2_sum_threshold, smoothing,
render_buffer.buffer, y, filters_[n],
&filters_updated, &error_sum);
}
diff --git a/modules/audio_processing/aec3/matched_filter.h b/modules/audio_processing/aec3/matched_filter.h
index fa44eb27fd..c6410ab4ee 100644
--- a/modules/audio_processing/aec3/matched_filter.h
+++ b/modules/audio_processing/aec3/matched_filter.h
@@ -100,7 +100,8 @@ class MatchedFilter {
int num_matched_filters,
size_t alignment_shift_sub_blocks,
float excitation_limit,
- float smoothing,
+ float smoothing_fast,
+ float smoothing_slow,
float matching_filter_threshold);
MatchedFilter() = delete;
@@ -111,7 +112,8 @@ class MatchedFilter {
// Updates the correlation with the values in the capture buffer.
void Update(const DownsampledRenderBuffer& render_buffer,
- rtc::ArrayView<const float> capture);
+ rtc::ArrayView<const float> capture,
+ bool use_slow_smoothing);
// Resets the matched filter.
void Reset();
@@ -140,7 +142,8 @@ class MatchedFilter {
std::vector<LagEstimate> lag_estimates_;
std::vector<size_t> filters_offsets_;
const float excitation_limit_;
- const float smoothing_;
+ const float smoothing_fast_;
+ const float smoothing_slow_;
const float matching_filter_threshold_;
};
diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator.h b/modules/audio_processing/aec3/matched_filter_lag_aggregator.h
index d48011e477..612bd5d942 100644
--- a/modules/audio_processing/aec3/matched_filter_lag_aggregator.h
+++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator.h
@@ -45,6 +45,9 @@ class MatchedFilterLagAggregator {
absl::optional<DelayEstimate> Aggregate(
rtc::ArrayView<const MatchedFilter::LagEstimate> lag_estimates);
+ // Returns whether a reliable delay estimate has been found.
+ bool ReliableDelayFound() const { return significant_candidate_found_; }
+
private:
ApmDataDumper* const data_dumper_;
std::vector<int> histogram_;
diff --git a/modules/audio_processing/aec3/matched_filter_unittest.cc b/modules/audio_processing/aec3/matched_filter_unittest.cc
index 137275fd74..37b51fa624 100644
--- a/modules/audio_processing/aec3/matched_filter_unittest.cc
+++ b/modules/audio_processing/aec3/matched_filter_unittest.cc
@@ -206,6 +206,7 @@ TEST(MatchedFilter, LagEstimation) {
kWindowSizeSubBlocks, kNumMatchedFilters,
kAlignmentShiftSubBlocks, 150,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold);
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
@@ -231,7 +232,7 @@ TEST(MatchedFilter, LagEstimation) {
downsampled_capture_data.data(), sub_block_size);
capture_decimator.Decimate(capture[0], downsampled_capture);
filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(),
- downsampled_capture);
+ downsampled_capture, false);
}
// Obtain the lag estimates.
@@ -318,6 +319,7 @@ TEST(MatchedFilter, LagNotReliableForUncorrelatedRenderAndCapture) {
kWindowSizeSubBlocks, kNumMatchedFilters,
kAlignmentShiftSubBlocks, 150,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold);
// Analyze the correlation between render and capture.
@@ -325,7 +327,8 @@ TEST(MatchedFilter, LagNotReliableForUncorrelatedRenderAndCapture) {
RandomizeSampleVector(&random_generator, render[0][0]);
RandomizeSampleVector(&random_generator, capture);
render_delay_buffer->Insert(render);
- filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(), capture);
+ filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(), capture,
+ false);
}
// Obtain the lag estimates.
@@ -361,6 +364,7 @@ TEST(MatchedFilter, LagNotUpdatedForLowLevelRender) {
kWindowSizeSubBlocks, kNumMatchedFilters,
kAlignmentShiftSubBlocks, 150,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold);
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
RenderDelayBuffer::Create(EchoCanceller3Config(), kSampleRateHz,
@@ -379,7 +383,7 @@ TEST(MatchedFilter, LagNotUpdatedForLowLevelRender) {
sub_block_size);
capture_decimator.Decimate(capture[0], downsampled_capture);
filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(),
- downsampled_capture);
+ downsampled_capture, false);
}
// Obtain the lag estimates.
@@ -407,6 +411,7 @@ TEST(MatchedFilter, NumberOfLagEstimates) {
MatchedFilter filter(&data_dumper, DetectOptimization(), sub_block_size,
32, num_matched_filters, 1, 150,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold);
EXPECT_EQ(num_matched_filters, filter.GetLagEstimates().size());
}
@@ -421,6 +426,7 @@ TEST(MatchedFilterDeathTest, ZeroWindowSize) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 16, 0, 1, 1,
150, config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold),
"");
}
@@ -430,6 +436,7 @@ TEST(MatchedFilterDeathTest, NullDataDumper) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(nullptr, DetectOptimization(), 16, 1, 1, 1, 150,
config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold),
"");
}
@@ -441,6 +448,7 @@ TEST(MatchedFilterDeathTest, DISABLED_BlockSizeMultipleOf4) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 15, 1, 1, 1,
150, config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold),
"");
}
@@ -453,6 +461,7 @@ TEST(MatchedFilterDeathTest, DISABLED_SubBlockSizeAddsUpToBlockSize) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 12, 1, 1, 1,
150, config.delay.delay_estimate_smoothing,
+ config.delay.delay_estimate_smoothing_delay_found,
config.delay.delay_candidate_detection_threshold),
"");
}
diff --git a/modules/audio_processing/aec3/residual_echo_estimator.cc b/modules/audio_processing/aec3/residual_echo_estimator.cc
index 0688429d47..15bebecb5f 100644
--- a/modules/audio_processing/aec3/residual_echo_estimator.cc
+++ b/modules/audio_processing/aec3/residual_echo_estimator.cc
@@ -177,7 +177,8 @@ void ResidualEchoEstimator::Estimate(
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> S2_linear,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> Y2,
bool dominant_nearend,
- rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2) {
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2,
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2_unbounded) {
RTC_DCHECK_EQ(R2.size(), Y2.size());
RTC_DCHECK_EQ(R2.size(), S2_linear.size());
@@ -193,14 +194,18 @@ void ResidualEchoEstimator::Estimate(
if (aec_state.SaturatedEcho()) {
for (size_t ch = 0; ch < num_capture_channels; ++ch) {
std::copy(Y2[ch].begin(), Y2[ch].end(), R2[ch].begin());
+ std::copy(Y2[ch].begin(), Y2[ch].end(), R2_unbounded[ch].begin());
}
} else {
const bool onset_compensated =
erle_onset_compensation_in_dominant_nearend_ || !dominant_nearend;
LinearEstimate(S2_linear, aec_state.Erle(onset_compensated), R2);
+ LinearEstimate(S2_linear, aec_state.ErleUnbounded(), R2_unbounded);
}
- AddReverb(ReverbType::kLinear, aec_state, render_buffer, R2);
+ UpdateReverb(ReverbType::kLinear, aec_state, render_buffer);
+ AddReverb(R2);
+ AddReverb(R2_unbounded);
} else {
const float echo_path_gain =
GetEchoPathGain(aec_state, /*gain_for_early_reflections=*/true);
@@ -210,6 +215,7 @@ void ResidualEchoEstimator::Estimate(
if (aec_state.SaturatedEcho()) {
for (size_t ch = 0; ch < num_capture_channels; ++ch) {
std::copy(Y2[ch].begin(), Y2[ch].end(), R2[ch].begin());
+ std::copy(Y2[ch].begin(), Y2[ch].end(), R2_unbounded[ch].begin());
}
} else {
// Estimate the echo generating signal power.
@@ -229,11 +235,14 @@ void ResidualEchoEstimator::Estimate(
}
NonLinearEstimate(echo_path_gain, X2, R2);
+ NonLinearEstimate(echo_path_gain, X2, R2_unbounded);
}
if (config_.echo_model.model_reverb_in_nonlinear_mode &&
!aec_state.TransparentModeActive()) {
- AddReverb(ReverbType::kNonLinear, aec_state, render_buffer, R2);
+ UpdateReverb(ReverbType::kNonLinear, aec_state, render_buffer);
+ AddReverb(R2);
+ AddReverb(R2_unbounded);
}
}
@@ -244,6 +253,7 @@ void ResidualEchoEstimator::Estimate(
for (size_t ch = 0; ch < num_capture_channels; ++ch) {
for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) {
R2[ch][k] *= residual_scaling[k];
+ R2_unbounded[ch][k] *= residual_scaling[k];
}
}
}
@@ -292,14 +302,10 @@ void ResidualEchoEstimator::UpdateRenderNoisePower(
}
}
-// Adds the estimated power of the reverb to the residual echo power.
-void ResidualEchoEstimator::AddReverb(
- ReverbType reverb_type,
- const AecState& aec_state,
- const RenderBuffer& render_buffer,
- rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2) {
- const size_t num_capture_channels = R2.size();
-
+// Updates the reverb estimation.
+void ResidualEchoEstimator::UpdateReverb(ReverbType reverb_type,
+ const AecState& aec_state,
+ const RenderBuffer& render_buffer) {
// Choose reverb partition based on what type of echo power model is used.
const size_t first_reverb_partition =
reverb_type == ReverbType::kLinear
@@ -334,6 +340,11 @@ void ResidualEchoEstimator::AddReverb(
echo_reverb_.UpdateReverbNoFreqShaping(render_power, echo_path_gain,
aec_state.ReverbDecay());
}
+}
+// Adds the estimated power of the reverb to the residual echo power.
+void ResidualEchoEstimator::AddReverb(
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2) const {
+ const size_t num_capture_channels = R2.size();
// Add the reverb power.
rtc::ArrayView<const float, kFftLengthBy2Plus1> reverb_power =
diff --git a/modules/audio_processing/aec3/residual_echo_estimator.h b/modules/audio_processing/aec3/residual_echo_estimator.h
index 9e977766cb..c071854c4a 100644
--- a/modules/audio_processing/aec3/residual_echo_estimator.h
+++ b/modules/audio_processing/aec3/residual_echo_estimator.h
@@ -40,7 +40,8 @@ class ResidualEchoEstimator {
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> S2_linear,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> Y2,
bool dominant_nearend,
- rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2);
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2,
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2_unbounded);
private:
enum class ReverbType { kLinear, kNonLinear };
@@ -52,12 +53,15 @@ class ResidualEchoEstimator {
// render signal.
void UpdateRenderNoisePower(const RenderBuffer& render_buffer);
+ // Updates the reverb estimation.
+ void UpdateReverb(ReverbType reverb_type,
+ const AecState& aec_state,
+ const RenderBuffer& render_buffer);
+
// Adds the estimated unmodelled echo power to the residual echo power
// estimate.
- void AddReverb(ReverbType reverb_type,
- const AecState& aec_state,
- const RenderBuffer& render_buffer,
- rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2);
+ void AddReverb(
+ rtc::ArrayView<std::array<float, kFftLengthBy2Plus1>> R2) const;
// Gets the echo path gain to apply.
float GetEchoPathGain(const AecState& aec_state,
diff --git a/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc b/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc
index e80838b5f6..3d760b7dda 100644
--- a/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc
+++ b/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc
@@ -48,6 +48,8 @@ TEST_P(ResidualEchoEstimatorMultiChannel, BasicTest) {
num_capture_channels);
std::vector<std::array<float, kFftLengthBy2Plus1>> Y2(num_capture_channels);
std::vector<std::array<float, kFftLengthBy2Plus1>> R2(num_capture_channels);
+ std::vector<std::array<float, kFftLengthBy2Plus1>> R2_unbounded(
+ num_capture_channels);
std::vector<std::vector<std::vector<float>>> x(
kNumBands, std::vector<std::vector<float>>(
num_render_channels, std::vector<float>(kBlockSize, 0.f)));
@@ -100,7 +102,8 @@ TEST_P(ResidualEchoEstimatorMultiChannel, BasicTest) {
output);
estimator.Estimate(aec_state, *render_delay_buffer->GetRenderBuffer(),
- S2_linear, Y2, /*dominant_nearend=*/false, R2);
+ S2_linear, Y2, /*dominant_nearend=*/false, R2,
+ R2_unbounded);
}
}
diff --git a/modules/audio_processing/aec3/reverb_model_estimator.cc b/modules/audio_processing/aec3/reverb_model_estimator.cc
index 717431103f..00ae466409 100644
--- a/modules/audio_processing/aec3/reverb_model_estimator.cc
+++ b/modules/audio_processing/aec3/reverb_model_estimator.cc
@@ -9,6 +9,7 @@
*/
#include "modules/audio_processing/aec3/reverb_model_estimator.h"
+#include <memory>
namespace webrtc {
diff --git a/modules/audio_processing/aec3/reverb_model_estimator.h b/modules/audio_processing/aec3/reverb_model_estimator.h
index 3b9971abae..e4e9540673 100644
--- a/modules/audio_processing/aec3/reverb_model_estimator.h
+++ b/modules/audio_processing/aec3/reverb_model_estimator.h
@@ -12,6 +12,7 @@
#define MODULES_AUDIO_PROCESSING_AEC3_REVERB_MODEL_ESTIMATOR_H_
#include <array>
+#include <memory>
#include <vector>
#include "absl/types/optional.h"
diff --git a/modules/audio_processing/aec3/subband_erle_estimator.cc b/modules/audio_processing/aec3/subband_erle_estimator.cc
index 1e957f23ac..dc7f92fd99 100644
--- a/modules/audio_processing/aec3/subband_erle_estimator.cc
+++ b/modules/audio_processing/aec3/subband_erle_estimator.cc
@@ -49,6 +49,7 @@ SubbandErleEstimator::SubbandErleEstimator(const EchoCanceller3Config& config,
accum_spectra_(num_capture_channels),
erle_(num_capture_channels),
erle_onset_compensated_(num_capture_channels),
+ erle_unbounded_(num_capture_channels),
erle_during_onsets_(num_capture_channels),
coming_onset_(num_capture_channels),
hold_counters_(num_capture_channels) {
@@ -62,6 +63,7 @@ void SubbandErleEstimator::Reset() {
for (size_t ch = 0; ch < num_capture_channels; ++ch) {
erle_[ch].fill(min_erle_);
erle_onset_compensated_[ch].fill(min_erle_);
+ erle_unbounded_[ch].fill(min_erle_);
erle_during_onsets_[ch].fill(min_erle_);
coming_onset_[ch].fill(true);
hold_counters_[ch].fill(0);
@@ -90,6 +92,10 @@ void SubbandErleEstimator::Update(
auto& erle_oc = erle_onset_compensated_[ch];
erle_oc[0] = erle_oc[1];
erle_oc[kFftLengthBy2] = erle_oc[kFftLengthBy2 - 1];
+
+ auto& erle_u = erle_unbounded_[ch];
+ erle_u[0] = erle_u[1];
+ erle_u[kFftLengthBy2] = erle_u[kFftLengthBy2 - 1];
}
}
@@ -163,6 +169,11 @@ void SubbandErleEstimator::UpdateBands(
update_erle_band(erle_onset_compensated_[ch][k], new_erle[k],
low_render_energy, min_erle_, max_erle_[k]);
}
+
+ // Virtually unbounded ERLE.
+ constexpr float kUnboundedErleMax = 100000.0f;
+ update_erle_band(erle_unbounded_[ch][k], new_erle[k], low_render_energy,
+ min_erle_, kUnboundedErleMax);
}
}
}
diff --git a/modules/audio_processing/aec3/subband_erle_estimator.h b/modules/audio_processing/aec3/subband_erle_estimator.h
index ffed6a57a5..8bf9c4d645 100644
--- a/modules/audio_processing/aec3/subband_erle_estimator.h
+++ b/modules/audio_processing/aec3/subband_erle_estimator.h
@@ -47,6 +47,12 @@ class SubbandErleEstimator {
: erle_;
}
+ // Returns the non-capped ERLE estimate.
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> ErleUnbounded()
+ const {
+ return erle_unbounded_;
+ }
+
// Returns the ERLE estimate at onsets (only used for testing).
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> ErleDuringOnsets()
const {
@@ -88,6 +94,7 @@ class SubbandErleEstimator {
std::vector<std::array<float, kFftLengthBy2Plus1>> erle_;
// ERLE lowered during render onsets.
std::vector<std::array<float, kFftLengthBy2Plus1>> erle_onset_compensated_;
+ std::vector<std::array<float, kFftLengthBy2Plus1>> erle_unbounded_;
// Estimation of ERLE during render onsets.
std::vector<std::array<float, kFftLengthBy2Plus1>> erle_during_onsets_;
std::vector<std::array<bool, kFftLengthBy2Plus1>> coming_onset_;
diff --git a/modules/audio_processing/aec3/subtractor.cc b/modules/audio_processing/aec3/subtractor.cc
index d10e4ffc52..2eae686752 100644
--- a/modules/audio_processing/aec3/subtractor.cc
+++ b/modules/audio_processing/aec3/subtractor.cc
@@ -91,7 +91,20 @@ Subtractor::Subtractor(const EchoCanceller3Config& config,
std::vector<float>(GetTimeDomainLength(std::max(
config_.filter.refined_initial.length_blocks,
config_.filter.refined.length_blocks)),
- 0.f)) {
+ 0.f)),
+ coarse_impulse_responses_(0) {
+ // Set up the storing of coarse impulse responses if data dumping is
+ // available.
+ if (ApmDataDumper::IsAvailable()) {
+ coarse_impulse_responses_.resize(num_capture_channels_);
+ const size_t filter_size = GetTimeDomainLength(
+ std::max(config_.filter.coarse_initial.length_blocks,
+ config_.filter.coarse.length_blocks));
+ for (std::vector<float>& impulse_response : coarse_impulse_responses_) {
+ impulse_response.resize(filter_size, 0.f);
+ }
+ }
+
for (size_t ch = 0; ch < num_capture_channels_; ++ch) {
refined_filters_[ch] = std::make_unique<AdaptiveFirFilter>(
config_.filter.refined.length_blocks,
@@ -285,7 +298,14 @@ void Subtractor::Process(const RenderBuffer& render_buffer,
config_.filter.coarse_reset_hangover_blocks;
}
- coarse_filter_[ch]->Adapt(render_buffer, G);
+ if (ApmDataDumper::IsAvailable()) {
+ RTC_DCHECK_LT(ch, coarse_impulse_responses_.size());
+ coarse_filter_[ch]->Adapt(render_buffer, G,
+ &coarse_impulse_responses_[ch]);
+ } else {
+ coarse_filter_[ch]->Adapt(render_buffer, G);
+ }
+
if (ch == 0) {
data_dumper_->DumpRaw("aec3_subtractor_G_coarse", G.re);
data_dumper_->DumpRaw("aec3_subtractor_G_coarse", G.im);
diff --git a/modules/audio_processing/aec3/subtractor.h b/modules/audio_processing/aec3/subtractor.h
index 560f6568eb..767e4aad46 100644
--- a/modules/audio_processing/aec3/subtractor.h
+++ b/modules/audio_processing/aec3/subtractor.h
@@ -78,6 +78,15 @@ class Subtractor {
refined_impulse_responses_[0].data(),
GetTimeDomainLength(
refined_filters_[0]->max_filter_size_partitions())));
+ if (ApmDataDumper::IsAvailable()) {
+ RTC_DCHECK_GT(coarse_impulse_responses_.size(), 0);
+ data_dumper_->DumpRaw(
+ "aec3_subtractor_h_coarse",
+ rtc::ArrayView<const float>(
+ coarse_impulse_responses_[0].data(),
+ GetTimeDomainLength(
+ coarse_filter_[0]->max_filter_size_partitions())));
+ }
refined_filters_[0]->DumpFilter("aec3_subtractor_H_refined");
coarse_filter_[0]->DumpFilter("aec3_subtractor_H_coarse");
@@ -132,6 +141,7 @@ class Subtractor {
std::vector<std::vector<std::array<float, kFftLengthBy2Plus1>>>
refined_frequency_responses_;
std::vector<std::vector<float>> refined_impulse_responses_;
+ std::vector<std::vector<float>> coarse_impulse_responses_;
};
} // namespace webrtc
diff --git a/modules/audio_processing/aec3/suppression_gain.cc b/modules/audio_processing/aec3/suppression_gain.cc
index 5b01c52908..6405d71c2d 100644
--- a/modules/audio_processing/aec3/suppression_gain.cc
+++ b/modules/audio_processing/aec3/suppression_gain.cc
@@ -23,10 +23,15 @@
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
namespace {
+bool UseUnboundedEchoSpectrum() {
+ return field_trial::IsEnabled("WebRTC-Aec3UseUnboundedEchoSpectrum");
+}
+
void LimitLowFrequencyGains(std::array<float, kFftLengthBy2Plus1>* gain) {
// Limit the low frequency gains to avoid the impact of the high-pass filter
// on the lower-frequency gain influencing the overall achieved gain.
@@ -230,16 +235,20 @@ void SuppressionGain::GetMinGain(
min_gain[k] = std::min(min_gain[k], 1.f);
}
- const bool is_nearend_state = dominant_nearend_detector_->IsNearendState();
- for (size_t k = 0; k < 6; ++k) {
- const auto& dec = is_nearend_state ? nearend_params_.max_dec_factor_lf
- : normal_params_.max_dec_factor_lf;
-
- // Make sure the gains of the low frequencies do not decrease too
- // quickly after strong nearend.
- if (last_nearend[k] > last_echo[k]) {
- min_gain[k] = std::max(min_gain[k], last_gain_[k] * dec);
- min_gain[k] = std::min(min_gain[k], 1.f);
+ if (!initial_state_ ||
+ config_.suppressor.lf_smoothing_during_initial_phase) {
+ const float& dec = dominant_nearend_detector_->IsNearendState()
+ ? nearend_params_.max_dec_factor_lf
+ : normal_params_.max_dec_factor_lf;
+
+ for (int k = 0; k <= config_.suppressor.last_lf_smoothing_band; ++k) {
+ // Make sure the gains of the low frequencies do not decrease too
+ // quickly after strong nearend.
+ if (last_nearend[k] > last_echo[k] ||
+ k <= config_.suppressor.last_permanent_lf_smoothing_band) {
+ min_gain[k] = std::max(min_gain[k], last_gain_[k] * dec);
+ min_gain[k] = std::min(min_gain[k], 1.f);
+ }
}
}
} else {
@@ -333,8 +342,13 @@ SuppressionGain::SuppressionGain(const EchoCanceller3Config& config,
num_capture_channels_,
aec3::MovingAverage(kFftLengthBy2Plus1,
config.suppressor.nearend_average_blocks)),
- nearend_params_(config_.suppressor.nearend_tuning),
- normal_params_(config_.suppressor.normal_tuning) {
+ nearend_params_(config_.suppressor.last_lf_band,
+ config_.suppressor.first_hf_band,
+ config_.suppressor.nearend_tuning),
+ normal_params_(config_.suppressor.last_lf_band,
+ config_.suppressor.first_hf_band,
+ config_.suppressor.normal_tuning),
+ use_unbounded_echo_spectrum_(UseUnboundedEchoSpectrum()) {
RTC_DCHECK_LT(0, state_change_duration_blocks_);
last_gain_.fill(1.f);
if (config_.suppressor.use_subband_nearend_detection) {
@@ -356,6 +370,8 @@ void SuppressionGain::GetGain(
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
residual_echo_spectrum,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
+ residual_echo_spectrum_unbounded,
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
comfort_noise_spectrum,
const RenderSignalAnalyzer& render_signal_analyzer,
const AecState& aec_state,
@@ -366,8 +382,13 @@ void SuppressionGain::GetGain(
RTC_DCHECK(high_bands_gain);
RTC_DCHECK(low_band_gain);
+ // Choose residual echo spectrum for the dominant nearend detector.
+ const auto echo = use_unbounded_echo_spectrum_
+ ? residual_echo_spectrum_unbounded
+ : residual_echo_spectrum;
+
// Update the nearend state selection.
- dominant_nearend_detector_->Update(nearend_spectrum, residual_echo_spectrum,
+ dominant_nearend_detector_->Update(nearend_spectrum, echo,
comfort_noise_spectrum, initial_state_);
// Compute gain for the lower band.
@@ -383,6 +404,9 @@ void SuppressionGain::GetGain(
*high_bands_gain =
UpperBandsGain(echo_spectrum, comfort_noise_spectrum, narrow_peak_band,
aec_state.SaturatedEcho(), render, *low_band_gain);
+
+ data_dumper_->DumpRaw("aec3_dominant_nearend",
+ dominant_nearend_detector_->IsNearendState());
}
void SuppressionGain::SetInitialState(bool state) {
@@ -419,23 +443,23 @@ bool SuppressionGain::LowNoiseRenderDetector::Detect(
}
SuppressionGain::GainParameters::GainParameters(
+ int last_lf_band,
+ int first_hf_band,
const EchoCanceller3Config::Suppressor::Tuning& tuning)
: max_inc_factor(tuning.max_inc_factor),
max_dec_factor_lf(tuning.max_dec_factor_lf) {
// Compute per-band masking thresholds.
- constexpr size_t kLastLfBand = 5;
- constexpr size_t kFirstHfBand = 8;
- RTC_DCHECK_LT(kLastLfBand, kFirstHfBand);
+ RTC_DCHECK_LT(last_lf_band, first_hf_band);
auto& lf = tuning.mask_lf;
auto& hf = tuning.mask_hf;
RTC_DCHECK_LT(lf.enr_transparent, lf.enr_suppress);
RTC_DCHECK_LT(hf.enr_transparent, hf.enr_suppress);
- for (size_t k = 0; k < kFftLengthBy2Plus1; k++) {
+ for (int k = 0; k < static_cast<int>(kFftLengthBy2Plus1); k++) {
float a;
- if (k <= kLastLfBand) {
+ if (k <= last_lf_band) {
a = 0.f;
- } else if (k < kFirstHfBand) {
- a = (k - kLastLfBand) / static_cast<float>(kFirstHfBand - kLastLfBand);
+ } else if (k < first_hf_band) {
+ a = (k - last_lf_band) / static_cast<float>(first_hf_band - last_lf_band);
} else {
a = 1.f;
}
diff --git a/modules/audio_processing/aec3/suppression_gain.h b/modules/audio_processing/aec3/suppression_gain.h
index d049baeaaf..7c4a1c9f7d 100644
--- a/modules/audio_processing/aec3/suppression_gain.h
+++ b/modules/audio_processing/aec3/suppression_gain.h
@@ -43,6 +43,8 @@ class SuppressionGain {
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
residual_echo_spectrum,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
+ residual_echo_spectrum_unbounded,
+ rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>>
comfort_noise_spectrum,
const RenderSignalAnalyzer& render_signal_analyzer,
const AecState& aec_state,
@@ -103,6 +105,8 @@ class SuppressionGain {
struct GainParameters {
explicit GainParameters(
+ int last_lf_band,
+ int first_hf_band,
const EchoCanceller3Config::Suppressor::Tuning& tuning);
const float max_inc_factor;
const float max_dec_factor_lf;
@@ -126,6 +130,9 @@ class SuppressionGain {
std::vector<aec3::MovingAverage> nearend_smoothers_;
const GainParameters nearend_params_;
const GainParameters normal_params_;
+ // Determines if the dominant nearend detector uses the unbounded residual
+ // echo spectrum.
+ const bool use_unbounded_echo_spectrum_;
std::unique_ptr<NearendDetector> dominant_nearend_detector_;
RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionGain);
diff --git a/modules/audio_processing/aec3/suppression_gain_unittest.cc b/modules/audio_processing/aec3/suppression_gain_unittest.cc
index 26bfc24ebb..999b0f27ab 100644
--- a/modules/audio_processing/aec3/suppression_gain_unittest.cc
+++ b/modules/audio_processing/aec3/suppression_gain_unittest.cc
@@ -26,29 +26,30 @@ namespace aec3 {
// Verifies that the check for non-null output gains works.
TEST(SuppressionGainDeathTest, NullOutputGains) {
- std::vector<std::array<float, kFftLengthBy2Plus1>> E2(1, {0.f});
- std::vector<std::array<float, kFftLengthBy2Plus1>> R2(1, {0.f});
+ std::vector<std::array<float, kFftLengthBy2Plus1>> E2(1, {0.0f});
+ std::vector<std::array<float, kFftLengthBy2Plus1>> R2(1, {0.0f});
+ std::vector<std::array<float, kFftLengthBy2Plus1>> R2_unbounded(1, {0.0f});
std::vector<std::array<float, kFftLengthBy2Plus1>> S2(1);
- std::vector<std::array<float, kFftLengthBy2Plus1>> N2(1, {0.f});
+ std::vector<std::array<float, kFftLengthBy2Plus1>> N2(1, {0.0f});
for (auto& S2_k : S2) {
- S2_k.fill(.1f);
+ S2_k.fill(0.1f);
}
FftData E;
FftData Y;
- E.re.fill(0.f);
- E.im.fill(0.f);
- Y.re.fill(0.f);
- Y.im.fill(0.f);
+ E.re.fill(0.0f);
+ E.im.fill(0.0f);
+ Y.re.fill(0.0f);
+ Y.im.fill(0.0f);
float high_bands_gain;
AecState aec_state(EchoCanceller3Config{}, 1);
EXPECT_DEATH(
SuppressionGain(EchoCanceller3Config{}, DetectOptimization(), 16000, 1)
- .GetGain(E2, S2, R2, N2,
+ .GetGain(E2, S2, R2, R2_unbounded, N2,
RenderSignalAnalyzer((EchoCanceller3Config{})), aec_state,
std::vector<std::vector<std::vector<float>>>(
3, std::vector<std::vector<float>>(
- 1, std::vector<float>(kBlockSize, 0.f))),
+ 1, std::vector<float>(kBlockSize, 0.0f))),
false, &high_bands_gain, nullptr),
"");
}
@@ -67,15 +68,17 @@ TEST(SuppressionGain, BasicGainComputation) {
float high_bands_gain;
std::vector<std::array<float, kFftLengthBy2Plus1>> E2(kNumCaptureChannels);
std::vector<std::array<float, kFftLengthBy2Plus1>> S2(kNumCaptureChannels,
- {0.f});
+ {0.0f});
std::vector<std::array<float, kFftLengthBy2Plus1>> Y2(kNumCaptureChannels);
std::vector<std::array<float, kFftLengthBy2Plus1>> R2(kNumCaptureChannels);
+ std::vector<std::array<float, kFftLengthBy2Plus1>> R2_unbounded(
+ kNumCaptureChannels);
std::vector<std::array<float, kFftLengthBy2Plus1>> N2(kNumCaptureChannels);
std::array<float, kFftLengthBy2Plus1> g;
std::vector<SubtractorOutput> output(kNumCaptureChannels);
std::vector<std::vector<std::vector<float>>> x(
kNumBands, std::vector<std::vector<float>>(
- kNumRenderChannels, std::vector<float>(kBlockSize, 0.f)));
+ kNumRenderChannels, std::vector<float>(kBlockSize, 0.0f)));
EchoCanceller3Config config;
AecState aec_state(config, kNumCaptureChannels);
ApmDataDumper data_dumper(42);
@@ -89,8 +92,9 @@ TEST(SuppressionGain, BasicGainComputation) {
for (size_t ch = 0; ch < kNumCaptureChannels; ++ch) {
E2[ch].fill(10.f);
Y2[ch].fill(10.f);
- R2[ch].fill(.1f);
- N2[ch].fill(100.f);
+ R2[ch].fill(0.1f);
+ R2_unbounded[ch].fill(0.1f);
+ N2[ch].fill(100.0f);
}
for (auto& subtractor_output : output) {
subtractor_output.Reset();
@@ -107,17 +111,18 @@ TEST(SuppressionGain, BasicGainComputation) {
aec_state.Update(delay_estimate, subtractor.FilterFrequencyResponses(),
subtractor.FilterImpulseResponses(),
*render_delay_buffer->GetRenderBuffer(), E2, Y2, output);
- suppression_gain.GetGain(E2, S2, R2, N2, analyzer, aec_state, x, false,
- &high_bands_gain, &g);
+ suppression_gain.GetGain(E2, S2, R2, R2_unbounded, N2, analyzer, aec_state,
+ x, false, &high_bands_gain, &g);
}
std::for_each(g.begin(), g.end(),
- [](float a) { EXPECT_NEAR(1.f, a, 0.001); });
+ [](float a) { EXPECT_NEAR(1.0f, a, 0.001f); });
// Ensure that a strong nearend is detected to mask any echoes.
for (size_t ch = 0; ch < kNumCaptureChannels; ++ch) {
E2[ch].fill(100.f);
Y2[ch].fill(100.f);
R2[ch].fill(0.1f);
+ R2_unbounded[ch].fill(0.1f);
S2[ch].fill(0.1f);
N2[ch].fill(0.f);
}
@@ -126,22 +131,23 @@ TEST(SuppressionGain, BasicGainComputation) {
aec_state.Update(delay_estimate, subtractor.FilterFrequencyResponses(),
subtractor.FilterImpulseResponses(),
*render_delay_buffer->GetRenderBuffer(), E2, Y2, output);
- suppression_gain.GetGain(E2, S2, R2, N2, analyzer, aec_state, x, false,
- &high_bands_gain, &g);
+ suppression_gain.GetGain(E2, S2, R2, R2_unbounded, N2, analyzer, aec_state,
+ x, false, &high_bands_gain, &g);
}
std::for_each(g.begin(), g.end(),
- [](float a) { EXPECT_NEAR(1.f, a, 0.001); });
+ [](float a) { EXPECT_NEAR(1.0f, a, 0.001f); });
// Add a strong echo to one of the channels and ensure that it is suppressed.
- E2[1].fill(1000000000.f);
- R2[1].fill(10000000000000.f);
+ E2[1].fill(1000000000.0f);
+ R2[1].fill(10000000000000.0f);
+ R2_unbounded[1].fill(10000000000000.0f);
for (int k = 0; k < 10; ++k) {
- suppression_gain.GetGain(E2, S2, R2, N2, analyzer, aec_state, x, false,
- &high_bands_gain, &g);
+ suppression_gain.GetGain(E2, S2, R2, R2_unbounded, N2, analyzer, aec_state,
+ x, false, &high_bands_gain, &g);
}
std::for_each(g.begin(), g.end(),
- [](float a) { EXPECT_NEAR(0.f, a, 0.001); });
+ [](float a) { EXPECT_NEAR(0.0f, a, 0.001f); });
}
} // namespace aec3
diff --git a/modules/audio_processing/aec3/transparent_mode.cc b/modules/audio_processing/aec3/transparent_mode.cc
index 7cfa3e8eae..489f53f4f1 100644
--- a/modules/audio_processing/aec3/transparent_mode.cc
+++ b/modules/audio_processing/aec3/transparent_mode.cc
@@ -11,6 +11,7 @@
#include "modules/audio_processing/aec3/transparent_mode.h"
#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
#include "system_wrappers/include/field_trial.h"
namespace webrtc {
@@ -228,11 +229,14 @@ class LegacyTransparentModeImpl : public TransparentMode {
std::unique_ptr<TransparentMode> TransparentMode::Create(
const EchoCanceller3Config& config) {
if (config.ep_strength.bounded_erl || DeactivateTransparentMode()) {
+ RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: Disabled";
return nullptr;
}
if (ActivateTransparentModeHmm()) {
+ RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: HMM";
return std::make_unique<TransparentModeImpl>();
}
+ RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: Legacy";
return std::make_unique<LegacyTransparentModeImpl>(config);
}
diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn
index 5ad6644323..4bb8c5494b 100644
--- a/modules/audio_processing/agc/BUILD.gn
+++ b/modules/audio_processing/agc/BUILD.gn
@@ -19,11 +19,14 @@ rtc_library("agc") {
]
configs += [ "..:apm_debug_dump" ]
deps = [
+ ":clipping_predictor",
+ ":clipping_predictor_evaluator",
":gain_control_interface",
":gain_map",
":level_estimation",
"..:apm_logging",
"..:audio_buffer",
+ "..:audio_frame_view",
"../../../common_audio",
"../../../common_audio:common_audio_c",
"../../../rtc_base:checks",
@@ -38,6 +41,49 @@ rtc_library("agc") {
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
+rtc_library("clipping_predictor") {
+ sources = [
+ "clipping_predictor.cc",
+ "clipping_predictor.h",
+ ]
+ deps = [
+ ":clipping_predictor_level_buffer",
+ ":gain_map",
+ "..:api",
+ "..:audio_frame_view",
+ "../../../common_audio",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:safe_minmax",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("clipping_predictor_evaluator") {
+ sources = [
+ "clipping_predictor_evaluator.cc",
+ "clipping_predictor_evaluator.h",
+ ]
+ deps = [
+ "../../../rtc_base:checks",
+ "../../../rtc_base:logging",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("clipping_predictor_level_buffer") {
+ sources = [
+ "clipping_predictor_level_buffer.cc",
+ "clipping_predictor_level_buffer.h",
+ ]
+ deps = [
+ "../../../rtc_base:checks",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
rtc_library("level_estimation") {
sources = [
"agc.cc",
@@ -96,6 +142,9 @@ if (rtc_include_tests) {
testonly = true
sources = [
"agc_manager_direct_unittest.cc",
+ "clipping_predictor_evaluator_unittest.cc",
+ "clipping_predictor_level_buffer_unittest.cc",
+ "clipping_predictor_unittest.cc",
"loudness_histogram_unittest.cc",
"mock_agc.h",
]
@@ -103,13 +152,20 @@ if (rtc_include_tests) {
deps = [
":agc",
+ ":clipping_predictor",
+ ":clipping_predictor_evaluator",
+ ":clipping_predictor_level_buffer",
":gain_control_interface",
":level_estimation",
"..:mocks",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base:safe_conversions",
"../../../test:field_trial",
"../../../test:fileutils",
"../../../test:test_support",
"//testing/gtest",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/modules/audio_processing/agc/agc_manager_direct.cc b/modules/audio_processing/agc/agc_manager_direct.cc
index 2454d1bbb1..817678801e 100644
--- a/modules/audio_processing/agc/agc_manager_direct.cc
+++ b/modules/audio_processing/agc/agc_manager_direct.cc
@@ -16,6 +16,7 @@
#include "common_audio/include/audio_util.h"
#include "modules/audio_processing/agc/gain_control.h"
#include "modules/audio_processing/agc/gain_map_internal.h"
+#include "modules/audio_processing/include/audio_frame_view.h"
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
@@ -27,33 +28,33 @@ namespace webrtc {
namespace {
-// Amount the microphone level is lowered with every clipping event.
-const int kClippedLevelStep = 15;
-// Proportion of clipped samples required to declare a clipping event.
-const float kClippedRatioThreshold = 0.1f;
-// Time in frames to wait after a clipping event before checking again.
-const int kClippedWaitFrames = 300;
-
// Amount of error we tolerate in the microphone level (presumably due to OS
// quantization) before we assume the user has manually adjusted the microphone.
-const int kLevelQuantizationSlack = 25;
+constexpr int kLevelQuantizationSlack = 25;
-const int kDefaultCompressionGain = 7;
-const int kMaxCompressionGain = 12;
-const int kMinCompressionGain = 2;
+constexpr int kDefaultCompressionGain = 7;
+constexpr int kMaxCompressionGain = 12;
+constexpr int kMinCompressionGain = 2;
// Controls the rate of compression changes towards the target.
-const float kCompressionGainStep = 0.05f;
+constexpr float kCompressionGainStep = 0.05f;
-const int kMaxMicLevel = 255;
+constexpr int kMaxMicLevel = 255;
static_assert(kGainMapSize > kMaxMicLevel, "gain map too small");
-const int kMinMicLevel = 12;
+constexpr int kMinMicLevel = 12;
// Prevent very large microphone level changes.
-const int kMaxResidualGainChange = 15;
+constexpr int kMaxResidualGainChange = 15;
// Maximum additional gain allowed to compensate for microphone level
// restrictions from clipping events.
-const int kSurplusCompressionGain = 6;
+constexpr int kSurplusCompressionGain = 6;
+
+// History size for the clipping predictor evaluator (unit: number of 10 ms
+// frames).
+constexpr int kClippingPredictorEvaluatorHistorySize = 32;
+
+using ClippingPredictorConfig = AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor;
// Returns whether a fall-back solution to choose the maximum level should be
// chosen.
@@ -132,6 +133,33 @@ float ComputeClippedRatio(const float* const* audio,
return static_cast<float>(num_clipped) / (samples_per_channel);
}
+void LogClippingPredictorMetrics(const ClippingPredictorEvaluator& evaluator) {
+ RTC_LOG(LS_INFO) << "Clipping predictor metrics: TP "
+ << evaluator.true_positives() << " TN "
+ << evaluator.true_negatives() << " FP "
+ << evaluator.false_positives() << " FN "
+ << evaluator.false_negatives();
+ const float precision_denominator =
+ evaluator.true_positives() + evaluator.false_positives();
+ const float recall_denominator =
+ evaluator.true_positives() + evaluator.false_negatives();
+ if (precision_denominator > 0 && recall_denominator > 0) {
+ const float precision = evaluator.true_positives() / precision_denominator;
+ const float recall = evaluator.true_positives() / recall_denominator;
+ RTC_LOG(LS_INFO) << "Clipping predictor metrics: P " << precision << " R "
+ << recall;
+ const float f1_score_denominator = precision + recall;
+ if (f1_score_denominator > 0.0f) {
+ const float f1_score = 2 * precision * recall / f1_score_denominator;
+ RTC_LOG(LS_INFO) << "Clipping predictor metrics: F1 " << f1_score;
+ RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc.ClippingPredictor.F1Score",
+ std::round(f1_score * 100.0f), /*min=*/0,
+ /*max=*/100,
+ /*bucket_count=*/50);
+ }
+ }
+}
+
} // namespace
MonoAgc::MonoAgc(ApmDataDumper* data_dumper,
@@ -182,19 +210,19 @@ void MonoAgc::Process(const int16_t* audio,
}
}
-void MonoAgc::HandleClipping() {
+void MonoAgc::HandleClipping(int clipped_level_step) {
// Always decrease the maximum level, even if the current level is below
// threshold.
- SetMaxLevel(std::max(clipped_level_min_, max_level_ - kClippedLevelStep));
+ SetMaxLevel(std::max(clipped_level_min_, max_level_ - clipped_level_step));
if (log_to_histograms_) {
RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.AgcClippingAdjustmentAllowed",
- level_ - kClippedLevelStep >= clipped_level_min_);
+ level_ - clipped_level_step >= clipped_level_min_);
}
if (level_ > clipped_level_min_) {
// Don't try to adjust the level if we're already below the limit. As
// a consequence, if the user has brought the level above the limit, we
// will still not react until the postproc updates the level.
- SetLevel(std::max(clipped_level_min_, level_ - kClippedLevelStep));
+ SetLevel(std::max(clipped_level_min_, level_ - clipped_level_step));
// Reset the AGCs for all channels since the level has changed.
agc_->Reset();
}
@@ -401,35 +429,58 @@ void MonoAgc::UpdateCompressor() {
int AgcManagerDirect::instance_counter_ = 0;
-AgcManagerDirect::AgcManagerDirect(Agc* agc,
- int startup_min_level,
- int clipped_level_min,
- int sample_rate_hz)
+AgcManagerDirect::AgcManagerDirect(
+ Agc* agc,
+ int startup_min_level,
+ int clipped_level_min,
+ int sample_rate_hz,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames,
+ const ClippingPredictorConfig& clipping_config)
: AgcManagerDirect(/*num_capture_channels*/ 1,
startup_min_level,
clipped_level_min,
/*disable_digital_adaptive*/ false,
- sample_rate_hz) {
+ sample_rate_hz,
+ clipped_level_step,
+ clipped_ratio_threshold,
+ clipped_wait_frames,
+ clipping_config) {
RTC_DCHECK(channel_agcs_[0]);
RTC_DCHECK(agc);
channel_agcs_[0]->set_agc(agc);
}
-AgcManagerDirect::AgcManagerDirect(int num_capture_channels,
- int startup_min_level,
- int clipped_level_min,
- bool disable_digital_adaptive,
- int sample_rate_hz)
+AgcManagerDirect::AgcManagerDirect(
+ int num_capture_channels,
+ int startup_min_level,
+ int clipped_level_min,
+ bool disable_digital_adaptive,
+ int sample_rate_hz,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames,
+ const ClippingPredictorConfig& clipping_config)
: data_dumper_(
new ApmDataDumper(rtc::AtomicOps::Increment(&instance_counter_))),
use_min_channel_level_(!UseMaxAnalogChannelLevel()),
sample_rate_hz_(sample_rate_hz),
num_capture_channels_(num_capture_channels),
disable_digital_adaptive_(disable_digital_adaptive),
- frames_since_clipped_(kClippedWaitFrames),
+ frames_since_clipped_(clipped_wait_frames),
capture_output_used_(true),
+ clipped_level_step_(clipped_level_step),
+ clipped_ratio_threshold_(clipped_ratio_threshold),
+ clipped_wait_frames_(clipped_wait_frames),
channel_agcs_(num_capture_channels),
- new_compressions_to_set_(num_capture_channels) {
+ new_compressions_to_set_(num_capture_channels),
+ clipping_predictor_(
+ CreateClippingPredictor(num_capture_channels, clipping_config)),
+ use_clipping_predictor_step_(!!clipping_predictor_ &&
+ clipping_config.use_predicted_step),
+ clipping_predictor_evaluator_(kClippingPredictorEvaluatorHistorySize),
+ clipping_predictor_log_counter_(0) {
const int min_mic_level = GetMinMicLevel();
for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) {
ApmDataDumper* data_dumper_ch = ch == 0 ? data_dumper_.get() : nullptr;
@@ -438,7 +489,12 @@ AgcManagerDirect::AgcManagerDirect(int num_capture_channels,
data_dumper_ch, startup_min_level, clipped_level_min,
disable_digital_adaptive_, min_mic_level);
}
- RTC_DCHECK_LT(0, channel_agcs_.size());
+ RTC_DCHECK(!channel_agcs_.empty());
+ RTC_DCHECK_GT(clipped_level_step, 0);
+ RTC_DCHECK_LE(clipped_level_step, 255);
+ RTC_DCHECK_GT(clipped_ratio_threshold, 0.f);
+ RTC_DCHECK_LT(clipped_ratio_threshold, 1.f);
+ RTC_DCHECK_GT(clipped_wait_frames, 0);
channel_agcs_[0]->ActivateLogging();
}
@@ -453,6 +509,8 @@ void AgcManagerDirect::Initialize() {
capture_output_used_ = true;
AggregateChannelLevels();
+ clipping_predictor_evaluator_.Reset();
+ clipping_predictor_log_counter_ = 0;
}
void AgcManagerDirect::SetupDigitalGainControl(
@@ -489,7 +547,13 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio,
return;
}
- if (frames_since_clipped_ < kClippedWaitFrames) {
+ if (!!clipping_predictor_) {
+ AudioFrameView<const float> frame = AudioFrameView<const float>(
+ audio, num_capture_channels_, static_cast<int>(samples_per_channel));
+ clipping_predictor_->Analyze(frame);
+ }
+
+ if (frames_since_clipped_ < clipped_wait_frames_) {
++frames_since_clipped_;
return;
}
@@ -505,14 +569,54 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio,
// gain is increased, through SetMaxLevel().
float clipped_ratio =
ComputeClippedRatio(audio, num_capture_channels_, samples_per_channel);
-
- if (clipped_ratio > kClippedRatioThreshold) {
- RTC_DLOG(LS_INFO) << "[agc] Clipping detected. clipped_ratio="
- << clipped_ratio;
+ const bool clipping_detected = clipped_ratio > clipped_ratio_threshold_;
+ bool clipping_predicted = false;
+ int predicted_step = 0;
+ if (!!clipping_predictor_) {
+ for (int channel = 0; channel < num_capture_channels_; ++channel) {
+ const auto step = clipping_predictor_->EstimateClippedLevelStep(
+ channel, stream_analog_level_, clipped_level_step_,
+ channel_agcs_[channel]->min_mic_level(), kMaxMicLevel);
+ if (use_clipping_predictor_step_ && step.has_value()) {
+ predicted_step = std::max(predicted_step, step.value());
+ clipping_predicted = true;
+ }
+ }
+ // Clipping prediction evaluation.
+ absl::optional<int> prediction_interval =
+ clipping_predictor_evaluator_.Observe(clipping_detected,
+ clipping_predicted);
+ if (prediction_interval.has_value()) {
+ RTC_HISTOGRAM_COUNTS_LINEAR(
+ "WebRTC.Audio.Agc.ClippingPredictor.PredictionInterval",
+ prediction_interval.value(), /*min=*/0,
+ /*max=*/49, /*bucket_count=*/50);
+ }
+ constexpr int kNumFramesIn30Seconds = 3000;
+ clipping_predictor_log_counter_++;
+ if (clipping_predictor_log_counter_ == kNumFramesIn30Seconds) {
+ LogClippingPredictorMetrics(clipping_predictor_evaluator_);
+ clipping_predictor_log_counter_ = 0;
+ }
+ }
+ if (clipping_detected || clipping_predicted) {
+ int step = clipped_level_step_;
+ if (clipping_detected) {
+ RTC_DLOG(LS_INFO) << "[agc] Clipping detected. clipped_ratio="
+ << clipped_ratio;
+ }
+ if (clipping_predicted) {
+ step = std::max(predicted_step, clipped_level_step_);
+ RTC_DLOG(LS_INFO) << "[agc] Clipping predicted. step=" << step;
+ }
for (auto& state_ch : channel_agcs_) {
- state_ch->HandleClipping();
+ state_ch->HandleClipping(step);
}
frames_since_clipped_ = 0;
+ if (!!clipping_predictor_) {
+ clipping_predictor_->Reset();
+ clipping_predictor_evaluator_.Reset();
+ }
}
AggregateChannelLevels();
}
diff --git a/modules/audio_processing/agc/agc_manager_direct.h b/modules/audio_processing/agc/agc_manager_direct.h
index f9417cffff..7ac96a661c 100644
--- a/modules/audio_processing/agc/agc_manager_direct.h
+++ b/modules/audio_processing/agc/agc_manager_direct.h
@@ -15,6 +15,8 @@
#include "absl/types/optional.h"
#include "modules/audio_processing/agc/agc.h"
+#include "modules/audio_processing/agc/clipping_predictor.h"
+#include "modules/audio_processing/agc/clipping_predictor_evaluator.h"
#include "modules/audio_processing/audio_buffer.h"
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/gtest_prod_util.h"
@@ -34,12 +36,23 @@ class AgcManagerDirect final {
// AgcManagerDirect will configure GainControl internally. The user is
// responsible for processing the audio using it after the call to Process.
// The operating range of startup_min_level is [12, 255] and any input value
- // outside that range will be clamped.
- AgcManagerDirect(int num_capture_channels,
- int startup_min_level,
- int clipped_level_min,
- bool disable_digital_adaptive,
- int sample_rate_hz);
+ // outside that range will be clamped. `clipped_level_step` is the amount
+ // the microphone level is lowered with every clipping event, limited to
+ // (0, 255]. `clipped_ratio_threshold` is the proportion of clipped
+ // samples required to declare a clipping event, limited to (0.f, 1.f).
+ // `clipped_wait_frames` is the time in frames to wait after a clipping event
+ // before checking again, limited to values higher than 0.
+ AgcManagerDirect(
+ int num_capture_channels,
+ int startup_min_level,
+ int clipped_level_min,
+ bool disable_digital_adaptive,
+ int sample_rate_hz,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames,
+ const AudioProcessing::Config::GainController1::AnalogGainController::
+ ClippingPredictor& clipping_config);
~AgcManagerDirect();
AgcManagerDirect(const AgcManagerDirect&) = delete;
@@ -64,6 +77,14 @@ class AgcManagerDirect final {
// If available, returns a new compression gain for the digital gain control.
absl::optional<int> GetDigitalComressionGain();
+ // Returns true if clipping prediction is enabled.
+ bool clipping_predictor_enabled() const { return !!clipping_predictor_; }
+
+ // Returns true if clipping prediction is used to adjust the analog gain.
+ bool use_clipping_predictor_step() const {
+ return use_clipping_predictor_step_;
+ }
+
private:
friend class AgcManagerDirectTest;
@@ -81,13 +102,28 @@ class AgcManagerDirect final {
AgcMinMicLevelExperimentEnabled50);
FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest,
AgcMinMicLevelExperimentEnabledAboveStartupLevel);
+ FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest,
+ ClippingParametersVerified);
+ FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest,
+ DisableClippingPredictorDoesNotLowerVolume);
+ FRIEND_TEST_ALL_PREFIXES(
+ AgcManagerDirectStandaloneTest,
+ EnableClippingPredictorWithUnusedPredictedStepDoesNotLowerVolume);
+ FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest,
+ EnableClippingPredictorLowersVolume);
// Dependency injection for testing. Don't delete |agc| as the memory is owned
// by the manager.
- AgcManagerDirect(Agc* agc,
- int startup_min_level,
- int clipped_level_min,
- int sample_rate_hz);
+ AgcManagerDirect(
+ Agc* agc,
+ int startup_min_level,
+ int clipped_level_min,
+ int sample_rate_hz,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames,
+ const AudioProcessing::Config::GainController1::AnalogGainController::
+ ClippingPredictor& clipping_config);
void AnalyzePreProcess(const float* const* audio, size_t samples_per_channel);
@@ -105,8 +141,17 @@ class AgcManagerDirect final {
bool capture_output_used_;
int channel_controlling_gain_ = 0;
+ const int clipped_level_step_;
+ const float clipped_ratio_threshold_;
+ const int clipped_wait_frames_;
+
std::vector<std::unique_ptr<MonoAgc>> channel_agcs_;
std::vector<absl::optional<int>> new_compressions_to_set_;
+
+ const std::unique_ptr<ClippingPredictor> clipping_predictor_;
+ const bool use_clipping_predictor_step_;
+ ClippingPredictorEvaluator clipping_predictor_evaluator_;
+ int clipping_predictor_log_counter_;
};
class MonoAgc {
@@ -123,7 +168,7 @@ class MonoAgc {
void Initialize();
void HandleCaptureOutputUsedChange(bool capture_output_used);
- void HandleClipping();
+ void HandleClipping(int clipped_level_step);
void Process(const int16_t* audio,
size_t samples_per_channel,
diff --git a/modules/audio_processing/agc/agc_manager_direct_unittest.cc b/modules/audio_processing/agc/agc_manager_direct_unittest.cc
index 1954ed4b21..bb284f9abc 100644
--- a/modules/audio_processing/agc/agc_manager_direct_unittest.cc
+++ b/modules/audio_processing/agc/agc_manager_direct_unittest.cc
@@ -26,13 +26,19 @@ using ::testing::SetArgPointee;
namespace webrtc {
namespace {
-const int kSampleRateHz = 32000;
-const int kNumChannels = 1;
-const int kSamplesPerChannel = kSampleRateHz / 100;
-const int kInitialVolume = 128;
+constexpr int kSampleRateHz = 32000;
+constexpr int kNumChannels = 1;
+constexpr int kSamplesPerChannel = kSampleRateHz / 100;
+constexpr int kInitialVolume = 128;
constexpr int kClippedMin = 165; // Arbitrary, but different from the default.
-const float kAboveClippedThreshold = 0.2f;
-const int kMinMicLevel = 12;
+constexpr float kAboveClippedThreshold = 0.2f;
+constexpr int kMinMicLevel = 12;
+constexpr int kClippedLevelStep = 15;
+constexpr float kClippedRatioThreshold = 0.1f;
+constexpr int kClippedWaitFrames = 300;
+
+using ClippingPredictorConfig = AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor;
class MockGainControl : public GainControl {
public:
@@ -57,10 +63,53 @@ class MockGainControl : public GainControl {
};
std::unique_ptr<AgcManagerDirect> CreateAgcManagerDirect(
- int startup_min_level) {
+ int startup_min_level,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames) {
return std::make_unique<AgcManagerDirect>(
/*num_capture_channels=*/1, startup_min_level, kClippedMin,
- /*disable_digital_adaptive=*/true, kSampleRateHz);
+ /*disable_digital_adaptive=*/true, kSampleRateHz, clipped_level_step,
+ clipped_ratio_threshold, clipped_wait_frames, ClippingPredictorConfig());
+}
+
+std::unique_ptr<AgcManagerDirect> CreateAgcManagerDirect(
+ int startup_min_level,
+ int clipped_level_step,
+ float clipped_ratio_threshold,
+ int clipped_wait_frames,
+ const ClippingPredictorConfig& clipping_cfg) {
+ return std::make_unique<AgcManagerDirect>(
+ /*num_capture_channels=*/1, startup_min_level, kClippedMin,
+ /*disable_digital_adaptive=*/true, kSampleRateHz, clipped_level_step,
+ clipped_ratio_threshold, clipped_wait_frames, clipping_cfg);
+}
+
+void CallPreProcessAudioBuffer(int num_calls,
+ float peak_ratio,
+ AgcManagerDirect& manager) {
+ RTC_DCHECK_GE(1.f, peak_ratio);
+ AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz,
+ 1);
+ const int num_channels = audio_buffer.num_channels();
+ const int num_frames = audio_buffer.num_frames();
+ for (int ch = 0; ch < num_channels; ++ch) {
+ for (int i = 0; i < num_frames; i += 2) {
+ audio_buffer.channels()[ch][i] = peak_ratio * 32767.f;
+ audio_buffer.channels()[ch][i + 1] = 0.0f;
+ }
+ }
+ for (int n = 0; n < num_calls / 2; ++n) {
+ manager.AnalyzePreProcess(&audio_buffer);
+ }
+ for (int ch = 0; ch < num_channels; ++ch) {
+ for (int i = 0; i < num_frames; ++i) {
+ audio_buffer.channels()[ch][i] = peak_ratio * 32767.f;
+ }
+ }
+ for (int n = 0; n < num_calls - num_calls / 2; ++n) {
+ manager.AnalyzePreProcess(&audio_buffer);
+ }
}
} // namespace
@@ -69,7 +118,14 @@ class AgcManagerDirectTest : public ::testing::Test {
protected:
AgcManagerDirectTest()
: agc_(new MockAgc),
- manager_(agc_, kInitialVolume, kClippedMin, kSampleRateHz),
+ manager_(agc_,
+ kInitialVolume,
+ kClippedMin,
+ kSampleRateHz,
+ kClippedLevelStep,
+ kClippedRatioThreshold,
+ kClippedWaitFrames,
+ ClippingPredictorConfig()),
audio(kNumChannels),
audio_data(kNumChannels * kSamplesPerChannel, 0.f) {
ExpectInitialize();
@@ -124,12 +180,32 @@ class AgcManagerDirectTest : public ::testing::Test {
audio[ch][k] = 32767.f;
}
}
-
for (int i = 0; i < num_calls; ++i) {
manager_.AnalyzePreProcess(audio.data(), kSamplesPerChannel);
}
}
+ void CallPreProcForChangingAudio(int num_calls, float peak_ratio) {
+ RTC_DCHECK_GE(1.f, peak_ratio);
+ std::fill(audio_data.begin(), audio_data.end(), 0.f);
+ for (size_t ch = 0; ch < kNumChannels; ++ch) {
+ for (size_t k = 0; k < kSamplesPerChannel; k += 2) {
+ audio[ch][k] = peak_ratio * 32767.f;
+ }
+ }
+ for (int i = 0; i < num_calls / 2; ++i) {
+ manager_.AnalyzePreProcess(audio.data(), kSamplesPerChannel);
+ }
+ for (size_t ch = 0; ch < kNumChannels; ++ch) {
+ for (size_t k = 0; k < kSamplesPerChannel; ++k) {
+ audio[ch][k] = peak_ratio * 32767.f;
+ }
+ }
+ for (int i = 0; i < num_calls - num_calls / 2; ++i) {
+ manager_.AnalyzePreProcess(audio.data(), kSamplesPerChannel);
+ }
+ }
+
MockAgc* agc_;
MockGainControl gctrl_;
AgcManagerDirect manager_;
@@ -696,6 +772,25 @@ TEST_F(AgcManagerDirectTest, TakesNoActionOnZeroMicVolume) {
EXPECT_EQ(0, manager_.stream_analog_level());
}
+TEST_F(AgcManagerDirectTest, ClippingDetectionLowersVolume) {
+ SetVolumeAndProcess(255);
+ EXPECT_EQ(255, manager_.stream_analog_level());
+ CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f);
+ EXPECT_EQ(255, manager_.stream_analog_level());
+ CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/1.0f);
+ EXPECT_EQ(240, manager_.stream_analog_level());
+}
+
+TEST_F(AgcManagerDirectTest, DisabledClippingPredictorDoesNotLowerVolume) {
+ SetVolumeAndProcess(255);
+ EXPECT_FALSE(manager_.clipping_predictor_enabled());
+ EXPECT_EQ(255, manager_.stream_analog_level());
+ CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f);
+ EXPECT_EQ(255, manager_.stream_analog_level());
+ CallPreProcForChangingAudio(/*num_calls=*/100, /*peak_ratio=*/0.99f);
+ EXPECT_EQ(255, manager_.stream_analog_level());
+}
+
TEST(AgcManagerDirectStandaloneTest, DisableDigitalDisablesDigital) {
auto agc = std::unique_ptr<Agc>(new ::testing::NiceMock<MockAgc>());
MockGainControl gctrl;
@@ -705,14 +800,16 @@ TEST(AgcManagerDirectStandaloneTest, DisableDigitalDisablesDigital) {
EXPECT_CALL(gctrl, enable_limiter(false));
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
manager->Initialize();
manager->SetupDigitalGainControl(&gctrl);
}
TEST(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperiment) {
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), kInitialVolume);
}
@@ -721,7 +818,8 @@ TEST(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperimentDisabled) {
test::ScopedFieldTrials field_trial(
"WebRTC-Audio-AgcMinMicLevelExperiment/Disabled/");
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), kInitialVolume);
}
@@ -732,7 +830,8 @@ TEST(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperimentOutOfRangeAbove) {
test::ScopedFieldTrials field_trial(
"WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-256/");
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), kInitialVolume);
}
@@ -743,7 +842,8 @@ TEST(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperimentOutOfRangeBelow) {
test::ScopedFieldTrials field_trial(
"WebRTC-Audio-AgcMinMicLevelExperiment/Enabled--1/");
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), kInitialVolume);
}
@@ -755,7 +855,8 @@ TEST(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperimentEnabled50) {
test::ScopedFieldTrials field_trial(
"WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-50/");
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(kInitialVolume);
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), 50);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), kInitialVolume);
}
@@ -768,9 +869,130 @@ TEST(AgcManagerDirectStandaloneTest,
test::ScopedFieldTrials field_trial(
"WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-50/");
std::unique_ptr<AgcManagerDirect> manager =
- CreateAgcManagerDirect(/*startup_min_level=*/30);
+ CreateAgcManagerDirect(/*startup_min_level=*/30, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), 50);
EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), 50);
}
+// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_level_step`.
+// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_ratio_threshold`.
+// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_wait_frames`.
+// Verifies that configurable clipping parameters are initialized as intended.
+TEST(AgcManagerDirectStandaloneTest, ClippingParametersVerified) {
+ std::unique_ptr<AgcManagerDirect> manager =
+ CreateAgcManagerDirect(kInitialVolume, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames);
+ manager->Initialize();
+ EXPECT_EQ(manager->clipped_level_step_, kClippedLevelStep);
+ EXPECT_EQ(manager->clipped_ratio_threshold_, kClippedRatioThreshold);
+ EXPECT_EQ(manager->clipped_wait_frames_, kClippedWaitFrames);
+ std::unique_ptr<AgcManagerDirect> manager_custom =
+ CreateAgcManagerDirect(kInitialVolume,
+ /*clipped_level_step=*/10,
+ /*clipped_ratio_threshold=*/0.2f,
+ /*clipped_wait_frames=*/50);
+ manager_custom->Initialize();
+ EXPECT_EQ(manager_custom->clipped_level_step_, 10);
+ EXPECT_EQ(manager_custom->clipped_ratio_threshold_, 0.2f);
+ EXPECT_EQ(manager_custom->clipped_wait_frames_, 50);
+}
+
+TEST(AgcManagerDirectStandaloneTest,
+ DisableClippingPredictorDisablesClippingPredictor) {
+ ClippingPredictorConfig default_config;
+ EXPECT_FALSE(default_config.enabled);
+ std::unique_ptr<AgcManagerDirect> manager = CreateAgcManagerDirect(
+ kInitialVolume, kClippedLevelStep, kClippedRatioThreshold,
+ kClippedWaitFrames, default_config);
+ manager->Initialize();
+ EXPECT_FALSE(manager->clipping_predictor_enabled());
+ EXPECT_FALSE(manager->use_clipping_predictor_step());
+}
+
+TEST(AgcManagerDirectStandaloneTest, ClippingPredictorDisabledByDefault) {
+ constexpr ClippingPredictorConfig kDefaultConfig;
+ EXPECT_FALSE(kDefaultConfig.enabled);
+}
+
+TEST(AgcManagerDirectStandaloneTest,
+ EnableClippingPredictorEnablesClippingPredictor) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ ClippingPredictorConfig config;
+ config.enabled = true;
+ config.use_predicted_step = true;
+ std::unique_ptr<AgcManagerDirect> manager = CreateAgcManagerDirect(
+ kInitialVolume, kClippedLevelStep, kClippedRatioThreshold,
+ kClippedWaitFrames, config);
+ manager->Initialize();
+ EXPECT_TRUE(manager->clipping_predictor_enabled());
+ EXPECT_TRUE(manager->use_clipping_predictor_step());
+}
+
+TEST(AgcManagerDirectStandaloneTest,
+ DisableClippingPredictorDoesNotLowerVolume) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ constexpr ClippingPredictorConfig kConfig{/*enabled=*/false};
+ AgcManagerDirect manager(new ::testing::NiceMock<MockAgc>(), kInitialVolume,
+ kClippedMin, kSampleRateHz, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames, kConfig);
+ manager.Initialize();
+ manager.set_stream_analog_level(/*level=*/255);
+ EXPECT_FALSE(manager.clipping_predictor_enabled());
+ EXPECT_FALSE(manager.use_clipping_predictor_step());
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ manager.Process(nullptr);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ CallPreProcessAudioBuffer(/*num_calls=*/300, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+}
+
+TEST(AgcManagerDirectStandaloneTest,
+ EnableClippingPredictorWithUnusedPredictedStepDoesNotLowerVolume) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ ClippingPredictorConfig config;
+ config.enabled = true;
+ config.use_predicted_step = false;
+ AgcManagerDirect manager(new ::testing::NiceMock<MockAgc>(), kInitialVolume,
+ kClippedMin, kSampleRateHz, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames, config);
+ manager.Initialize();
+ manager.set_stream_analog_level(/*level=*/255);
+ EXPECT_TRUE(manager.clipping_predictor_enabled());
+ EXPECT_FALSE(manager.use_clipping_predictor_step());
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ manager.Process(nullptr);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ CallPreProcessAudioBuffer(/*num_calls=*/300, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+}
+
+TEST(AgcManagerDirectStandaloneTest, EnableClippingPredictorLowersVolume) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ ClippingPredictorConfig config;
+ config.enabled = true;
+ config.use_predicted_step = true;
+ AgcManagerDirect manager(new ::testing::NiceMock<MockAgc>(), kInitialVolume,
+ kClippedMin, kSampleRateHz, kClippedLevelStep,
+ kClippedRatioThreshold, kClippedWaitFrames, config);
+ manager.Initialize();
+ manager.set_stream_analog_level(/*level=*/255);
+ EXPECT_TRUE(manager.clipping_predictor_enabled());
+ EXPECT_TRUE(manager.use_clipping_predictor_step());
+ EXPECT_EQ(manager.stream_analog_level(), 255);
+ manager.Process(nullptr);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 240);
+ CallPreProcessAudioBuffer(/*num_calls=*/300, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 240);
+ CallPreProcessAudioBuffer(/*num_calls=*/10, /*peak_ratio=*/0.99f, manager);
+ EXPECT_EQ(manager.stream_analog_level(), 225);
+}
+
} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor.cc b/modules/audio_processing/agc/clipping_predictor.cc
new file mode 100644
index 0000000000..982bbca2ee
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor.cc
@@ -0,0 +1,383 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "common_audio/include/audio_util.h"
+#include "modules/audio_processing/agc/clipping_predictor_level_buffer.h"
+#include "modules/audio_processing/agc/gain_map_internal.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+namespace {
+
+constexpr int kClippingPredictorMaxGainChange = 15;
+
+// Estimates the new level from the gain error; a copy of the function
+// `LevelFromGainError` in agc_manager_direct.cc.
+int LevelFromGainError(int gain_error,
+ int level,
+ int min_mic_level,
+ int max_mic_level) {
+ RTC_DCHECK_GE(level, 0);
+ RTC_DCHECK_LE(level, max_mic_level);
+ if (gain_error == 0) {
+ return level;
+ }
+ int new_level = level;
+ if (gain_error > 0) {
+ while (kGainMap[new_level] - kGainMap[level] < gain_error &&
+ new_level < max_mic_level) {
+ ++new_level;
+ }
+ } else {
+ while (kGainMap[new_level] - kGainMap[level] > gain_error &&
+ new_level > min_mic_level) {
+ --new_level;
+ }
+ }
+ return new_level;
+}
+
+float ComputeCrestFactor(const ClippingPredictorLevelBuffer::Level& level) {
+ const float crest_factor =
+ FloatS16ToDbfs(level.max) - FloatS16ToDbfs(std::sqrt(level.average));
+ return crest_factor;
+}
+
+// Crest factor-based clipping prediction and clipped level step estimation.
+class ClippingEventPredictor : public ClippingPredictor {
+ public:
+ // ClippingEventPredictor with `num_channels` channels (limited to values
+ // higher than zero); window size `window_length` and reference window size
+ // `reference_window_length` (both referring to the number of frames in the
+ // respective sliding windows and limited to values higher than zero);
+ // reference window delay `reference_window_delay` (delay in frames, limited
+ // to values zero and higher with an additional requirement of
+ // `window_length` < `reference_window_length` + reference_window_delay`);
+ // and an estimation peak threshold `clipping_threshold` and a crest factor
+ // drop threshold `crest_factor_margin` (both in dB).
+ ClippingEventPredictor(int num_channels,
+ int window_length,
+ int reference_window_length,
+ int reference_window_delay,
+ float clipping_threshold,
+ float crest_factor_margin)
+ : window_length_(window_length),
+ reference_window_length_(reference_window_length),
+ reference_window_delay_(reference_window_delay),
+ clipping_threshold_(clipping_threshold),
+ crest_factor_margin_(crest_factor_margin) {
+ RTC_DCHECK_GT(num_channels, 0);
+ RTC_DCHECK_GT(window_length, 0);
+ RTC_DCHECK_GT(reference_window_length, 0);
+ RTC_DCHECK_GE(reference_window_delay, 0);
+ RTC_DCHECK_GT(reference_window_length + reference_window_delay,
+ window_length);
+ const int buffer_length = GetMinFramesProcessed();
+ RTC_DCHECK_GT(buffer_length, 0);
+ for (int i = 0; i < num_channels; ++i) {
+ ch_buffers_.push_back(
+ std::make_unique<ClippingPredictorLevelBuffer>(buffer_length));
+ }
+ }
+
+ ClippingEventPredictor(const ClippingEventPredictor&) = delete;
+ ClippingEventPredictor& operator=(const ClippingEventPredictor&) = delete;
+ ~ClippingEventPredictor() {}
+
+ void Reset() {
+ const int num_channels = ch_buffers_.size();
+ for (int i = 0; i < num_channels; ++i) {
+ ch_buffers_[i]->Reset();
+ }
+ }
+
+ // Analyzes a frame of audio and stores the framewise metrics in
+ // `ch_buffers_`.
+ void Analyze(const AudioFrameView<const float>& frame) {
+ const int num_channels = frame.num_channels();
+ RTC_DCHECK_EQ(num_channels, ch_buffers_.size());
+ const int samples_per_channel = frame.samples_per_channel();
+ RTC_DCHECK_GT(samples_per_channel, 0);
+ for (int channel = 0; channel < num_channels; ++channel) {
+ float sum_squares = 0.0f;
+ float peak = 0.0f;
+ for (const auto& sample : frame.channel(channel)) {
+ sum_squares += sample * sample;
+ peak = std::max(std::fabs(sample), peak);
+ }
+ ch_buffers_[channel]->Push(
+ {sum_squares / static_cast<float>(samples_per_channel), peak});
+ }
+ }
+
+ // Estimates the analog gain adjustment for channel `channel` using a
+ // sliding window over the frame-wise metrics in `ch_buffers_`. Returns an
+ // estimate for the clipped level step equal to `default_clipped_level_step_`
+ // if at least `GetMinFramesProcessed()` frames have been processed since the
+ // last reset and a clipping event is predicted. `level`, `min_mic_level`, and
+ // `max_mic_level` are limited to [0, 255] and `default_step` to [1, 255].
+ absl::optional<int> EstimateClippedLevelStep(int channel,
+ int level,
+ int default_step,
+ int min_mic_level,
+ int max_mic_level) const {
+ RTC_CHECK_GE(channel, 0);
+ RTC_CHECK_LT(channel, ch_buffers_.size());
+ RTC_DCHECK_GE(level, 0);
+ RTC_DCHECK_LE(level, 255);
+ RTC_DCHECK_GT(default_step, 0);
+ RTC_DCHECK_LE(default_step, 255);
+ RTC_DCHECK_GE(min_mic_level, 0);
+ RTC_DCHECK_LE(min_mic_level, 255);
+ RTC_DCHECK_GE(max_mic_level, 0);
+ RTC_DCHECK_LE(max_mic_level, 255);
+ if (level <= min_mic_level) {
+ return absl::nullopt;
+ }
+ if (PredictClippingEvent(channel)) {
+ const int new_level =
+ rtc::SafeClamp(level - default_step, min_mic_level, max_mic_level);
+ const int step = level - new_level;
+ if (step > 0) {
+ return step;
+ }
+ }
+ return absl::nullopt;
+ }
+
+ private:
+ int GetMinFramesProcessed() const {
+ return reference_window_delay_ + reference_window_length_;
+ }
+
+ // Predicts clipping events based on the processed audio frames. Returns
+ // true if a clipping event is likely.
+ bool PredictClippingEvent(int channel) const {
+ const auto metrics =
+ ch_buffers_[channel]->ComputePartialMetrics(0, window_length_);
+ if (!metrics.has_value() ||
+ !(FloatS16ToDbfs(metrics.value().max) > clipping_threshold_)) {
+ return false;
+ }
+ const auto reference_metrics = ch_buffers_[channel]->ComputePartialMetrics(
+ reference_window_delay_, reference_window_length_);
+ if (!reference_metrics.has_value()) {
+ return false;
+ }
+ const float crest_factor = ComputeCrestFactor(metrics.value());
+ const float reference_crest_factor =
+ ComputeCrestFactor(reference_metrics.value());
+ if (crest_factor < reference_crest_factor - crest_factor_margin_) {
+ return true;
+ }
+ return false;
+ }
+
+ std::vector<std::unique_ptr<ClippingPredictorLevelBuffer>> ch_buffers_;
+ const int window_length_;
+ const int reference_window_length_;
+ const int reference_window_delay_;
+ const float clipping_threshold_;
+ const float crest_factor_margin_;
+};
+
+// Performs crest factor-based clipping peak prediction.
+class ClippingPeakPredictor : public ClippingPredictor {
+ public:
+ // Ctor. ClippingPeakPredictor with `num_channels` channels (limited to values
+ // higher than zero); window size `window_length` and reference window size
+ // `reference_window_length` (both referring to the number of frames in the
+ // respective sliding windows and limited to values higher than zero);
+ // reference window delay `reference_window_delay` (delay in frames, limited
+ // to values zero and higher with an additional requirement of
+ // `window_length` < `reference_window_length` + reference_window_delay`);
+ // and a clipping prediction threshold `clipping_threshold` (in dB). Adaptive
+ // clipped level step estimation is used if `adaptive_step_estimation` is
+ // true.
+ explicit ClippingPeakPredictor(int num_channels,
+ int window_length,
+ int reference_window_length,
+ int reference_window_delay,
+ int clipping_threshold,
+ bool adaptive_step_estimation)
+ : window_length_(window_length),
+ reference_window_length_(reference_window_length),
+ reference_window_delay_(reference_window_delay),
+ clipping_threshold_(clipping_threshold),
+ adaptive_step_estimation_(adaptive_step_estimation) {
+ RTC_DCHECK_GT(num_channels, 0);
+ RTC_DCHECK_GT(window_length, 0);
+ RTC_DCHECK_GT(reference_window_length, 0);
+ RTC_DCHECK_GE(reference_window_delay, 0);
+ RTC_DCHECK_GT(reference_window_length + reference_window_delay,
+ window_length);
+ const int buffer_length = GetMinFramesProcessed();
+ RTC_DCHECK_GT(buffer_length, 0);
+ for (int i = 0; i < num_channels; ++i) {
+ ch_buffers_.push_back(
+ std::make_unique<ClippingPredictorLevelBuffer>(buffer_length));
+ }
+ }
+
+ ClippingPeakPredictor(const ClippingPeakPredictor&) = delete;
+ ClippingPeakPredictor& operator=(const ClippingPeakPredictor&) = delete;
+ ~ClippingPeakPredictor() {}
+
+ void Reset() {
+ const int num_channels = ch_buffers_.size();
+ for (int i = 0; i < num_channels; ++i) {
+ ch_buffers_[i]->Reset();
+ }
+ }
+
+ // Analyzes a frame of audio and stores the framewise metrics in
+ // `ch_buffers_`.
+ void Analyze(const AudioFrameView<const float>& frame) {
+ const int num_channels = frame.num_channels();
+ RTC_DCHECK_EQ(num_channels, ch_buffers_.size());
+ const int samples_per_channel = frame.samples_per_channel();
+ RTC_DCHECK_GT(samples_per_channel, 0);
+ for (int channel = 0; channel < num_channels; ++channel) {
+ float sum_squares = 0.0f;
+ float peak = 0.0f;
+ for (const auto& sample : frame.channel(channel)) {
+ sum_squares += sample * sample;
+ peak = std::max(std::fabs(sample), peak);
+ }
+ ch_buffers_[channel]->Push(
+ {sum_squares / static_cast<float>(samples_per_channel), peak});
+ }
+ }
+
+ // Estimates the analog gain adjustment for channel `channel` using a
+ // sliding window over the frame-wise metrics in `ch_buffers_`. Returns an
+ // estimate for the clipped level step (equal to
+ // `default_clipped_level_step_` if `adaptive_estimation_` is false) if at
+ // least `GetMinFramesProcessed()` frames have been processed since the last
+ // reset and a clipping event is predicted. `level`, `min_mic_level`, and
+ // `max_mic_level` are limited to [0, 255] and `default_step` to [1, 255].
+ absl::optional<int> EstimateClippedLevelStep(int channel,
+ int level,
+ int default_step,
+ int min_mic_level,
+ int max_mic_level) const {
+ RTC_DCHECK_GE(channel, 0);
+ RTC_DCHECK_LT(channel, ch_buffers_.size());
+ RTC_DCHECK_GE(level, 0);
+ RTC_DCHECK_LE(level, 255);
+ RTC_DCHECK_GT(default_step, 0);
+ RTC_DCHECK_LE(default_step, 255);
+ RTC_DCHECK_GE(min_mic_level, 0);
+ RTC_DCHECK_LE(min_mic_level, 255);
+ RTC_DCHECK_GE(max_mic_level, 0);
+ RTC_DCHECK_LE(max_mic_level, 255);
+ if (level <= min_mic_level) {
+ return absl::nullopt;
+ }
+ absl::optional<float> estimate_db = EstimatePeakValue(channel);
+ if (estimate_db.has_value() && estimate_db.value() > clipping_threshold_) {
+ int step = 0;
+ if (!adaptive_step_estimation_) {
+ step = default_step;
+ } else {
+ const int estimated_gain_change =
+ rtc::SafeClamp(-static_cast<int>(std::ceil(estimate_db.value())),
+ -kClippingPredictorMaxGainChange, 0);
+ step =
+ std::max(level - LevelFromGainError(estimated_gain_change, level,
+ min_mic_level, max_mic_level),
+ default_step);
+ }
+ const int new_level =
+ rtc::SafeClamp(level - step, min_mic_level, max_mic_level);
+ if (level > new_level) {
+ return level - new_level;
+ }
+ }
+ return absl::nullopt;
+ }
+
+ private:
+ int GetMinFramesProcessed() {
+ return reference_window_delay_ + reference_window_length_;
+ }
+
+ // Predicts clipping sample peaks based on the processed audio frames.
+ // Returns the estimated peak value if clipping is predicted. Otherwise
+ // returns absl::nullopt.
+ absl::optional<float> EstimatePeakValue(int channel) const {
+ const auto reference_metrics = ch_buffers_[channel]->ComputePartialMetrics(
+ reference_window_delay_, reference_window_length_);
+ if (!reference_metrics.has_value()) {
+ return absl::nullopt;
+ }
+ const auto metrics =
+ ch_buffers_[channel]->ComputePartialMetrics(0, window_length_);
+ if (!metrics.has_value() ||
+ !(FloatS16ToDbfs(metrics.value().max) > clipping_threshold_)) {
+ return absl::nullopt;
+ }
+ const float reference_crest_factor =
+ ComputeCrestFactor(reference_metrics.value());
+ const float& mean_squares = metrics.value().average;
+ const float projected_peak =
+ reference_crest_factor + FloatS16ToDbfs(std::sqrt(mean_squares));
+ return projected_peak;
+ }
+
+ std::vector<std::unique_ptr<ClippingPredictorLevelBuffer>> ch_buffers_;
+ const int window_length_;
+ const int reference_window_length_;
+ const int reference_window_delay_;
+ const int clipping_threshold_;
+ const bool adaptive_step_estimation_;
+};
+
+} // namespace
+
+std::unique_ptr<ClippingPredictor> CreateClippingPredictor(
+ int num_channels,
+ const AudioProcessing::Config::GainController1::AnalogGainController::
+ ClippingPredictor& config) {
+ if (!config.enabled) {
+ RTC_LOG(LS_INFO) << "[agc] Clipping prediction disabled.";
+ return nullptr;
+ }
+ RTC_LOG(LS_INFO) << "[agc] Clipping prediction enabled.";
+ using ClippingPredictorMode = AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor::Mode;
+ switch (config.mode) {
+ case ClippingPredictorMode::kClippingEventPrediction:
+ return std::make_unique<ClippingEventPredictor>(
+ num_channels, config.window_length, config.reference_window_length,
+ config.reference_window_delay, config.clipping_threshold,
+ config.crest_factor_margin);
+ case ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction:
+ return std::make_unique<ClippingPeakPredictor>(
+ num_channels, config.window_length, config.reference_window_length,
+ config.reference_window_delay, config.clipping_threshold,
+ /*adaptive_step_estimation=*/true);
+ case ClippingPredictorMode::kFixedStepClippingPeakPrediction:
+ return std::make_unique<ClippingPeakPredictor>(
+ num_channels, config.window_length, config.reference_window_length,
+ config.reference_window_delay, config.clipping_threshold,
+ /*adaptive_step_estimation=*/false);
+ }
+ RTC_NOTREACHED();
+}
+
+} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor.h b/modules/audio_processing/agc/clipping_predictor.h
new file mode 100644
index 0000000000..ee2b6ef1e7
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_
+#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "modules/audio_processing/include/audio_frame_view.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+namespace webrtc {
+
+// Frame-wise clipping prediction and clipped level step estimation. Analyzes
+// 10 ms multi-channel frames and estimates an analog mic level decrease step
+// to possibly avoid clipping when predicted. `Analyze()` and
+// `EstimateClippedLevelStep()` can be called in any order.
+class ClippingPredictor {
+ public:
+ virtual ~ClippingPredictor() = default;
+
+ virtual void Reset() = 0;
+
+ // Analyzes a 10 ms multi-channel audio frame.
+ virtual void Analyze(const AudioFrameView<const float>& frame) = 0;
+
+ // Predicts if clipping is going to occur for the specified `channel` in the
+ // near-future and, if so, it returns a recommended analog mic level decrease
+ // step. Returns absl::nullopt if clipping is not predicted.
+ // `level` is the current analog mic level, `default_step` is the amount the
+ // mic level is lowered by the analog controller with every clipping event and
+ // `min_mic_level` and `max_mic_level` is the range of allowed analog mic
+ // levels.
+ virtual absl::optional<int> EstimateClippedLevelStep(
+ int channel,
+ int level,
+ int default_step,
+ int min_mic_level,
+ int max_mic_level) const = 0;
+
+};
+
+// Creates a ClippingPredictor based on the provided `config`. When enabled,
+// the following must hold for `config`:
+// `window_length < reference_window_length + reference_window_delay`.
+// Returns `nullptr` if `config.enabled` is false.
+std::unique_ptr<ClippingPredictor> CreateClippingPredictor(
+ int num_channels,
+ const AudioProcessing::Config::GainController1::AnalogGainController::
+ ClippingPredictor& config);
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_
diff --git a/modules/audio_processing/agc/clipping_predictor_evaluator.cc b/modules/audio_processing/agc/clipping_predictor_evaluator.cc
new file mode 100644
index 0000000000..2a4ea922cf
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_evaluator.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor_evaluator.h"
+
+#include <algorithm>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+// Returns the index of the oldest item in the ring buffer for a non-empty
+// ring buffer with give `size`, `tail` index and `capacity`.
+int OldestExpectedDetectionIndex(int size, int tail, int capacity) {
+ RTC_DCHECK_GT(size, 0);
+ return tail - size + (tail < size ? capacity : 0);
+}
+
+} // namespace
+
+ClippingPredictorEvaluator::ClippingPredictorEvaluator(int history_size)
+ : history_size_(history_size),
+ ring_buffer_capacity_(history_size + 1),
+ ring_buffer_(ring_buffer_capacity_),
+ true_positives_(0),
+ true_negatives_(0),
+ false_positives_(0),
+ false_negatives_(0) {
+ RTC_DCHECK_GT(history_size_, 0);
+ Reset();
+}
+
+ClippingPredictorEvaluator::~ClippingPredictorEvaluator() = default;
+
+absl::optional<int> ClippingPredictorEvaluator::Observe(
+ bool clipping_detected,
+ bool clipping_predicted) {
+ RTC_DCHECK_GE(ring_buffer_size_, 0);
+ RTC_DCHECK_LE(ring_buffer_size_, ring_buffer_capacity_);
+ RTC_DCHECK_GE(ring_buffer_tail_, 0);
+ RTC_DCHECK_LT(ring_buffer_tail_, ring_buffer_capacity_);
+
+ DecreaseTimesToLive();
+ if (clipping_predicted) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ Push(/*expected_detection=*/{/*ttl=*/history_size_, /*detected=*/false});
+ }
+ // Clipping is expected if there are expected detections regardless of
+ // whether all the expected detections have been previously matched - i.e.,
+ // `ExpectedDetection::detected` is true.
+ const bool clipping_expected = ring_buffer_size_ > 0;
+
+ absl::optional<int> prediction_interval;
+ if (clipping_expected && clipping_detected) {
+ prediction_interval = FindEarliestPredictionInterval();
+ // Add a true positive for each unexpired expected detection.
+ const int num_modified_items = MarkExpectedDetectionAsDetected();
+ true_positives_ += num_modified_items;
+ RTC_DCHECK(prediction_interval.has_value() || num_modified_items == 0);
+ RTC_DCHECK(!prediction_interval.has_value() || num_modified_items > 0);
+ } else if (clipping_expected && !clipping_detected) {
+ // Add a false positive if there is one expected detection that has expired
+ // and that has never been matched before. Note that there is at most one
+ // unmatched expired detection.
+ if (HasExpiredUnmatchedExpectedDetection()) {
+ false_positives_++;
+ }
+ } else if (!clipping_expected && clipping_detected) {
+ false_negatives_++;
+ } else {
+ RTC_DCHECK(!clipping_expected && !clipping_detected);
+ true_negatives_++;
+ }
+ return prediction_interval;
+}
+
+void ClippingPredictorEvaluator::Reset() {
+ // Empty the ring buffer of expected detections.
+ ring_buffer_tail_ = 0;
+ ring_buffer_size_ = 0;
+}
+
+// Cost: O(1).
+void ClippingPredictorEvaluator::Push(ExpectedDetection value) {
+ ring_buffer_[ring_buffer_tail_] = value;
+ ring_buffer_tail_++;
+ if (ring_buffer_tail_ == ring_buffer_capacity_) {
+ ring_buffer_tail_ = 0;
+ }
+ ring_buffer_size_ = std::min(ring_buffer_capacity_, ring_buffer_size_ + 1);
+}
+
+// Cost: O(N).
+void ClippingPredictorEvaluator::DecreaseTimesToLive() {
+ bool expired_found = false;
+ for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_;
+ ++i) {
+ int index = i >= 0 ? i : ring_buffer_capacity_ + i;
+ RTC_DCHECK_GE(index, 0);
+ RTC_DCHECK_LT(index, ring_buffer_.size());
+ RTC_DCHECK_GE(ring_buffer_[index].ttl, 0);
+ if (ring_buffer_[index].ttl == 0) {
+ RTC_DCHECK(!expired_found)
+ << "There must be at most one expired item in the ring buffer.";
+ expired_found = true;
+ RTC_DCHECK_EQ(index, OldestExpectedDetectionIndex(ring_buffer_size_,
+ ring_buffer_tail_,
+ ring_buffer_capacity_))
+ << "The expired item must be the oldest in the ring buffer.";
+ }
+ ring_buffer_[index].ttl--;
+ }
+ if (expired_found) {
+ ring_buffer_size_--;
+ }
+}
+
+// Cost: O(N).
+absl::optional<int> ClippingPredictorEvaluator::FindEarliestPredictionInterval()
+ const {
+ absl::optional<int> prediction_interval;
+ for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_;
+ ++i) {
+ int index = i >= 0 ? i : ring_buffer_capacity_ + i;
+ RTC_DCHECK_GE(index, 0);
+ RTC_DCHECK_LT(index, ring_buffer_.size());
+ if (!ring_buffer_[index].detected) {
+ prediction_interval = std::max(prediction_interval.value_or(0),
+ history_size_ - ring_buffer_[index].ttl);
+ }
+ }
+ return prediction_interval;
+}
+
+// Cost: O(N).
+int ClippingPredictorEvaluator::MarkExpectedDetectionAsDetected() {
+ int num_modified_items = 0;
+ for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_;
+ ++i) {
+ int index = i >= 0 ? i : ring_buffer_capacity_ + i;
+ RTC_DCHECK_GE(index, 0);
+ RTC_DCHECK_LT(index, ring_buffer_.size());
+ if (!ring_buffer_[index].detected) {
+ num_modified_items++;
+ }
+ ring_buffer_[index].detected = true;
+ }
+ return num_modified_items;
+}
+
+// Cost: O(1).
+bool ClippingPredictorEvaluator::HasExpiredUnmatchedExpectedDetection() const {
+ if (ring_buffer_size_ == 0) {
+ return false;
+ }
+ // If an expired item, that is `ttl` equal to 0, exists, it must be the
+ // oldest.
+ const int oldest_index = OldestExpectedDetectionIndex(
+ ring_buffer_size_, ring_buffer_tail_, ring_buffer_capacity_);
+ RTC_DCHECK_GE(oldest_index, 0);
+ RTC_DCHECK_LT(oldest_index, ring_buffer_.size());
+ return ring_buffer_[oldest_index].ttl == 0 &&
+ !ring_buffer_[oldest_index].detected;
+}
+
+} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor_evaluator.h b/modules/audio_processing/agc/clipping_predictor_evaluator.h
new file mode 100644
index 0000000000..e76f25d5e1
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_evaluator.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_
+#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// Counts true/false positives/negatives while observing sequences of flag pairs
+// that indicate whether clipping has been detected and/or if clipping is
+// predicted. When a true positive is found measures the time interval between
+// prediction and detection events.
+// From the time a prediction is observed and for a period equal to
+// `history_size` calls to `Observe()`, one or more detections are expected. If
+// the expectation is met, a true positives is added and the time interval
+// between the earliest prediction and the detection is recorded; otherwise,
+// when the deadline is reached, a false positive is added. Note that one
+// detection matches all the expected detections that have not expired - i.e.,
+// one detection counts as multiple true positives.
+// If a detection is observed, but no prediction has been observed over the past
+// `history_size` calls to `Observe()`, then a false negative is added;
+// otherwise, a true negative is added.
+class ClippingPredictorEvaluator {
+ public:
+ // Ctor. `history_size` indicates how long to wait for a call to `Observe()`
+ // having `clipping_detected` set to true from the time clipping is predicted.
+ explicit ClippingPredictorEvaluator(int history_size);
+ ClippingPredictorEvaluator(const ClippingPredictorEvaluator&) = delete;
+ ClippingPredictorEvaluator& operator=(const ClippingPredictorEvaluator&) =
+ delete;
+ ~ClippingPredictorEvaluator();
+
+ // Observes whether clipping has been detected and/or if clipping is
+ // predicted. When predicted one or more detections are expected in the next
+ // `history_size_` calls of `Observe()`. When true positives are found returns
+ // the prediction interval between the earliest prediction and the detection.
+ absl::optional<int> Observe(bool clipping_detected, bool clipping_predicted);
+
+ // Removes any expectation recently set after a call to `Observe()` having
+ // `clipping_predicted` set to true.
+ void Reset();
+
+ // Metrics getters.
+ int true_positives() const { return true_positives_; }
+ int true_negatives() const { return true_negatives_; }
+ int false_positives() const { return false_positives_; }
+ int false_negatives() const { return false_negatives_; }
+
+ private:
+ const int history_size_;
+
+ // State of a detection expected to be observed after a prediction.
+ struct ExpectedDetection {
+ // Time to live (TTL); remaining number of `Observe()` calls to match a call
+ // having `clipping_detected` set to true.
+ int ttl;
+ // True if an `Observe()` call having `clipping_detected` set to true has
+ // been observed.
+ bool detected;
+ };
+ // Ring buffer of expected detections.
+ const int ring_buffer_capacity_;
+ std::vector<ExpectedDetection> ring_buffer_;
+ int ring_buffer_tail_;
+ int ring_buffer_size_;
+
+ // Pushes `expected_detection` into `expected_matches_ring_buffer_`.
+ void Push(ExpectedDetection expected_detection);
+ // Decreased the TTLs in `expected_matches_ring_buffer_` and removes expired
+ // items.
+ void DecreaseTimesToLive();
+ // Returns the prediction interval for the earliest unexpired expected
+ // detection if any.
+ absl::optional<int> FindEarliestPredictionInterval() const;
+ // Marks all the items in `expected_matches_ring_buffer_` as `detected` and
+ // returns the number of updated items.
+ int MarkExpectedDetectionAsDetected();
+ // Returns true if `expected_matches_ring_buffer_` has an item having `ttl`
+ // equal to 0 (expired) and `detected` equal to false (unmatched).
+ bool HasExpiredUnmatchedExpectedDetection() const;
+
+ // Metrics.
+ int true_positives_;
+ int true_negatives_;
+ int false_positives_;
+ int false_negatives_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_
diff --git a/modules/audio_processing/agc/clipping_predictor_evaluator_unittest.cc b/modules/audio_processing/agc/clipping_predictor_evaluator_unittest.cc
new file mode 100644
index 0000000000..1eb83eae61
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_evaluator_unittest.cc
@@ -0,0 +1,568 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor_evaluator.h"
+
+#include <cstdint>
+#include <memory>
+#include <tuple>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/random.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using testing::Eq;
+using testing::Optional;
+
+constexpr bool kDetected = true;
+constexpr bool kNotDetected = false;
+
+constexpr bool kPredicted = true;
+constexpr bool kNotPredicted = false;
+
+int SumTrueFalsePositivesNegatives(
+ const ClippingPredictorEvaluator& evaluator) {
+ return evaluator.true_positives() + evaluator.true_negatives() +
+ evaluator.false_positives() + evaluator.false_negatives();
+}
+
+// Checks the metrics after init - i.e., no call to `Observe()`.
+TEST(ClippingPredictorEvaluatorTest, Init) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+class ClippingPredictorEvaluatorParameterization
+ : public ::testing::TestWithParam<std::tuple<int, int>> {
+ protected:
+ uint64_t seed() const {
+ return rtc::checked_cast<uint64_t>(std::get<0>(GetParam()));
+ }
+ int history_size() const { return std::get<1>(GetParam()); }
+};
+
+// Checks that after each call to `Observe()` at most one metric changes.
+TEST_P(ClippingPredictorEvaluatorParameterization, AtMostOneMetricChanges) {
+ constexpr int kNumCalls = 123;
+ Random random_generator(seed());
+ ClippingPredictorEvaluator evaluator(history_size());
+
+ for (int i = 0; i < kNumCalls; ++i) {
+ SCOPED_TRACE(i);
+ // Read metrics before `Observe()` is called.
+ const int last_tp = evaluator.true_positives();
+ const int last_tn = evaluator.true_negatives();
+ const int last_fp = evaluator.false_positives();
+ const int last_fn = evaluator.false_negatives();
+ // `Observe()` a random observation.
+ bool clipping_detected = random_generator.Rand<bool>();
+ bool clipping_predicted = random_generator.Rand<bool>();
+ evaluator.Observe(clipping_detected, clipping_predicted);
+
+ // Check that at most one metric has changed.
+ int num_changes = 0;
+ num_changes += last_tp == evaluator.true_positives() ? 0 : 1;
+ num_changes += last_tn == evaluator.true_negatives() ? 0 : 1;
+ num_changes += last_fp == evaluator.false_positives() ? 0 : 1;
+ num_changes += last_fn == evaluator.false_negatives() ? 0 : 1;
+ EXPECT_GE(num_changes, 0);
+ EXPECT_LE(num_changes, 1);
+ }
+}
+
+// Checks that after each call to `Observe()` each metric either remains
+// unchanged or grows.
+TEST_P(ClippingPredictorEvaluatorParameterization, MetricsAreWeaklyMonotonic) {
+ constexpr int kNumCalls = 123;
+ Random random_generator(seed());
+ ClippingPredictorEvaluator evaluator(history_size());
+
+ for (int i = 0; i < kNumCalls; ++i) {
+ SCOPED_TRACE(i);
+ // Read metrics before `Observe()` is called.
+ const int last_tp = evaluator.true_positives();
+ const int last_tn = evaluator.true_negatives();
+ const int last_fp = evaluator.false_positives();
+ const int last_fn = evaluator.false_negatives();
+ // `Observe()` a random observation.
+ bool clipping_detected = random_generator.Rand<bool>();
+ bool clipping_predicted = random_generator.Rand<bool>();
+ evaluator.Observe(clipping_detected, clipping_predicted);
+
+ // Check that metrics are weakly monotonic.
+ EXPECT_GE(evaluator.true_positives(), last_tp);
+ EXPECT_GE(evaluator.true_negatives(), last_tn);
+ EXPECT_GE(evaluator.false_positives(), last_fp);
+ EXPECT_GE(evaluator.false_negatives(), last_fn);
+ }
+}
+
+// Checks that after each call to `Observe()` the growth speed of each metrics
+// is bounded.
+TEST_P(ClippingPredictorEvaluatorParameterization, BoundedMetricsGrowth) {
+ constexpr int kNumCalls = 123;
+ Random random_generator(seed());
+ ClippingPredictorEvaluator evaluator(history_size());
+
+ for (int i = 0; i < kNumCalls; ++i) {
+ SCOPED_TRACE(i);
+ // Read metrics before `Observe()` is called.
+ const int last_tp = evaluator.true_positives();
+ const int last_tn = evaluator.true_negatives();
+ const int last_fp = evaluator.false_positives();
+ const int last_fn = evaluator.false_negatives();
+ // `Observe()` a random observation.
+ bool clipping_detected = random_generator.Rand<bool>();
+ bool clipping_predicted = random_generator.Rand<bool>();
+ evaluator.Observe(clipping_detected, clipping_predicted);
+
+ // Check that TPs grow by at most `history_size() + 1`. Such an upper bound
+ // is reached when multiple predictions are matched by a single detection.
+ EXPECT_LE(evaluator.true_positives() - last_tp, history_size() + 1);
+ // Check that TNs, FPs and FNs grow by at most one. `max_growth`.
+ EXPECT_LE(evaluator.true_negatives() - last_tn, 1);
+ EXPECT_LE(evaluator.false_positives() - last_fp, 1);
+ EXPECT_LE(evaluator.false_negatives() - last_fn, 1);
+ }
+}
+
+// Checks that `Observe()` returns a prediction interval if and only if one or
+// more true positives are found.
+TEST_P(ClippingPredictorEvaluatorParameterization,
+ PredictionIntervalIfAndOnlyIfTruePositives) {
+ constexpr int kNumCalls = 123;
+ Random random_generator(seed());
+ ClippingPredictorEvaluator evaluator(history_size());
+
+ for (int i = 0; i < kNumCalls; ++i) {
+ SCOPED_TRACE(i);
+ // Read true positives before `Observe()` is called.
+ const int last_tp = evaluator.true_positives();
+ // `Observe()` a random observation.
+ bool clipping_detected = random_generator.Rand<bool>();
+ bool clipping_predicted = random_generator.Rand<bool>();
+ absl::optional<int> prediction_interval =
+ evaluator.Observe(clipping_detected, clipping_predicted);
+
+ // Check that the prediction interval is returned when a true positive is
+ // found.
+ if (evaluator.true_positives() == last_tp) {
+ EXPECT_FALSE(prediction_interval.has_value());
+ } else {
+ EXPECT_TRUE(prediction_interval.has_value());
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ ClippingPredictorEvaluatorTest,
+ ClippingPredictorEvaluatorParameterization,
+ ::testing::Combine(::testing::Values(4, 8, 15, 16, 23, 42),
+ ::testing::Values(1, 10, 21)));
+
+// Checks that, observing a detection and a prediction after init, produces a
+// true positive.
+TEST(ClippingPredictorEvaluatorTest, OneTruePositiveAfterInit) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kDetected, kPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that, observing a detection but no prediction after init, produces a
+// false negative.
+TEST(ClippingPredictorEvaluatorTest, OneFalseNegativeAfterInit) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_negatives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+}
+
+// Checks that, observing no detection but a prediction after init, produces a
+// false positive after expiration.
+TEST(ClippingPredictorEvaluatorTest, OneFalsePositiveAfterInit) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that, observing no detection and no prediction after init, produces a
+// true negative.
+TEST(ClippingPredictorEvaluatorTest, OneTrueNegativeAfterInit) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_negatives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that the evaluator detects true negatives when clipping is neither
+// predicted nor detected.
+TEST(ClippingPredictorEvaluatorTest, NeverDetectedAndNotPredicted) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_negatives(), 4);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that the evaluator detects a false negative when clipping is detected
+// but not predicted.
+TEST(ClippingPredictorEvaluatorTest, DetectedButNotPredicted) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_negatives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 3);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+}
+
+// Checks that the evaluator does not detect a false positive when clipping is
+// predicted but not detected until the observation period expires.
+TEST(ClippingPredictorEvaluatorTest,
+ PredictedOnceAndNeverDetectedBeforeDeadline) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that the evaluator detects a false positive when clipping is predicted
+// but detected after the observation period expires.
+TEST(ClippingPredictorEvaluatorTest, PredictedOnceButDetectedAfterDeadline) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 0);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 1);
+}
+
+// Checks that a prediction followed by a detection counts as true positive.
+TEST(ClippingPredictorEvaluatorTest, PredictedOnceAndThenImmediatelyDetected) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that a prediction followed by a delayed detection counts as true
+// positive if the delay is within the observation period.
+TEST(ClippingPredictorEvaluatorTest, PredictedOnceAndDetectedBeforeDeadline) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that a prediction followed by a delayed detection counts as true
+// positive if the delay equals the observation period.
+TEST(ClippingPredictorEvaluatorTest, PredictedOnceAndDetectedAtDeadline) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that a prediction followed by a multiple adjacent detections within
+// the deadline counts as a single true positive and that, after the deadline,
+// a detection counts as a false negative.
+TEST(ClippingPredictorEvaluatorTest, PredictedOnceAndDetectedMultipleTimes) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ // Multiple detections.
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_positives(), 1);
+ // A detection outside of the observation period counts as false negative.
+ evaluator.Observe(kDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.false_negatives(), 1);
+ EXPECT_EQ(SumTrueFalsePositivesNegatives(evaluator), 2);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+}
+
+// Checks that a false positive is added when clipping is detected after a too
+// early prediction.
+TEST(ClippingPredictorEvaluatorTest,
+ PredictedMultipleTimesAndDetectedOnceAfterDeadline) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted); // ---+
+ evaluator.Observe(kNotDetected, kPredicted); // |
+ evaluator.Observe(kNotDetected, kPredicted); // |
+ evaluator.Observe(kNotDetected, kPredicted); // <--+ Not matched.
+ // The time to match a detection after the first prediction expired.
+ EXPECT_EQ(evaluator.false_positives(), 1);
+ evaluator.Observe(kDetected, kNotPredicted);
+ // The detection above does not match the first prediction because it happened
+ // after the deadline of the 1st prediction.
+ EXPECT_EQ(evaluator.false_positives(), 1);
+
+ EXPECT_EQ(evaluator.true_positives(), 3);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that multiple consecutive predictions match the first detection
+// observed before the expected detection deadline expires.
+TEST(ClippingPredictorEvaluatorTest, PredictedMultipleTimesAndDetectedOnce) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted); // --+
+ evaluator.Observe(kNotDetected, kPredicted); // | --+
+ evaluator.Observe(kNotDetected, kPredicted); // | | --+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+ <-+
+ EXPECT_EQ(evaluator.true_positives(), 3);
+ // The following observations do not generate any true negatives as they
+ // belong to the observation period of the last prediction - for which a
+ // detection has already been matched.
+ const int true_negatives = evaluator.true_negatives();
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_negatives(), true_negatives);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that multiple consecutive predictions match the multiple detections
+// observed before the expected detection deadline expires.
+TEST(ClippingPredictorEvaluatorTest,
+ PredictedMultipleTimesAndDetectedMultipleTimes) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted); // --+
+ evaluator.Observe(kNotDetected, kPredicted); // | --+
+ evaluator.Observe(kNotDetected, kPredicted); // | | --+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+ <-+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+
+ EXPECT_EQ(evaluator.true_positives(), 3);
+ // The following observation does not generate a true negative as it belongs
+ // to the observation period of the last prediction - for which two detections
+ // have already been matched.
+ const int true_negatives = evaluator.true_negatives();
+ evaluator.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(evaluator.true_negatives(), true_negatives);
+
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that multiple consecutive predictions match all the detections
+// observed before the expected detection deadline expires.
+TEST(ClippingPredictorEvaluatorTest, PredictedMultipleTimesAndAllDetected) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted); // --+
+ evaluator.Observe(kNotDetected, kPredicted); // | --+
+ evaluator.Observe(kNotDetected, kPredicted); // | | --+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+ <-+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+
+ EXPECT_EQ(evaluator.true_positives(), 3);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+// Checks that multiple non-consecutive predictions match all the detections
+// observed before the expected detection deadline expires.
+TEST(ClippingPredictorEvaluatorTest,
+ PredictedMultipleTimesWithGapAndAllDetected) {
+ ClippingPredictorEvaluator evaluator(/*history_size=*/3);
+ evaluator.Observe(kNotDetected, kPredicted); // --+
+ evaluator.Observe(kNotDetected, kNotPredicted); // |
+ evaluator.Observe(kNotDetected, kPredicted); // | --+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+ <-+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+
+ evaluator.Observe(kDetected, kNotPredicted); // <-+
+ EXPECT_EQ(evaluator.true_positives(), 2);
+ EXPECT_EQ(evaluator.true_negatives(), 0);
+ EXPECT_EQ(evaluator.false_positives(), 0);
+ EXPECT_EQ(evaluator.false_negatives(), 0);
+}
+
+class ClippingPredictorEvaluatorPredictionIntervalParameterization
+ : public ::testing::TestWithParam<std::tuple<int, int>> {
+ protected:
+ int num_extra_observe_calls() const { return std::get<0>(GetParam()); }
+ int history_size() const { return std::get<1>(GetParam()); }
+};
+
+// Checks that the minimum prediction interval is returned if clipping is
+// correctly predicted as soon as detected - i.e., no anticipation.
+TEST_P(ClippingPredictorEvaluatorPredictionIntervalParameterization,
+ MinimumPredictionInterval) {
+ ClippingPredictorEvaluator evaluator(history_size());
+ for (int i = 0; i < num_extra_observe_calls(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kNotPredicted), absl::nullopt);
+ }
+ absl::optional<int> prediction_interval =
+ evaluator.Observe(kDetected, kPredicted);
+ EXPECT_THAT(prediction_interval, Optional(Eq(0)));
+}
+
+// Checks that a prediction interval between the minimum and the maximum is
+// returned if clipping is correctly predicted before it is detected but not as
+// early as possible.
+TEST_P(ClippingPredictorEvaluatorPredictionIntervalParameterization,
+ IntermediatePredictionInterval) {
+ ClippingPredictorEvaluator evaluator(history_size());
+ for (int i = 0; i < num_extra_observe_calls(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kNotPredicted), absl::nullopt);
+ }
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kPredicted), absl::nullopt);
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kPredicted), absl::nullopt);
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kPredicted), absl::nullopt);
+ absl::optional<int> prediction_interval =
+ evaluator.Observe(kDetected, kPredicted);
+ EXPECT_THAT(prediction_interval, Optional(Eq(3)));
+}
+
+// Checks that the maximum prediction interval is returned if clipping is
+// correctly predicted as early as possible.
+TEST_P(ClippingPredictorEvaluatorPredictionIntervalParameterization,
+ MaximumPredictionInterval) {
+ ClippingPredictorEvaluator evaluator(history_size());
+ for (int i = 0; i < num_extra_observe_calls(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kNotPredicted), absl::nullopt);
+ }
+ for (int i = 0; i < history_size(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kPredicted), absl::nullopt);
+ }
+ absl::optional<int> prediction_interval =
+ evaluator.Observe(kDetected, kPredicted);
+ EXPECT_THAT(prediction_interval, Optional(Eq(history_size())));
+}
+
+// Checks that `Observe()` returns the prediction interval as soon as a true
+// positive is found and never again while ongoing detections are matched to a
+// previously observed prediction.
+TEST_P(ClippingPredictorEvaluatorPredictionIntervalParameterization,
+ PredictionIntervalReturnedOnce) {
+ ASSERT_LT(num_extra_observe_calls(), history_size());
+ ClippingPredictorEvaluator evaluator(history_size());
+ // Observe predictions before detection.
+ for (int i = 0; i < num_extra_observe_calls(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kNotDetected, kPredicted), absl::nullopt);
+ }
+ // Observe a detection.
+ absl::optional<int> prediction_interval =
+ evaluator.Observe(kDetected, kPredicted);
+ EXPECT_TRUE(prediction_interval.has_value());
+ // `Observe()` does not return a prediction interval anymore during ongoing
+ // detections observed while a detection is still expected.
+ for (int i = 0; i < history_size(); ++i) {
+ EXPECT_EQ(evaluator.Observe(kDetected, kNotPredicted), absl::nullopt);
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ ClippingPredictorEvaluatorTest,
+ ClippingPredictorEvaluatorPredictionIntervalParameterization,
+ ::testing::Combine(::testing::Values(0, 3, 5), ::testing::Values(7, 11)));
+
+// Checks that, when a detection is expected, the expectation is removed if and
+// only if `Reset()` is called after a prediction is observed.
+TEST(ClippingPredictorEvaluatorTest, NoFalsePositivesAfterReset) {
+ constexpr int kHistorySize = 2;
+
+ ClippingPredictorEvaluator with_reset(kHistorySize);
+ with_reset.Observe(kNotDetected, kPredicted);
+ with_reset.Reset();
+ with_reset.Observe(kNotDetected, kNotPredicted);
+ with_reset.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(with_reset.true_positives(), 0);
+ EXPECT_EQ(with_reset.true_negatives(), 2);
+ EXPECT_EQ(with_reset.false_positives(), 0);
+ EXPECT_EQ(with_reset.false_negatives(), 0);
+
+ ClippingPredictorEvaluator no_reset(kHistorySize);
+ no_reset.Observe(kNotDetected, kPredicted);
+ no_reset.Observe(kNotDetected, kNotPredicted);
+ no_reset.Observe(kNotDetected, kNotPredicted);
+ EXPECT_EQ(no_reset.true_positives(), 0);
+ EXPECT_EQ(no_reset.true_negatives(), 0);
+ EXPECT_EQ(no_reset.false_positives(), 1);
+ EXPECT_EQ(no_reset.false_negatives(), 0);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor_level_buffer.cc b/modules/audio_processing/agc/clipping_predictor_level_buffer.cc
new file mode 100644
index 0000000000..bc33cda040
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_level_buffer.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor_level_buffer.h"
+
+#include <algorithm>
+#include <cmath>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+bool ClippingPredictorLevelBuffer::Level::operator==(const Level& level) const {
+ constexpr float kEpsilon = 1e-6f;
+ return std::fabs(average - level.average) < kEpsilon &&
+ std::fabs(max - level.max) < kEpsilon;
+}
+
+ClippingPredictorLevelBuffer::ClippingPredictorLevelBuffer(int capacity)
+ : tail_(-1), size_(0), data_(std::max(1, capacity)) {
+ if (capacity > kMaxCapacity) {
+ RTC_LOG(LS_WARNING) << "[agc]: ClippingPredictorLevelBuffer exceeds the "
+ << "maximum allowed capacity. Capacity: " << capacity;
+ }
+ RTC_DCHECK(!data_.empty());
+}
+
+void ClippingPredictorLevelBuffer::Reset() {
+ tail_ = -1;
+ size_ = 0;
+}
+
+void ClippingPredictorLevelBuffer::Push(Level level) {
+ ++tail_;
+ if (tail_ == Capacity()) {
+ tail_ = 0;
+ }
+ if (size_ < Capacity()) {
+ size_++;
+ }
+ data_[tail_] = level;
+}
+
+// TODO(bugs.webrtc.org/12774): Optimize partial computation for long buffers.
+absl::optional<ClippingPredictorLevelBuffer::Level>
+ClippingPredictorLevelBuffer::ComputePartialMetrics(int delay,
+ int num_items) const {
+ RTC_DCHECK_GE(delay, 0);
+ RTC_DCHECK_LT(delay, Capacity());
+ RTC_DCHECK_GT(num_items, 0);
+ RTC_DCHECK_LE(num_items, Capacity());
+ RTC_DCHECK_LE(delay + num_items, Capacity());
+ if (delay + num_items > Size()) {
+ return absl::nullopt;
+ }
+ float sum = 0.0f;
+ float max = 0.0f;
+ for (int i = 0; i < num_items && i < Size(); ++i) {
+ int idx = tail_ - delay - i;
+ if (idx < 0) {
+ idx += Capacity();
+ }
+ sum += data_[idx].average;
+ max = std::fmax(data_[idx].max, max);
+ }
+ return absl::optional<Level>({sum / static_cast<float>(num_items), max});
+}
+
+} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor_level_buffer.h b/modules/audio_processing/agc/clipping_predictor_level_buffer.h
new file mode 100644
index 0000000000..f3e8368194
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_level_buffer.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_
+#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// A circular buffer to store frame-wise `Level` items for clipping prediction.
+// The current implementation is not optimized for large buffer lengths.
+class ClippingPredictorLevelBuffer {
+ public:
+ struct Level {
+ float average;
+ float max;
+ bool operator==(const Level& level) const;
+ };
+
+ // Recommended maximum capacity. It is possible to create a buffer with a
+ // larger capacity, but the implementation is not optimized for large values.
+ static constexpr int kMaxCapacity = 100;
+
+ // Ctor. Sets the buffer capacity to max(1, `capacity`) and logs a warning
+ // message if the capacity is greater than `kMaxCapacity`.
+ explicit ClippingPredictorLevelBuffer(int capacity);
+ ~ClippingPredictorLevelBuffer() {}
+ ClippingPredictorLevelBuffer(const ClippingPredictorLevelBuffer&) = delete;
+ ClippingPredictorLevelBuffer& operator=(const ClippingPredictorLevelBuffer&) =
+ delete;
+
+ void Reset();
+
+ // Returns the current number of items stored in the buffer.
+ int Size() const { return size_; }
+
+ // Returns the capacity of the buffer.
+ int Capacity() const { return data_.size(); }
+
+ // Adds a `level` item into the circular buffer `data_`. Stores at most
+ // `Capacity()` items. If more items are pushed, the new item replaces the
+ // least recently pushed item.
+ void Push(Level level);
+
+ // If at least `num_items` + `delay` items have been pushed, returns the
+ // average and maximum value for the `num_items` most recently pushed items
+ // from `delay` to `delay` - `num_items` (a delay equal to zero corresponds
+ // to the most recently pushed item). The value of `delay` is limited to
+ // [0, N] and `num_items` to [1, M] where N + M is the capacity of the buffer.
+ absl::optional<Level> ComputePartialMetrics(int delay, int num_items) const;
+
+ private:
+ int tail_;
+ int size_;
+ std::vector<Level> data_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_
diff --git a/modules/audio_processing/agc/clipping_predictor_level_buffer_unittest.cc b/modules/audio_processing/agc/clipping_predictor_level_buffer_unittest.cc
new file mode 100644
index 0000000000..7e594a1eca
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_level_buffer_unittest.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor_level_buffer.h"
+
+#include <algorithm>
+
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Optional;
+
+class ClippingPredictorLevelBufferParametrization
+ : public ::testing::TestWithParam<int> {
+ protected:
+ int capacity() const { return GetParam(); }
+};
+
+TEST_P(ClippingPredictorLevelBufferParametrization, CheckEmptyBufferSize) {
+ ClippingPredictorLevelBuffer buffer(capacity());
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), 0);
+}
+
+TEST_P(ClippingPredictorLevelBufferParametrization, CheckHalfEmptyBufferSize) {
+ ClippingPredictorLevelBuffer buffer(capacity());
+ for (int i = 0; i < buffer.Capacity() / 2; ++i) {
+ buffer.Push({2, 4});
+ }
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), std::max(capacity(), 1) / 2);
+}
+
+TEST_P(ClippingPredictorLevelBufferParametrization, CheckFullBufferSize) {
+ ClippingPredictorLevelBuffer buffer(capacity());
+ for (int i = 0; i < buffer.Capacity(); ++i) {
+ buffer.Push({2, 4});
+ }
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), std::max(capacity(), 1));
+}
+
+TEST_P(ClippingPredictorLevelBufferParametrization, CheckLargeBufferSize) {
+ ClippingPredictorLevelBuffer buffer(capacity());
+ for (int i = 0; i < 2 * buffer.Capacity(); ++i) {
+ buffer.Push({2, 4});
+ }
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), std::max(capacity(), 1));
+}
+
+TEST_P(ClippingPredictorLevelBufferParametrization, CheckSizeAfterReset) {
+ ClippingPredictorLevelBuffer buffer(capacity());
+ buffer.Push({1, 1});
+ buffer.Push({1, 1});
+ buffer.Reset();
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), 0);
+ buffer.Push({1, 1});
+ EXPECT_EQ(buffer.Capacity(), std::max(capacity(), 1));
+ EXPECT_EQ(buffer.Size(), 1);
+}
+
+INSTANTIATE_TEST_SUITE_P(ClippingPredictorLevelBufferTest,
+ ClippingPredictorLevelBufferParametrization,
+ ::testing::Values(-1, 0, 1, 123));
+
+TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterFullBuffer) {
+ ClippingPredictorLevelBuffer buffer(/*capacity=*/2);
+ buffer.Push({1, 2});
+ buffer.Push({3, 6});
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{3, 6})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/1, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{1, 2})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/2),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{2, 6})));
+}
+
+TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterPushBeyondCapacity) {
+ ClippingPredictorLevelBuffer buffer(/*capacity=*/2);
+ buffer.Push({1, 1});
+ buffer.Push({3, 6});
+ buffer.Push({5, 10});
+ buffer.Push({7, 14});
+ buffer.Push({6, 12});
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{6, 12})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/1, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{7, 14})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/2),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{6.5f, 14})));
+}
+
+TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterTooFewItems) {
+ ClippingPredictorLevelBuffer buffer(/*capacity=*/4);
+ buffer.Push({1, 2});
+ buffer.Push({3, 6});
+ EXPECT_EQ(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/3),
+ absl::nullopt);
+ EXPECT_EQ(buffer.ComputePartialMetrics(/*delay=*/2, /*num_items=*/1),
+ absl::nullopt);
+}
+
+TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterReset) {
+ ClippingPredictorLevelBuffer buffer(/*capacity=*/2);
+ buffer.Push({1, 2});
+ buffer.Reset();
+ buffer.Push({5, 10});
+ buffer.Push({7, 14});
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{7, 14})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/2),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{6, 14})));
+ EXPECT_THAT(buffer.ComputePartialMetrics(/*delay=*/1, /*num_items=*/1),
+ Optional(Eq(ClippingPredictorLevelBuffer::Level{5, 10})));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/audio_processing/agc/clipping_predictor_unittest.cc b/modules/audio_processing/agc/clipping_predictor_unittest.cc
new file mode 100644
index 0000000000..e848e1a724
--- /dev/null
+++ b/modules/audio_processing/agc/clipping_predictor_unittest.cc
@@ -0,0 +1,491 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_processing/agc/clipping_predictor.h"
+
+#include <cstdint>
+#include <limits>
+#include <tuple>
+
+#include "rtc_base/checks.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Optional;
+using ClippingPredictorConfig = AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor;
+using ClippingPredictorMode = AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor::Mode;
+
+constexpr int kSampleRateHz = 32000;
+constexpr int kNumChannels = 1;
+constexpr int kSamplesPerChannel = kSampleRateHz / 100;
+constexpr int kMaxMicLevel = 255;
+constexpr int kMinMicLevel = 12;
+constexpr int kDefaultClippedLevelStep = 15;
+constexpr float kMaxSampleS16 =
+ static_cast<float>(std::numeric_limits<int16_t>::max());
+
+// Threshold in dB corresponding to a signal with an amplitude equal to 99% of
+// the dynamic range - i.e., computed as `20*log10(0.99)`.
+constexpr float kClippingThresholdDb = -0.08729610804900176f;
+
+void CallAnalyze(int num_calls,
+ const AudioFrameView<const float>& frame,
+ ClippingPredictor& predictor) {
+ for (int i = 0; i < num_calls; ++i) {
+ predictor.Analyze(frame);
+ }
+}
+
+// Creates and analyzes an audio frame with a non-zero (approx. 4.15dB) crest
+// factor.
+void AnalyzeNonZeroCrestFactorAudio(int num_calls,
+ int num_channels,
+ float peak_ratio,
+ ClippingPredictor& predictor) {
+ RTC_DCHECK_GT(num_calls, 0);
+ RTC_DCHECK_GT(num_channels, 0);
+ RTC_DCHECK_LE(peak_ratio, 1.0f);
+ std::vector<float*> audio(num_channels);
+ std::vector<float> audio_data(num_channels * kSamplesPerChannel, 0.0f);
+ for (int channel = 0; channel < num_channels; ++channel) {
+ audio[channel] = &audio_data[channel * kSamplesPerChannel];
+ for (int sample = 0; sample < kSamplesPerChannel; sample += 10) {
+ audio[channel][sample] = 0.1f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 1] = 0.2f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 2] = 0.3f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 3] = 0.4f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 4] = 0.5f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 5] = 0.6f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 6] = 0.7f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 7] = 0.8f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 8] = 0.9f * peak_ratio * kMaxSampleS16;
+ audio[channel][sample + 9] = 1.0f * peak_ratio * kMaxSampleS16;
+ }
+ }
+ AudioFrameView<const float> frame(audio.data(), num_channels,
+ kSamplesPerChannel);
+ CallAnalyze(num_calls, frame, predictor);
+}
+
+void CheckChannelEstimatesWithValue(int num_channels,
+ int level,
+ int default_step,
+ int min_mic_level,
+ int max_mic_level,
+ const ClippingPredictor& predictor,
+ int expected) {
+ for (int i = 0; i < num_channels; ++i) {
+ SCOPED_TRACE(i);
+ EXPECT_THAT(predictor.EstimateClippedLevelStep(
+ i, level, default_step, min_mic_level, max_mic_level),
+ Optional(Eq(expected)));
+ }
+}
+
+void CheckChannelEstimatesWithoutValue(int num_channels,
+ int level,
+ int default_step,
+ int min_mic_level,
+ int max_mic_level,
+ const ClippingPredictor& predictor) {
+ for (int i = 0; i < num_channels; ++i) {
+ SCOPED_TRACE(i);
+ EXPECT_EQ(predictor.EstimateClippedLevelStep(i, level, default_step,
+ min_mic_level, max_mic_level),
+ absl::nullopt);
+ }
+}
+
+// Creates and analyzes an audio frame with a zero crest factor.
+void AnalyzeZeroCrestFactorAudio(int num_calls,
+ int num_channels,
+ float peak_ratio,
+ ClippingPredictor& predictor) {
+ RTC_DCHECK_GT(num_calls, 0);
+ RTC_DCHECK_GT(num_channels, 0);
+ RTC_DCHECK_LE(peak_ratio, 1.f);
+ std::vector<float*> audio(num_channels);
+ std::vector<float> audio_data(num_channels * kSamplesPerChannel, 0.f);
+ for (int channel = 0; channel < num_channels; ++channel) {
+ audio[channel] = &audio_data[channel * kSamplesPerChannel];
+ for (int sample = 0; sample < kSamplesPerChannel; ++sample) {
+ audio[channel][sample] = peak_ratio * kMaxSampleS16;
+ }
+ }
+ auto frame = AudioFrameView<const float>(audio.data(), num_channels,
+ kSamplesPerChannel);
+ CallAnalyze(num_calls, frame, predictor);
+}
+
+TEST(ClippingPeakPredictorTest, NoPredictorCreated) {
+ auto predictor =
+ CreateClippingPredictor(kNumChannels, /*config=*/{/*enabled=*/false});
+ EXPECT_FALSE(predictor);
+}
+
+TEST(ClippingPeakPredictorTest, ClippingEventPredictionCreated) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ auto predictor = CreateClippingPredictor(
+ kNumChannels,
+ /*config=*/{/*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kClippingEventPrediction});
+ EXPECT_TRUE(predictor);
+}
+
+TEST(ClippingPeakPredictorTest, AdaptiveStepClippingPeakPredictionCreated) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ auto predictor = CreateClippingPredictor(
+ kNumChannels, /*config=*/{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction});
+ EXPECT_TRUE(predictor);
+}
+
+TEST(ClippingPeakPredictorTest, FixedStepClippingPeakPredictionCreated) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ auto predictor = CreateClippingPredictor(
+ kNumChannels, /*config=*/{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kFixedStepClippingPeakPrediction});
+ EXPECT_TRUE(predictor);
+}
+
+class ClippingPredictorParameterization
+ : public ::testing::TestWithParam<std::tuple<int, int, int, int>> {
+ protected:
+ int num_channels() const { return std::get<0>(GetParam()); }
+ ClippingPredictorConfig GetConfig(ClippingPredictorMode mode) const {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ return {/*enabled=*/true,
+ /*mode=*/mode,
+ /*window_length=*/std::get<1>(GetParam()),
+ /*reference_window_length=*/std::get<2>(GetParam()),
+ /*reference_window_delay=*/std::get<3>(GetParam()),
+ /*clipping_threshold=*/-1.0f,
+ /*crest_factor_margin=*/0.5f};
+ }
+};
+
+TEST_P(ClippingPredictorParameterization,
+ CheckClippingEventPredictorEstimateAfterCrestFactorDrop) {
+ const ClippingPredictorConfig config =
+ GetConfig(ClippingPredictorMode::kClippingEventPrediction);
+ if (config.reference_window_length + config.reference_window_delay <=
+ config.window_length) {
+ return;
+ }
+ auto predictor = CreateClippingPredictor(num_channels(), config);
+ AnalyzeNonZeroCrestFactorAudio(
+ /*num_calls=*/config.reference_window_length +
+ config.reference_window_delay - config.window_length,
+ num_channels(), /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(config.window_length, num_channels(),
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithValue(
+ num_channels(), /*level=*/255, kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, kDefaultClippedLevelStep);
+}
+
+TEST_P(ClippingPredictorParameterization,
+ CheckClippingEventPredictorNoEstimateAfterConstantCrestFactor) {
+ const ClippingPredictorConfig config =
+ GetConfig(ClippingPredictorMode::kClippingEventPrediction);
+ if (config.reference_window_length + config.reference_window_delay <=
+ config.window_length) {
+ return;
+ }
+ auto predictor = CreateClippingPredictor(num_channels(), config);
+ AnalyzeNonZeroCrestFactorAudio(
+ /*num_calls=*/config.reference_window_length +
+ config.reference_window_delay - config.window_length,
+ num_channels(), /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.window_length,
+ num_channels(),
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+}
+
+TEST_P(ClippingPredictorParameterization,
+ CheckClippingPeakPredictorEstimateAfterHighCrestFactor) {
+ const ClippingPredictorConfig config =
+ GetConfig(ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction);
+ if (config.reference_window_length + config.reference_window_delay <=
+ config.window_length) {
+ return;
+ }
+ auto predictor = CreateClippingPredictor(num_channels(), config);
+ AnalyzeNonZeroCrestFactorAudio(
+ /*num_calls=*/config.reference_window_length +
+ config.reference_window_delay - config.window_length,
+ num_channels(), /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.window_length,
+ num_channels(),
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithValue(
+ num_channels(), /*level=*/255, kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, kDefaultClippedLevelStep);
+}
+
+TEST_P(ClippingPredictorParameterization,
+ CheckClippingPeakPredictorNoEstimateAfterLowCrestFactor) {
+ const ClippingPredictorConfig config =
+ GetConfig(ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction);
+ if (config.reference_window_length + config.reference_window_delay <=
+ config.window_length) {
+ return;
+ }
+ auto predictor = CreateClippingPredictor(num_channels(), config);
+ AnalyzeZeroCrestFactorAudio(
+ /*num_calls=*/config.reference_window_length +
+ config.reference_window_delay - config.window_length,
+ num_channels(), /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.window_length,
+ num_channels(),
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(num_channels(), /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+}
+
+INSTANTIATE_TEST_SUITE_P(GainController1ClippingPredictor,
+ ClippingPredictorParameterization,
+ ::testing::Combine(::testing::Values(1, 5),
+ ::testing::Values(1, 5, 10),
+ ::testing::Values(1, 5),
+ ::testing::Values(0, 1, 5)));
+
+class ClippingEventPredictorParameterization
+ : public ::testing::TestWithParam<std::tuple<float, float>> {
+ protected:
+ ClippingPredictorConfig GetConfig() const {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ return {/*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kClippingEventPrediction,
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/std::get<0>(GetParam()),
+ /*crest_factor_margin=*/std::get<1>(GetParam())};
+ }
+};
+
+TEST_P(ClippingEventPredictorParameterization,
+ CheckEstimateAfterCrestFactorDrop) {
+ const ClippingPredictorConfig config = GetConfig();
+ auto predictor = CreateClippingPredictor(kNumChannels, config);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.reference_window_length,
+ kNumChannels, /*peak_ratio=*/0.99f,
+ *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(config.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ // TODO(bugs.webrtc.org/12774): Add clarifying comment.
+ // TODO(bugs.webrtc.org/12774): Remove 4.15f threshold and split tests.
+ if (config.clipping_threshold < kClippingThresholdDb &&
+ config.crest_factor_margin < 4.15f) {
+ CheckChannelEstimatesWithValue(
+ kNumChannels, /*level=*/255, kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, kDefaultClippedLevelStep);
+ } else {
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(GainController1ClippingPredictor,
+ ClippingEventPredictorParameterization,
+ ::testing::Combine(::testing::Values(-1.0f, 0.0f),
+ ::testing::Values(3.0f, 4.16f)));
+
+class ClippingPredictorModeParameterization
+ : public ::testing::TestWithParam<ClippingPredictorMode> {
+ protected:
+ ClippingPredictorConfig GetConfig(float clipping_threshold_dbfs) const {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ return {/*enabled=*/true,
+ /*mode=*/GetParam(),
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/clipping_threshold_dbfs,
+ /*crest_factor_margin=*/3.0f};
+ }
+};
+
+TEST_P(ClippingPredictorModeParameterization,
+ CheckEstimateAfterHighCrestFactorWithNoClippingMargin) {
+ const ClippingPredictorConfig config = GetConfig(
+ /*clipping_threshold_dbfs=*/0.0f);
+ auto predictor = CreateClippingPredictor(kNumChannels, config);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.reference_window_length,
+ kNumChannels, /*peak_ratio=*/0.99f,
+ *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(config.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ // Since the clipping threshold is set to 0 dBFS, `EstimateClippedLevelStep()`
+ // is expected to return an unavailable value.
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+}
+
+TEST_P(ClippingPredictorModeParameterization,
+ CheckEstimateAfterHighCrestFactorWithClippingMargin) {
+ const ClippingPredictorConfig config =
+ GetConfig(/*clipping_threshold_dbfs=*/-1.0f);
+ auto predictor = CreateClippingPredictor(kNumChannels, config);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/config.reference_window_length,
+ kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(config.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ // TODO(bugs.webrtc.org/12774): Add clarifying comment.
+ const float expected_step =
+ config.mode == ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction
+ ? 17
+ : kDefaultClippedLevelStep;
+ CheckChannelEstimatesWithValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, expected_step);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ GainController1ClippingPredictor,
+ ClippingPredictorModeParameterization,
+ ::testing::Values(
+ ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction,
+ ClippingPredictorMode::kFixedStepClippingPeakPrediction));
+
+TEST(ClippingEventPredictorTest, CheckEstimateAfterReset) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ constexpr ClippingPredictorConfig kConfig{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kClippingEventPrediction,
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/-1.0f,
+ /*crest_factor_margin=*/3.0f};
+ auto predictor = CreateClippingPredictor(kNumChannels, kConfig);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/kConfig.reference_window_length,
+ kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ predictor->Reset();
+ AnalyzeZeroCrestFactorAudio(kConfig.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+}
+
+TEST(ClippingPeakPredictorTest, CheckNoEstimateAfterReset) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ constexpr ClippingPredictorConfig kConfig{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction,
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/-1.0f};
+ auto predictor = CreateClippingPredictor(kNumChannels, kConfig);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/kConfig.reference_window_length,
+ kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ predictor->Reset();
+ AnalyzeZeroCrestFactorAudio(kConfig.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+}
+
+TEST(ClippingPeakPredictorTest, CheckAdaptiveStepEstimate) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ constexpr ClippingPredictorConfig kConfig{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kAdaptiveStepClippingPeakPrediction,
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/-1.0f};
+ auto predictor = CreateClippingPredictor(kNumChannels, kConfig);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/kConfig.reference_window_length,
+ kNumChannels, /*peak_ratio=*/0.99f,
+ *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(kConfig.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, /*expected=*/17);
+}
+
+TEST(ClippingPeakPredictorTest, CheckFixedStepEstimate) {
+ // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed.
+ constexpr ClippingPredictorConfig kConfig{
+ /*enabled=*/true,
+ /*mode=*/ClippingPredictorMode::kFixedStepClippingPeakPrediction,
+ /*window_length=*/5,
+ /*reference_window_length=*/5,
+ /*reference_window_delay=*/5,
+ /*clipping_threshold=*/-1.0f};
+ auto predictor = CreateClippingPredictor(kNumChannels, kConfig);
+ AnalyzeNonZeroCrestFactorAudio(/*num_calls=*/kConfig.reference_window_length,
+ kNumChannels, /*peak_ratio=*/0.99f,
+ *predictor);
+ CheckChannelEstimatesWithoutValue(kNumChannels, /*level=*/255,
+ kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor);
+ AnalyzeZeroCrestFactorAudio(kConfig.window_length, kNumChannels,
+ /*peak_ratio=*/0.99f, *predictor);
+ CheckChannelEstimatesWithValue(
+ kNumChannels, /*level=*/255, kDefaultClippedLevelStep, kMinMicLevel,
+ kMaxMicLevel, *predictor, kDefaultClippedLevelStep);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/audio_processing/agc2/adaptive_agc.cc b/modules/audio_processing/agc2/adaptive_agc.cc
index 8bf192e77f..3fc9008db1 100644
--- a/modules/audio_processing/agc2/adaptive_agc.cc
+++ b/modules/audio_processing/agc2/adaptive_agc.cc
@@ -25,10 +25,6 @@ using AdaptiveDigitalConfig =
using NoiseEstimatorType =
AudioProcessing::Config::GainController2::NoiseEstimator;
-constexpr int kGainApplierAdjacentSpeechFramesThreshold = 1;
-constexpr float kMaxGainChangePerSecondDb = 3.0f;
-constexpr float kMaxOutputNoiseLevelDbfs = -50.0f;
-
// Detects the available CPU features and applies any kill-switches.
AvailableCpuFeatures GetAllowedCpuFeatures(
const AdaptiveDigitalConfig& config) {
@@ -56,29 +52,8 @@ std::unique_ptr<NoiseLevelEstimator> CreateNoiseLevelEstimator(
}
}
-constexpr NoiseEstimatorType kDefaultNoiseLevelEstimatorType =
- NoiseEstimatorType::kNoiseFloor;
-
} // namespace
-AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper)
- : speech_level_estimator_(apm_data_dumper),
- gain_controller_(apm_data_dumper,
- kGainApplierAdjacentSpeechFramesThreshold,
- kMaxGainChangePerSecondDb,
- kMaxOutputNoiseLevelDbfs),
- apm_data_dumper_(apm_data_dumper),
- noise_level_estimator_(
- CreateNoiseLevelEstimator(kDefaultNoiseLevelEstimatorType,
- apm_data_dumper)),
- saturation_protector_(
- CreateSaturationProtector(kSaturationProtectorInitialHeadroomDb,
- kSaturationProtectorExtraHeadroomDb,
- kGainApplierAdjacentSpeechFramesThreshold,
- apm_data_dumper)) {
- RTC_DCHECK(apm_data_dumper);
-}
-
AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper,
const AdaptiveDigitalConfig& config)
: speech_level_estimator_(apm_data_dumper,
@@ -87,7 +62,8 @@ AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper,
gain_controller_(apm_data_dumper,
config.adjacent_speech_frames_threshold,
config.max_gain_change_db_per_second,
- config.max_output_noise_level_dbfs),
+ config.max_output_noise_level_dbfs,
+ config.dry_run),
apm_data_dumper_(apm_data_dumper),
noise_level_estimator_(
CreateNoiseLevelEstimator(config.noise_estimator, apm_data_dumper)),
@@ -106,6 +82,10 @@ AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper,
AdaptiveAgc::~AdaptiveAgc() = default;
+void AdaptiveAgc::Initialize(int sample_rate_hz, int num_channels) {
+ gain_controller_.Initialize(sample_rate_hz, num_channels);
+}
+
void AdaptiveAgc::Process(AudioFrameView<float> frame, float limiter_envelope) {
AdaptiveDigitalGainApplier::FrameInfo info;
diff --git a/modules/audio_processing/agc2/adaptive_agc.h b/modules/audio_processing/agc2/adaptive_agc.h
index fe814446ff..43c7787e36 100644
--- a/modules/audio_processing/agc2/adaptive_agc.h
+++ b/modules/audio_processing/agc2/adaptive_agc.h
@@ -25,19 +25,21 @@ namespace webrtc {
class ApmDataDumper;
// Adaptive digital gain controller.
-// TODO(crbug.com/webrtc/7494): Unify with `AdaptiveDigitalGainApplier`.
+// TODO(crbug.com/webrtc/7494): Rename to `AdaptiveDigitalGainController`.
class AdaptiveAgc {
public:
- explicit AdaptiveAgc(ApmDataDumper* apm_data_dumper);
- // TODO(crbug.com/webrtc/7494): Remove ctor above.
AdaptiveAgc(
ApmDataDumper* apm_data_dumper,
const AudioProcessing::Config::GainController2::AdaptiveDigital& config);
~AdaptiveAgc();
+ void Initialize(int sample_rate_hz, int num_channels);
+
+ // TODO(crbug.com/webrtc/7494): Add `SetLimiterEnvelope()`.
+
// Analyzes `frame` and applies a digital adaptive gain to it. Takes into
// account the envelope measured by the limiter.
- // TODO(crbug.com/webrtc/7494): Make the class depend on the limiter.
+ // TODO(crbug.com/webrtc/7494): Remove `limiter_envelope`.
void Process(AudioFrameView<float> frame, float limiter_envelope);
// Handles a gain change applied to the input signal (e.g., analog gain).
diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc b/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc
index 8a8a7fdc9b..e59b110efe 100644
--- a/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc
+++ b/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc
@@ -92,13 +92,28 @@ float ComputeGainChangeThisFrameDb(float target_gain_db,
max_gain_increase_db);
}
+// Copies the (multichannel) audio samples from `src` into `dst`.
+void CopyAudio(AudioFrameView<const float> src,
+ std::vector<std::vector<float>>& dst) {
+ RTC_DCHECK_GT(src.num_channels(), 0);
+ RTC_DCHECK_GT(src.samples_per_channel(), 0);
+ RTC_DCHECK_EQ(dst.size(), src.num_channels());
+ for (size_t c = 0; c < src.num_channels(); ++c) {
+ rtc::ArrayView<const float> channel_view = src.channel(c);
+ RTC_DCHECK_EQ(channel_view.size(), src.samples_per_channel());
+ RTC_DCHECK_EQ(dst[c].size(), src.samples_per_channel());
+ std::copy(channel_view.begin(), channel_view.end(), dst[c].begin());
+ }
+}
+
} // namespace
AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier(
ApmDataDumper* apm_data_dumper,
int adjacent_speech_frames_threshold,
float max_gain_change_db_per_second,
- float max_output_noise_level_dbfs)
+ float max_output_noise_level_dbfs,
+ bool dry_run)
: apm_data_dumper_(apm_data_dumper),
gain_applier_(
/*hard_clip_samples=*/false,
@@ -107,13 +122,39 @@ AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier(
max_gain_change_db_per_10ms_(max_gain_change_db_per_second *
kFrameDurationMs / 1000.f),
max_output_noise_level_dbfs_(max_output_noise_level_dbfs),
+ dry_run_(dry_run),
calls_since_last_gain_log_(0),
frames_to_gain_increase_allowed_(adjacent_speech_frames_threshold_),
last_gain_db_(kInitialAdaptiveDigitalGainDb) {
- RTC_DCHECK_GT(max_gain_change_db_per_second, 0.f);
+ RTC_DCHECK_GT(max_gain_change_db_per_second, 0.0f);
RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1);
- RTC_DCHECK_GE(max_output_noise_level_dbfs_, -90.f);
- RTC_DCHECK_LE(max_output_noise_level_dbfs_, 0.f);
+ RTC_DCHECK_GE(max_output_noise_level_dbfs_, -90.0f);
+ RTC_DCHECK_LE(max_output_noise_level_dbfs_, 0.0f);
+ Initialize(/*sample_rate_hz=*/48000, /*num_channels=*/1);
+}
+
+void AdaptiveDigitalGainApplier::Initialize(int sample_rate_hz,
+ int num_channels) {
+ if (!dry_run_) {
+ return;
+ }
+ RTC_DCHECK_GT(sample_rate_hz, 0);
+ RTC_DCHECK_GT(num_channels, 0);
+ int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100);
+ bool sample_rate_changed =
+ dry_run_frame_.empty() || // Handle initialization.
+ dry_run_frame_[0].size() != static_cast<size_t>(frame_size);
+ bool num_channels_changed =
+ dry_run_channels_.size() != static_cast<size_t>(num_channels);
+ if (sample_rate_changed || num_channels_changed) {
+ // Resize the multichannel audio vector and update the channel pointers.
+ dry_run_frame_.resize(num_channels);
+ dry_run_channels_.resize(num_channels);
+ for (int c = 0; c < num_channels; ++c) {
+ dry_run_frame_[c].resize(frame_size);
+ dry_run_channels_[c] = dry_run_frame_[c].data();
+ }
+ }
}
void AdaptiveDigitalGainApplier::Process(const FrameInfo& info,
@@ -174,7 +215,19 @@ void AdaptiveDigitalGainApplier::Process(const FrameInfo& info,
gain_applier_.SetGainFactor(
DbToRatio(last_gain_db_ + gain_change_this_frame_db));
}
- gain_applier_.ApplyGain(frame);
+
+ // Modify `frame` only if not running in "dry run" mode.
+ if (!dry_run_) {
+ gain_applier_.ApplyGain(frame);
+ } else {
+ // Copy `frame` so that `ApplyGain()` is called (on a copy).
+ CopyAudio(frame, dry_run_frame_);
+ RTC_DCHECK(!dry_run_channels_.empty());
+ AudioFrameView<float> frame_copy(&dry_run_channels_[0],
+ frame.num_channels(),
+ frame.samples_per_channel());
+ gain_applier_.ApplyGain(frame_copy);
+ }
// Remember that the gain has changed for the next iteration.
last_gain_db_ = last_gain_db_ + gain_change_this_frame_db;
diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier.h b/modules/audio_processing/agc2/adaptive_digital_gain_applier.h
index 74220fa861..8b58ea00b2 100644
--- a/modules/audio_processing/agc2/adaptive_digital_gain_applier.h
+++ b/modules/audio_processing/agc2/adaptive_digital_gain_applier.h
@@ -11,6 +11,8 @@
#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_
#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_
+#include <vector>
+
#include "modules/audio_processing/agc2/gain_applier.h"
#include "modules/audio_processing/include/audio_frame_view.h"
@@ -37,15 +39,18 @@ class AdaptiveDigitalGainApplier {
// frames must be observed in order to consider the sequence as speech.
// `max_gain_change_db_per_second` limits the adaptation speed (uniformly
// operated across frames). `max_output_noise_level_dbfs` limits the output
- // noise level.
+ // noise level. If `dry_run` is true, `Process()` will not modify the audio.
AdaptiveDigitalGainApplier(ApmDataDumper* apm_data_dumper,
int adjacent_speech_frames_threshold,
float max_gain_change_db_per_second,
- float max_output_noise_level_dbfs);
+ float max_output_noise_level_dbfs,
+ bool dry_run);
AdaptiveDigitalGainApplier(const AdaptiveDigitalGainApplier&) = delete;
AdaptiveDigitalGainApplier& operator=(const AdaptiveDigitalGainApplier&) =
delete;
+ void Initialize(int sample_rate_hz, int num_channels);
+
// Analyzes `info`, updates the digital gain and applies it to a 10 ms
// `frame`. Supports any sample rate supported by APM.
void Process(const FrameInfo& info, AudioFrameView<float> frame);
@@ -57,10 +62,14 @@ class AdaptiveDigitalGainApplier {
const int adjacent_speech_frames_threshold_;
const float max_gain_change_db_per_10ms_;
const float max_output_noise_level_dbfs_;
+ const bool dry_run_;
int calls_since_last_gain_log_;
int frames_to_gain_increase_allowed_;
float last_gain_db_;
+
+ std::vector<std::vector<float>> dry_run_frame_;
+ std::vector<float*> dry_run_channels_;
};
} // namespace webrtc
diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc b/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc
index ee9cb02ed6..f4a23a92b9 100644
--- a/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc
+++ b/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc
@@ -48,7 +48,8 @@ struct GainApplierHelper {
&apm_data_dumper,
adjacent_speech_frames_threshold,
kMaxGainChangePerSecondDb,
- kMaxOutputNoiseLevelDbfs)) {}
+ kMaxOutputNoiseLevelDbfs,
+ /*dry_run=*/false)) {}
ApmDataDumper apm_data_dumper;
std::unique_ptr<AdaptiveDigitalGainApplier> gain_applier;
};
@@ -67,6 +68,7 @@ constexpr AdaptiveDigitalGainApplier::FrameInfo kFrameInfo{
TEST(GainController2AdaptiveGainApplier, GainApplierShouldNotCrash) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kStereo);
// Make one call with reasonable audio level values and settings.
VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f);
AdaptiveDigitalGainApplier::FrameInfo info = kFrameInfo;
@@ -80,6 +82,7 @@ TEST(GainController2AdaptiveGainApplier, MaxGainApplied) {
static_cast<int>(kMaxGainDb / kMaxGainChangePerFrameDb) + 10;
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/8000, kMono);
AdaptiveDigitalGainApplier::FrameInfo info = kFrameInfo;
info.speech_level_dbfs = -60.0f;
float applied_gain;
@@ -94,6 +97,7 @@ TEST(GainController2AdaptiveGainApplier, MaxGainApplied) {
TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/8000, kMono);
constexpr float initial_level_dbfs = -25.0f;
// A few extra frames for safety.
@@ -131,6 +135,7 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) {
TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono);
constexpr float initial_level_dbfs = -25.0f;
@@ -155,6 +160,7 @@ TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) {
TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono);
constexpr float initial_level_dbfs = -25.0f;
constexpr int num_initial_frames =
@@ -184,6 +190,7 @@ TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) {
TEST(GainController2GainApplier, CanHandlePositiveSpeechLevels) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kStereo);
// Make one call with positive audio level values and settings.
VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f);
@@ -194,6 +201,7 @@ TEST(GainController2GainApplier, CanHandlePositiveSpeechLevels) {
TEST(GainController2GainApplier, AudioLevelLimitsGain) {
GainApplierHelper helper;
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono);
constexpr float initial_level_dbfs = -25.0f;
constexpr int num_initial_frames =
@@ -231,6 +239,7 @@ TEST_P(AdaptiveDigitalGainApplierTest,
DoNotIncreaseGainWithTooFewSpeechFrames) {
const int adjacent_speech_frames_threshold = AdjacentSpeechFramesThreshold();
GainApplierHelper helper(adjacent_speech_frames_threshold);
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono);
float prev_gain = 0.0f;
for (int i = 0; i < adjacent_speech_frames_threshold; ++i) {
@@ -248,6 +257,7 @@ TEST_P(AdaptiveDigitalGainApplierTest,
TEST_P(AdaptiveDigitalGainApplierTest, IncreaseGainWithEnoughSpeechFrames) {
const int adjacent_speech_frames_threshold = AdjacentSpeechFramesThreshold();
GainApplierHelper helper(adjacent_speech_frames_threshold);
+ helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono);
float prev_gain = 0.0f;
for (int i = 0; i < adjacent_speech_frames_threshold; ++i) {
@@ -269,5 +279,68 @@ INSTANTIATE_TEST_SUITE_P(GainController2,
AdaptiveDigitalGainApplierTest,
::testing::Values(1, 7, 31));
+// Checks that the input is never modified when running in dry run mode.
+TEST(GainController2GainApplier, DryRunDoesNotChangeInput) {
+ ApmDataDumper apm_data_dumper(0);
+ AdaptiveDigitalGainApplier gain_applier(
+ &apm_data_dumper, /*adjacent_speech_frames_threshold=*/1,
+ kMaxGainChangePerSecondDb, kMaxOutputNoiseLevelDbfs, /*dry_run=*/true);
+ // Simulate an input signal with log speech level.
+ AdaptiveDigitalGainApplier::FrameInfo info = kFrameInfo;
+ info.speech_level_dbfs = -60.0f;
+ // Allow enough time to reach the maximum gain.
+ constexpr int kNumFramesToAdapt =
+ static_cast<int>(kMaxGainDb / kMaxGainChangePerFrameDb) + 10;
+ constexpr float kPcmSamples = 123.456f;
+ // Run the gain applier and check that the PCM samples are not modified.
+ gain_applier.Initialize(/*sample_rate_hz=*/8000, kMono);
+ for (int i = 0; i < kNumFramesToAdapt; ++i) {
+ SCOPED_TRACE(i);
+ VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, kPcmSamples);
+ gain_applier.Process(info, fake_audio.float_frame_view());
+ EXPECT_FLOAT_EQ(fake_audio.float_frame_view().channel(0)[0], kPcmSamples);
+ }
+}
+
+// Checks that no sample is modified before and after the sample rate changes.
+TEST(GainController2GainApplier, DryRunHandlesSampleRateChange) {
+ ApmDataDumper apm_data_dumper(0);
+ AdaptiveDigitalGainApplier gain_applier(
+ &apm_data_dumper, /*adjacent_speech_frames_threshold=*/1,
+ kMaxGainChangePerSecondDb, kMaxOutputNoiseLevelDbfs, /*dry_run=*/true);
+ AdaptiveDigitalGainApplier::FrameInfo info = kFrameInfo;
+ info.speech_level_dbfs = -60.0f;
+ constexpr float kPcmSamples = 123.456f;
+ VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples);
+ gain_applier.Initialize(/*sample_rate_hz=*/8000, kMono);
+ gain_applier.Process(info, fake_audio_8k.float_frame_view());
+ EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples);
+ gain_applier.Initialize(/*sample_rate_hz=*/48000, kMono);
+ VectorFloatFrame fake_audio_48k(kMono, kFrameLen10ms48kHz, kPcmSamples);
+ gain_applier.Process(info, fake_audio_48k.float_frame_view());
+ EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples);
+}
+
+// Checks that no sample is modified before and after the number of channels
+// changes.
+TEST(GainController2GainApplier, DryRunHandlesNumChannelsChange) {
+ ApmDataDumper apm_data_dumper(0);
+ AdaptiveDigitalGainApplier gain_applier(
+ &apm_data_dumper, /*adjacent_speech_frames_threshold=*/1,
+ kMaxGainChangePerSecondDb, kMaxOutputNoiseLevelDbfs, /*dry_run=*/true);
+ AdaptiveDigitalGainApplier::FrameInfo info = kFrameInfo;
+ info.speech_level_dbfs = -60.0f;
+ constexpr float kPcmSamples = 123.456f;
+ VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples);
+ gain_applier.Initialize(/*sample_rate_hz=*/8000, kMono);
+ gain_applier.Process(info, fake_audio_8k.float_frame_view());
+ EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples);
+ VectorFloatFrame fake_audio_48k(kStereo, kFrameLen10ms8kHz, kPcmSamples);
+ gain_applier.Initialize(/*sample_rate_hz=*/8000, kStereo);
+ gain_applier.Process(info, fake_audio_48k.float_frame_view());
+ EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples);
+ EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(1)[0], kPcmSamples);
+}
+
} // namespace
} // namespace webrtc
diff --git a/modules/audio_processing/agc2/agc2_common.h b/modules/audio_processing/agc2/agc2_common.h
index 0f806d3938..adb1614926 100644
--- a/modules/audio_processing/agc2/agc2_common.h
+++ b/modules/audio_processing/agc2/agc2_common.h
@@ -44,8 +44,6 @@ constexpr float kLevelEstimatorLeakFactor =
1.0f - 1.0f / kLevelEstimatorTimeToConfidenceMs;
// Robust VAD probability and speech decisions.
-constexpr int kDefaultVadRnnResetPeriodMs = 1500;
-static_assert(kDefaultVadRnnResetPeriodMs % kFrameDurationMs == 0, "");
constexpr int kDefaultLevelEstimatorAdjacentSpeechFramesThreshold = 12;
// Saturation Protector settings.
diff --git a/modules/audio_processing/agc2/vad_with_level.cc b/modules/audio_processing/agc2/vad_with_level.cc
index 034f2b6ac0..9747ca2370 100644
--- a/modules/audio_processing/agc2/vad_with_level.cc
+++ b/modules/audio_processing/agc2/vad_with_level.cc
@@ -67,10 +67,6 @@ class Vad : public VoiceActivityDetector {
} // namespace
-VadLevelAnalyzer::VadLevelAnalyzer()
- : VadLevelAnalyzer(kDefaultVadRnnResetPeriodMs, GetAvailableCpuFeatures()) {
-}
-
VadLevelAnalyzer::VadLevelAnalyzer(int vad_reset_period_ms,
const AvailableCpuFeatures& cpu_features)
: VadLevelAnalyzer(vad_reset_period_ms,
diff --git a/modules/audio_processing/agc2/vad_with_level.h b/modules/audio_processing/agc2/vad_with_level.h
index 7cd93d6f2b..8d2ae45762 100644
--- a/modules/audio_processing/agc2/vad_with_level.h
+++ b/modules/audio_processing/agc2/vad_with_level.h
@@ -37,8 +37,6 @@ class VadLevelAnalyzer {
virtual float ComputeProbability(AudioFrameView<const float> frame) = 0;
};
- // Ctor. Uses the default VAD with the default settings.
- VadLevelAnalyzer();
// Ctor. `vad_reset_period_ms` indicates the period in milliseconds to call
// `VadLevelAnalyzer::Reset()`; it must be equal to or greater than the
// duration of two frames. Uses `cpu_features` to instantiate the default VAD.
diff --git a/modules/audio_processing/agc2/vad_with_level_unittest.cc b/modules/audio_processing/agc2/vad_with_level_unittest.cc
index 99b0136376..ec8e476965 100644
--- a/modules/audio_processing/agc2/vad_with_level_unittest.cc
+++ b/modules/audio_processing/agc2/vad_with_level_unittest.cc
@@ -71,16 +71,16 @@ struct FrameWithView {
const AudioFrameView<const float> view;
};
-TEST(GainController2VadLevelAnalyzer, PeakLevelGreaterThanRmsLevel) {
+TEST(GainController2VadLevelAnalyzer, RmsLessThanPeakLevel) {
+ auto analyzer = CreateVadLevelAnalyzerWithMockVad(
+ /*vad_reset_period_ms=*/1500,
+ /*speech_probabilities=*/{1.0f},
+ /*expected_vad_reset_calls=*/0);
// Handcrafted frame so that the average is lower than the peak value.
FrameWithView frame(1000.0f); // Constant frame.
frame.samples[10] = 2000.0f; // Except for one peak value.
-
- // Compute audio frame levels (the VAD result is ignored).
- VadLevelAnalyzer analyzer;
- auto levels_and_vad_prob = analyzer.AnalyzeFrame(frame.view);
-
- // Compare peak and RMS levels.
+ // Compute audio frame levels.
+ auto levels_and_vad_prob = analyzer->AnalyzeFrame(frame.view);
EXPECT_LT(levels_and_vad_prob.rms_dbfs, levels_and_vad_prob.peak_dbfs);
}
diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc
index 12646fd243..4a1985545f 100644
--- a/modules/audio_processing/audio_processing_impl.cc
+++ b/modules/audio_processing/audio_processing_impl.cc
@@ -271,7 +271,8 @@ AudioProcessingImpl::AudioProcessingImpl(
!field_trial::IsEnabled(
"WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch"),
EnforceSplitBandHpf(),
- MinimizeProcessingForUnusedOutput()),
+ MinimizeProcessingForUnusedOutput(),
+ field_trial::IsEnabled("WebRTC-TransientSuppressorForcedOff")),
capture_(),
capture_nonlocked_() {
RTC_LOG(LS_INFO) << "Injected APM submodules:"
@@ -290,8 +291,7 @@ AudioProcessingImpl::AudioProcessingImpl(
// If no echo detector is injected, use the ResidualEchoDetector.
if (!submodules_.echo_detector) {
- submodules_.echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ submodules_.echo_detector = rtc::make_ref_counted<ResidualEchoDetector>();
}
#if !(defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS))
@@ -1733,7 +1733,8 @@ bool AudioProcessingImpl::UpdateActiveSubmoduleStates() {
}
void AudioProcessingImpl::InitializeTransientSuppressor() {
- if (config_.transient_suppression.enabled) {
+ if (config_.transient_suppression.enabled &&
+ !constants_.transient_suppressor_forced_off) {
// Attempt to create a transient suppressor, if one is not already created.
if (!submodules_.transient_suppressor) {
submodules_.transient_suppressor =
@@ -1917,7 +1918,11 @@ void AudioProcessingImpl::InitializeGainController1() {
config_.gain_controller1.analog_gain_controller.clipped_level_min,
!config_.gain_controller1.analog_gain_controller
.enable_digital_adaptive,
- capture_nonlocked_.split_rate));
+ capture_nonlocked_.split_rate,
+ config_.gain_controller1.analog_gain_controller.clipped_level_step,
+ config_.gain_controller1.analog_gain_controller.clipped_ratio_threshold,
+ config_.gain_controller1.analog_gain_controller.clipped_wait_frames,
+ config_.gain_controller1.analog_gain_controller.clipping_predictor));
if (re_creation) {
submodules_.agc_manager->set_stream_analog_level(stream_analog_level);
}
@@ -1937,7 +1942,8 @@ void AudioProcessingImpl::InitializeGainController2() {
submodules_.gain_controller2.reset(new GainController2());
}
- submodules_.gain_controller2->Initialize(proc_fullband_sample_rate_hz());
+ submodules_.gain_controller2->Initialize(proc_fullband_sample_rate_hz(),
+ num_input_channels());
submodules_.gain_controller2->ApplyConfig(config_.gain_controller2);
} else {
submodules_.gain_controller2.reset();
diff --git a/modules/audio_processing/audio_processing_impl.h b/modules/audio_processing/audio_processing_impl.h
index e08abd5797..c88cfcde92 100644
--- a/modules/audio_processing/audio_processing_impl.h
+++ b/modules/audio_processing/audio_processing_impl.h
@@ -422,16 +422,19 @@ class AudioProcessingImpl : public AudioProcessing {
ApmConstants(bool multi_channel_render_support,
bool multi_channel_capture_support,
bool enforce_split_band_hpf,
- bool minimize_processing_for_unused_output)
+ bool minimize_processing_for_unused_output,
+ bool transient_suppressor_forced_off)
: multi_channel_render_support(multi_channel_render_support),
multi_channel_capture_support(multi_channel_capture_support),
enforce_split_band_hpf(enforce_split_band_hpf),
minimize_processing_for_unused_output(
- minimize_processing_for_unused_output) {}
+ minimize_processing_for_unused_output),
+ transient_suppressor_forced_off(transient_suppressor_forced_off) {}
bool multi_channel_render_support;
bool multi_channel_capture_support;
bool enforce_split_band_hpf;
bool minimize_processing_for_unused_output;
+ bool transient_suppressor_forced_off;
} constants_;
struct ApmCaptureState {
diff --git a/modules/audio_processing/audio_processing_impl_locking_unittest.cc b/modules/audio_processing/audio_processing_impl_locking_unittest.cc
index ec165aa146..66c1251d4c 100644
--- a/modules/audio_processing/audio_processing_impl_locking_unittest.cc
+++ b/modules/audio_processing/audio_processing_impl_locking_unittest.cc
@@ -387,33 +387,6 @@ class AudioProcessingImplLockTest
void SetUp() override;
void TearDown() override;
- // Thread callback for the render thread
- static void RenderProcessorThreadFunc(void* context) {
- AudioProcessingImplLockTest* impl =
- reinterpret_cast<AudioProcessingImplLockTest*>(context);
- while (!impl->MaybeEndTest()) {
- impl->render_thread_state_.Process();
- }
- }
-
- // Thread callback for the capture thread
- static void CaptureProcessorThreadFunc(void* context) {
- AudioProcessingImplLockTest* impl =
- reinterpret_cast<AudioProcessingImplLockTest*>(context);
- while (!impl->MaybeEndTest()) {
- impl->capture_thread_state_.Process();
- }
- }
-
- // Thread callback for the stats thread
- static void StatsProcessorThreadFunc(void* context) {
- AudioProcessingImplLockTest* impl =
- reinterpret_cast<AudioProcessingImplLockTest*>(context);
- while (!impl->MaybeEndTest()) {
- impl->stats_thread_state_.Process();
- }
- }
-
// Tests whether all the required render and capture side calls have been
// done.
bool TestDone() {
@@ -423,9 +396,28 @@ class AudioProcessingImplLockTest
// Start the threads used in the test.
void StartThreads() {
- render_thread_.Start();
- capture_thread_.Start();
- stats_thread_.Start();
+ const auto attributes =
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime);
+ render_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!MaybeEndTest())
+ render_thread_state_.Process();
+ },
+ "render", attributes);
+ capture_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!MaybeEndTest()) {
+ capture_thread_state_.Process();
+ }
+ },
+ "capture", attributes);
+
+ stats_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (!MaybeEndTest())
+ stats_thread_state_.Process();
+ },
+ "stats", attributes);
}
// Event handlers for the test.
@@ -434,9 +426,6 @@ class AudioProcessingImplLockTest
rtc::Event capture_call_event_;
// Thread related variables.
- rtc::PlatformThread render_thread_;
- rtc::PlatformThread capture_thread_;
- rtc::PlatformThread stats_thread_;
mutable RandomGenerator rand_gen_;
std::unique_ptr<AudioProcessing> apm_;
@@ -445,6 +434,9 @@ class AudioProcessingImplLockTest
RenderProcessor render_thread_state_;
CaptureProcessor capture_thread_state_;
StatsProcessor stats_thread_state_;
+ rtc::PlatformThread render_thread_;
+ rtc::PlatformThread capture_thread_;
+ rtc::PlatformThread stats_thread_;
};
// Sleeps a random time between 0 and max_sleep milliseconds.
@@ -485,19 +477,7 @@ void PopulateAudioFrame(float amplitude,
}
AudioProcessingImplLockTest::AudioProcessingImplLockTest()
- : render_thread_(RenderProcessorThreadFunc,
- this,
- "render",
- rtc::kRealtimePriority),
- capture_thread_(CaptureProcessorThreadFunc,
- this,
- "capture",
- rtc::kRealtimePriority),
- stats_thread_(StatsProcessorThreadFunc,
- this,
- "stats",
- rtc::kNormalPriority),
- apm_(AudioProcessingBuilderForTesting().Create()),
+ : apm_(AudioProcessingBuilderForTesting().Create()),
render_thread_state_(kMaxFrameSize,
&rand_gen_,
&render_call_event_,
@@ -549,9 +529,6 @@ void AudioProcessingImplLockTest::SetUp() {
void AudioProcessingImplLockTest::TearDown() {
render_call_event_.Set();
capture_call_event_.Set();
- render_thread_.Stop();
- capture_thread_.Stop();
- stats_thread_.Stop();
}
StatsProcessor::StatsProcessor(RandomGenerator* rand_gen,
diff --git a/modules/audio_processing/audio_processing_impl_unittest.cc b/modules/audio_processing/audio_processing_impl_unittest.cc
index ef1830357a..ca8b8b4c25 100644
--- a/modules/audio_processing/audio_processing_impl_unittest.cc
+++ b/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -544,8 +544,7 @@ TEST(AudioProcessingImplTest, EchoControllerObservesPlayoutVolumeChange) {
TEST(AudioProcessingImplTest, RenderPreProcessorBeforeEchoDetector) {
// Make sure that signal changes caused by a render pre-processing sub-module
// take place before any echo detector analysis.
- rtc::scoped_refptr<TestEchoDetector> test_echo_detector(
- new rtc::RefCountedObject<TestEchoDetector>());
+ auto test_echo_detector = rtc::make_ref_counted<TestEchoDetector>();
std::unique_ptr<CustomProcessing> test_render_pre_processor(
new TestRenderPreProcessor());
// Create APM injecting the test echo detector and render pre-processor.
@@ -605,8 +604,7 @@ TEST(AudioProcessingImplTest, RenderPreProcessorBeforeEchoDetector) {
// config should be bit-exact with running APM with said submodules disabled.
// This mainly tests that SetCreateOptionalSubmodulesForTesting has an effect.
TEST(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules) {
- rtc::scoped_refptr<AudioProcessingImpl> apm =
- new rtc::RefCountedObject<AudioProcessingImpl>(webrtc::Config());
+ auto apm = rtc::make_ref_counted<AudioProcessingImpl>(webrtc::Config());
ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError);
ApmSubmoduleCreationOverrides overrides;
@@ -654,8 +652,7 @@ TEST(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules) {
// Disable transient suppressor creation and run APM in ways that should trigger
// calls to the transient suppressor API.
TEST(ApmWithSubmodulesExcludedTest, ReinitializeTransientSuppressor) {
- rtc::scoped_refptr<AudioProcessingImpl> apm =
- new rtc::RefCountedObject<AudioProcessingImpl>(webrtc::Config());
+ auto apm = rtc::make_ref_counted<AudioProcessingImpl>(webrtc::Config());
ASSERT_EQ(apm->Initialize(), kNoErr);
ApmSubmoduleCreationOverrides overrides;
@@ -716,8 +713,7 @@ TEST(ApmWithSubmodulesExcludedTest, ReinitializeTransientSuppressor) {
// Disable transient suppressor creation and run APM in ways that should trigger
// calls to the transient suppressor API.
TEST(ApmWithSubmodulesExcludedTest, ToggleTransientSuppressor) {
- rtc::scoped_refptr<AudioProcessingImpl> apm =
- new rtc::RefCountedObject<AudioProcessingImpl>(webrtc::Config());
+ auto apm = rtc::make_ref_counted<AudioProcessingImpl>(webrtc::Config());
ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError);
ApmSubmoduleCreationOverrides overrides;
diff --git a/modules/audio_processing/audio_processing_performance_unittest.cc b/modules/audio_processing/audio_processing_performance_unittest.cc
index 86ff0e8bfe..9585850296 100644
--- a/modules/audio_processing/audio_processing_performance_unittest.cc
+++ b/modules/audio_processing/audio_processing_performance_unittest.cc
@@ -391,15 +391,7 @@ class TimedThreadApiProcessor {
class CallSimulator : public ::testing::TestWithParam<SimulationConfig> {
public:
CallSimulator()
- : render_thread_(new rtc::PlatformThread(RenderProcessorThreadFunc,
- this,
- "render",
- rtc::kRealtimePriority)),
- capture_thread_(new rtc::PlatformThread(CaptureProcessorThreadFunc,
- this,
- "capture",
- rtc::kRealtimePriority)),
- rand_gen_(42U),
+ : rand_gen_(42U),
simulation_config_(static_cast<SimulationConfig>(GetParam())) {}
// Run the call simulation with a timeout.
@@ -434,13 +426,10 @@ class CallSimulator : public ::testing::TestWithParam<SimulationConfig> {
static const int kMinNumFramesToProcess = 150;
static const int32_t kTestTimeout = 3 * 10 * kMinNumFramesToProcess;
- // ::testing::TestWithParam<> implementation.
- void TearDown() override { StopThreads(); }
-
// Stop all running threads.
void StopThreads() {
- render_thread_->Stop();
- capture_thread_->Stop();
+ render_thread_.Finalize();
+ capture_thread_.Finalize();
}
// Simulator and APM setup.
@@ -531,32 +520,28 @@ class CallSimulator : public ::testing::TestWithParam<SimulationConfig> {
kMinNumFramesToProcess, kCaptureInputFloatLevel, num_capture_channels));
}
- // Thread callback for the render thread.
- static void RenderProcessorThreadFunc(void* context) {
- CallSimulator* call_simulator = reinterpret_cast<CallSimulator*>(context);
- while (call_simulator->render_thread_state_->Process()) {
- }
- }
-
- // Thread callback for the capture thread.
- static void CaptureProcessorThreadFunc(void* context) {
- CallSimulator* call_simulator = reinterpret_cast<CallSimulator*>(context);
- while (call_simulator->capture_thread_state_->Process()) {
- }
- }
-
// Start the threads used in the test.
void StartThreads() {
- ASSERT_NO_FATAL_FAILURE(render_thread_->Start());
- ASSERT_NO_FATAL_FAILURE(capture_thread_->Start());
+ const auto attributes =
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime);
+ render_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (render_thread_state_->Process()) {
+ }
+ },
+ "render", attributes);
+ capture_thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (capture_thread_state_->Process()) {
+ }
+ },
+ "capture", attributes);
}
// Event handler for the test.
rtc::Event test_complete_;
// Thread related variables.
- std::unique_ptr<rtc::PlatformThread> render_thread_;
- std::unique_ptr<rtc::PlatformThread> capture_thread_;
Random rand_gen_;
std::unique_ptr<AudioProcessing> apm_;
@@ -565,6 +550,8 @@ class CallSimulator : public ::testing::TestWithParam<SimulationConfig> {
LockedFlag capture_call_checker_;
std::unique_ptr<TimedThreadApiProcessor> render_thread_state_;
std::unique_ptr<TimedThreadApiProcessor> capture_thread_state_;
+ rtc::PlatformThread render_thread_;
+ rtc::PlatformThread capture_thread_;
};
// Implements the callback functionality for the threads.
diff --git a/modules/audio_processing/audio_processing_unittest.cc b/modules/audio_processing/audio_processing_unittest.cc
index 3d562dffcd..4d30a348f6 100644
--- a/modules/audio_processing/audio_processing_unittest.cc
+++ b/modules/audio_processing/audio_processing_unittest.cc
@@ -3039,50 +3039,50 @@ TEST(AudioProcessing, GainController1ConfigNotEqual) {
Toggle(a.enabled);
EXPECT_NE(a, b);
- a.enabled = b.enabled;
+ a = b;
a.mode = AudioProcessing::Config::GainController1::Mode::kAdaptiveDigital;
EXPECT_NE(a, b);
- a.mode = b.mode;
+ a = b;
a.target_level_dbfs++;
EXPECT_NE(a, b);
- a.target_level_dbfs = b.target_level_dbfs;
+ a = b;
a.compression_gain_db++;
EXPECT_NE(a, b);
- a.compression_gain_db = b.compression_gain_db;
+ a = b;
Toggle(a.enable_limiter);
EXPECT_NE(a, b);
- a.enable_limiter = b.enable_limiter;
+ a = b;
a.analog_level_minimum++;
EXPECT_NE(a, b);
- a.analog_level_minimum = b.analog_level_minimum;
+ a = b;
a.analog_level_maximum--;
EXPECT_NE(a, b);
- a.analog_level_maximum = b.analog_level_maximum;
+ a = b;
auto& a_analog = a.analog_gain_controller;
const auto& b_analog = b.analog_gain_controller;
Toggle(a_analog.enabled);
EXPECT_NE(a, b);
- a_analog.enabled = b_analog.enabled;
+ a_analog = b_analog;
a_analog.startup_min_volume++;
EXPECT_NE(a, b);
- a_analog.startup_min_volume = b_analog.startup_min_volume;
+ a_analog = b_analog;
a_analog.clipped_level_min++;
EXPECT_NE(a, b);
- a_analog.clipped_level_min = b_analog.clipped_level_min;
+ a_analog = b_analog;
Toggle(a_analog.enable_digital_adaptive);
EXPECT_NE(a, b);
- a_analog.enable_digital_adaptive = b_analog.enable_digital_adaptive;
+ a_analog = b_analog;
}
TEST(AudioProcessing, GainController2ConfigEqual) {
@@ -3094,7 +3094,7 @@ TEST(AudioProcessing, GainController2ConfigEqual) {
b.enabled = a.enabled;
EXPECT_EQ(a, b);
- a.fixed_digital.gain_db += 1.f;
+ a.fixed_digital.gain_db += 1.0f;
b.fixed_digital.gain_db = a.fixed_digital.gain_db;
EXPECT_EQ(a, b);
@@ -3105,46 +3105,44 @@ TEST(AudioProcessing, GainController2ConfigEqual) {
b_adaptive.enabled = a_adaptive.enabled;
EXPECT_EQ(a, b);
- a_adaptive.vad_probability_attack += 1.f;
- b_adaptive.vad_probability_attack = a_adaptive.vad_probability_attack;
+ Toggle(a_adaptive.dry_run);
+ b_adaptive.dry_run = a_adaptive.dry_run;
EXPECT_EQ(a, b);
- a_adaptive.level_estimator =
- AudioProcessing::Config::GainController2::LevelEstimator::kPeak;
- b_adaptive.level_estimator = a_adaptive.level_estimator;
+ a_adaptive.noise_estimator = AudioProcessing::Config::GainController2::
+ NoiseEstimator::kStationaryNoise;
+ b_adaptive.noise_estimator = a_adaptive.noise_estimator;
EXPECT_EQ(a, b);
- a_adaptive.level_estimator_adjacent_speech_frames_threshold++;
- b_adaptive.level_estimator_adjacent_speech_frames_threshold =
- a_adaptive.level_estimator_adjacent_speech_frames_threshold;
+ a_adaptive.vad_reset_period_ms++;
+ b_adaptive.vad_reset_period_ms = a_adaptive.vad_reset_period_ms;
EXPECT_EQ(a, b);
- Toggle(a_adaptive.use_saturation_protector);
- b_adaptive.use_saturation_protector = a_adaptive.use_saturation_protector;
+ a_adaptive.adjacent_speech_frames_threshold++;
+ b_adaptive.adjacent_speech_frames_threshold =
+ a_adaptive.adjacent_speech_frames_threshold;
EXPECT_EQ(a, b);
- a_adaptive.initial_saturation_margin_db += 1.f;
- b_adaptive.initial_saturation_margin_db =
- a_adaptive.initial_saturation_margin_db;
+ a_adaptive.max_gain_change_db_per_second += 1.0f;
+ b_adaptive.max_gain_change_db_per_second =
+ a_adaptive.max_gain_change_db_per_second;
EXPECT_EQ(a, b);
- a_adaptive.extra_saturation_margin_db += 1.f;
- b_adaptive.extra_saturation_margin_db = a_adaptive.extra_saturation_margin_db;
+ a_adaptive.max_output_noise_level_dbfs += 1.0f;
+ b_adaptive.max_output_noise_level_dbfs =
+ a_adaptive.max_output_noise_level_dbfs;
EXPECT_EQ(a, b);
- a_adaptive.gain_applier_adjacent_speech_frames_threshold++;
- b_adaptive.gain_applier_adjacent_speech_frames_threshold =
- a_adaptive.gain_applier_adjacent_speech_frames_threshold;
+ Toggle(a_adaptive.sse2_allowed);
+ b_adaptive.sse2_allowed = a_adaptive.sse2_allowed;
EXPECT_EQ(a, b);
- a_adaptive.max_gain_change_db_per_second += 1.f;
- b_adaptive.max_gain_change_db_per_second =
- a_adaptive.max_gain_change_db_per_second;
+ Toggle(a_adaptive.avx2_allowed);
+ b_adaptive.avx2_allowed = a_adaptive.avx2_allowed;
EXPECT_EQ(a, b);
- a_adaptive.max_output_noise_level_dbfs -= 1.f;
- b_adaptive.max_output_noise_level_dbfs =
- a_adaptive.max_output_noise_level_dbfs;
+ Toggle(a_adaptive.neon_allowed);
+ b_adaptive.neon_allowed = a_adaptive.neon_allowed;
EXPECT_EQ(a, b);
}
@@ -3156,60 +3154,55 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) {
Toggle(a.enabled);
EXPECT_NE(a, b);
- a.enabled = b.enabled;
+ a = b;
- a.fixed_digital.gain_db += 1.f;
+ a.fixed_digital.gain_db += 1.0f;
EXPECT_NE(a, b);
- a.fixed_digital.gain_db = b.fixed_digital.gain_db;
+ a.fixed_digital = b.fixed_digital;
auto& a_adaptive = a.adaptive_digital;
const auto& b_adaptive = b.adaptive_digital;
Toggle(a_adaptive.enabled);
EXPECT_NE(a, b);
- a_adaptive.enabled = b_adaptive.enabled;
+ a_adaptive = b_adaptive;
- a_adaptive.vad_probability_attack += 1.f;
+ Toggle(a_adaptive.dry_run);
EXPECT_NE(a, b);
- a_adaptive.vad_probability_attack = b_adaptive.vad_probability_attack;
+ a_adaptive = b_adaptive;
- a_adaptive.level_estimator =
- AudioProcessing::Config::GainController2::LevelEstimator::kPeak;
+ a_adaptive.noise_estimator = AudioProcessing::Config::GainController2::
+ NoiseEstimator::kStationaryNoise;
EXPECT_NE(a, b);
- a_adaptive.level_estimator = b_adaptive.level_estimator;
+ a_adaptive = b_adaptive;
- a_adaptive.level_estimator_adjacent_speech_frames_threshold++;
+ a_adaptive.vad_reset_period_ms++;
EXPECT_NE(a, b);
- a_adaptive.level_estimator_adjacent_speech_frames_threshold =
- b_adaptive.level_estimator_adjacent_speech_frames_threshold;
+ a_adaptive = b_adaptive;
- Toggle(a_adaptive.use_saturation_protector);
+ a_adaptive.adjacent_speech_frames_threshold++;
EXPECT_NE(a, b);
- a_adaptive.use_saturation_protector = b_adaptive.use_saturation_protector;
+ a_adaptive = b_adaptive;
- a_adaptive.initial_saturation_margin_db += 1.f;
+ a_adaptive.max_gain_change_db_per_second += 1.0f;
EXPECT_NE(a, b);
- a_adaptive.initial_saturation_margin_db =
- b_adaptive.initial_saturation_margin_db;
+ a_adaptive = b_adaptive;
- a_adaptive.extra_saturation_margin_db += 1.f;
+ a_adaptive.max_output_noise_level_dbfs += 1.0f;
EXPECT_NE(a, b);
- a_adaptive.extra_saturation_margin_db = b_adaptive.extra_saturation_margin_db;
+ a_adaptive = b_adaptive;
- a_adaptive.gain_applier_adjacent_speech_frames_threshold++;
+ Toggle(a_adaptive.sse2_allowed);
EXPECT_NE(a, b);
- a_adaptive.gain_applier_adjacent_speech_frames_threshold =
- b_adaptive.gain_applier_adjacent_speech_frames_threshold;
+ a_adaptive = b_adaptive;
- a_adaptive.max_gain_change_db_per_second += 1.f;
+ Toggle(a_adaptive.avx2_allowed);
EXPECT_NE(a, b);
- a_adaptive.max_gain_change_db_per_second =
- b_adaptive.max_gain_change_db_per_second;
+ a_adaptive = b_adaptive;
- a_adaptive.max_output_noise_level_dbfs -= 1.f;
+ Toggle(a_adaptive.neon_allowed);
EXPECT_NE(a, b);
- a_adaptive.max_output_noise_level_dbfs =
- b_adaptive.max_output_noise_level_dbfs;
+ a_adaptive = b_adaptive;
}
} // namespace webrtc
diff --git a/modules/audio_processing/gain_controller2.cc b/modules/audio_processing/gain_controller2.cc
index 9e3e8e7cae..74b63c9432 100644
--- a/modules/audio_processing/gain_controller2.cc
+++ b/modules/audio_processing/gain_controller2.cc
@@ -26,22 +26,26 @@ int GainController2::instance_count_ = 0;
GainController2::GainController2()
: data_dumper_(rtc::AtomicOps::Increment(&instance_count_)),
gain_applier_(/*hard_clip_samples=*/false,
- /*initial_gain_factor=*/0.f),
+ /*initial_gain_factor=*/0.0f),
limiter_(static_cast<size_t>(48000), &data_dumper_, "Agc2"),
calls_since_last_limiter_log_(0) {
if (config_.adaptive_digital.enabled) {
- adaptive_agc_ = std::make_unique<AdaptiveAgc>(&data_dumper_);
+ adaptive_agc_ =
+ std::make_unique<AdaptiveAgc>(&data_dumper_, config_.adaptive_digital);
}
}
GainController2::~GainController2() = default;
-void GainController2::Initialize(int sample_rate_hz) {
+void GainController2::Initialize(int sample_rate_hz, int num_channels) {
RTC_DCHECK(sample_rate_hz == AudioProcessing::kSampleRate8kHz ||
sample_rate_hz == AudioProcessing::kSampleRate16kHz ||
sample_rate_hz == AudioProcessing::kSampleRate32kHz ||
sample_rate_hz == AudioProcessing::kSampleRate48kHz);
limiter_.SetSampleRate(sample_rate_hz);
+ if (adaptive_agc_) {
+ adaptive_agc_->Initialize(sample_rate_hz, num_channels);
+ }
data_dumper_.InitiateNewSetOfRecordings();
data_dumper_.DumpRaw("sample_rate_hz", sample_rate_hz);
calls_since_last_limiter_log_ = 0;
diff --git a/modules/audio_processing/gain_controller2.h b/modules/audio_processing/gain_controller2.h
index b62890d721..ce758c7834 100644
--- a/modules/audio_processing/gain_controller2.h
+++ b/modules/audio_processing/gain_controller2.h
@@ -34,7 +34,7 @@ class GainController2 {
GainController2& operator=(const GainController2&) = delete;
~GainController2();
- void Initialize(int sample_rate_hz);
+ void Initialize(int sample_rate_hz, int num_channels);
void Process(AudioBuffer* audio);
void NotifyAnalogLevel(int level);
diff --git a/modules/audio_processing/gain_controller2_unittest.cc b/modules/audio_processing/gain_controller2_unittest.cc
index 815d58efe7..85c08bb750 100644
--- a/modules/audio_processing/gain_controller2_unittest.cc
+++ b/modules/audio_processing/gain_controller2_unittest.cc
@@ -65,7 +65,7 @@ std::unique_ptr<GainController2> CreateAgc2FixedDigitalMode(
size_t sample_rate_hz) {
auto agc2 = std::make_unique<GainController2>();
agc2->ApplyConfig(CreateAgc2FixedDigitalModeConfig(fixed_gain_db));
- agc2->Initialize(sample_rate_hz);
+ agc2->Initialize(sample_rate_hz, /*num_channels=*/1);
return agc2;
}
@@ -337,9 +337,10 @@ TEST(GainController2, CheckGainAdaptiveDigital) {
constexpr float kExpectedGainDb = 4.3f;
constexpr float kToleranceDb = 0.5f;
GainController2 gain_controller2;
- gain_controller2.Initialize(AudioProcessing::kSampleRate48kHz);
+ gain_controller2.Initialize(AudioProcessing::kSampleRate48kHz,
+ /*num_channels=*/1);
AudioProcessing::Config::GainController2 config;
- config.fixed_digital.gain_db = 0.f;
+ config.fixed_digital.gain_db = 0.0f;
config.adaptive_digital.enabled = true;
gain_controller2.ApplyConfig(config);
EXPECT_NEAR(
diff --git a/modules/audio_processing/include/audio_processing.cc b/modules/audio_processing/include/audio_processing.cc
index fa45230c6b..44a90d6e76 100644
--- a/modules/audio_processing/include/audio_processing.cc
+++ b/modules/audio_processing/include/audio_processing.cc
@@ -77,33 +77,42 @@ bool Agc1Config::operator==(const Agc1Config& rhs) const {
analog_lhs.startup_min_volume == analog_rhs.startup_min_volume &&
analog_lhs.clipped_level_min == analog_rhs.clipped_level_min &&
analog_lhs.enable_digital_adaptive ==
- analog_rhs.enable_digital_adaptive;
+ analog_rhs.enable_digital_adaptive &&
+ analog_lhs.clipped_level_step == analog_rhs.clipped_level_step &&
+ analog_lhs.clipped_ratio_threshold ==
+ analog_rhs.clipped_ratio_threshold &&
+ analog_lhs.clipped_wait_frames == analog_rhs.clipped_wait_frames &&
+ analog_lhs.clipping_predictor.mode ==
+ analog_rhs.clipping_predictor.mode &&
+ analog_lhs.clipping_predictor.window_length ==
+ analog_rhs.clipping_predictor.window_length &&
+ analog_lhs.clipping_predictor.reference_window_length ==
+ analog_rhs.clipping_predictor.reference_window_length &&
+ analog_lhs.clipping_predictor.reference_window_delay ==
+ analog_rhs.clipping_predictor.reference_window_delay &&
+ analog_lhs.clipping_predictor.clipping_threshold ==
+ analog_rhs.clipping_predictor.clipping_threshold &&
+ analog_lhs.clipping_predictor.crest_factor_margin ==
+ analog_rhs.clipping_predictor.crest_factor_margin;
}
-bool Agc2Config::operator==(const Agc2Config& rhs) const {
- const auto& adaptive_lhs = adaptive_digital;
- const auto& adaptive_rhs = rhs.adaptive_digital;
+bool Agc2Config::AdaptiveDigital::operator==(
+ const Agc2Config::AdaptiveDigital& rhs) const {
+ return enabled == rhs.enabled && dry_run == rhs.dry_run &&
+ noise_estimator == rhs.noise_estimator &&
+ vad_reset_period_ms == rhs.vad_reset_period_ms &&
+ adjacent_speech_frames_threshold ==
+ rhs.adjacent_speech_frames_threshold &&
+ max_gain_change_db_per_second == rhs.max_gain_change_db_per_second &&
+ max_output_noise_level_dbfs == rhs.max_output_noise_level_dbfs &&
+ sse2_allowed == rhs.sse2_allowed && avx2_allowed == rhs.avx2_allowed &&
+ neon_allowed == rhs.neon_allowed;
+}
+bool Agc2Config::operator==(const Agc2Config& rhs) const {
return enabled == rhs.enabled &&
fixed_digital.gain_db == rhs.fixed_digital.gain_db &&
- adaptive_lhs.enabled == adaptive_rhs.enabled &&
- adaptive_lhs.vad_probability_attack ==
- adaptive_rhs.vad_probability_attack &&
- adaptive_lhs.level_estimator == adaptive_rhs.level_estimator &&
- adaptive_lhs.level_estimator_adjacent_speech_frames_threshold ==
- adaptive_rhs.level_estimator_adjacent_speech_frames_threshold &&
- adaptive_lhs.use_saturation_protector ==
- adaptive_rhs.use_saturation_protector &&
- adaptive_lhs.initial_saturation_margin_db ==
- adaptive_rhs.initial_saturation_margin_db &&
- adaptive_lhs.extra_saturation_margin_db ==
- adaptive_rhs.extra_saturation_margin_db &&
- adaptive_lhs.gain_applier_adjacent_speech_frames_threshold ==
- adaptive_rhs.gain_applier_adjacent_speech_frames_threshold &&
- adaptive_lhs.max_gain_change_db_per_second ==
- adaptive_rhs.max_gain_change_db_per_second &&
- adaptive_lhs.max_output_noise_level_dbfs ==
- adaptive_rhs.max_output_noise_level_dbfs;
+ adaptive_digital == rhs.adaptive_digital;
}
bool AudioProcessing::Config::CaptureLevelAdjustment::operator==(
@@ -156,11 +165,46 @@ std::string AudioProcessing::Config::ToString() const {
<< ", enable_limiter: " << gain_controller1.enable_limiter
<< ", analog_level_minimum: " << gain_controller1.analog_level_minimum
<< ", analog_level_maximum: " << gain_controller1.analog_level_maximum
- << " }, gain_controller2: { enabled: " << gain_controller2.enabled
+ << ", analog_gain_controller { enabled: "
+ << gain_controller1.analog_gain_controller.enabled
+ << ", startup_min_volume: "
+ << gain_controller1.analog_gain_controller.startup_min_volume
+ << ", clipped_level_min: "
+ << gain_controller1.analog_gain_controller.clipped_level_min
+ << ", enable_digital_adaptive: "
+ << gain_controller1.analog_gain_controller.enable_digital_adaptive
+ << ", clipped_level_step: "
+ << gain_controller1.analog_gain_controller.clipped_level_step
+ << ", clipped_ratio_threshold: "
+ << gain_controller1.analog_gain_controller.clipped_ratio_threshold
+ << ", clipped_wait_frames: "
+ << gain_controller1.analog_gain_controller.clipped_wait_frames
+ << ", clipping_predictor: { enabled: "
+ << gain_controller1.analog_gain_controller.clipping_predictor.enabled
+ << ", mode: "
+ << gain_controller1.analog_gain_controller.clipping_predictor.mode
+ << ", window_length: "
+ << gain_controller1.analog_gain_controller.clipping_predictor
+ .window_length
+ << ", reference_window_length: "
+ << gain_controller1.analog_gain_controller.clipping_predictor
+ .reference_window_length
+ << ", reference_window_delay: "
+ << gain_controller1.analog_gain_controller.clipping_predictor
+ .reference_window_delay
+ << ", clipping_threshold: "
+ << gain_controller1.analog_gain_controller.clipping_predictor
+ .clipping_threshold
+ << ", crest_factor_margin: "
+ << gain_controller1.analog_gain_controller.clipping_predictor
+ .crest_factor_margin
+ << " }}}, gain_controller2: { enabled: " << gain_controller2.enabled
<< ", fixed_digital: { gain_db: "
<< gain_controller2.fixed_digital.gain_db
<< " }, adaptive_digital: { enabled: "
- << gain_controller2.adaptive_digital.enabled << ", noise_estimator: "
+ << gain_controller2.adaptive_digital.enabled
+ << ", dry_run: " << gain_controller2.adaptive_digital.dry_run
+ << ", noise_estimator: "
<< GainController2NoiseEstimatorToString(
gain_controller2.adaptive_digital.noise_estimator)
<< ", vad_reset_period_ms: "
diff --git a/modules/audio_processing/include/audio_processing.h b/modules/audio_processing/include/audio_processing.h
index 01bb7c33c7..64b1b5d107 100644
--- a/modules/audio_processing/include/audio_processing.h
+++ b/modules/audio_processing/include/audio_processing.h
@@ -59,9 +59,9 @@ class CustomProcessing;
//
// Must be provided through AudioProcessingBuilder().Create(config).
#if defined(WEBRTC_CHROMIUM_BUILD)
-static const int kAgcStartupMinVolume = 85;
+static constexpr int kAgcStartupMinVolume = 85;
#else
-static const int kAgcStartupMinVolume = 0;
+static constexpr int kAgcStartupMinVolume = 0;
#endif // defined(WEBRTC_CHROMIUM_BUILD)
static constexpr int kClippedLevelMin = 70;
@@ -275,7 +275,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface {
// in the analog mode, prescribing an analog gain to be applied at the audio
// HAL.
// Recommended to be enabled on the client-side.
- struct GainController1 {
+ struct RTC_EXPORT GainController1 {
bool operator==(const GainController1& rhs) const;
bool operator!=(const GainController1& rhs) const {
return !(*this == rhs);
@@ -334,6 +334,43 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface {
// clipping.
int clipped_level_min = kClippedLevelMin;
bool enable_digital_adaptive = true;
+ // Amount the microphone level is lowered with every clipping event.
+ // Limited to (0, 255].
+ int clipped_level_step = 15;
+ // Proportion of clipped samples required to declare a clipping event.
+ // Limited to (0.f, 1.f).
+ float clipped_ratio_threshold = 0.1f;
+ // Time in frames to wait after a clipping event before checking again.
+ // Limited to values higher than 0.
+ int clipped_wait_frames = 300;
+
+ // Enables clipping prediction functionality.
+ struct ClippingPredictor {
+ bool enabled = false;
+ enum Mode {
+ // Clipping event prediction mode with fixed step estimation.
+ kClippingEventPrediction,
+ // Clipped peak estimation mode with adaptive step estimation.
+ kAdaptiveStepClippingPeakPrediction,
+ // Clipped peak estimation mode with fixed step estimation.
+ kFixedStepClippingPeakPrediction,
+ };
+ Mode mode = kClippingEventPrediction;
+ // Number of frames in the sliding analysis window.
+ int window_length = 5;
+ // Number of frames in the sliding reference window.
+ int reference_window_length = 5;
+ // Reference window delay (unit: number of frames).
+ int reference_window_delay = 5;
+ // Clipping prediction threshold (dBFS).
+ float clipping_threshold = -1.0f;
+ // Crest factor drop threshold (dB).
+ float crest_factor_margin = 3.0f;
+ // If true, the recommended clipped level step is used to modify the
+ // analog gain. Otherwise, the predictor runs without affecting the
+ // analog gain.
+ bool use_predicted_step = true;
+ } clipping_predictor;
} analog_gain_controller;
} gain_controller1;
@@ -343,7 +380,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface {
// setting |fixed_gain_db|, the limiter can be turned into a compressor that
// first applies a fixed gain. The adaptive digital AGC can be turned off by
// setting |adaptive_digital_mode=false|.
- struct GainController2 {
+ struct RTC_EXPORT GainController2 {
bool operator==(const GainController2& rhs) const;
bool operator!=(const GainController2& rhs) const {
return !(*this == rhs);
@@ -356,8 +393,15 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface {
struct FixedDigital {
float gain_db = 0.0f;
} fixed_digital;
- struct AdaptiveDigital {
+ struct RTC_EXPORT AdaptiveDigital {
+ bool operator==(const AdaptiveDigital& rhs) const;
+ bool operator!=(const AdaptiveDigital& rhs) const {
+ return !(*this == rhs);
+ }
+
bool enabled = false;
+ // Run the adaptive digital controller but the signal is not modified.
+ bool dry_run = false;
NoiseEstimator noise_estimator = kNoiseFloor;
int vad_reset_period_ms = 1500;
int adjacent_speech_frames_threshold = 12;
diff --git a/modules/audio_processing/logging/apm_data_dumper.h b/modules/audio_processing/logging/apm_data_dumper.h
index 6d32b32ab5..9c2ac3be5d 100644
--- a/modules/audio_processing/logging/apm_data_dumper.h
+++ b/modules/audio_processing/logging/apm_data_dumper.h
@@ -65,6 +65,15 @@ class ApmDataDumper {
#endif
}
+ // Returns whether dumping functionality is enabled/available.
+ static bool IsAvailable() {
+#if WEBRTC_APM_DEBUG_DUMP == 1
+ return true;
+#else
+ return false;
+#endif
+ }
+
// Default dump set.
static constexpr size_t kDefaultDumpSet = 0;
diff --git a/modules/audio_processing/residual_echo_detector_unittest.cc b/modules/audio_processing/residual_echo_detector_unittest.cc
index 6697cf009d..a5f1409516 100644
--- a/modules/audio_processing/residual_echo_detector_unittest.cc
+++ b/modules/audio_processing/residual_echo_detector_unittest.cc
@@ -18,8 +18,7 @@
namespace webrtc {
TEST(ResidualEchoDetectorTests, Echo) {
- rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ auto echo_detector = rtc::make_ref_counted<ResidualEchoDetector>();
echo_detector->SetReliabilityForTest(1.0f);
std::vector<float> ones(160, 1.f);
std::vector<float> zeros(160, 0.f);
@@ -46,8 +45,7 @@ TEST(ResidualEchoDetectorTests, Echo) {
}
TEST(ResidualEchoDetectorTests, NoEcho) {
- rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ auto echo_detector = rtc::make_ref_counted<ResidualEchoDetector>();
echo_detector->SetReliabilityForTest(1.0f);
std::vector<float> ones(160, 1.f);
std::vector<float> zeros(160, 0.f);
@@ -69,8 +67,7 @@ TEST(ResidualEchoDetectorTests, NoEcho) {
}
TEST(ResidualEchoDetectorTests, EchoWithRenderClockDrift) {
- rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ auto echo_detector = rtc::make_ref_counted<ResidualEchoDetector>();
echo_detector->SetReliabilityForTest(1.0f);
std::vector<float> ones(160, 1.f);
std::vector<float> zeros(160, 0.f);
@@ -107,8 +104,7 @@ TEST(ResidualEchoDetectorTests, EchoWithRenderClockDrift) {
}
TEST(ResidualEchoDetectorTests, EchoWithCaptureClockDrift) {
- rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
- new rtc::RefCountedObject<ResidualEchoDetector>();
+ auto echo_detector = rtc::make_ref_counted<ResidualEchoDetector>();
echo_detector->SetReliabilityForTest(1.0f);
std::vector<float> ones(160, 1.f);
std::vector<float> zeros(160, 0.f);
diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn
index 3e1e8c0e85..c0b064d9ed 100644
--- a/modules/congestion_controller/BUILD.gn
+++ b/modules/congestion_controller/BUILD.gn
@@ -22,12 +22,17 @@ rtc_library("congestion_controller") {
sources = [
"include/receive_side_congestion_controller.h",
"receive_side_congestion_controller.cc",
+ "remb_throttler.cc",
+ "remb_throttler.h",
]
deps = [
"..:module_api",
"../../api/transport:field_trial_based_config",
"../../api/transport:network_control",
+ "../../api/units:data_rate",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
"../../rtc_base/synchronization:mutex",
"../pacing",
"../remote_bitrate_estimator",
@@ -43,11 +48,17 @@ if (rtc_include_tests && !build_with_chromium) {
rtc_library("congestion_controller_unittests") {
testonly = true
- sources = [ "receive_side_congestion_controller_unittest.cc" ]
+ sources = [
+ "receive_side_congestion_controller_unittest.cc",
+ "remb_throttler_unittest.cc",
+ ]
deps = [
":congestion_controller",
"../../api/test/network_emulation",
"../../api/test/network_emulation:create_cross_traffic",
+ "../../api/units:data_rate",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
"../../system_wrappers",
"../../test:test_support",
"../../test/scenario",
diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
index 0a0b1801f2..2344f45a65 100644
--- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
+++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
@@ -465,7 +465,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback(
expected_packets_since_last_loss_update_ +=
report.PacketsWithFeedback().size();
for (const auto& packet_feedback : report.PacketsWithFeedback()) {
- if (packet_feedback.receive_time.IsInfinite())
+ if (!packet_feedback.IsReceived())
lost_packets_since_last_loss_update_ += 1;
}
if (report.feedback_time > next_loss_update_) {
diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc
index 8eb4a00431..7e8d7b9ac6 100644
--- a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc
+++ b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc
@@ -124,6 +124,35 @@ void UpdatesTargetRateBasedOnLinkCapacity(std::string test_name = "") {
truth->PrintRow();
EXPECT_NEAR(client->target_rate().kbps(), 90, 25);
}
+
+DataRate RunRembDipScenario(std::string test_name) {
+ Scenario s(test_name);
+ NetworkSimulationConfig net_conf;
+ net_conf.bandwidth = DataRate::KilobitsPerSec(2000);
+ net_conf.delay = TimeDelta::Millis(50);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000);
+ });
+ auto send_net = {s.CreateSimulationNode(net_conf)};
+ auto ret_net = {s.CreateSimulationNode(net_conf)};
+ auto* route = s.CreateRoutes(
+ client, send_net, s.CreateClient("return", CallClientConfig()), ret_net);
+ s.CreateVideoStream(route->forward(), VideoStreamConfig());
+
+ s.RunFor(TimeDelta::Seconds(10));
+ EXPECT_GT(client->send_bandwidth().kbps(), 1500);
+
+ DataRate RembLimit = DataRate::KilobitsPerSec(250);
+ client->SetRemoteBitrate(RembLimit);
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_EQ(client->send_bandwidth(), RembLimit);
+
+ DataRate RembLimitLifted = DataRate::KilobitsPerSec(10000);
+ client->SetRemoteBitrate(RembLimitLifted);
+ s.RunFor(TimeDelta::Seconds(10));
+
+ return client->send_bandwidth();
+}
} // namespace
class GoogCcNetworkControllerTest : public ::testing::Test {
@@ -850,33 +879,17 @@ TEST_F(GoogCcNetworkControllerTest, IsFairToTCP) {
EXPECT_LT(client->send_bandwidth().kbps(), 750);
}
-TEST(GoogCcScenario, RampupOnRembCapLifted) {
+TEST(GoogCcScenario, FastRampupOnRembCapLiftedWithFieldTrial) {
ScopedFieldTrials trial("WebRTC-Bwe-ReceiverLimitCapsOnly/Enabled/");
- Scenario s("googcc_unit/rampup_ramb_cap_lifted");
- NetworkSimulationConfig net_conf;
- net_conf.bandwidth = DataRate::KilobitsPerSec(2000);
- net_conf.delay = TimeDelta::Millis(50);
- auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
- c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000);
- });
- auto send_net = {s.CreateSimulationNode(net_conf)};
- auto ret_net = {s.CreateSimulationNode(net_conf)};
- auto* route = s.CreateRoutes(
- client, send_net, s.CreateClient("return", CallClientConfig()), ret_net);
- s.CreateVideoStream(route->forward(), VideoStreamConfig());
-
- s.RunFor(TimeDelta::Seconds(10));
- EXPECT_GT(client->send_bandwidth().kbps(), 1500);
-
- DataRate RembLimit = DataRate::KilobitsPerSec(250);
- client->SetRemoteBitrate(RembLimit);
- s.RunFor(TimeDelta::Seconds(1));
- EXPECT_EQ(client->send_bandwidth(), RembLimit);
+ DataRate final_estimate =
+ RunRembDipScenario("googcc_unit/fast_rampup_on_remb_cap_lifted");
+ EXPECT_GT(final_estimate.kbps(), 1500);
+}
- DataRate RembLimitLifted = DataRate::KilobitsPerSec(10000);
- client->SetRemoteBitrate(RembLimitLifted);
- s.RunFor(TimeDelta::Seconds(10));
- EXPECT_GT(client->send_bandwidth().kbps(), 1500);
+TEST(GoogCcScenario, SlowRampupOnRembCapLifted) {
+ DataRate final_estimate =
+ RunRembDipScenario("googcc_unit/default_slow_rampup_on_remb_cap_lifted");
+ EXPECT_LT(final_estimate.kbps(), 1000);
}
} // namespace test
diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
index 2211d26f0a..505e9306f6 100644
--- a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
+++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
@@ -139,7 +139,7 @@ void LossBasedBandwidthEstimation::UpdateLossStatistics(
}
int loss_count = 0;
for (const auto& pkt : packet_results) {
- loss_count += pkt.receive_time.IsInfinite() ? 1 : 0;
+ loss_count += !pkt.IsReceived() ? 1 : 0;
}
last_loss_ratio_ = static_cast<double>(loss_count) / packet_results.size();
const TimeDelta time_passed = last_loss_packet_report_.IsFinite()
diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
index a2865d9f5a..c5f51df99b 100644
--- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
+++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
@@ -587,8 +587,7 @@ DataRate SendSideBandwidthEstimation::GetUpperLimit() const {
DataRate upper_limit = delay_based_limit_;
if (!receiver_limit_caps_only_)
upper_limit = std::min(upper_limit, receiver_limit_);
- upper_limit = std::min(upper_limit, max_bitrate_configured_);
- return upper_limit;
+ return std::min(upper_limit, max_bitrate_configured_);
}
void SendSideBandwidthEstimation::MaybeLogLowBitrateWarning(DataRate bitrate,
diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h
index 034f2e9517..84661c05b7 100644
--- a/modules/congestion_controller/include/receive_side_congestion_controller.h
+++ b/modules/congestion_controller/include/receive_side_congestion_controller.h
@@ -16,7 +16,10 @@
#include "api/transport/field_trial_based_config.h"
#include "api/transport/network_control.h"
+#include "api/units/data_rate.h"
+#include "modules/congestion_controller/remb_throttler.h"
#include "modules/include/module.h"
+#include "modules/pacing/packet_router.h"
#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "rtc_base/synchronization/mutex.h"
@@ -32,10 +35,10 @@ class RemoteBitrateObserver;
class ReceiveSideCongestionController : public CallStatsObserver,
public Module {
public:
- ReceiveSideCongestionController(Clock* clock, PacketRouter* packet_router);
ReceiveSideCongestionController(
Clock* clock,
- PacketRouter* packet_router,
+ RemoteEstimatorProxy::TransportFeedbackSender feedback_sender,
+ RembThrottler::RembSender remb_sender,
NetworkStateEstimator* network_state_estimator);
~ReceiveSideCongestionController() override {}
@@ -56,6 +59,10 @@ class ReceiveSideCongestionController : public CallStatsObserver,
// This is send bitrate, used to control the rate of feedback messages.
void OnBitrateChanged(int bitrate_bps);
+ // Ensures the remote party is notified of the receive bitrate no larger than
+ // |bitrate| using RTCP REMB.
+ void SetMaxDesiredReceiveBitrate(DataRate bitrate);
+
// Implements Module.
int64_t TimeUntilNextProcess() override;
void Process() override;
@@ -103,6 +110,7 @@ class ReceiveSideCongestionController : public CallStatsObserver,
};
const FieldTrialBasedConfig field_trial_config_;
+ RembThrottler remb_throttler_;
WrappingBitrateEstimator remote_bitrate_estimator_;
RemoteEstimatorProxy remote_estimator_proxy_;
};
diff --git a/modules/congestion_controller/pcc/monitor_interval.cc b/modules/congestion_controller/pcc/monitor_interval.cc
index c8efd5b59a..6bc9f4a7ef 100644
--- a/modules/congestion_controller/pcc/monitor_interval.cc
+++ b/modules/congestion_controller/pcc/monitor_interval.cc
@@ -47,7 +47,7 @@ void PccMonitorInterval::OnPacketsFeedback(
feedback_collection_done_ = true;
return;
}
- if (packet_result.receive_time.IsInfinite()) {
+ if (!packet_result.IsReceived()) {
lost_packets_sent_time_.push_back(packet_result.sent_packet.send_time);
} else {
received_packets_.push_back(
diff --git a/modules/congestion_controller/pcc/rtt_tracker.cc b/modules/congestion_controller/pcc/rtt_tracker.cc
index 0814912b49..af9dc8f11b 100644
--- a/modules/congestion_controller/pcc/rtt_tracker.cc
+++ b/modules/congestion_controller/pcc/rtt_tracker.cc
@@ -23,7 +23,7 @@ void RttTracker::OnPacketsFeedback(
Timestamp feedback_received_time) {
TimeDelta packet_rtt = TimeDelta::MinusInfinity();
for (const PacketResult& packet_result : packet_feedbacks) {
- if (packet_result.receive_time.IsInfinite())
+ if (!packet_result.IsReceived())
continue;
packet_rtt = std::max<TimeDelta>(
packet_rtt,
diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc
index 638cb2d295..61a126fbe3 100644
--- a/modules/congestion_controller/receive_side_congestion_controller.cc
+++ b/modules/congestion_controller/receive_side_congestion_controller.cc
@@ -10,6 +10,7 @@
#include "modules/congestion_controller/include/receive_side_congestion_controller.h"
+#include "api/units/data_rate.h"
#include "modules/pacing/packet_router.h"
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
@@ -120,16 +121,13 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator::
ReceiveSideCongestionController::ReceiveSideCongestionController(
Clock* clock,
- PacketRouter* packet_router)
- : ReceiveSideCongestionController(clock, packet_router, nullptr) {}
-
-ReceiveSideCongestionController::ReceiveSideCongestionController(
- Clock* clock,
- PacketRouter* packet_router,
+ RemoteEstimatorProxy::TransportFeedbackSender feedback_sender,
+ RembThrottler::RembSender remb_sender,
NetworkStateEstimator* network_state_estimator)
- : remote_bitrate_estimator_(packet_router, clock),
+ : remb_throttler_(std::move(remb_sender), clock),
+ remote_bitrate_estimator_(&remb_throttler_, clock),
remote_estimator_proxy_(clock,
- packet_router,
+ std::move(feedback_sender),
&field_trial_config_,
network_state_estimator) {}
@@ -186,4 +184,9 @@ void ReceiveSideCongestionController::Process() {
remote_bitrate_estimator_.Process();
}
+void ReceiveSideCongestionController::SetMaxDesiredReceiveBitrate(
+ DataRate bitrate) {
+ remb_throttler_.SetMaxDesiredReceiveBitrate(bitrate);
+}
+
} // namespace webrtc
diff --git a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
index 5622c84689..5e03179f42 100644
--- a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
+++ b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
@@ -20,10 +20,8 @@
using ::testing::_;
using ::testing::AtLeast;
-using ::testing::NiceMock;
-using ::testing::Return;
-using ::testing::SaveArg;
-using ::testing::StrictMock;
+using ::testing::ElementsAre;
+using ::testing::MockFunction;
namespace webrtc {
@@ -37,34 +35,28 @@ uint32_t AbsSendTime(int64_t t, int64_t denom) {
return (((t << 18) + (denom >> 1)) / denom) & 0x00fffffful;
}
-class MockPacketRouter : public PacketRouter {
- public:
- MOCK_METHOD(void,
- OnReceiveBitrateChanged,
- (const std::vector<uint32_t>& ssrcs, uint32_t bitrate),
- (override));
-};
-
const uint32_t kInitialBitrateBps = 60000;
} // namespace
namespace test {
-TEST(ReceiveSideCongestionControllerTest, OnReceivedPacketWithAbsSendTime) {
- StrictMock<MockPacketRouter> packet_router;
+TEST(ReceiveSideCongestionControllerTest, SendsRembWithAbsSendTime) {
+ MockFunction<void(std::vector<std::unique_ptr<rtcp::RtcpPacket>>)>
+ feedback_sender;
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
SimulatedClock clock_(123456);
- ReceiveSideCongestionController controller(&clock_, &packet_router);
+ ReceiveSideCongestionController controller(
+ &clock_, feedback_sender.AsStdFunction(), remb_sender.AsStdFunction(),
+ nullptr);
size_t payload_size = 1000;
RTPHeader header;
header.ssrc = 0x11eb21c;
header.extension.hasAbsoluteSendTime = true;
- std::vector<unsigned int> ssrcs;
- EXPECT_CALL(packet_router, OnReceiveBitrateChanged(_, _))
- .WillRepeatedly(SaveArg<0>(&ssrcs));
+ EXPECT_CALL(remb_sender, Call(_, ElementsAre(header.ssrc))).Times(AtLeast(1));
for (int i = 0; i < 10; ++i) {
clock_.AdvanceTimeMilliseconds((1000 * payload_size) / kInitialBitrateBps);
@@ -72,9 +64,20 @@ TEST(ReceiveSideCongestionControllerTest, OnReceivedPacketWithAbsSendTime) {
header.extension.absoluteSendTime = AbsSendTime(now_ms, 1000);
controller.OnReceivedPacket(now_ms, payload_size, header);
}
+}
+
+TEST(ReceiveSideCongestionControllerTest,
+ SendsRembAfterSetMaxDesiredReceiveBitrate) {
+ MockFunction<void(std::vector<std::unique_ptr<rtcp::RtcpPacket>>)>
+ feedback_sender;
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ SimulatedClock clock_(123456);
- ASSERT_EQ(1u, ssrcs.size());
- EXPECT_EQ(header.ssrc, ssrcs[0]);
+ ReceiveSideCongestionController controller(
+ &clock_, feedback_sender.AsStdFunction(), remb_sender.AsStdFunction(),
+ nullptr);
+ EXPECT_CALL(remb_sender, Call(123, _));
+ controller.SetMaxDesiredReceiveBitrate(DataRate::BitsPerSec(123));
}
TEST(ReceiveSideCongestionControllerTest, ConvergesToCapacity) {
diff --git a/modules/congestion_controller/remb_throttler.cc b/modules/congestion_controller/remb_throttler.cc
new file mode 100644
index 0000000000..fcc30af9a8
--- /dev/null
+++ b/modules/congestion_controller/remb_throttler.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/congestion_controller/remb_throttler.h"
+
+#include <algorithm>
+#include <utility>
+
+namespace webrtc {
+
+namespace {
+constexpr TimeDelta kRembSendInterval = TimeDelta::Millis(200);
+} // namespace
+
+RembThrottler::RembThrottler(RembSender remb_sender, Clock* clock)
+ : remb_sender_(std::move(remb_sender)),
+ clock_(clock),
+ last_remb_time_(Timestamp::MinusInfinity()),
+ last_send_remb_bitrate_(DataRate::PlusInfinity()),
+ max_remb_bitrate_(DataRate::PlusInfinity()) {}
+
+void RembThrottler::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
+ uint32_t bitrate_bps) {
+ DataRate receive_bitrate = DataRate::BitsPerSec(bitrate_bps);
+ Timestamp now = clock_->CurrentTime();
+ {
+ MutexLock lock(&mutex_);
+ // % threshold for if we should send a new REMB asap.
+ const int64_t kSendThresholdPercent = 103;
+ if (receive_bitrate * kSendThresholdPercent / 100 >
+ last_send_remb_bitrate_ &&
+ now < last_remb_time_ + kRembSendInterval) {
+ return;
+ }
+ last_remb_time_ = now;
+ last_send_remb_bitrate_ = receive_bitrate;
+ receive_bitrate = std::min(last_send_remb_bitrate_, max_remb_bitrate_);
+ }
+ remb_sender_(receive_bitrate.bps(), ssrcs);
+}
+
+void RembThrottler::SetMaxDesiredReceiveBitrate(DataRate bitrate) {
+ Timestamp now = clock_->CurrentTime();
+ {
+ MutexLock lock(&mutex_);
+ max_remb_bitrate_ = bitrate;
+ if (now - last_remb_time_ < kRembSendInterval &&
+ !last_send_remb_bitrate_.IsZero() &&
+ last_send_remb_bitrate_ <= max_remb_bitrate_) {
+ return;
+ }
+ }
+ remb_sender_(bitrate.bps(), /*ssrcs=*/{});
+}
+
+} // namespace webrtc
diff --git a/modules/congestion_controller/remb_throttler.h b/modules/congestion_controller/remb_throttler.h
new file mode 100644
index 0000000000..67c0280749
--- /dev/null
+++ b/modules/congestion_controller/remb_throttler.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_
+#define MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_
+
+#include <functional>
+#include <vector>
+
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// RembThrottler is a helper class used for throttling RTCP REMB messages.
+// Throttles small changes to the received BWE within 200ms.
+class RembThrottler : public RemoteBitrateObserver {
+ public:
+ using RembSender =
+ std::function<void(int64_t bitrate_bps, std::vector<uint32_t> ssrcs)>;
+ RembThrottler(RembSender remb_sender, Clock* clock);
+
+ // Ensures the remote party is notified of the receive bitrate no larger than
+ // |bitrate| using RTCP REMB.
+ void SetMaxDesiredReceiveBitrate(DataRate bitrate);
+
+ // Implements RemoteBitrateObserver;
+ // Called every time there is a new bitrate estimate for a receive channel
+ // group. This call will trigger a new RTCP REMB packet if the bitrate
+ // estimate has decreased or if no RTCP REMB packet has been sent for
+ // a certain time interval.
+ void OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
+ uint32_t bitrate_bps) override;
+
+ private:
+ const RembSender remb_sender_;
+ Clock* const clock_;
+ mutable Mutex mutex_;
+ Timestamp last_remb_time_ RTC_GUARDED_BY(mutex_);
+ DataRate last_send_remb_bitrate_ RTC_GUARDED_BY(mutex_);
+ DataRate max_remb_bitrate_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc
+#endif // MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_
diff --git a/modules/congestion_controller/remb_throttler_unittest.cc b/modules/congestion_controller/remb_throttler_unittest.cc
new file mode 100644
index 0000000000..3f8df8a7bb
--- /dev/null
+++ b/modules/congestion_controller/remb_throttler_unittest.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/congestion_controller/remb_throttler.h"
+
+#include <vector>
+
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "system_wrappers/include/clock.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using ::testing::_;
+using ::testing::MockFunction;
+
+TEST(RembThrottlerTest, CallRembSenderOnFirstReceiveBitrateChange) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+
+ EXPECT_CALL(remb_sender, Call(12345, std::vector<uint32_t>({1, 2, 3})));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/12345);
+}
+
+TEST(RembThrottlerTest, ThrottlesSmallReceiveBitrateDecrease) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+
+ EXPECT_CALL(remb_sender, Call);
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/12346);
+ clock.AdvanceTime(TimeDelta::Millis(100));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/12345);
+
+ EXPECT_CALL(remb_sender, Call(12345, _));
+ clock.AdvanceTime(TimeDelta::Millis(101));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/12345);
+}
+
+TEST(RembThrottlerTest, DoNotThrottleLargeReceiveBitrateDecrease) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+
+ EXPECT_CALL(remb_sender, Call(2345, _));
+ EXPECT_CALL(remb_sender, Call(1234, _));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/2345);
+ clock.AdvanceTime(TimeDelta::Millis(1));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/1234);
+}
+
+TEST(RembThrottlerTest, ThrottlesReceiveBitrateIncrease) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+
+ EXPECT_CALL(remb_sender, Call);
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/1234);
+ clock.AdvanceTime(TimeDelta::Millis(100));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/2345);
+
+ // Updates 200ms after previous callback is not throttled.
+ EXPECT_CALL(remb_sender, Call(2345, _));
+ clock.AdvanceTime(TimeDelta::Millis(101));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/2345);
+}
+
+TEST(RembThrottlerTest, CallRembSenderOnSetMaxDesiredReceiveBitrate) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+ EXPECT_CALL(remb_sender, Call(1234, _));
+ remb_throttler.SetMaxDesiredReceiveBitrate(DataRate::BitsPerSec(1234));
+}
+
+TEST(RembThrottlerTest, CallRembSenderWithMinOfMaxDesiredAndOnReceivedBitrate) {
+ SimulatedClock clock(Timestamp::Zero());
+ MockFunction<void(uint64_t, std::vector<uint32_t>)> remb_sender;
+ RembThrottler remb_throttler(remb_sender.AsStdFunction(), &clock);
+
+ EXPECT_CALL(remb_sender, Call(1234, _));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/1234);
+ clock.AdvanceTime(TimeDelta::Millis(1));
+ remb_throttler.SetMaxDesiredReceiveBitrate(DataRate::BitsPerSec(4567));
+
+ clock.AdvanceTime(TimeDelta::Millis(200));
+ EXPECT_CALL(remb_sender, Call(4567, _));
+ remb_throttler.OnReceiveBitrateChanged({1, 2, 3}, /*bitrate_bps=*/5678);
+}
+
+} // namespace webrtc
diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
index 3849cb3707..933abd9bf0 100644
--- a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
@@ -27,9 +27,9 @@ using ::testing::_;
using ::testing::Invoke;
namespace webrtc {
-namespace webrtc_cc {
namespace {
+constexpr uint32_t kSsrc = 8492;
const PacedPacketInfo kPacingInfo0(0, 5, 2000);
const PacedPacketInfo kPacingInfo1(1, 8, 4000);
const PacedPacketInfo kPacingInfo2(2, 14, 7000);
@@ -49,8 +49,8 @@ void ComparePacketFeedbackVectors(const std::vector<PacketResult>& truth,
// equal. However, the difference must be the same for all x.
TimeDelta arrival_time_delta = truth[0].receive_time - input[0].receive_time;
for (size_t i = 0; i < len; ++i) {
- RTC_CHECK(truth[i].receive_time.IsFinite());
- if (input[i].receive_time.IsFinite()) {
+ RTC_CHECK(truth[i].IsReceived());
+ if (input[i].IsReceived()) {
EXPECT_EQ(truth[i].receive_time - input[i].receive_time,
arrival_time_delta);
}
@@ -77,10 +77,6 @@ PacketResult CreatePacket(int64_t receive_time_ms,
return res;
}
-} // namespace
-
-namespace test {
-
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
public:
MOCK_METHOD(void,
@@ -89,6 +85,8 @@ class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
(override));
};
+} // namespace
+
class TransportFeedbackAdapterTest : public ::testing::Test {
public:
TransportFeedbackAdapterTest() : clock_(0) {}
@@ -108,7 +106,7 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
void OnSentPacket(const PacketResult& packet_feedback) {
RtpPacketSendInfo packet_info;
- packet_info.ssrc = kSsrc;
+ packet_info.media_ssrc = kSsrc;
packet_info.transport_sequence_number =
packet_feedback.sent_packet.sequence_number;
packet_info.rtp_sequence_number = 0;
@@ -122,8 +120,6 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
packet_feedback.sent_packet.send_time.ms(), rtc::PacketInfo()));
}
- static constexpr uint32_t kSsrc = 8492;
-
SimulatedClock clock_;
std::unique_ptr<TransportFeedbackAdapter> adapter_;
};
@@ -393,7 +389,7 @@ TEST_F(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) {
// Add a packet and then mark it as sent.
RtpPacketSendInfo packet_info;
- packet_info.ssrc = kSsrc;
+ packet_info.media_ssrc = kSsrc;
packet_info.transport_sequence_number = packet.sent_packet.sequence_number;
packet_info.length = packet.sent_packet.size.bytes();
packet_info.pacing_info = packet.sent_packet.pacing_info;
@@ -412,6 +408,4 @@ TEST_F(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) {
EXPECT_FALSE(duplicate_packet.has_value());
}
-} // namespace test
-} // namespace webrtc_cc
} // namespace webrtc
diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
index c958a1c3cb..6ab3ad80fa 100644
--- a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc
@@ -38,15 +38,16 @@ void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver(
void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) {
MutexLock lock(&lock_);
- if (packet_info.ssrc != 0) {
- StreamFeedbackObserver::StreamPacketInfo info;
- info.ssrc = packet_info.ssrc;
- info.rtp_sequence_number = packet_info.rtp_sequence_number;
- info.received = false;
- history_.insert(
- {seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number),
- info});
- }
+
+ StreamFeedbackObserver::StreamPacketInfo info;
+ info.ssrc = packet_info.media_ssrc;
+ info.rtp_sequence_number = packet_info.rtp_sequence_number;
+ info.received = false;
+ info.is_retransmission =
+ packet_info.packet_type == RtpPacketMediaType::kRetransmission;
+ history_.insert(
+ {seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number), info});
+
while (history_.size() > kMaxPacketsInHistory) {
history_.erase(history_.begin());
}
diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
index 6514a4eda7..482f58d1bb 100644
--- a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
+++ b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
@@ -16,7 +16,11 @@
namespace webrtc {
namespace {
-using ::testing::_;
+using ::testing::AllOf;
+using ::testing::ElementsAre;
+using ::testing::Field;
+using PacketInfo = StreamFeedbackObserver::StreamPacketInfo;
+
static constexpr uint32_t kSsrc = 8492;
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
@@ -28,41 +32,65 @@ class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
};
RtpPacketSendInfo CreatePacket(uint32_t ssrc,
- int16_t rtp_sequence_number,
- int64_t transport_sequence_number) {
+ uint16_t rtp_sequence_number,
+ int64_t transport_sequence_number,
+ bool is_retransmission) {
RtpPacketSendInfo res;
- res.ssrc = ssrc;
+ res.media_ssrc = ssrc;
res.transport_sequence_number = transport_sequence_number;
res.rtp_sequence_number = rtp_sequence_number;
+ res.packet_type = is_retransmission ? RtpPacketMediaType::kRetransmission
+ : RtpPacketMediaType::kVideo;
return res;
}
} // namespace
+
TEST(TransportFeedbackDemuxerTest, ObserverSanity) {
TransportFeedbackDemuxer demuxer;
MockStreamFeedbackObserver mock;
demuxer.RegisterStreamFeedbackObserver({kSsrc}, &mock);
- demuxer.AddPacket(CreatePacket(kSsrc, 55, 1));
- demuxer.AddPacket(CreatePacket(kSsrc, 56, 2));
- demuxer.AddPacket(CreatePacket(kSsrc, 57, 3));
+ const uint16_t kRtpStartSeq = 55;
+ const int64_t kTransportStartSeq = 1;
+ demuxer.AddPacket(CreatePacket(kSsrc, kRtpStartSeq, kTransportStartSeq,
+ /*is_retransmit=*/false));
+ demuxer.AddPacket(CreatePacket(kSsrc, kRtpStartSeq + 1,
+ kTransportStartSeq + 1,
+ /*is_retransmit=*/false));
+ demuxer.AddPacket(CreatePacket(
+ kSsrc, kRtpStartSeq + 2, kTransportStartSeq + 2, /*is_retransmit=*/true));
rtcp::TransportFeedback feedback;
- feedback.SetBase(1, 1000);
- ASSERT_TRUE(feedback.AddReceivedPacket(1, 1000));
- ASSERT_TRUE(feedback.AddReceivedPacket(2, 2000));
- ASSERT_TRUE(feedback.AddReceivedPacket(3, 3000));
+ feedback.SetBase(kTransportStartSeq, 1000);
+ ASSERT_TRUE(feedback.AddReceivedPacket(kTransportStartSeq, 1000));
+ // Drop middle packet.
+ ASSERT_TRUE(feedback.AddReceivedPacket(kTransportStartSeq + 2, 3000));
- EXPECT_CALL(mock, OnPacketFeedbackVector(_)).Times(1);
+ EXPECT_CALL(
+ mock, OnPacketFeedbackVector(ElementsAre(
+ AllOf(Field(&PacketInfo::received, true),
+ Field(&PacketInfo::ssrc, kSsrc),
+ Field(&PacketInfo::rtp_sequence_number, kRtpStartSeq),
+ Field(&PacketInfo::is_retransmission, false)),
+ AllOf(Field(&PacketInfo::received, false),
+ Field(&PacketInfo::ssrc, kSsrc),
+ Field(&PacketInfo::rtp_sequence_number, kRtpStartSeq + 1),
+ Field(&PacketInfo::is_retransmission, false)),
+ AllOf(Field(&PacketInfo::received, true),
+ Field(&PacketInfo::ssrc, kSsrc),
+ Field(&PacketInfo::rtp_sequence_number, kRtpStartSeq + 2),
+ Field(&PacketInfo::is_retransmission, true)))));
demuxer.OnTransportFeedback(feedback);
demuxer.DeRegisterStreamFeedbackObserver(&mock);
- demuxer.AddPacket(CreatePacket(kSsrc, 58, 4));
+ demuxer.AddPacket(
+ CreatePacket(kSsrc, kRtpStartSeq + 3, kTransportStartSeq + 3, false));
rtcp::TransportFeedback second_feedback;
- second_feedback.SetBase(4, 4000);
- ASSERT_TRUE(second_feedback.AddReceivedPacket(4, 4000));
+ second_feedback.SetBase(kTransportStartSeq + 3, 4000);
+ ASSERT_TRUE(second_feedback.AddReceivedPacket(kTransportStartSeq + 3, 4000));
- EXPECT_CALL(mock, OnPacketFeedbackVector(_)).Times(0);
+ EXPECT_CALL(mock, OnPacketFeedbackVector).Times(0);
demuxer.OnTransportFeedback(second_feedback);
}
} // namespace webrtc
diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn
index 4281becd68..25b92bed45 100644
--- a/modules/desktop_capture/BUILD.gn
+++ b/modules/desktop_capture/BUILD.gn
@@ -145,9 +145,15 @@ if (rtc_include_tests) {
sources += [ "screen_capturer_mac_unittest.cc" ]
}
if (rtc_enable_win_wgc) {
- sources += [ "win/wgc_capturer_win_unittest.cc" ]
+ sources += [
+ "win/wgc_capture_source_unittest.cc",
+ "win/wgc_capturer_win_unittest.cc",
+ ]
}
- deps += [ ":desktop_capture_mock" ]
+ deps += [
+ ":desktop_capture_mock",
+ "../../system_wrappers:metrics",
+ ]
public_configs = [ ":x11_config" ]
}
}
diff --git a/modules/desktop_capture/cropping_window_capturer_win.cc b/modules/desktop_capture/cropping_window_capturer_win.cc
index de36adb01e..c52ca136d0 100644
--- a/modules/desktop_capture/cropping_window_capturer_win.cc
+++ b/modules/desktop_capture/cropping_window_capturer_win.cc
@@ -130,6 +130,8 @@ class CroppingWindowCapturerWin : public CroppingWindowCapturer {
public:
explicit CroppingWindowCapturerWin(const DesktopCaptureOptions& options)
: CroppingWindowCapturer(options),
+ enumerate_current_process_windows_(
+ options.enumerate_current_process_windows()),
full_screen_window_detector_(options.full_screen_window_detector()) {}
void CaptureFrame() override;
@@ -148,6 +150,8 @@ class CroppingWindowCapturerWin : public CroppingWindowCapturer {
WindowCaptureHelperWin window_capture_helper_;
+ bool enumerate_current_process_windows_;
+
rtc::scoped_refptr<FullScreenWindowDetector> full_screen_window_detector_;
};
@@ -164,7 +168,12 @@ void CroppingWindowCapturerWin::CaptureFrame() {
// it uses responsiveness check which could lead to performance
// issues.
SourceList result;
- if (!webrtc::GetWindowList(GetWindowListFlags::kNone, &result))
+ int window_list_flags =
+ enumerate_current_process_windows_
+ ? GetWindowListFlags::kNone
+ : GetWindowListFlags::kIgnoreCurrentProcessWindows;
+
+ if (!webrtc::GetWindowList(window_list_flags, &result))
return false;
// Filter out windows not visible on current desktop
diff --git a/modules/desktop_capture/desktop_capture_options.h b/modules/desktop_capture/desktop_capture_options.h
index 521c80b5c5..a693803aa0 100644
--- a/modules/desktop_capture/desktop_capture_options.h
+++ b/modules/desktop_capture/desktop_capture_options.h
@@ -98,6 +98,24 @@ class RTC_EXPORT DesktopCaptureOptions {
}
#if defined(WEBRTC_WIN)
+ // Enumerating windows owned by the current process on Windows has some
+ // complications due to |GetWindowText*()| APIs potentially causing a
+ // deadlock (see the comments in the |GetWindowListHandler()| function in
+ // window_capture_utils.cc for more details on the deadlock).
+ // To avoid this issue, consumers can either ensure that the thread that runs
+ // their message loop never waits on |GetSourceList()|, or they can set this
+ // flag to false which will prevent windows running in the current process
+ // from being enumerated and included in the results. Consumers can still
+ // provide the WindowId for their own windows to |SelectSource()| and capture
+ // them.
+ bool enumerate_current_process_windows() const {
+ return enumerate_current_process_windows_;
+ }
+ void set_enumerate_current_process_windows(
+ bool enumerate_current_process_windows) {
+ enumerate_current_process_windows_ = enumerate_current_process_windows;
+ }
+
bool allow_use_magnification_api() const {
return allow_use_magnification_api_;
}
@@ -126,7 +144,19 @@ class RTC_EXPORT DesktopCaptureOptions {
void set_allow_cropping_window_capturer(bool allow) {
allow_cropping_window_capturer_ = allow;
}
-#endif
+
+#if defined(RTC_ENABLE_WIN_WGC)
+ // This flag enables the WGC capturer for both window and screen capture.
+ // This capturer should offer similar or better performance than the cropping
+ // capturer without the disadvantages listed above. However, the WGC capturer
+ // is only available on Windows 10 version 1809 (Redstone 5) and up. This flag
+ // will have no affect on older versions.
+ // If set, and running a supported version of Win10, this flag will take
+ // precedence over the cropping, directx, and magnification flags.
+ bool allow_wgc_capturer() const { return allow_wgc_capturer_; }
+ void set_allow_wgc_capturer(bool allow) { allow_wgc_capturer_ = allow; }
+#endif // defined(RTC_ENABLE_WIN_WGC)
+#endif // defined(WEBRTC_WIN)
#if defined(WEBRTC_USE_PIPEWIRE)
bool allow_pipewire() const { return allow_pipewire_; }
@@ -146,9 +176,13 @@ class RTC_EXPORT DesktopCaptureOptions {
rtc::scoped_refptr<FullScreenWindowDetector> full_screen_window_detector_;
#if defined(WEBRTC_WIN)
+ bool enumerate_current_process_windows_ = true;
bool allow_use_magnification_api_ = false;
bool allow_directx_capturer_ = false;
bool allow_cropping_window_capturer_ = false;
+#if defined(RTC_ENABLE_WIN_WGC)
+ bool allow_wgc_capturer_ = false;
+#endif
#endif
#if defined(WEBRTC_USE_X11)
bool use_update_notifications_ = false;
diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc
index 8d8bdd5835..735aa4d530 100644
--- a/modules/desktop_capture/desktop_capturer.cc
+++ b/modules/desktop_capture/desktop_capturer.cc
@@ -54,10 +54,8 @@ bool DesktopCapturer::IsOccluded(const DesktopVector& pos) {
std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateWindowCapturer(
const DesktopCaptureOptions& options) {
#if defined(RTC_ENABLE_WIN_WGC)
- // TODO(bugs.webrtc.org/11760): Add a WebRTC field trial (or similar
- // mechanism) check here that leads to use of the WGC capturer once it is
- // fully implemented.
- if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10_RS5) {
+ if (options.allow_wgc_capturer() &&
+ rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10_RS5) {
return WgcCapturerWin::CreateRawWindowCapturer(options);
}
#endif // defined(RTC_ENABLE_WIN_WGC)
@@ -80,10 +78,8 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateWindowCapturer(
std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateScreenCapturer(
const DesktopCaptureOptions& options) {
#if defined(RTC_ENABLE_WIN_WGC)
- // TODO(bugs.webrtc.org/11760): Add a WebRTC field trial (or similar
- // mechanism) check here that leads to use of the WGC capturer once it is
- // fully implemented.
- if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10_RS5) {
+ if (options.allow_wgc_capturer() &&
+ rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10_RS5) {
return WgcCapturerWin::CreateRawScreenCapturer(options);
}
#endif // defined(RTC_ENABLE_WIN_WGC)
diff --git a/modules/desktop_capture/full_screen_window_detector.h b/modules/desktop_capture/full_screen_window_detector.h
index 46fb607b7d..ca30d95de4 100644
--- a/modules/desktop_capture/full_screen_window_detector.h
+++ b/modules/desktop_capture/full_screen_window_detector.h
@@ -32,7 +32,8 @@ namespace webrtc {
// window using criteria provided by application specific
// FullScreenApplicationHandler.
-class FullScreenWindowDetector : public rtc::RefCountedBase {
+class FullScreenWindowDetector
+ : public rtc::RefCountedNonVirtual<FullScreenWindowDetector> {
public:
using ApplicationHandlerFactory =
std::function<std::unique_ptr<FullScreenApplicationHandler>(
diff --git a/modules/desktop_capture/linux/base_capturer_pipewire.cc b/modules/desktop_capture/linux/base_capturer_pipewire.cc
index c302a086ea..e5d001e476 100644
--- a/modules/desktop_capture/linux/base_capturer_pipewire.cc
+++ b/modules/desktop_capture/linux/base_capturer_pipewire.cc
@@ -772,37 +772,27 @@ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
// Use video metadata when video size from metadata is set and smaller than
// video stream size, so we need to adjust it.
- bool video_is_full_width = true;
- bool video_is_full_height = true;
+ bool video_metadata_use = false;
+
#if PW_CHECK_VERSION(0, 3, 0)
- if (video_metadata && video_metadata->region.size.width != 0 &&
- video_metadata->region.size.height != 0) {
- if (video_metadata->region.size.width <
- static_cast<uint32_t>(desktop_size_.width())) {
- video_is_full_width = false;
- } else if (video_metadata->region.size.height <
- static_cast<uint32_t>(desktop_size_.height())) {
- video_is_full_height = false;
- }
- }
+ const struct spa_rectangle* video_metadata_size =
+ video_metadata ? &video_metadata->region.size : nullptr;
#else
- if (video_metadata && video_metadata->width != 0 &&
- video_metadata->height != 0) {
- if (video_metadata->width < desktop_size_.width()) {
- } else if (video_metadata->height < desktop_size_.height()) {
- video_is_full_height = false;
- }
- }
+ const struct spa_meta_video_crop* video_metadata_size = video_metadata;
#endif
+ if (video_metadata_size && video_metadata_size->width != 0 &&
+ video_metadata_size->height != 0 &&
+ (static_cast<int>(video_metadata_size->width) < desktop_size_.width() ||
+ static_cast<int>(video_metadata_size->height) <
+ desktop_size_.height())) {
+ video_metadata_use = true;
+ }
+
DesktopSize video_size_prev = video_size_;
- if (!video_is_full_height || !video_is_full_width) {
-#if PW_CHECK_VERSION(0, 3, 0)
- video_size_ = DesktopSize(video_metadata->region.size.width,
- video_metadata->region.size.height);
-#else
- video_size_ = DesktopSize(video_metadata->width, video_metadata->height);
-#endif
+ if (video_metadata_use) {
+ video_size_ =
+ DesktopSize(video_metadata_size->width, video_metadata_size->height);
} else {
video_size_ = desktop_size_;
}
@@ -827,25 +817,25 @@ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
// Adjust source content based on metadata video position
#if PW_CHECK_VERSION(0, 3, 0)
- if (!video_is_full_height &&
+ if (video_metadata_use &&
(video_metadata->region.position.y + video_size_.height() <=
desktop_size_.height())) {
src += src_stride * video_metadata->region.position.y;
}
const int x_offset =
- !video_is_full_width &&
+ video_metadata_use &&
(video_metadata->region.position.x + video_size_.width() <=
desktop_size_.width())
? video_metadata->region.position.x * kBytesPerPixel
: 0;
#else
- if (!video_is_full_height &&
+ if (video_metadata_use &&
(video_metadata->y + video_size_.height() <= desktop_size_.height())) {
src += src_stride * video_metadata->y;
}
const int x_offset =
- !video_is_full_width &&
+ video_metadata_use &&
(video_metadata->x + video_size_.width() <= desktop_size_.width())
? video_metadata->x * kBytesPerPixel
: 0;
@@ -1036,6 +1026,23 @@ void BaseCapturerPipeWire::SourcesRequest() {
// We don't want to allow selection of multiple sources.
g_variant_builder_add(&builder, "{sv}", "multiple",
g_variant_new_boolean(false));
+
+ Scoped<GVariant> variant(
+ g_dbus_proxy_get_cached_property(proxy_, "AvailableCursorModes"));
+ if (variant.get()) {
+ uint32_t modes = 0;
+ g_variant_get(variant.get(), "u", &modes);
+ // Request mouse cursor to be embedded as part of the stream, otherwise it
+ // is hidden by default. Make request only if this mode is advertised by
+ // the portal implementation.
+ if (modes &
+ static_cast<uint32_t>(BaseCapturerPipeWire::CursorMode::kEmbedded)) {
+ g_variant_builder_add(&builder, "{sv}", "cursor_mode",
+ g_variant_new_uint32(static_cast<uint32_t>(
+ BaseCapturerPipeWire::CursorMode::kEmbedded)));
+ }
+ }
+
variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
diff --git a/modules/desktop_capture/linux/base_capturer_pipewire.h b/modules/desktop_capture/linux/base_capturer_pipewire.h
index 75d20dbf1d..52264188a7 100644
--- a/modules/desktop_capture/linux/base_capturer_pipewire.h
+++ b/modules/desktop_capture/linux/base_capturer_pipewire.h
@@ -47,6 +47,12 @@ class BaseCapturerPipeWire : public DesktopCapturer {
kAny = 0b11
};
+ enum class CursorMode : uint32_t {
+ kHidden = 0b01,
+ kEmbedded = 0b10,
+ kMetadata = 0b100
+ };
+
explicit BaseCapturerPipeWire(CaptureSourceType source_type);
~BaseCapturerPipeWire() override;
diff --git a/modules/desktop_capture/linux/shared_x_display.h b/modules/desktop_capture/linux/shared_x_display.h
index 64c498c134..dd52e456ca 100644
--- a/modules/desktop_capture/linux/shared_x_display.h
+++ b/modules/desktop_capture/linux/shared_x_display.h
@@ -28,7 +28,8 @@ typedef union _XEvent XEvent;
namespace webrtc {
// A ref-counted object to store XDisplay connection.
-class RTC_EXPORT SharedXDisplay : public rtc::RefCountedBase {
+class RTC_EXPORT SharedXDisplay
+ : public rtc::RefCountedNonVirtual<SharedXDisplay> {
public:
class XEventHandler {
public:
@@ -38,9 +39,6 @@ class RTC_EXPORT SharedXDisplay : public rtc::RefCountedBase {
virtual bool HandleXEvent(const XEvent& event) = 0;
};
- // Takes ownership of |display|.
- explicit SharedXDisplay(Display* display);
-
// Creates a new X11 Display for the |display_name|. NULL is returned if X11
// connection failed. Equivalent to CreateDefault() when |display_name| is
// empty.
@@ -65,8 +63,11 @@ class RTC_EXPORT SharedXDisplay : public rtc::RefCountedBase {
void IgnoreXServerGrabs();
+ ~SharedXDisplay();
+
protected:
- ~SharedXDisplay() override;
+ // Takes ownership of |display|.
+ explicit SharedXDisplay(Display* display);
private:
typedef std::map<int, std::vector<XEventHandler*> > EventHandlersMap;
diff --git a/modules/desktop_capture/linux/x_error_trap.cc b/modules/desktop_capture/linux/x_error_trap.cc
index 53c907fc45..903aa86ab3 100644
--- a/modules/desktop_capture/linux/x_error_trap.cc
+++ b/modules/desktop_capture/linux/x_error_trap.cc
@@ -13,16 +13,10 @@
#include <assert.h>
#include <stddef.h>
-#if defined(TOOLKIT_GTK)
-#include <gdk/gdk.h>
-#endif // !defined(TOOLKIT_GTK)
-
namespace webrtc {
namespace {
-#if !defined(TOOLKIT_GTK)
-
// TODO(sergeyu): This code is not thread safe. Fix it. Bug 2202.
static bool g_xserver_error_trap_enabled = false;
static int g_last_xserver_error_code = 0;
@@ -33,32 +27,22 @@ int XServerErrorHandler(Display* display, XErrorEvent* error_event) {
return 0;
}
-#endif // !defined(TOOLKIT_GTK)
-
} // namespace
XErrorTrap::XErrorTrap(Display* display)
: original_error_handler_(NULL), enabled_(true) {
-#if defined(TOOLKIT_GTK)
- gdk_error_trap_push();
-#else // !defined(TOOLKIT_GTK)
assert(!g_xserver_error_trap_enabled);
original_error_handler_ = XSetErrorHandler(&XServerErrorHandler);
g_xserver_error_trap_enabled = true;
g_last_xserver_error_code = 0;
-#endif // !defined(TOOLKIT_GTK)
}
int XErrorTrap::GetLastErrorAndDisable() {
enabled_ = false;
-#if defined(TOOLKIT_GTK)
- return gdk_error_trap_push();
-#else // !defined(TOOLKIT_GTK)
assert(g_xserver_error_trap_enabled);
XSetErrorHandler(original_error_handler_);
g_xserver_error_trap_enabled = false;
return g_last_xserver_error_code;
-#endif // !defined(TOOLKIT_GTK)
}
XErrorTrap::~XErrorTrap() {
diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.h b/modules/desktop_capture/mac/desktop_configuration_monitor.h
index 46a66d1d4c..aa0ebfbacc 100644
--- a/modules/desktop_capture/mac/desktop_configuration_monitor.h
+++ b/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -25,15 +25,15 @@ namespace webrtc {
// The class provides functions to synchronize capturing and display
// reconfiguring across threads, and the up-to-date MacDesktopConfiguration.
-class DesktopConfigurationMonitor : public rtc::RefCountedBase {
+class DesktopConfigurationMonitor final
+ : public rtc::RefCountedNonVirtual<DesktopConfigurationMonitor> {
public:
DesktopConfigurationMonitor();
+ ~DesktopConfigurationMonitor();
+
// Returns the current desktop configuration.
MacDesktopConfiguration desktop_configuration();
- protected:
- ~DesktopConfigurationMonitor() override;
-
private:
static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
CGDisplayChangeSummaryFlags flags,
diff --git a/modules/desktop_capture/screen_drawer_unittest.cc b/modules/desktop_capture/screen_drawer_unittest.cc
index c38eee6991..2394260105 100644
--- a/modules/desktop_capture/screen_drawer_unittest.cc
+++ b/modules/desktop_capture/screen_drawer_unittest.cc
@@ -48,13 +48,12 @@ void TestScreenDrawerLock(
~Task() = default;
- static void RunTask(void* me) {
- Task* task = static_cast<Task*>(me);
- std::unique_ptr<ScreenDrawerLock> lock = task->ctor_();
+ void RunTask() {
+ std::unique_ptr<ScreenDrawerLock> lock = ctor_();
ASSERT_TRUE(!!lock);
- task->created_->store(true);
+ created_->store(true);
// Wait for the main thread to get the signal of created_.
- while (!task->ready_.load()) {
+ while (!ready_.load()) {
SleepMs(1);
}
// At this point, main thread should begin to create a second lock. Though
@@ -77,8 +76,8 @@ void TestScreenDrawerLock(
const rtc::FunctionView<std::unique_ptr<ScreenDrawerLock>()> ctor_;
} task(&created, ready, ctor);
- rtc::PlatformThread lock_thread(&Task::RunTask, &task, "lock_thread");
- lock_thread.Start();
+ auto lock_thread = rtc::PlatformThread::SpawnJoinable(
+ [&task] { task.RunTask(); }, "lock_thread");
// Wait for the first lock in Task::RunTask() to be created.
// TODO(zijiehe): Find a better solution to wait for the creation of the first
@@ -95,7 +94,6 @@ void TestScreenDrawerLock(
ASSERT_GT(kLockDurationMs, rtc::TimeMillis() - start_ms);
ctor();
ASSERT_LE(kLockDurationMs, rtc::TimeMillis() - start_ms);
- lock_thread.Stop();
}
} // namespace
diff --git a/modules/desktop_capture/shared_desktop_frame.h b/modules/desktop_capture/shared_desktop_frame.h
index fd862d7f21..1f451b65df 100644
--- a/modules/desktop_capture/shared_desktop_frame.h
+++ b/modules/desktop_capture/shared_desktop_frame.h
@@ -23,7 +23,7 @@ namespace webrtc {
// SharedDesktopFrame is a DesktopFrame that may have multiple instances all
// sharing the same buffer.
-class RTC_EXPORT SharedDesktopFrame : public DesktopFrame {
+class RTC_EXPORT SharedDesktopFrame final : public DesktopFrame {
public:
~SharedDesktopFrame() override;
@@ -51,7 +51,7 @@ class RTC_EXPORT SharedDesktopFrame : public DesktopFrame {
bool IsShared();
private:
- typedef rtc::RefCountedObject<std::unique_ptr<DesktopFrame>> Core;
+ typedef rtc::FinalRefCountedObject<std::unique_ptr<DesktopFrame>> Core;
SharedDesktopFrame(rtc::scoped_refptr<Core> core);
diff --git a/modules/desktop_capture/win/screen_capture_utils.cc b/modules/desktop_capture/win/screen_capture_utils.cc
index b66e4912d8..53b6dd399c 100644
--- a/modules/desktop_capture/win/screen_capture_utils.cc
+++ b/modules/desktop_capture/win/screen_capture_utils.cc
@@ -99,6 +99,18 @@ bool IsMonitorValid(const HMONITOR monitor) {
return GetMonitorInfoA(monitor, &monitor_info);
}
+DesktopRect GetMonitorRect(const HMONITOR monitor) {
+ MONITORINFO monitor_info;
+ monitor_info.cbSize = sizeof(MONITORINFO);
+ if (!GetMonitorInfoA(monitor, &monitor_info)) {
+ return DesktopRect();
+ }
+
+ return DesktopRect::MakeLTRB(
+ monitor_info.rcMonitor.left, monitor_info.rcMonitor.top,
+ monitor_info.rcMonitor.right, monitor_info.rcMonitor.bottom);
+}
+
bool IsScreenValid(const DesktopCapturer::SourceId screen,
std::wstring* device_key) {
if (screen == kFullDesktopScreenId) {
diff --git a/modules/desktop_capture/win/screen_capture_utils.h b/modules/desktop_capture/win/screen_capture_utils.h
index 86d92e1d71..dc993dad25 100644
--- a/modules/desktop_capture/win/screen_capture_utils.h
+++ b/modules/desktop_capture/win/screen_capture_utils.h
@@ -37,6 +37,10 @@ bool GetHmonitorFromDeviceIndex(const DesktopCapturer::SourceId device_index,
// WM_DISPLAYCHANGE message has been received.
bool IsMonitorValid(const HMONITOR monitor);
+// Returns the rect of the monitor identified by |monitor|, relative to the
+// primary display's top-left. On failure, returns an empty rect.
+DesktopRect GetMonitorRect(const HMONITOR monitor);
+
// Returns true if |screen| is a valid screen. The screen device key is
// returned through |device_key| if the screen is valid. The device key can be
// used in GetScreenRect to verify the screen matches the previously obtained
diff --git a/modules/desktop_capture/win/test_support/test_window.cc b/modules/desktop_capture/win/test_support/test_window.cc
index bcbadecfaf..c07ff74aa5 100644
--- a/modules/desktop_capture/win/test_support/test_window.cc
+++ b/modules/desktop_capture/win/test_support/test_window.cc
@@ -75,8 +75,16 @@ WindowInfo CreateTestWindow(const WCHAR* window_title,
}
void ResizeTestWindow(const HWND hwnd, const int width, const int height) {
+ // SWP_NOMOVE results in the x and y params being ignored.
::SetWindowPos(hwnd, HWND_TOP, /*x-coord=*/0, /*y-coord=*/0, width, height,
- SWP_SHOWWINDOW);
+ SWP_SHOWWINDOW | SWP_NOMOVE);
+ ::UpdateWindow(hwnd);
+}
+
+void MoveTestWindow(const HWND hwnd, const int x, const int y) {
+ // SWP_NOSIZE results in the width and height params being ignored.
+ ::SetWindowPos(hwnd, HWND_TOP, x, y, /*width=*/0, /*height=*/0,
+ SWP_SHOWWINDOW | SWP_NOSIZE);
::UpdateWindow(hwnd);
}
diff --git a/modules/desktop_capture/win/test_support/test_window.h b/modules/desktop_capture/win/test_support/test_window.h
index 05727684ea..8701dc990b 100644
--- a/modules/desktop_capture/win/test_support/test_window.h
+++ b/modules/desktop_capture/win/test_support/test_window.h
@@ -38,6 +38,8 @@ WindowInfo CreateTestWindow(const WCHAR* window_title,
void ResizeTestWindow(const HWND hwnd, const int width, const int height);
+void MoveTestWindow(const HWND hwnd, const int x, const int y);
+
void MinimizeTestWindow(const HWND hwnd);
void UnminimizeTestWindow(const HWND hwnd);
diff --git a/modules/desktop_capture/win/wgc_capture_source.cc b/modules/desktop_capture/win/wgc_capture_source.cc
index f894a1ec3c..9786ca67b5 100644
--- a/modules/desktop_capture/win/wgc_capture_source.cc
+++ b/modules/desktop_capture/win/wgc_capture_source.cc
@@ -11,6 +11,8 @@
#include "modules/desktop_capture/win/wgc_capture_source.h"
#include <windows.graphics.capture.interop.h>
+#include <windows.h>
+
#include <utility>
#include "modules/desktop_capture/win/screen_capture_utils.h"
@@ -26,6 +28,18 @@ WgcCaptureSource::WgcCaptureSource(DesktopCapturer::SourceId source_id)
: source_id_(source_id) {}
WgcCaptureSource::~WgcCaptureSource() = default;
+bool WgcCaptureSource::IsCapturable() {
+ // If we can create a capture item, then we can capture it. Unfortunately,
+ // we can't cache this item because it may be created in a different COM
+ // apartment than where capture will eventually start from.
+ ComPtr<WGC::IGraphicsCaptureItem> item;
+ return SUCCEEDED(CreateCaptureItem(&item));
+}
+
+bool WgcCaptureSource::FocusOnSource() {
+ return false;
+}
+
HRESULT WgcCaptureSource::GetCaptureItem(
ComPtr<WGC::IGraphicsCaptureItem>* result) {
HRESULT hr = S_OK;
@@ -36,14 +50,6 @@ HRESULT WgcCaptureSource::GetCaptureItem(
return hr;
}
-bool WgcCaptureSource::IsCapturable() {
- // If we can create a capture item, then we can capture it. Unfortunately,
- // we can't cache this item because it may be created in a different COM
- // apartment than where capture will eventually start from.
- ComPtr<WGC::IGraphicsCaptureItem> item;
- return SUCCEEDED(CreateCaptureItem(&item));
-}
-
WgcCaptureSourceFactory::~WgcCaptureSourceFactory() = default;
WgcWindowSourceFactory::WgcWindowSourceFactory() = default;
@@ -66,6 +72,14 @@ WgcWindowSource::WgcWindowSource(DesktopCapturer::SourceId source_id)
: WgcCaptureSource(source_id) {}
WgcWindowSource::~WgcWindowSource() = default;
+DesktopVector WgcWindowSource::GetTopLeft() {
+ DesktopRect window_rect;
+ if (!GetWindowRect(reinterpret_cast<HWND>(GetSourceId()), &window_rect))
+ return DesktopVector();
+
+ return window_rect.top_left();
+}
+
bool WgcWindowSource::IsCapturable() {
if (!IsWindowValidAndVisible(reinterpret_cast<HWND>(GetSourceId())))
return false;
@@ -73,6 +87,14 @@ bool WgcWindowSource::IsCapturable() {
return WgcCaptureSource::IsCapturable();
}
+bool WgcWindowSource::FocusOnSource() {
+ if (!IsWindowValidAndVisible(reinterpret_cast<HWND>(GetSourceId())))
+ return false;
+
+ return ::BringWindowToTop(reinterpret_cast<HWND>(GetSourceId())) &&
+ ::SetForegroundWindow(reinterpret_cast<HWND>(GetSourceId()));
+}
+
HRESULT WgcWindowSource::CreateCaptureItem(
ComPtr<WGC::IGraphicsCaptureItem>* result) {
if (!ResolveCoreWinRTDelayload())
@@ -99,17 +121,26 @@ HRESULT WgcWindowSource::CreateCaptureItem(
}
WgcScreenSource::WgcScreenSource(DesktopCapturer::SourceId source_id)
- : WgcCaptureSource(source_id) {}
+ : WgcCaptureSource(source_id) {
+ // Getting the HMONITOR could fail if the source_id is invalid. In that case,
+ // we leave hmonitor_ uninitialized and |IsCapturable()| will fail.
+ HMONITOR hmon;
+ if (GetHmonitorFromDeviceIndex(GetSourceId(), &hmon))
+ hmonitor_ = hmon;
+}
+
WgcScreenSource::~WgcScreenSource() = default;
-bool WgcScreenSource::IsCapturable() {
- if (!hmonitor_) {
- HMONITOR hmon;
- if (!GetHmonitorFromDeviceIndex(GetSourceId(), &hmon))
- return false;
+DesktopVector WgcScreenSource::GetTopLeft() {
+ if (!hmonitor_)
+ return DesktopVector();
- hmonitor_ = hmon;
- }
+ return GetMonitorRect(*hmonitor_).top_left();
+}
+
+bool WgcScreenSource::IsCapturable() {
+ if (!hmonitor_)
+ return false;
if (!IsMonitorValid(*hmonitor_))
return false;
diff --git a/modules/desktop_capture/win/wgc_capture_source.h b/modules/desktop_capture/win/wgc_capture_source.h
index a5599c620d..135f92bb84 100644
--- a/modules/desktop_capture/win/wgc_capture_source.h
+++ b/modules/desktop_capture/win/wgc_capture_source.h
@@ -18,6 +18,7 @@
#include "absl/types/optional.h"
#include "modules/desktop_capture/desktop_capturer.h"
+#include "modules/desktop_capture/desktop_geometry.h"
namespace webrtc {
@@ -30,7 +31,9 @@ class WgcCaptureSource {
explicit WgcCaptureSource(DesktopCapturer::SourceId source_id);
virtual ~WgcCaptureSource();
+ virtual DesktopVector GetTopLeft() = 0;
virtual bool IsCapturable();
+ virtual bool FocusOnSource();
HRESULT GetCaptureItem(
Microsoft::WRL::ComPtr<
ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>* result);
@@ -92,7 +95,9 @@ class WgcWindowSource final : public WgcCaptureSource {
~WgcWindowSource() override;
+ DesktopVector GetTopLeft() override;
bool IsCapturable() override;
+ bool FocusOnSource() override;
private:
HRESULT CreateCaptureItem(
@@ -111,6 +116,7 @@ class WgcScreenSource final : public WgcCaptureSource {
~WgcScreenSource() override;
+ DesktopVector GetTopLeft() override;
bool IsCapturable() override;
private:
diff --git a/modules/desktop_capture/win/wgc_capture_source_unittest.cc b/modules/desktop_capture/win/wgc_capture_source_unittest.cc
new file mode 100644
index 0000000000..a230e12578
--- /dev/null
+++ b/modules/desktop_capture/win/wgc_capture_source_unittest.cc
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/desktop_capture/win/wgc_capture_source.h"
+
+#include <windows.graphics.capture.h>
+#include <wrl/client.h>
+
+#include <utility>
+
+#include "modules/desktop_capture/desktop_capture_types.h"
+#include "modules/desktop_capture/desktop_geometry.h"
+#include "modules/desktop_capture/win/screen_capture_utils.h"
+#include "modules/desktop_capture/win/test_support/test_window.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/win/scoped_com_initializer.h"
+#include "rtc_base/win/windows_version.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+const WCHAR kWindowTitle[] = L"WGC Capture Source Test Window";
+
+const int kFirstXCoord = 25;
+const int kFirstYCoord = 50;
+const int kSecondXCoord = 50;
+const int kSecondYCoord = 75;
+
+enum SourceType { kWindowSource = 0, kScreenSource = 1 };
+
+} // namespace
+
+class WgcCaptureSourceTest : public ::testing::TestWithParam<SourceType> {
+ public:
+ void SetUp() override {
+ if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN10_RS5) {
+ RTC_LOG(LS_INFO)
+ << "Skipping WgcCaptureSourceTests on Windows versions < RS5.";
+ GTEST_SKIP();
+ }
+
+ com_initializer_ =
+ std::make_unique<ScopedCOMInitializer>(ScopedCOMInitializer::kMTA);
+ ASSERT_TRUE(com_initializer_->Succeeded());
+ }
+
+ void TearDown() override {
+ if (window_open_) {
+ DestroyTestWindow(window_info_);
+ }
+ }
+
+ void SetUpForWindowSource() {
+ window_info_ = CreateTestWindow(kWindowTitle);
+ window_open_ = true;
+ source_id_ = reinterpret_cast<DesktopCapturer::SourceId>(window_info_.hwnd);
+ source_factory_ = std::make_unique<WgcWindowSourceFactory>();
+ }
+
+ void SetUpForScreenSource() {
+ source_id_ = kFullDesktopScreenId;
+ source_factory_ = std::make_unique<WgcScreenSourceFactory>();
+ }
+
+ protected:
+ std::unique_ptr<ScopedCOMInitializer> com_initializer_;
+ std::unique_ptr<WgcCaptureSourceFactory> source_factory_;
+ std::unique_ptr<WgcCaptureSource> source_;
+ DesktopCapturer::SourceId source_id_;
+ WindowInfo window_info_;
+ bool window_open_ = false;
+};
+
+// Window specific test
+TEST_F(WgcCaptureSourceTest, WindowPosition) {
+ SetUpForWindowSource();
+ source_ = source_factory_->CreateCaptureSource(source_id_);
+ ASSERT_TRUE(source_);
+ EXPECT_EQ(source_->GetSourceId(), source_id_);
+
+ MoveTestWindow(window_info_.hwnd, kFirstXCoord, kFirstYCoord);
+ DesktopVector source_vector = source_->GetTopLeft();
+ EXPECT_EQ(source_vector.x(), kFirstXCoord);
+ EXPECT_EQ(source_vector.y(), kFirstYCoord);
+
+ MoveTestWindow(window_info_.hwnd, kSecondXCoord, kSecondYCoord);
+ source_vector = source_->GetTopLeft();
+ EXPECT_EQ(source_vector.x(), kSecondXCoord);
+ EXPECT_EQ(source_vector.y(), kSecondYCoord);
+}
+
+// Screen specific test
+TEST_F(WgcCaptureSourceTest, ScreenPosition) {
+ SetUpForScreenSource();
+ source_ = source_factory_->CreateCaptureSource(source_id_);
+ ASSERT_TRUE(source_);
+ EXPECT_EQ(source_id_, source_->GetSourceId());
+
+ DesktopRect screen_rect = GetFullscreenRect();
+ DesktopVector source_vector = source_->GetTopLeft();
+ EXPECT_EQ(source_vector.x(), screen_rect.left());
+ EXPECT_EQ(source_vector.y(), screen_rect.top());
+}
+
+// Source agnostic test
+TEST_P(WgcCaptureSourceTest, CreateSource) {
+ if (GetParam() == SourceType::kWindowSource) {
+ SetUpForWindowSource();
+ } else {
+ SetUpForScreenSource();
+ }
+
+ source_ = source_factory_->CreateCaptureSource(source_id_);
+ ASSERT_TRUE(source_);
+ EXPECT_EQ(source_id_, source_->GetSourceId());
+ EXPECT_TRUE(source_->IsCapturable());
+
+ Microsoft::WRL::ComPtr<ABI::Windows::Graphics::Capture::IGraphicsCaptureItem>
+ item;
+ EXPECT_TRUE(SUCCEEDED(source_->GetCaptureItem(&item)));
+ EXPECT_TRUE(item);
+}
+
+INSTANTIATE_TEST_SUITE_P(SourceAgnostic,
+ WgcCaptureSourceTest,
+ ::testing::Values(SourceType::kWindowSource,
+ SourceType::kScreenSource));
+
+} // namespace webrtc
diff --git a/modules/desktop_capture/win/wgc_capturer_win.cc b/modules/desktop_capture/win/wgc_capturer_win.cc
index 0d4848e3e2..442c827a67 100644
--- a/modules/desktop_capture/win/wgc_capturer_win.cc
+++ b/modules/desktop_capture/win/wgc_capturer_win.cc
@@ -57,7 +57,8 @@ std::unique_ptr<DesktopCapturer> WgcCapturerWin::CreateRawWindowCapturer(
const DesktopCaptureOptions& options) {
return std::make_unique<WgcCapturerWin>(
std::make_unique<WgcWindowSourceFactory>(),
- std::make_unique<WindowEnumerator>());
+ std::make_unique<WindowEnumerator>(
+ options.enumerate_current_process_windows()));
}
// static
@@ -77,6 +78,13 @@ bool WgcCapturerWin::SelectSource(DesktopCapturer::SourceId id) {
return capture_source_->IsCapturable();
}
+bool WgcCapturerWin::FocusOnSelectedSource() {
+ if (!capture_source_)
+ return false;
+
+ return capture_source_->FocusOnSource();
+}
+
void WgcCapturerWin::Start(Callback* callback) {
RTC_DCHECK(!callback_);
RTC_DCHECK(callback);
@@ -192,6 +200,7 @@ void WgcCapturerWin::CaptureFrame() {
frame->set_capture_time_ms(capture_time_ms);
frame->set_capturer_id(DesktopCapturerId::kWgcCapturerWin);
frame->set_may_contain_cursor(true);
+ frame->set_top_left(capture_source_->GetTopLeft());
RecordWgcCapturerResult(WgcCapturerResult::kSuccess);
callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS,
std::move(frame));
diff --git a/modules/desktop_capture/win/wgc_capturer_win.h b/modules/desktop_capture/win/wgc_capturer_win.h
index 1171d15fad..58f3fc318a 100644
--- a/modules/desktop_capture/win/wgc_capturer_win.h
+++ b/modules/desktop_capture/win/wgc_capturer_win.h
@@ -38,7 +38,8 @@ class SourceEnumerator {
class WindowEnumerator final : public SourceEnumerator {
public:
- WindowEnumerator() = default;
+ explicit WindowEnumerator(bool enumerate_current_process_windows)
+ : enumerate_current_process_windows_(enumerate_current_process_windows) {}
WindowEnumerator(const WindowEnumerator&) = delete;
WindowEnumerator& operator=(const WindowEnumerator&) = delete;
@@ -48,12 +49,13 @@ class WindowEnumerator final : public SourceEnumerator {
bool FindAllSources(DesktopCapturer::SourceList* sources) override {
// WGC fails to capture windows with the WS_EX_TOOLWINDOW style, so we
// provide it as a filter to ensure windows with the style are not returned.
- return window_capture_helper_.EnumerateCapturableWindows(sources,
- WS_EX_TOOLWINDOW);
+ return window_capture_helper_.EnumerateCapturableWindows(
+ sources, enumerate_current_process_windows_, WS_EX_TOOLWINDOW);
}
private:
WindowCaptureHelperWin window_capture_helper_;
+ bool enumerate_current_process_windows_;
};
class ScreenEnumerator final : public SourceEnumerator {
@@ -94,6 +96,7 @@ class WgcCapturerWin : public DesktopCapturer {
// DesktopCapturer interface.
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;
+ bool FocusOnSelectedSource() override;
void Start(Callback* callback) override;
void CaptureFrame() override;
diff --git a/modules/desktop_capture/win/wgc_capturer_win_unittest.cc b/modules/desktop_capture/win/wgc_capturer_win_unittest.cc
index 1056c821c3..ebfb576e63 100644
--- a/modules/desktop_capture/win/wgc_capturer_win_unittest.cc
+++ b/modules/desktop_capture/win/wgc_capturer_win_unittest.cc
@@ -77,7 +77,7 @@ class WgcCapturerWinTest : public ::testing::TestWithParam<CaptureType>,
void SetUp() override {
if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN10_RS5) {
RTC_LOG(LS_INFO)
- << "Skipping WgcWindowCaptureTests on Windows versions < RS5.";
+ << "Skipping WgcCapturerWinTests on Windows versions < RS5.";
GTEST_SKIP();
}
@@ -335,11 +335,18 @@ INSTANTIATE_TEST_SUITE_P(SourceAgnostic,
CaptureType::kScreenCapture));
// Monitor specific tests.
-TEST_F(WgcCapturerWinTest, CaptureAllMonitors) {
+TEST_F(WgcCapturerWinTest, FocusOnMonitor) {
SetUpForScreenCapture();
- // 0 (or a NULL HMONITOR) leads to WGC capturing all displays.
EXPECT_TRUE(capturer_->SelectSource(0));
+ // You can't set focus on a monitor.
+ EXPECT_FALSE(capturer_->FocusOnSelectedSource());
+}
+
+TEST_F(WgcCapturerWinTest, CaptureAllMonitors) {
+ SetUpForScreenCapture();
+ EXPECT_TRUE(capturer_->SelectSource(kFullDesktopScreenId));
+
capturer_->Start(this);
DoCapture();
EXPECT_GT(frame_->size().width(), 0);
@@ -347,6 +354,22 @@ TEST_F(WgcCapturerWinTest, CaptureAllMonitors) {
}
// Window specific tests.
+TEST_F(WgcCapturerWinTest, FocusOnWindow) {
+ capturer_ = WgcCapturerWin::CreateRawWindowCapturer(
+ DesktopCaptureOptions::CreateDefault());
+ window_info_ = CreateTestWindow(kWindowTitle);
+ source_id_ = GetScreenIdFromSourceList();
+
+ EXPECT_TRUE(capturer_->SelectSource(source_id_));
+ EXPECT_TRUE(capturer_->FocusOnSelectedSource());
+
+ HWND hwnd = reinterpret_cast<HWND>(source_id_);
+ EXPECT_EQ(hwnd, ::GetActiveWindow());
+ EXPECT_EQ(hwnd, ::GetForegroundWindow());
+ EXPECT_EQ(hwnd, ::GetFocus());
+ DestroyTestWindow(window_info_);
+}
+
TEST_F(WgcCapturerWinTest, SelectMinimizedWindow) {
SetUpForWindowCapture();
MinimizeTestWindow(reinterpret_cast<HWND>(source_id_));
diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc
index 7c5cc70087..aaaef0a80d 100644
--- a/modules/desktop_capture/win/window_capture_utils.cc
+++ b/modules/desktop_capture/win/window_capture_utils.cc
@@ -32,26 +32,21 @@ struct GetWindowListParams {
DesktopCapturer::SourceList* result)
: ignore_untitled(flags & GetWindowListFlags::kIgnoreUntitled),
ignore_unresponsive(flags & GetWindowListFlags::kIgnoreUnresponsive),
+ ignore_current_process_windows(
+ flags & GetWindowListFlags::kIgnoreCurrentProcessWindows),
ex_style_filters(ex_style_filters),
result(result) {}
const bool ignore_untitled;
const bool ignore_unresponsive;
+ const bool ignore_current_process_windows;
const LONG ex_style_filters;
DesktopCapturer::SourceList* const result;
};
-// If a window is owned by the current process and unresponsive, then making a
-// blocking call such as GetWindowText may lead to a deadlock.
-//
-// https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getwindowtexta#remarks
-bool CanSafelyMakeBlockingCalls(HWND hwnd) {
+bool IsWindowOwnedByCurrentProcess(HWND hwnd) {
DWORD process_id;
GetWindowThreadProcessId(hwnd, &process_id);
- if (process_id != GetCurrentProcessId() || IsWindowResponding(hwnd)) {
- return true;
- }
-
- return false;
+ return process_id == GetCurrentProcessId();
}
BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) {
@@ -85,11 +80,26 @@ BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) {
window.id = reinterpret_cast<WindowId>(hwnd);
// GetWindowText* are potentially blocking operations if |hwnd| is
- // owned by the current process, and can lead to a deadlock if the message
- // pump is waiting on this thread. If we've filtered out unresponsive
- // windows, this is not a concern, but otherwise we need to check if we can
- // safely make blocking calls.
- if (params->ignore_unresponsive || CanSafelyMakeBlockingCalls(hwnd)) {
+ // owned by the current process. The APIs will send messages to the window's
+ // message loop, and if the message loop is waiting on this operation we will
+ // enter a deadlock.
+ // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getwindowtexta#remarks
+ //
+ // To help consumers avoid this, there is a DesktopCaptureOption to ignore
+ // windows owned by the current process. Consumers should either ensure that
+ // the thread running their message loop never waits on this operation, or use
+ // the option to exclude these windows from the source list.
+ bool owned_by_current_process = IsWindowOwnedByCurrentProcess(hwnd);
+ if (owned_by_current_process && params->ignore_current_process_windows) {
+ return TRUE;
+ }
+
+ // Even if consumers request to enumerate windows owned by the current
+ // process, we should not call GetWindowText* on unresponsive windows owned by
+ // the current process because we will hang. Unfortunately, we could still
+ // hang if the window becomes unresponsive after this check, hence the option
+ // to avoid these completely.
+ if (!owned_by_current_process || IsWindowResponding(hwnd)) {
const size_t kTitleLength = 500;
WCHAR window_title[kTitleLength] = L"";
if (GetWindowTextLength(hwnd) != 0 &&
@@ -445,10 +455,15 @@ bool WindowCaptureHelperWin::IsWindowCloaked(HWND hwnd) {
bool WindowCaptureHelperWin::EnumerateCapturableWindows(
DesktopCapturer::SourceList* results,
+ bool enumerate_current_process_windows,
LONG ex_style_filters) {
- if (!webrtc::GetWindowList((GetWindowListFlags::kIgnoreUntitled |
- GetWindowListFlags::kIgnoreUnresponsive),
- results, ex_style_filters)) {
+ int flags = (GetWindowListFlags::kIgnoreUntitled |
+ GetWindowListFlags::kIgnoreUnresponsive);
+ if (!enumerate_current_process_windows) {
+ flags |= GetWindowListFlags::kIgnoreCurrentProcessWindows;
+ }
+
+ if (!webrtc::GetWindowList(flags, results, ex_style_filters)) {
return false;
}
diff --git a/modules/desktop_capture/win/window_capture_utils.h b/modules/desktop_capture/win/window_capture_utils.h
index 11b2c2c1b5..a6a295d068 100644
--- a/modules/desktop_capture/win/window_capture_utils.h
+++ b/modules/desktop_capture/win/window_capture_utils.h
@@ -78,6 +78,7 @@ enum GetWindowListFlags {
kNone = 0x00,
kIgnoreUntitled = 1 << 0,
kIgnoreUnresponsive = 1 << 1,
+ kIgnoreCurrentProcessWindows = 1 << 2,
};
// Retrieves the list of top-level windows on the screen.
@@ -85,7 +86,8 @@ enum GetWindowListFlags {
// - Those that are invisible or minimized.
// - Program Manager & Start menu.
// - [with kIgnoreUntitled] windows with no title.
-// - [with kIgnoreUnresponsive] windows that unresponsive.
+// - [with kIgnoreUnresponsive] windows that are unresponsive.
+// - [with kIgnoreCurrentProcessWindows] windows owned by the current process.
// - Any windows with extended styles that match |ex_style_filters|.
// Returns false if native APIs failed.
bool GetWindowList(int flags,
@@ -115,6 +117,7 @@ class WindowCaptureHelperWin {
// extended window styles (e.g. WS_EX_TOOLWINDOW) and prevent windows that
// match from being included in |results|.
bool EnumerateCapturableWindows(DesktopCapturer::SourceList* results,
+ bool enumerate_current_process_windows,
LONG ex_style_filters = 0);
private:
diff --git a/modules/desktop_capture/win/window_capture_utils_unittest.cc b/modules/desktop_capture/win/window_capture_utils_unittest.cc
index 52f6714383..4b426fc464 100644
--- a/modules/desktop_capture/win/window_capture_utils_unittest.cc
+++ b/modules/desktop_capture/win/window_capture_utils_unittest.cc
@@ -137,4 +137,18 @@ TEST(WindowCaptureUtilsTest, IgnoreUntitledWindows) {
DestroyTestWindow(info);
}
+TEST(WindowCaptureUtilsTest, IgnoreCurrentProcessWindows) {
+ WindowInfo info = CreateTestWindow(kWindowTitle);
+ DesktopCapturer::SourceList window_list;
+ ASSERT_TRUE(GetWindowList(GetWindowListFlags::kIgnoreCurrentProcessWindows,
+ &window_list));
+ EXPECT_EQ(std::find_if(window_list.begin(), window_list.end(),
+ [&info](DesktopCapturer::Source window) {
+ return reinterpret_cast<HWND>(window.id) ==
+ info.hwnd;
+ }),
+ window_list.end());
+ DestroyTestWindow(info);
+}
+
} // namespace webrtc
diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.cc b/modules/desktop_capture/win/window_capturer_win_gdi.cc
index 277c02e7c5..25677e9868 100644
--- a/modules/desktop_capture/win/window_capturer_win_gdi.cc
+++ b/modules/desktop_capture/win/window_capturer_win_gdi.cc
@@ -95,11 +95,14 @@ BOOL CALLBACK OwnedWindowCollector(HWND hwnd, LPARAM param) {
return TRUE;
}
-WindowCapturerWinGdi::WindowCapturerWinGdi() {}
+WindowCapturerWinGdi::WindowCapturerWinGdi(
+ bool enumerate_current_process_windows)
+ : enumerate_current_process_windows_(enumerate_current_process_windows) {}
WindowCapturerWinGdi::~WindowCapturerWinGdi() {}
bool WindowCapturerWinGdi::GetSourceList(SourceList* sources) {
- if (!window_capture_helper_.EnumerateCapturableWindows(sources))
+ if (!window_capture_helper_.EnumerateCapturableWindows(
+ sources, enumerate_current_process_windows_))
return false;
std::map<HWND, DesktopSize> new_map;
@@ -350,7 +353,8 @@ WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame(
if (!owned_windows_.empty()) {
if (!owned_window_capturer_) {
- owned_window_capturer_ = std::make_unique<WindowCapturerWinGdi>();
+ owned_window_capturer_ = std::make_unique<WindowCapturerWinGdi>(
+ enumerate_current_process_windows_);
}
// Owned windows are stored in top-down z-order, so this iterates in
@@ -389,7 +393,8 @@ WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame(
// static
std::unique_ptr<DesktopCapturer> WindowCapturerWinGdi::CreateRawWindowCapturer(
const DesktopCaptureOptions& options) {
- return std::unique_ptr<DesktopCapturer>(new WindowCapturerWinGdi());
+ return std::unique_ptr<DesktopCapturer>(
+ new WindowCapturerWinGdi(options.enumerate_current_process_windows()));
}
} // namespace webrtc
diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.h b/modules/desktop_capture/win/window_capturer_win_gdi.h
index c954c230c9..5091458a12 100644
--- a/modules/desktop_capture/win/window_capturer_win_gdi.h
+++ b/modules/desktop_capture/win/window_capturer_win_gdi.h
@@ -24,7 +24,7 @@ namespace webrtc {
class WindowCapturerWinGdi : public DesktopCapturer {
public:
- WindowCapturerWinGdi();
+ explicit WindowCapturerWinGdi(bool enumerate_current_process_windows);
// Disallow copy and assign
WindowCapturerWinGdi(const WindowCapturerWinGdi&) = delete;
@@ -61,6 +61,8 @@ class WindowCapturerWinGdi : public DesktopCapturer {
WindowCaptureHelperWin window_capture_helper_;
+ bool enumerate_current_process_windows_;
+
// This map is used to avoid flickering for the case when SelectWindow() calls
// are interleaved with Capture() calls.
std::map<HWND, DesktopSize> window_size_map_;
diff --git a/modules/desktop_capture/window_capturer_unittest.cc b/modules/desktop_capture/window_capturer_unittest.cc
index 8a611e760a..3989c28f16 100644
--- a/modules/desktop_capture/window_capturer_unittest.cc
+++ b/modules/desktop_capture/window_capturer_unittest.cc
@@ -54,8 +54,9 @@ TEST_F(WindowCapturerTest, Enumerate) {
}
}
-// Flaky on Linux. See: crbug.com/webrtc/7830
-#if defined(WEBRTC_LINUX)
+// Flaky on Linux. See: crbug.com/webrtc/7830.
+// Failing on macOS 11: See bugs.webrtc.org/12801
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
#define MAYBE_Capture DISABLED_Capture
#else
#define MAYBE_Capture Capture
diff --git a/modules/include/module_common_types_public.h b/modules/include/module_common_types_public.h
index 345e45ce12..de74bf6946 100644
--- a/modules/include/module_common_types_public.h
+++ b/modules/include/module_common_types_public.h
@@ -10,7 +10,7 @@
#ifndef MODULES_INCLUDE_MODULE_COMMON_TYPES_PUBLIC_H_
#define MODULES_INCLUDE_MODULE_COMMON_TYPES_PUBLIC_H_
-
+#include <cstdint>
#include <limits>
#include "absl/types/optional.h"
diff --git a/modules/pacing/packet_router.cc b/modules/pacing/packet_router.cc
index 5317f510c9..3b1278e504 100644
--- a/modules/pacing/packet_router.cc
+++ b/modules/pacing/packet_router.cc
@@ -27,20 +27,11 @@
#include "rtc_base/trace_event.h"
namespace webrtc {
-namespace {
-
-constexpr int kRembSendIntervalMs = 200;
-
-} // namespace
PacketRouter::PacketRouter() : PacketRouter(0) {}
PacketRouter::PacketRouter(uint16_t start_transport_seq)
: last_send_module_(nullptr),
- last_remb_time_ms_(rtc::TimeMillis()),
- last_send_bitrate_bps_(0),
- bitrate_bps_(0),
- max_bitrate_bps_(std::numeric_limits<decltype(max_bitrate_bps_)>::max()),
active_remb_module_(nullptr),
transport_seq_(start_transport_seq) {}
@@ -235,77 +226,19 @@ uint16_t PacketRouter::CurrentTransportSequenceNumber() const {
return transport_seq_ & 0xFFFF;
}
-void PacketRouter::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
- uint32_t bitrate_bps) {
- // % threshold for if we should send a new REMB asap.
- const int64_t kSendThresholdPercent = 97;
- // TODO(danilchap): Remove receive_bitrate_bps variable and the cast
- // when OnReceiveBitrateChanged takes bitrate as int64_t.
- int64_t receive_bitrate_bps = static_cast<int64_t>(bitrate_bps);
-
- int64_t now_ms = rtc::TimeMillis();
- {
- MutexLock lock(&remb_mutex_);
-
- // If we already have an estimate, check if the new total estimate is below
- // kSendThresholdPercent of the previous estimate.
- if (last_send_bitrate_bps_ > 0) {
- int64_t new_remb_bitrate_bps =
- last_send_bitrate_bps_ - bitrate_bps_ + receive_bitrate_bps;
-
- if (new_remb_bitrate_bps <
- kSendThresholdPercent * last_send_bitrate_bps_ / 100) {
- // The new bitrate estimate is less than kSendThresholdPercent % of the
- // last report. Send a REMB asap.
- last_remb_time_ms_ = now_ms - kRembSendIntervalMs;
- }
- }
- bitrate_bps_ = receive_bitrate_bps;
-
- if (now_ms - last_remb_time_ms_ < kRembSendIntervalMs) {
- return;
- }
- // NOTE: Updated if we intend to send the data; we might not have
- // a module to actually send it.
- last_remb_time_ms_ = now_ms;
- last_send_bitrate_bps_ = receive_bitrate_bps;
- // Cap the value to send in remb with configured value.
- receive_bitrate_bps = std::min(receive_bitrate_bps, max_bitrate_bps_);
- }
- SendRemb(receive_bitrate_bps, ssrcs);
-}
-
-void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) {
- RTC_DCHECK_GE(bitrate_bps, 0);
- {
- MutexLock lock(&remb_mutex_);
- max_bitrate_bps_ = bitrate_bps;
- if (rtc::TimeMillis() - last_remb_time_ms_ < kRembSendIntervalMs &&
- last_send_bitrate_bps_ > 0 &&
- last_send_bitrate_bps_ <= max_bitrate_bps_) {
- // Recent measured bitrate is already below the cap.
- return;
- }
- }
- SendRemb(bitrate_bps, /*ssrcs=*/{});
-}
-
-bool PacketRouter::SendRemb(int64_t bitrate_bps,
- const std::vector<uint32_t>& ssrcs) {
+void PacketRouter::SendRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) {
MutexLock lock(&modules_mutex_);
if (!active_remb_module_) {
- return false;
+ return;
}
// The Add* and Remove* methods above ensure that REMB is disabled on all
// other modules, because otherwise, they will send REMB with stale info.
- active_remb_module_->SetRemb(bitrate_bps, ssrcs);
-
- return true;
+ active_remb_module_->SetRemb(bitrate_bps, std::move(ssrcs));
}
-bool PacketRouter::SendCombinedRtcpPacket(
+void PacketRouter::SendCombinedRtcpPacket(
std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets) {
MutexLock lock(&modules_mutex_);
@@ -315,15 +248,14 @@ bool PacketRouter::SendCombinedRtcpPacket(
continue;
}
rtp_module->SendCombinedRtcpPacket(std::move(packets));
- return true;
+ return;
}
if (rtcp_feedback_senders_.empty()) {
- return false;
+ return;
}
auto* rtcp_sender = rtcp_feedback_senders_[0];
rtcp_sender->SendCombinedRtcpPacket(std::move(packets));
- return true;
}
void PacketRouter::AddRembModuleCandidate(
diff --git a/modules/pacing/packet_router.h b/modules/pacing/packet_router.h
index 2fa104b4cd..7a6e24d7ea 100644
--- a/modules/pacing/packet_router.h
+++ b/modules/pacing/packet_router.h
@@ -39,9 +39,7 @@ class RtpRtcpInterface;
// module if possible (sender report), otherwise on receive module
// (receiver report). For the latter case, we also keep track of the
// receive modules.
-class PacketRouter : public RemoteBitrateObserver,
- public TransportFeedbackSenderInterface,
- public PacingController::PacketSender {
+class PacketRouter : public PacingController::PacketSender {
public:
PacketRouter();
explicit PacketRouter(uint16_t start_transport_seq);
@@ -62,24 +60,12 @@ class PacketRouter : public RemoteBitrateObserver,
uint16_t CurrentTransportSequenceNumber() const;
- // Called every time there is a new bitrate estimate for a receive channel
- // group. This call will trigger a new RTCP REMB packet if the bitrate
- // estimate has decreased or if no RTCP REMB packet has been sent for
- // a certain time interval.
- // Implements RtpReceiveBitrateUpdate.
- void OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
- uint32_t bitrate_bps) override;
-
- // Ensures remote party notified of the receive bitrate limit no larger than
- // |bitrate_bps|.
- void SetMaxDesiredReceiveBitrate(int64_t bitrate_bps);
-
// Send REMB feedback.
- bool SendRemb(int64_t bitrate_bps, const std::vector<uint32_t>& ssrcs);
+ void SendRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs);
// Sends |packets| in one or more IP packets.
- bool SendCombinedRtcpPacket(
- std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets) override;
+ void SendCombinedRtcpPacket(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets);
private:
void AddRembModuleCandidate(RtcpFeedbackSenderInterface* candidate_module,
@@ -107,16 +93,6 @@ class PacketRouter : public RemoteBitrateObserver,
std::vector<RtcpFeedbackSenderInterface*> rtcp_feedback_senders_
RTC_GUARDED_BY(modules_mutex_);
- // TODO(eladalon): remb_mutex_ only ever held from one function, and it's not
- // clear if that function can actually be called from more than one thread.
- Mutex remb_mutex_;
- // The last time a REMB was sent.
- int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_mutex_);
- int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
- // The last bitrate update.
- int64_t bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
- int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
-
// Candidates for the REMB module can be RTP sender/receiver modules, with
// the sender modules taking precedence.
std::vector<RtcpFeedbackSenderInterface*> sender_remb_candidates_
diff --git a/modules/pacing/packet_router_unittest.cc b/modules/pacing/packet_router_unittest.cc
index 10cf98b3dd..77fe5f9f8d 100644
--- a/modules/pacing/packet_router_unittest.cc
+++ b/modules/pacing/packet_router_unittest.cc
@@ -74,25 +74,19 @@ TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_GeneratePadding) {
EXPECT_TRUE(packet_router_.GeneratePadding(bytes).empty());
}
-TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_OnReceiveBitrateChanged) {
- const std::vector<uint32_t> ssrcs = {1, 2, 3};
- constexpr uint32_t bitrate_bps = 10000;
-
- packet_router_.OnReceiveBitrateChanged(ssrcs, bitrate_bps);
-}
TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_SendRemb) {
const std::vector<uint32_t> ssrcs = {1, 2, 3};
constexpr uint32_t bitrate_bps = 10000;
-
- EXPECT_FALSE(packet_router_.SendRemb(bitrate_bps, ssrcs));
+ // Expect not to crash
+ packet_router_.SendRemb(bitrate_bps, ssrcs);
}
TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_SendTransportFeedback) {
std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback;
feedback.push_back(std::make_unique<rtcp::TransportFeedback>());
-
- EXPECT_FALSE(packet_router_.SendCombinedRtcpPacket(std::move(feedback)));
+ // Expect not to crash
+ packet_router_.SendCombinedRtcpPacket(std::move(feedback));
}
TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) {
@@ -327,10 +321,10 @@ TEST_F(PacketRouterTest, SendTransportFeedback) {
std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback;
feedback.push_back(std::make_unique<rtcp::TransportFeedback>());
- EXPECT_CALL(rtp_1, SendCombinedRtcpPacket).Times(1);
+ EXPECT_CALL(rtp_1, SendCombinedRtcpPacket);
packet_router_.SendCombinedRtcpPacket(std::move(feedback));
packet_router_.RemoveSendRtpModule(&rtp_1);
- EXPECT_CALL(rtp_2, SendCombinedRtcpPacket).Times(1);
+ EXPECT_CALL(rtp_2, SendCombinedRtcpPacket);
std::vector<std::unique_ptr<rtcp::RtcpPacket>> new_feedback;
new_feedback.push_back(std::make_unique<rtcp::TransportFeedback>());
packet_router_.SendCombinedRtcpPacket(std::move(new_feedback));
@@ -442,86 +436,7 @@ TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
}
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(PacketRouterRembTest, LowerEstimateToSendRemb) {
- rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcpInterface> rtp;
- PacketRouter packet_router;
-
- packet_router.AddSendRtpModule(&rtp, true);
-
- uint32_t bitrate_estimate = 456;
- const std::vector<uint32_t> ssrcs = {1234};
-
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- clock.AdvanceTime(TimeDelta::Millis(1000));
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower the estimate with more than 3% to trigger a call to SetRemb right
- // away.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- packet_router.RemoveSendRtpModule(&rtp);
-}
-
-TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) {
- rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcpInterface> rtp;
- PacketRouter packet_router;
- packet_router.AddSendRtpModule(&rtp, true);
-
- uint32_t bitrate_estimate[] = {456, 789};
- std::vector<uint32_t> ssrcs = {1234, 5678};
-
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate[0], ssrcs)).Times(1);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]);
-
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100);
-
- // Lower the estimate to trigger a callback.
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate[1], ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1]);
-
- packet_router.RemoveSendRtpModule(&rtp);
-}
-
-TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) {
- rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcpInterface> rtp;
- PacketRouter packet_router;
- packet_router.AddSendRtpModule(&rtp, true);
-
- uint32_t bitrate_estimate = 456;
- std::vector<uint32_t> ssrcs = {1234, 5678};
-
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Increased estimate shouldn't trigger a callback right away.
- EXPECT_CALL(rtp, SetRemb(_, _)).Times(0);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate + 1);
-
- // Decreasing the estimate less than 3% shouldn't trigger a new callback.
- EXPECT_CALL(rtp, SetRemb(_, _)).Times(0);
- int lower_estimate = bitrate_estimate * 98 / 100;
- packet_router.OnReceiveBitrateChanged(ssrcs, lower_estimate);
-
- packet_router.RemoveSendRtpModule(&rtp);
-}
-
-TEST(PacketRouterRembTest, ChangeSendRtpModule) {
+TEST(PacketRouterRembTest, ChangeSendRtpModuleChangeRembSender) {
rtc::ScopedFakeClock clock;
NiceMock<MockRtpRtcpInterface> rtp_send;
NiceMock<MockRtpRtcpInterface> rtp_recv;
@@ -532,191 +447,18 @@ TEST(PacketRouterRembTest, ChangeSendRtpModule) {
uint32_t bitrate_estimate = 456;
std::vector<uint32_t> ssrcs = {1234, 5678};
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- clock.AdvanceTime(TimeDelta::Millis(1000));
- EXPECT_CALL(rtp_send, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Decrease estimate to trigger a REMB.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp_send, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(rtp_send, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Remove the sending module -> should get remb on the second module.
packet_router.RemoveSendRtpModule(&rtp_send);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp_recv, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(rtp_recv, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
packet_router.RemoveReceiveRtpModule(&rtp_recv);
}
-TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) {
- rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcpInterface> rtp;
- PacketRouter packet_router;
- packet_router.AddSendRtpModule(&rtp, true);
-
- uint32_t bitrate_estimate = 456;
- const std::vector<uint32_t> ssrcs = {1234};
-
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- clock.AdvanceTime(TimeDelta::Millis(1000));
- EXPECT_CALL(rtp, SetRemb(_, _)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Lower the estimate, should trigger a call to SetRemb right away.
- bitrate_estimate = bitrate_estimate - 100;
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged again, this should not trigger a new callback.
- EXPECT_CALL(rtp, SetRemb(_, _)).Times(0);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
- packet_router.RemoveSendRtpModule(&rtp);
-}
-
-TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateLimitsSetRemb) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- const int64_t cap_bitrate = 100000;
- EXPECT_CALL(remb_sender, SetRemb(Le(cap_bitrate), _)).Times(AtLeast(1));
- EXPECT_CALL(remb_sender, SetRemb(Gt(cap_bitrate), _)).Times(0);
-
- const std::vector<uint32_t> ssrcs = {1234};
- packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate);
- packet_router.OnReceiveBitrateChanged(ssrcs, cap_bitrate + 5000);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, cap_bitrate - 5000);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
-TEST(PacketRouterRembTest,
- SetMaxDesiredReceiveBitrateTriggersRembWhenMoreRestrictive) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- const int64_t measured_bitrate_bps = 150000;
- const int64_t cap_bitrate_bps = measured_bitrate_bps - 5000;
- const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(remb_sender, SetRemb(measured_bitrate_bps, _));
- packet_router.OnReceiveBitrateChanged(ssrcs, measured_bitrate_bps);
-
- EXPECT_CALL(remb_sender, SetRemb(cap_bitrate_bps, _));
- packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate_bps);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
-TEST(PacketRouterRembTest,
- SetMaxDesiredReceiveBitrateDoesNotTriggerRembWhenAsRestrictive) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- const uint32_t measured_bitrate_bps = 150000;
- const uint32_t cap_bitrate_bps = measured_bitrate_bps;
- const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(remb_sender, SetRemb(measured_bitrate_bps, _));
- packet_router.OnReceiveBitrateChanged(ssrcs, measured_bitrate_bps);
-
- EXPECT_CALL(remb_sender, SetRemb(_, _)).Times(0);
- packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate_bps);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
-TEST(PacketRouterRembTest,
- SetMaxDesiredReceiveBitrateDoesNotTriggerRembWhenLessRestrictive) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- const uint32_t measured_bitrate_bps = 150000;
- const uint32_t cap_bitrate_bps = measured_bitrate_bps + 500;
- const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(remb_sender, SetRemb(measured_bitrate_bps, _));
- packet_router.OnReceiveBitrateChanged(ssrcs, measured_bitrate_bps);
-
- EXPECT_CALL(remb_sender, SetRemb(_, _)).Times(0);
- packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate_bps);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
-TEST(PacketRouterRembTest,
- SetMaxDesiredReceiveBitrateTriggersRembWhenNoRecentMeasure) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- const uint32_t measured_bitrate_bps = 150000;
- const uint32_t cap_bitrate_bps = measured_bitrate_bps + 5000;
- const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(remb_sender, SetRemb(measured_bitrate_bps, _));
- packet_router.OnReceiveBitrateChanged(ssrcs, measured_bitrate_bps);
- clock.AdvanceTime(TimeDelta::Millis(1000));
-
- EXPECT_CALL(remb_sender, SetRemb(cap_bitrate_bps, _));
- packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate_bps);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
-TEST(PacketRouterRembTest,
- SetMaxDesiredReceiveBitrateTriggersRembWhenNoMeasures) {
- rtc::ScopedFakeClock clock;
- PacketRouter packet_router;
- clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcpInterface> remb_sender;
- constexpr bool remb_candidate = true;
- packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
-
- // Set cap.
- EXPECT_CALL(remb_sender, SetRemb(100000, _)).Times(1);
- packet_router.SetMaxDesiredReceiveBitrate(100000);
- // Increase cap.
- EXPECT_CALL(remb_sender, SetRemb(200000, _)).Times(1);
- packet_router.SetMaxDesiredReceiveBitrate(200000);
- // Decrease cap.
- EXPECT_CALL(remb_sender, SetRemb(150000, _)).Times(1);
- packet_router.SetMaxDesiredReceiveBitrate(150000);
-
- // Test tear-down.
- packet_router.RemoveSendRtpModule(&remb_sender);
-}
-
// Only register receiving modules and make sure we fallback to trigger a REMB
// packet on this one.
TEST(PacketRouterRembTest, NoSendingRtpModule) {
@@ -729,18 +471,14 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) {
uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
-
- // Call OnReceiveBitrateChanged twice to get a first estimate.
- clock.AdvanceTime(TimeDelta::Millis(1000));
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Lower the estimate to trigger a new packet REMB packet.
- EXPECT_CALL(rtp, SetRemb(bitrate_estimate - 100, ssrcs)).Times(1);
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate - 100);
+ EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
- EXPECT_CALL(rtp, UnsetRemb()).Times(1);
+ EXPECT_CALL(rtp, UnsetRemb());
packet_router.RemoveReceiveRtpModule(&rtp);
}
@@ -756,8 +494,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) {
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
EXPECT_CALL(module, SetRemb(_, _)).Times(0);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveSendRtpModule(&module);
@@ -774,9 +511,8 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) {
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveSendRtpModule(&module);
@@ -794,8 +530,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) {
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
EXPECT_CALL(module, SetRemb(_, _)).Times(0);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveReceiveRtpModule(&module);
@@ -812,9 +547,8 @@ TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) {
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs)).Times(1);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveReceiveRtpModule(&module);
@@ -837,11 +571,10 @@ TEST(PacketRouterRembTest,
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs)).Times(1);
+ EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs));
EXPECT_CALL(receive_module, SetRemb(_, _)).Times(0);
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveReceiveRtpModule(&receive_module);
@@ -865,11 +598,11 @@ TEST(PacketRouterRembTest,
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
- EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs)).Times(1);
+ EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs));
EXPECT_CALL(receive_module, SetRemb(_, _)).Times(0);
clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveReceiveRtpModule(&receive_module);
@@ -893,10 +626,8 @@ TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) {
constexpr uint32_t bitrate_estimate = 456;
const std::vector<uint32_t> ssrcs = {1234};
EXPECT_CALL(send_module, SetRemb(_, _)).Times(0);
- EXPECT_CALL(receive_module, SetRemb(bitrate_estimate, ssrcs)).Times(1);
-
- clock.AdvanceTime(TimeDelta::Millis(1000));
- packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate);
+ EXPECT_CALL(receive_module, SetRemb(bitrate_estimate, ssrcs));
+ packet_router.SendRemb(bitrate_estimate, ssrcs);
// Test tear-down
packet_router.RemoveReceiveRtpModule(&receive_module);
diff --git a/modules/pacing/round_robin_packet_queue.h b/modules/pacing/round_robin_packet_queue.h
index 9446a8e174..cad555a1af 100644
--- a/modules/pacing/round_robin_packet_queue.h
+++ b/modules/pacing/round_robin_packet_queue.h
@@ -19,6 +19,7 @@
#include <memory>
#include <queue>
#include <set>
+#include <unordered_map>
#include "absl/types/optional.h"
#include "api/transport/webrtc_key_value_config.h"
@@ -163,7 +164,7 @@ class RoundRobinPacketQueue {
std::multimap<StreamPrioKey, uint32_t> stream_priorities_;
// A map of SSRCs to Streams.
- std::map<uint32_t, Stream> streams_;
+ std::unordered_map<uint32_t, Stream> streams_;
// The enqueue time of every packet currently in the queue. Used to figure out
// the age of the oldest packet in the queue.
diff --git a/modules/remote_bitrate_estimator/BUILD.gn b/modules/remote_bitrate_estimator/BUILD.gn
index 81aa1efdda..923f00a74c 100644
--- a/modules/remote_bitrate_estimator/BUILD.gn
+++ b/modules/remote_bitrate_estimator/BUILD.gn
@@ -21,6 +21,8 @@ rtc_library("remote_bitrate_estimator") {
"overuse_detector.h",
"overuse_estimator.cc",
"overuse_estimator.h",
+ "packet_arrival_map.cc",
+ "packet_arrival_map.h",
"remote_bitrate_estimator_abs_send_time.cc",
"remote_bitrate_estimator_abs_send_time.h",
"remote_bitrate_estimator_single_stream.cc",
@@ -45,6 +47,8 @@ rtc_library("remote_bitrate_estimator") {
"../../api/transport:network_control",
"../../api/transport:webrtc_key_value_config",
"../../api/units:data_rate",
+ "../../api/units:data_size",
+ "../../api/units:time_delta",
"../../api/units:timestamp",
"../../modules:module_api",
"../../modules:module_api_public",
@@ -74,10 +78,9 @@ if (!build_with_chromium) {
"tools/bwe_rtp.h",
]
deps = [
- ":remote_bitrate_estimator",
"../../rtc_base:rtc_base_approved",
"../../test:rtp_test_utils",
- "../rtp_rtcp",
+ "../rtp_rtcp:rtp_rtcp_format",
]
absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
@@ -90,10 +93,10 @@ if (!build_with_chromium) {
sources = [ "tools/rtp_to_text.cc" ]
deps = [
":bwe_rtp",
- "../../modules/rtp_rtcp",
"../../rtc_base:macromagic",
"../../rtc_base:stringutils",
"../../test:rtp_test_utils",
+ "../rtp_rtcp:rtp_rtcp_format",
]
}
}
@@ -106,6 +109,7 @@ if (rtc_include_tests) {
"aimd_rate_control_unittest.cc",
"inter_arrival_unittest.cc",
"overuse_detector_unittest.cc",
+ "packet_arrival_map_test.cc",
"remote_bitrate_estimator_abs_send_time_unittest.cc",
"remote_bitrate_estimator_single_stream_unittest.cc",
"remote_bitrate_estimator_unittest_helper.cc",
diff --git a/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index c60c030e8d..ac937bbfe0 100644
--- a/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -38,14 +38,6 @@ class RemoteBitrateObserver {
virtual ~RemoteBitrateObserver() {}
};
-class TransportFeedbackSenderInterface {
- public:
- virtual ~TransportFeedbackSenderInterface() = default;
-
- virtual bool SendCombinedRtcpPacket(
- std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets) = 0;
-};
-
// TODO(holmer): Remove when all implementations have been updated.
struct ReceiveBandwidthEstimatorStats {};
diff --git a/modules/remote_bitrate_estimator/packet_arrival_map.cc b/modules/remote_bitrate_estimator/packet_arrival_map.cc
new file mode 100644
index 0000000000..72696f6c80
--- /dev/null
+++ b/modules/remote_bitrate_estimator/packet_arrival_map.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/remote_bitrate_estimator/packet_arrival_map.h"
+
+#include <algorithm>
+
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+
+constexpr size_t PacketArrivalTimeMap::kMaxNumberOfPackets;
+
+void PacketArrivalTimeMap::AddPacket(int64_t sequence_number,
+ int64_t arrival_time_ms) {
+ if (!has_seen_packet_) {
+ // First packet.
+ has_seen_packet_ = true;
+ begin_sequence_number_ = sequence_number;
+ arrival_times.push_back(arrival_time_ms);
+ return;
+ }
+
+ int64_t pos = sequence_number - begin_sequence_number_;
+ if (pos >= 0 && pos < static_cast<int64_t>(arrival_times.size())) {
+ // The packet is within the buffer - no need to expand it.
+ arrival_times[pos] = arrival_time_ms;
+ return;
+ }
+
+ if (pos < 0) {
+ // The packet goes before the current buffer. Expand to add packet, but only
+ // if it fits within kMaxNumberOfPackets.
+ size_t missing_packets = -pos;
+ if (missing_packets + arrival_times.size() > kMaxNumberOfPackets) {
+ // Don't expand the buffer further, as that would remove newly received
+ // packets.
+ return;
+ }
+
+ arrival_times.insert(arrival_times.begin(), missing_packets, 0);
+ arrival_times[0] = arrival_time_ms;
+ begin_sequence_number_ = sequence_number;
+ return;
+ }
+
+ // The packet goes after the buffer.
+
+ if (static_cast<size_t>(pos) >= kMaxNumberOfPackets) {
+ // The buffer grows too large - old packets have to be removed.
+ size_t packets_to_remove = pos - kMaxNumberOfPackets + 1;
+ if (packets_to_remove >= arrival_times.size()) {
+ arrival_times.clear();
+ begin_sequence_number_ = sequence_number;
+ pos = 0;
+ } else {
+ // Also trim the buffer to remove leading non-received packets, to
+ // ensure that the buffer only spans received packets.
+ while (packets_to_remove < arrival_times.size() &&
+ arrival_times[packets_to_remove] == 0) {
+ ++packets_to_remove;
+ }
+
+ arrival_times.erase(arrival_times.begin(),
+ arrival_times.begin() + packets_to_remove);
+ begin_sequence_number_ += packets_to_remove;
+ pos -= packets_to_remove;
+ RTC_DCHECK_GE(pos, 0);
+ }
+ }
+
+ // Packets can be received out-of-order. If this isn't the next expected
+ // packet, add enough placeholders to fill the gap.
+ size_t missing_gap_packets = pos - arrival_times.size();
+ if (missing_gap_packets > 0) {
+ arrival_times.insert(arrival_times.end(), missing_gap_packets, 0);
+ }
+ RTC_DCHECK_EQ(arrival_times.size(), pos);
+ arrival_times.push_back(arrival_time_ms);
+ RTC_DCHECK_LE(arrival_times.size(), kMaxNumberOfPackets);
+}
+
+void PacketArrivalTimeMap::RemoveOldPackets(int64_t sequence_number,
+ int64_t arrival_time_limit) {
+ while (!arrival_times.empty() && begin_sequence_number_ < sequence_number &&
+ arrival_times.front() <= arrival_time_limit) {
+ arrival_times.pop_front();
+ ++begin_sequence_number_;
+ }
+}
+
+bool PacketArrivalTimeMap::has_received(int64_t sequence_number) const {
+ int64_t pos = sequence_number - begin_sequence_number_;
+ if (pos >= 0 && pos < static_cast<int64_t>(arrival_times.size()) &&
+ arrival_times[pos] != 0) {
+ return true;
+ }
+ return false;
+}
+
+void PacketArrivalTimeMap::EraseTo(int64_t sequence_number) {
+ if (sequence_number > begin_sequence_number_) {
+ size_t count =
+ std::min(static_cast<size_t>(sequence_number - begin_sequence_number_),
+ arrival_times.size());
+
+ arrival_times.erase(arrival_times.begin(), arrival_times.begin() + count);
+ begin_sequence_number_ += count;
+ }
+}
+
+int64_t PacketArrivalTimeMap::clamp(int64_t sequence_number) const {
+ return rtc::SafeClamp(sequence_number, begin_sequence_number(),
+ end_sequence_number());
+}
+
+} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/packet_arrival_map.h b/modules/remote_bitrate_estimator/packet_arrival_map.h
new file mode 100644
index 0000000000..10659e0f65
--- /dev/null
+++ b/modules/remote_bitrate_estimator/packet_arrival_map.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_
+#define MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_
+
+#include <cstddef>
+#include <cstdint>
+#include <deque>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+// PacketArrivalTimeMap is an optimized map of packet sequence number to arrival
+// time, limited in size to never exceed `kMaxNumberOfPackets`. It will grow as
+// needed, and remove old packets, and will expand to allow earlier packets to
+// be added (out-of-order).
+//
+// Not yet received packets have the arrival time zero. The queue will not span
+// larger than necessary and the last packet should always be received. The
+// first packet in the queue doesn't have to be received in case of receiving
+// packets out-of-order.
+class PacketArrivalTimeMap {
+ public:
+ // Impossible to request feedback older than what can be represented by 15
+ // bits.
+ static constexpr size_t kMaxNumberOfPackets = (1 << 15);
+
+ // Indicates if the packet with `sequence_number` has already been received.
+ bool has_received(int64_t sequence_number) const;
+
+ // Returns the sequence number of the first entry in the map, i.e. the
+ // sequence number that a `begin()` iterator would represent.
+ int64_t begin_sequence_number() const { return begin_sequence_number_; }
+
+ // Returns the sequence number of the element just after the map, i.e. the
+ // sequence number that an `end()` iterator would represent.
+ int64_t end_sequence_number() const {
+ return begin_sequence_number_ + arrival_times.size();
+ }
+
+ // Returns an element by `sequence_number`, which must be valid, i.e.
+ // between [begin_sequence_number, end_sequence_number).
+ int64_t get(int64_t sequence_number) {
+ int64_t pos = sequence_number - begin_sequence_number_;
+ RTC_DCHECK(pos >= 0 && pos < static_cast<int64_t>(arrival_times.size()));
+ return arrival_times[pos];
+ }
+
+ // Clamps `sequence_number` between [begin_sequence_number,
+ // end_sequence_number].
+ int64_t clamp(int64_t sequence_number) const;
+
+ // Erases all elements from the beginning of the map until `sequence_number`.
+ void EraseTo(int64_t sequence_number);
+
+ // Records the fact that a packet with `sequence_number` arrived at
+ // `arrival_time_ms`.
+ void AddPacket(int64_t sequence_number, int64_t arrival_time_ms);
+
+ // Removes packets from the beginning of the map as long as they are received
+ // before `sequence_number` and with an age older than `arrival_time_limit`
+ void RemoveOldPackets(int64_t sequence_number, int64_t arrival_time_limit);
+
+ private:
+ // Deque representing unwrapped sequence number -> time, where the index +
+ // `begin_sequence_number_` represents the packet's sequence number.
+ std::deque<int64_t> arrival_times;
+
+ // The unwrapped sequence number for the first element in
+ // `arrival_times`.
+ int64_t begin_sequence_number_ = 0;
+
+ // Indicates if this map has had any packet added to it. The first packet
+ // decides the initial sequence number.
+ bool has_seen_packet_ = false;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_
diff --git a/modules/remote_bitrate_estimator/packet_arrival_map_test.cc b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc
new file mode 100644
index 0000000000..afc7038832
--- /dev/null
+++ b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc
@@ -0,0 +1,252 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/remote_bitrate_estimator/packet_arrival_map.h"
+
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+TEST(PacketArrivalMapTest, IsConsistentWhenEmpty) {
+ PacketArrivalTimeMap map;
+
+ EXPECT_EQ(map.begin_sequence_number(), map.end_sequence_number());
+ EXPECT_FALSE(map.has_received(0));
+ EXPECT_EQ(map.clamp(-5), 0);
+ EXPECT_EQ(map.clamp(5), 0);
+}
+
+TEST(PacketArrivalMapTest, InsertsFirstItemIntoMap) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ EXPECT_EQ(map.begin_sequence_number(), 42);
+ EXPECT_EQ(map.end_sequence_number(), 43);
+
+ EXPECT_FALSE(map.has_received(41));
+ EXPECT_TRUE(map.has_received(42));
+ EXPECT_FALSE(map.has_received(44));
+
+ EXPECT_EQ(map.clamp(-100), 42);
+ EXPECT_EQ(map.clamp(42), 42);
+ EXPECT_EQ(map.clamp(100), 43);
+}
+
+TEST(PacketArrivalMapTest, InsertsWithGaps) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(45, 11);
+ EXPECT_EQ(map.begin_sequence_number(), 42);
+ EXPECT_EQ(map.end_sequence_number(), 46);
+
+ EXPECT_FALSE(map.has_received(41));
+ EXPECT_TRUE(map.has_received(42));
+ EXPECT_FALSE(map.has_received(43));
+ EXPECT_FALSE(map.has_received(44));
+ EXPECT_TRUE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+
+ EXPECT_EQ(map.get(42), 10);
+ EXPECT_EQ(map.get(43), 0);
+ EXPECT_EQ(map.get(44), 0);
+ EXPECT_EQ(map.get(45), 11);
+
+ EXPECT_EQ(map.clamp(-100), 42);
+ EXPECT_EQ(map.clamp(44), 44);
+ EXPECT_EQ(map.clamp(100), 46);
+}
+
+TEST(PacketArrivalMapTest, InsertsWithinBuffer) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(45, 11);
+
+ map.AddPacket(43, 12);
+ map.AddPacket(44, 13);
+
+ EXPECT_EQ(map.begin_sequence_number(), 42);
+ EXPECT_EQ(map.end_sequence_number(), 46);
+
+ EXPECT_FALSE(map.has_received(41));
+ EXPECT_TRUE(map.has_received(42));
+ EXPECT_TRUE(map.has_received(43));
+ EXPECT_TRUE(map.has_received(44));
+ EXPECT_TRUE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+
+ EXPECT_EQ(map.get(42), 10);
+ EXPECT_EQ(map.get(43), 12);
+ EXPECT_EQ(map.get(44), 13);
+ EXPECT_EQ(map.get(45), 11);
+}
+
+TEST(PacketArrivalMapTest, GrowsBufferAndRemoveOld) {
+ PacketArrivalTimeMap map;
+
+ constexpr int64_t kLargeSeq = 42 + PacketArrivalTimeMap::kMaxNumberOfPackets;
+ map.AddPacket(42, 10);
+ map.AddPacket(43, 11);
+ map.AddPacket(44, 12);
+ map.AddPacket(45, 13);
+ map.AddPacket(kLargeSeq, 12);
+
+ EXPECT_EQ(map.begin_sequence_number(), 43);
+ EXPECT_EQ(map.end_sequence_number(), kLargeSeq + 1);
+ EXPECT_EQ(static_cast<size_t>(map.end_sequence_number() -
+ map.begin_sequence_number()),
+ PacketArrivalTimeMap::kMaxNumberOfPackets);
+
+ EXPECT_FALSE(map.has_received(41));
+ EXPECT_FALSE(map.has_received(42));
+ EXPECT_TRUE(map.has_received(43));
+ EXPECT_TRUE(map.has_received(44));
+ EXPECT_TRUE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+ EXPECT_TRUE(map.has_received(kLargeSeq));
+ EXPECT_FALSE(map.has_received(kLargeSeq + 1));
+}
+
+TEST(PacketArrivalMapTest, GrowsBufferAndRemoveOldTrimsBeginning) {
+ PacketArrivalTimeMap map;
+
+ constexpr int64_t kLargeSeq = 42 + PacketArrivalTimeMap::kMaxNumberOfPackets;
+ map.AddPacket(42, 10);
+ // Missing: 43, 44
+ map.AddPacket(45, 13);
+ map.AddPacket(kLargeSeq, 12);
+
+ EXPECT_EQ(map.begin_sequence_number(), 45);
+ EXPECT_EQ(map.end_sequence_number(), kLargeSeq + 1);
+
+ EXPECT_FALSE(map.has_received(44));
+ EXPECT_TRUE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+ EXPECT_TRUE(map.has_received(kLargeSeq));
+ EXPECT_FALSE(map.has_received(kLargeSeq + 1));
+}
+
+TEST(PacketArrivalMapTest, SequenceNumberJumpsDeletesAll) {
+ PacketArrivalTimeMap map;
+
+ constexpr int64_t kLargeSeq =
+ 42 + 2 * PacketArrivalTimeMap::kMaxNumberOfPackets;
+ map.AddPacket(42, 10);
+ map.AddPacket(kLargeSeq, 12);
+
+ EXPECT_EQ(map.begin_sequence_number(), kLargeSeq);
+ EXPECT_EQ(map.end_sequence_number(), kLargeSeq + 1);
+
+ EXPECT_FALSE(map.has_received(42));
+ EXPECT_TRUE(map.has_received(kLargeSeq));
+ EXPECT_FALSE(map.has_received(kLargeSeq + 1));
+}
+
+TEST(PacketArrivalMapTest, ExpandsBeforeBeginning) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(-1000, 13);
+
+ EXPECT_EQ(map.begin_sequence_number(), -1000);
+ EXPECT_EQ(map.end_sequence_number(), 43);
+
+ EXPECT_FALSE(map.has_received(-1001));
+ EXPECT_TRUE(map.has_received(-1000));
+ EXPECT_FALSE(map.has_received(-999));
+ EXPECT_TRUE(map.has_received(42));
+ EXPECT_FALSE(map.has_received(43));
+}
+
+TEST(PacketArrivalMapTest, ExpandingBeforeBeginningKeepsReceived) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ constexpr int64_t kSmallSeq =
+ static_cast<int64_t>(42) - 2 * PacketArrivalTimeMap::kMaxNumberOfPackets;
+ map.AddPacket(kSmallSeq, 13);
+
+ EXPECT_EQ(map.begin_sequence_number(), 42);
+ EXPECT_EQ(map.end_sequence_number(), 43);
+}
+
+TEST(PacketArrivalMapTest, ErasesToRemoveElements) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(43, 11);
+ map.AddPacket(44, 12);
+ map.AddPacket(45, 13);
+
+ map.EraseTo(44);
+
+ EXPECT_EQ(map.begin_sequence_number(), 44);
+ EXPECT_EQ(map.end_sequence_number(), 46);
+
+ EXPECT_FALSE(map.has_received(43));
+ EXPECT_TRUE(map.has_received(44));
+ EXPECT_TRUE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+}
+
+TEST(PacketArrivalMapTest, ErasesInEmptyMap) {
+ PacketArrivalTimeMap map;
+
+ EXPECT_EQ(map.begin_sequence_number(), map.end_sequence_number());
+
+ map.EraseTo(map.end_sequence_number());
+ EXPECT_EQ(map.begin_sequence_number(), map.end_sequence_number());
+}
+
+TEST(PacketArrivalMapTest, IsTolerantToWrongArgumentsForErase) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(43, 11);
+
+ map.EraseTo(1);
+
+ EXPECT_EQ(map.begin_sequence_number(), 42);
+ EXPECT_EQ(map.end_sequence_number(), 44);
+
+ map.EraseTo(100);
+
+ EXPECT_EQ(map.begin_sequence_number(), 44);
+ EXPECT_EQ(map.end_sequence_number(), 44);
+}
+
+TEST(PacketArrivalMapTest, EraseAllRemembersBeginningSeqNbr) {
+ PacketArrivalTimeMap map;
+
+ map.AddPacket(42, 10);
+ map.AddPacket(43, 11);
+ map.AddPacket(44, 12);
+ map.AddPacket(45, 13);
+
+ map.EraseTo(46);
+
+ map.AddPacket(50, 10);
+
+ EXPECT_EQ(map.begin_sequence_number(), 46);
+ EXPECT_EQ(map.end_sequence_number(), 51);
+
+ EXPECT_FALSE(map.has_received(45));
+ EXPECT_FALSE(map.has_received(46));
+ EXPECT_FALSE(map.has_received(47));
+ EXPECT_FALSE(map.has_received(48));
+ EXPECT_FALSE(map.has_received(49));
+ EXPECT_TRUE(map.has_received(50));
+ EXPECT_FALSE(map.has_received(51));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index 4196f6dc57..ae960ab960 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -13,18 +13,36 @@
#include <math.h>
#include <algorithm>
+#include <memory>
+#include <utility>
#include "api/transport/field_trial_based_config.h"
+#include "api/units/data_rate.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "rtc_base/checks.h"
-#include "rtc_base/constructor_magic.h"
#include "rtc_base/logging.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
namespace {
+
+constexpr TimeDelta kMinClusterDelta = TimeDelta::Millis(1);
+constexpr TimeDelta kInitialProbingInterval = TimeDelta::Seconds(2);
+constexpr int kTimestampGroupLengthMs = 5;
+constexpr int kAbsSendTimeInterArrivalUpshift = 8;
+constexpr int kInterArrivalShift =
+ RTPHeaderExtension::kAbsSendTimeFraction + kAbsSendTimeInterArrivalUpshift;
+constexpr int kMinClusterSize = 4;
+constexpr int kMaxProbePackets = 15;
+constexpr int kExpectedNumberOfProbes = 3;
+constexpr double kTimestampToMs =
+ 1000.0 / static_cast<double>(1 << kInterArrivalShift);
+
absl::optional<DataRate> OptionalRateFromOptionalBps(
absl::optional<int> bitrate_bps) {
if (bitrate_bps) {
@@ -33,62 +51,48 @@ absl::optional<DataRate> OptionalRateFromOptionalBps(
return absl::nullopt;
}
}
-} // namespace
-
-enum {
- kTimestampGroupLengthMs = 5,
- kAbsSendTimeInterArrivalUpshift = 8,
- kInterArrivalShift = RTPHeaderExtension::kAbsSendTimeFraction +
- kAbsSendTimeInterArrivalUpshift,
- kInitialProbingIntervalMs = 2000,
- kMinClusterSize = 4,
- kMaxProbePackets = 15,
- kExpectedNumberOfProbes = 3
-};
-
-static const double kTimestampToMs =
- 1000.0 / static_cast<double>(1 << kInterArrivalShift);
template <typename K, typename V>
std::vector<K> Keys(const std::map<K, V>& map) {
std::vector<K> keys;
keys.reserve(map.size());
- for (typename std::map<K, V>::const_iterator it = map.begin();
- it != map.end(); ++it) {
- keys.push_back(it->first);
+ for (const auto& kv_pair : map) {
+ keys.push_back(kv_pair.first);
}
return keys;
}
-uint32_t ConvertMsTo24Bits(int64_t time_ms) {
- uint32_t time_24_bits =
- static_cast<uint32_t>(((static_cast<uint64_t>(time_ms)
- << RTPHeaderExtension::kAbsSendTimeFraction) +
- 500) /
- 1000) &
- 0x00FFFFFF;
- return time_24_bits;
-}
+} // namespace
RemoteBitrateEstimatorAbsSendTime::~RemoteBitrateEstimatorAbsSendTime() =
default;
bool RemoteBitrateEstimatorAbsSendTime::IsWithinClusterBounds(
- int send_delta_ms,
+ TimeDelta send_delta,
const Cluster& cluster_aggregate) {
if (cluster_aggregate.count == 0)
return true;
- float cluster_mean = cluster_aggregate.send_mean_ms /
- static_cast<float>(cluster_aggregate.count);
- return fabs(static_cast<float>(send_delta_ms) - cluster_mean) < 2.5f;
+ TimeDelta cluster_mean =
+ cluster_aggregate.send_mean / cluster_aggregate.count;
+ return (send_delta - cluster_mean).Abs() < TimeDelta::Micros(2'500);
}
-void RemoteBitrateEstimatorAbsSendTime::AddCluster(std::list<Cluster>* clusters,
- Cluster* cluster) {
- cluster->send_mean_ms /= static_cast<float>(cluster->count);
- cluster->recv_mean_ms /= static_cast<float>(cluster->count);
- cluster->mean_size /= cluster->count;
- clusters->push_back(*cluster);
+void RemoteBitrateEstimatorAbsSendTime::MaybeAddCluster(
+ const Cluster& cluster_aggregate,
+ std::list<Cluster>& clusters) {
+ if (cluster_aggregate.count < kMinClusterSize ||
+ cluster_aggregate.send_mean <= TimeDelta::Zero() ||
+ cluster_aggregate.recv_mean <= TimeDelta::Zero()) {
+ return;
+ }
+
+ Cluster cluster;
+ cluster.send_mean = cluster_aggregate.send_mean / cluster_aggregate.count;
+ cluster.recv_mean = cluster_aggregate.recv_mean / cluster_aggregate.count;
+ cluster.mean_size = cluster_aggregate.mean_size / cluster_aggregate.count;
+ cluster.count = cluster_aggregate.count;
+ cluster.num_above_min_delta = cluster_aggregate.num_above_min_delta;
+ clusters.push_back(cluster);
}
RemoteBitrateEstimatorAbsSendTime::RemoteBitrateEstimatorAbsSendTime(
@@ -96,91 +100,77 @@ RemoteBitrateEstimatorAbsSendTime::RemoteBitrateEstimatorAbsSendTime(
Clock* clock)
: clock_(clock),
observer_(observer),
- inter_arrival_(),
- estimator_(),
detector_(&field_trials_),
- incoming_bitrate_(kBitrateWindowMs, 8000),
- incoming_bitrate_initialized_(false),
- total_probes_received_(0),
- first_packet_time_ms_(-1),
- last_update_ms_(-1),
- uma_recorded_(false),
remote_rate_(&field_trials_) {
RTC_DCHECK(clock_);
RTC_DCHECK(observer_);
RTC_LOG(LS_INFO) << "RemoteBitrateEstimatorAbsSendTime: Instantiating.";
}
-void RemoteBitrateEstimatorAbsSendTime::ComputeClusters(
- std::list<Cluster>* clusters) const {
- Cluster current;
- int64_t prev_send_time = -1;
- int64_t prev_recv_time = -1;
- for (std::list<Probe>::const_iterator it = probes_.begin();
- it != probes_.end(); ++it) {
- if (prev_send_time >= 0) {
- int send_delta_ms = it->send_time_ms - prev_send_time;
- int recv_delta_ms = it->recv_time_ms - prev_recv_time;
- if (send_delta_ms >= 1 && recv_delta_ms >= 1) {
- ++current.num_above_min_delta;
+std::list<RemoteBitrateEstimatorAbsSendTime::Cluster>
+RemoteBitrateEstimatorAbsSendTime::ComputeClusters() const {
+ std::list<Cluster> clusters;
+ Cluster cluster_aggregate;
+ Timestamp prev_send_time = Timestamp::MinusInfinity();
+ Timestamp prev_recv_time = Timestamp::MinusInfinity();
+ for (const Probe& probe : probes_) {
+ if (prev_send_time.IsFinite()) {
+ TimeDelta send_delta = probe.send_time - prev_send_time;
+ TimeDelta recv_delta = probe.recv_time - prev_recv_time;
+ if (send_delta >= kMinClusterDelta && recv_delta >= kMinClusterDelta) {
+ ++cluster_aggregate.num_above_min_delta;
}
- if (!IsWithinClusterBounds(send_delta_ms, current)) {
- if (current.count >= kMinClusterSize && current.send_mean_ms > 0.0f &&
- current.recv_mean_ms > 0.0f) {
- AddCluster(clusters, &current);
- }
- current = Cluster();
+ if (!IsWithinClusterBounds(send_delta, cluster_aggregate)) {
+ MaybeAddCluster(cluster_aggregate, clusters);
+ cluster_aggregate = Cluster();
}
- current.send_mean_ms += send_delta_ms;
- current.recv_mean_ms += recv_delta_ms;
- current.mean_size += it->payload_size;
- ++current.count;
+ cluster_aggregate.send_mean += send_delta;
+ cluster_aggregate.recv_mean += recv_delta;
+ cluster_aggregate.mean_size += probe.payload_size;
+ ++cluster_aggregate.count;
}
- prev_send_time = it->send_time_ms;
- prev_recv_time = it->recv_time_ms;
- }
- if (current.count >= kMinClusterSize && current.send_mean_ms > 0.0f &&
- current.recv_mean_ms > 0.0f) {
- AddCluster(clusters, &current);
+ prev_send_time = probe.send_time;
+ prev_recv_time = probe.recv_time;
}
+ MaybeAddCluster(cluster_aggregate, clusters);
+ return clusters;
}
-std::list<Cluster>::const_iterator
+const RemoteBitrateEstimatorAbsSendTime::Cluster*
RemoteBitrateEstimatorAbsSendTime::FindBestProbe(
const std::list<Cluster>& clusters) const {
- int highest_probe_bitrate_bps = 0;
- std::list<Cluster>::const_iterator best_it = clusters.end();
- for (std::list<Cluster>::const_iterator it = clusters.begin();
- it != clusters.end(); ++it) {
- if (it->send_mean_ms == 0 || it->recv_mean_ms == 0)
+ DataRate highest_probe_bitrate = DataRate::Zero();
+ const Cluster* best = nullptr;
+ for (const auto& cluster : clusters) {
+ if (cluster.send_mean == TimeDelta::Zero() ||
+ cluster.recv_mean == TimeDelta::Zero()) {
continue;
- if (it->num_above_min_delta > it->count / 2 &&
- (it->recv_mean_ms - it->send_mean_ms <= 2.0f &&
- it->send_mean_ms - it->recv_mean_ms <= 5.0f)) {
- int probe_bitrate_bps =
- std::min(it->GetSendBitrateBps(), it->GetRecvBitrateBps());
- if (probe_bitrate_bps > highest_probe_bitrate_bps) {
- highest_probe_bitrate_bps = probe_bitrate_bps;
- best_it = it;
+ }
+ if (cluster.num_above_min_delta > cluster.count / 2 &&
+ (cluster.recv_mean - cluster.send_mean <= TimeDelta::Millis(2) &&
+ cluster.send_mean - cluster.recv_mean <= TimeDelta::Millis(5))) {
+ DataRate probe_bitrate =
+ std::min(cluster.SendBitrate(), cluster.RecvBitrate());
+ if (probe_bitrate > highest_probe_bitrate) {
+ highest_probe_bitrate = probe_bitrate;
+ best = &cluster;
}
} else {
- int send_bitrate_bps = it->mean_size * 8 * 1000 / it->send_mean_ms;
- int recv_bitrate_bps = it->mean_size * 8 * 1000 / it->recv_mean_ms;
- RTC_LOG(LS_INFO) << "Probe failed, sent at " << send_bitrate_bps
- << " bps, received at " << recv_bitrate_bps
- << " bps. Mean send delta: " << it->send_mean_ms
- << " ms, mean recv delta: " << it->recv_mean_ms
- << " ms, num probes: " << it->count;
+ RTC_LOG(LS_INFO) << "Probe failed, sent at "
+ << cluster.SendBitrate().bps() << " bps, received at "
+ << cluster.RecvBitrate().bps()
+ << " bps. Mean send delta: " << cluster.send_mean.ms()
+ << " ms, mean recv delta: " << cluster.recv_mean.ms()
+ << " ms, num probes: " << cluster.count;
break;
}
}
- return best_it;
+ return best;
}
RemoteBitrateEstimatorAbsSendTime::ProbeResult
-RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) {
- std::list<Cluster> clusters;
- ComputeClusters(&clusters);
+RemoteBitrateEstimatorAbsSendTime::ProcessClusters(Timestamp now) {
+ std::list<Cluster> clusters = ComputeClusters();
if (clusters.empty()) {
// If we reach the max number of probe packets and still have no clusters,
// we will remove the oldest one.
@@ -189,21 +179,18 @@ RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) {
return ProbeResult::kNoUpdate;
}
- std::list<Cluster>::const_iterator best_it = FindBestProbe(clusters);
- if (best_it != clusters.end()) {
- int probe_bitrate_bps =
- std::min(best_it->GetSendBitrateBps(), best_it->GetRecvBitrateBps());
+ if (const Cluster* best = FindBestProbe(clusters)) {
+ DataRate probe_bitrate = std::min(best->SendBitrate(), best->RecvBitrate());
// Make sure that a probe sent on a lower bitrate than our estimate can't
// reduce the estimate.
- if (IsBitrateImproving(probe_bitrate_bps)) {
+ if (IsBitrateImproving(probe_bitrate)) {
RTC_LOG(LS_INFO) << "Probe successful, sent at "
- << best_it->GetSendBitrateBps() << " bps, received at "
- << best_it->GetRecvBitrateBps()
- << " bps. Mean send delta: " << best_it->send_mean_ms
- << " ms, mean recv delta: " << best_it->recv_mean_ms
- << " ms, num probes: " << best_it->count;
- remote_rate_.SetEstimate(DataRate::BitsPerSec(probe_bitrate_bps),
- Timestamp::Millis(now_ms));
+ << best->SendBitrate().bps() << " bps, received at "
+ << best->RecvBitrate().bps()
+ << " bps. Mean send delta: " << best->send_mean.ms()
+ << " ms, mean recv delta: " << best->recv_mean.ms()
+ << " ms, num probes: " << best->count;
+ remote_rate_.SetEstimate(probe_bitrate, now);
return ProbeResult::kBitrateUpdated;
}
}
@@ -216,11 +203,11 @@ RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) {
}
bool RemoteBitrateEstimatorAbsSendTime::IsBitrateImproving(
- int new_bitrate_bps) const {
- bool initial_probe = !remote_rate_.ValidEstimate() && new_bitrate_bps > 0;
- bool bitrate_above_estimate =
- remote_rate_.ValidEstimate() &&
- new_bitrate_bps > remote_rate_.LatestEstimate().bps<int>();
+ DataRate probe_bitrate) const {
+ bool initial_probe =
+ !remote_rate_.ValidEstimate() && probe_bitrate > DataRate::Zero();
+ bool bitrate_above_estimate = remote_rate_.ValidEstimate() &&
+ probe_bitrate > remote_rate_.LatestEstimate();
return initial_probe || bitrate_above_estimate;
}
@@ -235,14 +222,15 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(
"is missing absolute send time extension!";
return;
}
- IncomingPacketInfo(arrival_time_ms, header.extension.absoluteSendTime,
- payload_size, header.ssrc);
+ IncomingPacketInfo(Timestamp::Millis(arrival_time_ms),
+ header.extension.absoluteSendTime,
+ DataSize::Bytes(payload_size), header.ssrc);
}
void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
- int64_t arrival_time_ms,
+ Timestamp arrival_time,
uint32_t send_time_24bits,
- size_t payload_size,
+ DataSize payload_size,
uint32_t ssrc) {
RTC_CHECK(send_time_24bits < (1ul << 24));
if (!uma_recorded_) {
@@ -253,15 +241,16 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
// Shift up send time to use the full 32 bits that inter_arrival works with,
// so wrapping works properly.
uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift;
- int64_t send_time_ms = static_cast<int64_t>(timestamp) * kTimestampToMs;
+ Timestamp send_time =
+ Timestamp::Millis(static_cast<int64_t>(timestamp) * kTimestampToMs);
- int64_t now_ms = clock_->TimeInMilliseconds();
+ Timestamp now = clock_->CurrentTime();
// TODO(holmer): SSRCs are only needed for REMB, should be broken out from
// here.
// Check if incoming bitrate estimate is valid, and if it needs to be reset.
absl::optional<uint32_t> incoming_bitrate =
- incoming_bitrate_.Rate(arrival_time_ms);
+ incoming_bitrate_.Rate(arrival_time.ms());
if (incoming_bitrate) {
incoming_bitrate_initialized_ = true;
} else if (incoming_bitrate_initialized_) {
@@ -271,74 +260,82 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
incoming_bitrate_.Reset();
incoming_bitrate_initialized_ = false;
}
- incoming_bitrate_.Update(payload_size, arrival_time_ms);
+ incoming_bitrate_.Update(payload_size.bytes(), arrival_time.ms());
- if (first_packet_time_ms_ == -1)
- first_packet_time_ms_ = now_ms;
+ if (first_packet_time_.IsInfinite()) {
+ first_packet_time_ = now;
+ }
uint32_t ts_delta = 0;
int64_t t_delta = 0;
int size_delta = 0;
bool update_estimate = false;
- uint32_t target_bitrate_bps = 0;
+ DataRate target_bitrate = DataRate::Zero();
std::vector<uint32_t> ssrcs;
{
MutexLock lock(&mutex_);
- TimeoutStreams(now_ms);
- RTC_DCHECK(inter_arrival_.get());
- RTC_DCHECK(estimator_.get());
- ssrcs_[ssrc] = now_ms;
+ TimeoutStreams(now);
+ RTC_DCHECK(inter_arrival_);
+ RTC_DCHECK(estimator_);
+ // TODO(danilchap): Replace 5 lines below with insert_or_assign when that
+ // c++17 function is available.
+ auto inserted = ssrcs_.insert(std::make_pair(ssrc, now));
+ if (!inserted.second) {
+ // Already inserted, update.
+ inserted.first->second = now;
+ }
// For now only try to detect probes while we don't have a valid estimate.
// We currently assume that only packets larger than 200 bytes are paced by
// the sender.
- const size_t kMinProbePacketSize = 200;
+ static constexpr DataSize kMinProbePacketSize = DataSize::Bytes(200);
if (payload_size > kMinProbePacketSize &&
(!remote_rate_.ValidEstimate() ||
- now_ms - first_packet_time_ms_ < kInitialProbingIntervalMs)) {
+ now - first_packet_time_ < kInitialProbingInterval)) {
// TODO(holmer): Use a map instead to get correct order?
if (total_probes_received_ < kMaxProbePackets) {
- int send_delta_ms = -1;
- int recv_delta_ms = -1;
+ TimeDelta send_delta = TimeDelta::Millis(-1);
+ TimeDelta recv_delta = TimeDelta::Millis(-1);
if (!probes_.empty()) {
- send_delta_ms = send_time_ms - probes_.back().send_time_ms;
- recv_delta_ms = arrival_time_ms - probes_.back().recv_time_ms;
+ send_delta = send_time - probes_.back().send_time;
+ recv_delta = arrival_time - probes_.back().recv_time;
}
- RTC_LOG(LS_INFO) << "Probe packet received: send time=" << send_time_ms
- << " ms, recv time=" << arrival_time_ms
- << " ms, send delta=" << send_delta_ms
- << " ms, recv delta=" << recv_delta_ms << " ms.";
+ RTC_LOG(LS_INFO) << "Probe packet received: send time="
+ << send_time.ms()
+ << " ms, recv time=" << arrival_time.ms()
+ << " ms, send delta=" << send_delta.ms()
+ << " ms, recv delta=" << recv_delta.ms() << " ms.";
}
- probes_.push_back(Probe(send_time_ms, arrival_time_ms, payload_size));
+ probes_.emplace_back(send_time, arrival_time, payload_size);
++total_probes_received_;
// Make sure that a probe which updated the bitrate immediately has an
// effect by calling the OnReceiveBitrateChanged callback.
- if (ProcessClusters(now_ms) == ProbeResult::kBitrateUpdated)
+ if (ProcessClusters(now) == ProbeResult::kBitrateUpdated)
update_estimate = true;
}
- if (inter_arrival_->ComputeDeltas(timestamp, arrival_time_ms, now_ms,
- payload_size, &ts_delta, &t_delta,
+ if (inter_arrival_->ComputeDeltas(timestamp, arrival_time.ms(), now.ms(),
+ payload_size.bytes(), &ts_delta, &t_delta,
&size_delta)) {
double ts_delta_ms = (1000.0 * ts_delta) / (1 << kInterArrivalShift);
estimator_->Update(t_delta, ts_delta_ms, size_delta, detector_.State(),
- arrival_time_ms);
+ arrival_time.ms());
detector_.Detect(estimator_->offset(), ts_delta_ms,
- estimator_->num_of_deltas(), arrival_time_ms);
+ estimator_->num_of_deltas(), arrival_time.ms());
}
if (!update_estimate) {
// Check if it's time for a periodic update or if we should update because
// of an over-use.
- if (last_update_ms_ == -1 ||
- now_ms - last_update_ms_ > remote_rate_.GetFeedbackInterval().ms()) {
+ if (last_update_.IsInfinite() ||
+ now.ms() - last_update_.ms() >
+ remote_rate_.GetFeedbackInterval().ms()) {
update_estimate = true;
} else if (detector_.State() == BandwidthUsage::kBwOverusing) {
absl::optional<uint32_t> incoming_rate =
- incoming_bitrate_.Rate(arrival_time_ms);
+ incoming_bitrate_.Rate(arrival_time.ms());
if (incoming_rate && remote_rate_.TimeToReduceFurther(
- Timestamp::Millis(now_ms),
- DataRate::BitsPerSec(*incoming_rate))) {
+ now, DataRate::BitsPerSec(*incoming_rate))) {
update_estimate = true;
}
}
@@ -349,18 +346,16 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
// We also have to update the estimate immediately if we are overusing
// and the target bitrate is too high compared to what we are receiving.
const RateControlInput input(
- detector_.State(),
- OptionalRateFromOptionalBps(incoming_bitrate_.Rate(arrival_time_ms)));
- target_bitrate_bps =
- remote_rate_.Update(&input, Timestamp::Millis(now_ms))
- .bps<uint32_t>();
+ detector_.State(), OptionalRateFromOptionalBps(
+ incoming_bitrate_.Rate(arrival_time.ms())));
+ target_bitrate = remote_rate_.Update(&input, now);
update_estimate = remote_rate_.ValidEstimate();
ssrcs = Keys(ssrcs_);
}
}
if (update_estimate) {
- last_update_ms_ = now_ms;
- observer_->OnReceiveBitrateChanged(ssrcs, target_bitrate_bps);
+ last_update_ = now;
+ observer_->OnReceiveBitrateChanged(ssrcs, target_bitrate.bps<uint32_t>());
}
}
@@ -371,9 +366,9 @@ int64_t RemoteBitrateEstimatorAbsSendTime::TimeUntilNextProcess() {
return kDisabledModuleTime;
}
-void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
- for (Ssrcs::iterator it = ssrcs_.begin(); it != ssrcs_.end();) {
- if ((now_ms - it->second) > kStreamTimeOutMs) {
+void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(Timestamp now) {
+ for (auto it = ssrcs_.begin(); it != ssrcs_.end();) {
+ if (now - it->second > TimeDelta::Millis(kStreamTimeOutMs)) {
ssrcs_.erase(it++);
} else {
++it;
@@ -381,17 +376,17 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
}
if (ssrcs_.empty()) {
// We can't update the estimate if we don't have any active streams.
- inter_arrival_.reset(
- new InterArrival((kTimestampGroupLengthMs << kInterArrivalShift) / 1000,
- kTimestampToMs, true));
- estimator_.reset(new OveruseEstimator(OverUseDetectorOptions()));
+ inter_arrival_ = std::make_unique<InterArrival>(
+ (kTimestampGroupLengthMs << kInterArrivalShift) / 1000, kTimestampToMs,
+ true);
+ estimator_ = std::make_unique<OveruseEstimator>(OverUseDetectorOptions());
// We deliberately don't reset the first_packet_time_ms_ here for now since
// we only probe for bandwidth in the beginning of a call right now.
}
}
void RemoteBitrateEstimatorAbsSendTime::OnRttUpdate(int64_t avg_rtt_ms,
- int64_t max_rtt_ms) {
+ int64_t /*max_rtt_ms*/) {
MutexLock lock(&mutex_);
remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms));
}
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
index f42a28f8c8..4117382577 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
@@ -21,6 +21,10 @@
#include "api/rtp_headers.h"
#include "api/transport/field_trial_based_config.h"
+#include "api/units/data_rate.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/remote_bitrate_estimator/inter_arrival.h"
@@ -35,42 +39,6 @@
namespace webrtc {
-struct Probe {
- Probe(int64_t send_time_ms, int64_t recv_time_ms, size_t payload_size)
- : send_time_ms(send_time_ms),
- recv_time_ms(recv_time_ms),
- payload_size(payload_size) {}
- int64_t send_time_ms;
- int64_t recv_time_ms;
- size_t payload_size;
-};
-
-struct Cluster {
- Cluster()
- : send_mean_ms(0.0f),
- recv_mean_ms(0.0f),
- mean_size(0),
- count(0),
- num_above_min_delta(0) {}
-
- int GetSendBitrateBps() const {
- RTC_CHECK_GT(send_mean_ms, 0.0f);
- return mean_size * 8 * 1000 / send_mean_ms;
- }
-
- int GetRecvBitrateBps() const {
- RTC_CHECK_GT(recv_mean_ms, 0.0f);
- return mean_size * 8 * 1000 / recv_mean_ms;
- }
-
- float send_mean_ms;
- float recv_mean_ms;
- // TODO(holmer): Add some variance metric as well?
- size_t mean_size;
- int count;
- int num_above_min_delta;
-};
-
class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
public:
RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer,
@@ -100,32 +68,54 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
void SetMinBitrate(int min_bitrate_bps) override;
private:
- typedef std::map<uint32_t, int64_t> Ssrcs;
+ struct Probe {
+ Probe(Timestamp send_time, Timestamp recv_time, DataSize payload_size)
+ : send_time(send_time),
+ recv_time(recv_time),
+ payload_size(payload_size) {}
+
+ Timestamp send_time;
+ Timestamp recv_time;
+ DataSize payload_size;
+ };
+
+ struct Cluster {
+ DataRate SendBitrate() const { return mean_size / send_mean; }
+ DataRate RecvBitrate() const { return mean_size / recv_mean; }
+
+ TimeDelta send_mean = TimeDelta::Zero();
+ TimeDelta recv_mean = TimeDelta::Zero();
+ // TODO(holmer): Add some variance metric as well?
+ DataSize mean_size = DataSize::Zero();
+ int count = 0;
+ int num_above_min_delta = 0;
+ };
+
enum class ProbeResult { kBitrateUpdated, kNoUpdate };
- static bool IsWithinClusterBounds(int send_delta_ms,
+ static bool IsWithinClusterBounds(TimeDelta send_delta,
const Cluster& cluster_aggregate);
- static void AddCluster(std::list<Cluster>* clusters, Cluster* cluster);
+ static void MaybeAddCluster(const Cluster& cluster_aggregate,
+ std::list<Cluster>& clusters);
- void IncomingPacketInfo(int64_t arrival_time_ms,
+ void IncomingPacketInfo(Timestamp arrival_time,
uint32_t send_time_24bits,
- size_t payload_size,
+ DataSize payload_size,
uint32_t ssrc);
- void ComputeClusters(std::list<Cluster>* clusters) const;
+ std::list<Cluster> ComputeClusters() const;
- std::list<Cluster>::const_iterator FindBestProbe(
- const std::list<Cluster>& clusters) const;
+ const Cluster* FindBestProbe(const std::list<Cluster>& clusters) const;
// Returns true if a probe which changed the estimate was detected.
- ProbeResult ProcessClusters(int64_t now_ms)
+ ProbeResult ProcessClusters(Timestamp now)
RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
- bool IsBitrateImproving(int probe_bitrate_bps) const
+ bool IsBitrateImproving(DataRate probe_bitrate) const
RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
- void TimeoutStreams(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
+ void TimeoutStreams(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_);
rtc::RaceChecker network_race_;
Clock* const clock_;
@@ -134,18 +124,16 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
std::unique_ptr<InterArrival> inter_arrival_;
std::unique_ptr<OveruseEstimator> estimator_;
OveruseDetector detector_;
- RateStatistics incoming_bitrate_;
- bool incoming_bitrate_initialized_;
- std::vector<int> recent_propagation_delta_ms_;
- std::vector<int64_t> recent_update_time_ms_;
+ RateStatistics incoming_bitrate_{kBitrateWindowMs, 8000};
+ bool incoming_bitrate_initialized_ = false;
std::list<Probe> probes_;
- size_t total_probes_received_;
- int64_t first_packet_time_ms_;
- int64_t last_update_ms_;
- bool uma_recorded_;
+ size_t total_probes_received_ = 0;
+ Timestamp first_packet_time_ = Timestamp::MinusInfinity();
+ Timestamp last_update_ = Timestamp::MinusInfinity();
+ bool uma_recorded_ = false;
mutable Mutex mutex_;
- Ssrcs ssrcs_ RTC_GUARDED_BY(&mutex_);
+ std::map<uint32_t, Timestamp> ssrcs_ RTC_GUARDED_BY(&mutex_);
AimdRateControl remote_rate_ RTC_GUARDED_BY(&mutex_);
};
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
index a9cc170a35..7764e60ef2 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
@@ -23,9 +23,6 @@
namespace webrtc {
-// Impossible to request feedback older than what can be represented by 15 bits.
-const int RemoteEstimatorProxy::kMaxNumberOfPackets = (1 << 15);
-
// The maximum allowed value for a timestamp in milliseconds. This is lower
// than the numerical limit since we often convert to microseconds.
static constexpr int64_t kMaxTimeMs =
@@ -33,11 +30,11 @@ static constexpr int64_t kMaxTimeMs =
RemoteEstimatorProxy::RemoteEstimatorProxy(
Clock* clock,
- TransportFeedbackSenderInterface* feedback_sender,
+ TransportFeedbackSender feedback_sender,
const WebRtcKeyValueConfig* key_value_config,
NetworkStateEstimator* network_state_estimator)
: clock_(clock),
- feedback_sender_(feedback_sender),
+ feedback_sender_(std::move(feedback_sender)),
send_config_(key_value_config),
last_process_time_ms_(-1),
network_state_estimator_(network_state_estimator),
@@ -54,6 +51,18 @@ RemoteEstimatorProxy::RemoteEstimatorProxy(
RemoteEstimatorProxy::~RemoteEstimatorProxy() {}
+void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number,
+ int64_t arrival_time_ms) {
+ if (periodic_window_start_seq_.has_value()) {
+ if (*periodic_window_start_seq_ >=
+ packet_arrival_times_.end_sequence_number()) {
+ // Start new feedback packet, cull old packets.
+ packet_arrival_times_.RemoveOldPackets(
+ sequence_number, arrival_time_ms - send_config_.back_window->ms());
+ }
+ }
+}
+
void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
const RTPHeader& header) {
@@ -69,39 +78,26 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
seq = unwrapper_.Unwrap(header.extension.transportSequenceNumber);
if (send_periodic_feedback_) {
- if (periodic_window_start_seq_ &&
- packet_arrival_times_.lower_bound(*periodic_window_start_seq_) ==
- packet_arrival_times_.end()) {
- // Start new feedback packet, cull old packets.
- for (auto it = packet_arrival_times_.begin();
- it != packet_arrival_times_.end() && it->first < seq &&
- arrival_time_ms - it->second >= send_config_.back_window->ms();) {
- it = packet_arrival_times_.erase(it);
- }
- }
+ MaybeCullOldPackets(seq, arrival_time_ms);
+
if (!periodic_window_start_seq_ || seq < *periodic_window_start_seq_) {
periodic_window_start_seq_ = seq;
}
}
// We are only interested in the first time a packet is received.
- if (packet_arrival_times_.find(seq) != packet_arrival_times_.end())
+ if (packet_arrival_times_.has_received(seq)) {
return;
+ }
- packet_arrival_times_[seq] = arrival_time_ms;
+ packet_arrival_times_.AddPacket(seq, arrival_time_ms);
// Limit the range of sequence numbers to send feedback for.
- auto first_arrival_time_to_keep = packet_arrival_times_.lower_bound(
- packet_arrival_times_.rbegin()->first - kMaxNumberOfPackets);
- if (first_arrival_time_to_keep != packet_arrival_times_.begin()) {
- packet_arrival_times_.erase(packet_arrival_times_.begin(),
- first_arrival_time_to_keep);
- if (send_periodic_feedback_) {
- // |packet_arrival_times_| cannot be empty since we just added one
- // element and the last element is not deleted.
- RTC_DCHECK(!packet_arrival_times_.empty());
- periodic_window_start_seq_ = packet_arrival_times_.begin()->first;
- }
+ if (!periodic_window_start_seq_.has_value() ||
+ periodic_window_start_seq_.value() <
+ packet_arrival_times_.begin_sequence_number()) {
+ periodic_window_start_seq_ =
+ packet_arrival_times_.begin_sequence_number();
}
if (header.extension.feedback_request) {
@@ -113,8 +109,8 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
if (network_state_estimator_ && header.extension.hasAbsoluteSendTime) {
PacketResult packet_result;
packet_result.receive_time = Timestamp::Millis(arrival_time_ms);
- // Ignore reordering of packets and assume they have approximately the same
- // send time.
+ // Ignore reordering of packets and assume they have approximately the
+ // same send time.
abs_send_timestamp_ += std::max(
header.extension.GetAbsoluteSendTimeDelta(previous_abs_send_time_),
TimeDelta::Millis(0));
@@ -183,9 +179,9 @@ void RemoteEstimatorProxy::SetSendPeriodicFeedback(
}
void RemoteEstimatorProxy::SendPeriodicFeedbacks() {
- // |periodic_window_start_seq_| is the first sequence number to include in the
- // current feedback packet. Some older may still be in the map, in case a
- // reordering happens and we need to retransmit them.
+ // |periodic_window_start_seq_| is the first sequence number to include in
+ // the current feedback packet. Some older may still be in the map, in case
+ // a reordering happens and we need to retransmit them.
if (!periodic_window_start_seq_)
return;
@@ -199,15 +195,17 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() {
}
}
- for (auto begin_iterator =
- packet_arrival_times_.lower_bound(*periodic_window_start_seq_);
- begin_iterator != packet_arrival_times_.cend();
- begin_iterator =
- packet_arrival_times_.lower_bound(*periodic_window_start_seq_)) {
- auto feedback_packet = std::make_unique<rtcp::TransportFeedback>();
- periodic_window_start_seq_ = BuildFeedbackPacket(
- feedback_packet_count_++, media_ssrc_, *periodic_window_start_seq_,
- begin_iterator, packet_arrival_times_.cend(), feedback_packet.get());
+ int64_t packet_arrival_times_end_seq =
+ packet_arrival_times_.end_sequence_number();
+ while (periodic_window_start_seq_ < packet_arrival_times_end_seq) {
+ auto feedback_packet = MaybeBuildFeedbackPacket(
+ /*include_timestamps=*/true, periodic_window_start_seq_.value(),
+ packet_arrival_times_end_seq,
+ /*is_periodic_update=*/true);
+
+ if (feedback_packet == nullptr) {
+ break;
+ }
RTC_DCHECK(feedback_sender_ != nullptr);
@@ -217,10 +215,10 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() {
}
packets.push_back(std::move(feedback_packet));
- feedback_sender_->SendCombinedRtcpPacket(std::move(packets));
- // Note: Don't erase items from packet_arrival_times_ after sending, in case
- // they need to be re-sent after a reordering. Removal will be handled
- // by OnPacketArrival once packets are too old.
+ feedback_sender_(std::move(packets));
+ // Note: Don't erase items from packet_arrival_times_ after sending, in
+ // case they need to be re-sent after a reordering. Removal will be
+ // handled by OnPacketArrival once packets are too old.
}
}
@@ -231,61 +229,79 @@ void RemoteEstimatorProxy::SendFeedbackOnRequest(
return;
}
- auto feedback_packet = std::make_unique<rtcp::TransportFeedback>(
- feedback_request.include_timestamps);
-
int64_t first_sequence_number =
sequence_number - feedback_request.sequence_count + 1;
- auto begin_iterator =
- packet_arrival_times_.lower_bound(first_sequence_number);
- auto end_iterator = packet_arrival_times_.upper_bound(sequence_number);
- BuildFeedbackPacket(feedback_packet_count_++, media_ssrc_,
- first_sequence_number, begin_iterator, end_iterator,
- feedback_packet.get());
+ auto feedback_packet = MaybeBuildFeedbackPacket(
+ feedback_request.include_timestamps, first_sequence_number,
+ sequence_number + 1, /*is_periodic_update=*/false);
+
+ // This is called when a packet has just been added.
+ RTC_DCHECK(feedback_packet != nullptr);
// Clear up to the first packet that is included in this feedback packet.
- packet_arrival_times_.erase(packet_arrival_times_.begin(), begin_iterator);
+ packet_arrival_times_.EraseTo(first_sequence_number);
RTC_DCHECK(feedback_sender_ != nullptr);
std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets;
packets.push_back(std::move(feedback_packet));
- feedback_sender_->SendCombinedRtcpPacket(std::move(packets));
+ feedback_sender_(std::move(packets));
}
-int64_t RemoteEstimatorProxy::BuildFeedbackPacket(
- uint8_t feedback_packet_count,
- uint32_t media_ssrc,
- int64_t base_sequence_number,
- std::map<int64_t, int64_t>::const_iterator begin_iterator,
- std::map<int64_t, int64_t>::const_iterator end_iterator,
- rtcp::TransportFeedback* feedback_packet) {
- RTC_DCHECK(begin_iterator != end_iterator);
-
- // TODO(sprang): Measure receive times in microseconds and remove the
- // conversions below.
- feedback_packet->SetMediaSsrc(media_ssrc);
- // Base sequence number is the expected first sequence number. This is known,
- // but we might not have actually received it, so the base time shall be the
- // time of the first received packet in the feedback.
- feedback_packet->SetBase(static_cast<uint16_t>(base_sequence_number & 0xFFFF),
- begin_iterator->second * 1000);
- feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count);
- int64_t next_sequence_number = base_sequence_number;
- for (auto it = begin_iterator; it != end_iterator; ++it) {
- if (!feedback_packet->AddReceivedPacket(
- static_cast<uint16_t>(it->first & 0xFFFF), it->second * 1000)) {
- // If we can't even add the first seq to the feedback packet, we won't be
- // able to build it at all.
- RTC_CHECK(begin_iterator != it);
+std::unique_ptr<rtcp::TransportFeedback>
+RemoteEstimatorProxy::MaybeBuildFeedbackPacket(
+ bool include_timestamps,
+ int64_t begin_sequence_number_inclusive,
+ int64_t end_sequence_number_exclusive,
+ bool is_periodic_update) {
+ RTC_DCHECK_LT(begin_sequence_number_inclusive, end_sequence_number_exclusive);
+
+ int64_t start_seq =
+ packet_arrival_times_.clamp(begin_sequence_number_inclusive);
+
+ int64_t end_seq = packet_arrival_times_.clamp(end_sequence_number_exclusive);
+
+ // Create the packet on demand, as it's not certain that there are packets
+ // in the range that have been received.
+ std::unique_ptr<rtcp::TransportFeedback> feedback_packet = nullptr;
+
+ int64_t next_sequence_number = begin_sequence_number_inclusive;
+ for (int64_t seq = start_seq; seq < end_seq; ++seq) {
+ int64_t arrival_time_ms = packet_arrival_times_.get(seq);
+ if (arrival_time_ms == 0) {
+ // Packet not received.
+ continue;
+ }
+
+ if (feedback_packet == nullptr) {
+ feedback_packet =
+ std::make_unique<rtcp::TransportFeedback>(include_timestamps);
+ // TODO(sprang): Measure receive times in microseconds and remove the
+ // conversions below.
+ feedback_packet->SetMediaSsrc(media_ssrc_);
+ // Base sequence number is the expected first sequence number. This is
+ // known, but we might not have actually received it, so the base time
+ // shall be the time of the first received packet in the feedback.
+ feedback_packet->SetBase(
+ static_cast<uint16_t>(begin_sequence_number_inclusive & 0xFFFF),
+ arrival_time_ms * 1000);
+ feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count_++);
+ }
+
+ if (!feedback_packet->AddReceivedPacket(static_cast<uint16_t>(seq & 0xFFFF),
+ arrival_time_ms * 1000)) {
// Could not add timestamp, feedback packet might be full. Return and
// try again with a fresh packet.
break;
}
- next_sequence_number = it->first + 1;
+
+ next_sequence_number = seq + 1;
+ }
+ if (is_periodic_update) {
+ periodic_window_start_seq_ = next_sequence_number;
}
- return next_sequence_number;
+ return feedback_packet;
}
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/modules/remote_bitrate_estimator/remote_estimator_proxy.h
index a4adefc5ee..4f89409995 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy.h
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.h
@@ -11,12 +11,15 @@
#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_
#define MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_
-#include <map>
+#include <deque>
+#include <functional>
+#include <memory>
#include <vector>
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/remote_bitrate_estimator/packet_arrival_map.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/numerics/sequence_number_util.h"
#include "rtc_base/synchronization/mutex.h"
@@ -24,7 +27,6 @@
namespace webrtc {
class Clock;
-class PacketRouter;
namespace rtcp {
class TransportFeedback;
}
@@ -32,11 +34,14 @@ class TransportFeedback;
// Class used when send-side BWE is enabled: This proxy is instantiated on the
// receive side. It buffers a number of receive timestamps and then sends
// transport feedback messages back too the send side.
-
class RemoteEstimatorProxy : public RemoteBitrateEstimator {
public:
+ // Used for sending transport feedback messages when send side
+ // BWE is used.
+ using TransportFeedbackSender = std::function<void(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets)>;
RemoteEstimatorProxy(Clock* clock,
- TransportFeedbackSenderInterface* feedback_sender,
+ TransportFeedbackSender feedback_sender,
const WebRtcKeyValueConfig* key_value_config,
NetworkStateEstimator* network_state_estimator);
~RemoteEstimatorProxy() override;
@@ -71,24 +76,33 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator {
}
};
- static const int kMaxNumberOfPackets;
-
+ void MaybeCullOldPackets(int64_t sequence_number, int64_t arrival_time_ms)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_);
void SendPeriodicFeedbacks() RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_);
void SendFeedbackOnRequest(int64_t sequence_number,
const FeedbackRequest& feedback_request)
RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_);
- static int64_t BuildFeedbackPacket(
- uint8_t feedback_packet_count,
- uint32_t media_ssrc,
- int64_t base_sequence_number,
- std::map<int64_t, int64_t>::const_iterator
- begin_iterator, // |begin_iterator| is inclusive.
- std::map<int64_t, int64_t>::const_iterator
- end_iterator, // |end_iterator| is exclusive.
- rtcp::TransportFeedback* feedback_packet);
+
+ // Returns a Transport Feedback packet with information about as many packets
+ // that has been received between [`begin_sequence_number_incl`,
+ // `end_sequence_number_excl`) that can fit in it. If `is_periodic_update`,
+ // this represents sending a periodic feedback message, which will make it
+ // update the `periodic_window_start_seq_` variable with the first packet that
+ // was not included in the feedback packet, so that the next update can
+ // continue from that sequence number.
+ //
+ // If no incoming packets were added, nullptr is returned.
+ //
+ // `include_timestamps` decide if the returned TransportFeedback should
+ // include timestamps.
+ std::unique_ptr<rtcp::TransportFeedback> MaybeBuildFeedbackPacket(
+ bool include_timestamps,
+ int64_t begin_sequence_number_inclusive,
+ int64_t end_sequence_number_exclusive,
+ bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_);
Clock* const clock_;
- TransportFeedbackSenderInterface* const feedback_sender_;
+ const TransportFeedbackSender feedback_sender_;
const TransportWideFeedbackConfig send_config_;
int64_t last_process_time_ms_;
@@ -99,9 +113,14 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator {
uint32_t media_ssrc_ RTC_GUARDED_BY(&lock_);
uint8_t feedback_packet_count_ RTC_GUARDED_BY(&lock_);
SeqNumUnwrapper<uint16_t> unwrapper_ RTC_GUARDED_BY(&lock_);
+
+ // The next sequence number that should be the start sequence number during
+ // periodic reporting. Will be absl::nullopt before the first seen packet.
absl::optional<int64_t> periodic_window_start_seq_ RTC_GUARDED_BY(&lock_);
- // Map unwrapped seq -> time.
- std::map<int64_t, int64_t> packet_arrival_times_ RTC_GUARDED_BY(&lock_);
+
+ // Packet arrival times, by sequence number.
+ PacketArrivalTimeMap packet_arrival_times_ RTC_GUARDED_BY(&lock_);
+
int64_t send_interval_ms_ RTC_GUARDED_BY(&lock_);
bool send_periodic_feedback_ RTC_GUARDED_BY(&lock_);
diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index da995922d9..296724fa71 100644
--- a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -16,8 +16,8 @@
#include "api/transport/field_trial_based_config.h"
#include "api/transport/network_types.h"
#include "api/transport/test/mock_network_control.h"
-#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "system_wrappers/include/clock.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -25,6 +25,7 @@
using ::testing::_;
using ::testing::ElementsAre;
using ::testing::Invoke;
+using ::testing::MockFunction;
using ::testing::Return;
using ::testing::SizeIs;
@@ -63,20 +64,12 @@ std::vector<int64_t> TimestampsMs(
return timestamps;
}
-class MockTransportFeedbackSender : public TransportFeedbackSenderInterface {
- public:
- MOCK_METHOD(bool,
- SendCombinedRtcpPacket,
- (std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets),
- (override));
-};
-
class RemoteEstimatorProxyTest : public ::testing::Test {
public:
RemoteEstimatorProxyTest()
: clock_(0),
proxy_(&clock_,
- &router_,
+ feedback_sender_.AsStdFunction(),
&field_trial_config_,
&network_state_estimator_) {}
@@ -113,7 +106,8 @@ class RemoteEstimatorProxyTest : public ::testing::Test {
FieldTrialBasedConfig field_trial_config_;
SimulatedClock clock_;
- ::testing::StrictMock<MockTransportFeedbackSender> router_;
+ MockFunction<void(std::vector<std::unique_ptr<rtcp::RtcpPacket>>)>
+ feedback_sender_;
::testing::NiceMock<MockNetworkStateEstimator> network_state_estimator_;
RemoteEstimatorProxy proxy_;
};
@@ -121,7 +115,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test {
TEST_F(RemoteEstimatorProxyTest, SendsSinglePacketFeedback) {
IncomingPacket(kBaseSeq, kBaseTimeMs);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -134,7 +128,6 @@ TEST_F(RemoteEstimatorProxyTest, SendsSinglePacketFeedback) {
ElementsAre(kBaseSeq));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs));
- return true;
}));
Process();
@@ -144,7 +137,7 @@ TEST_F(RemoteEstimatorProxyTest, DuplicatedPackets) {
IncomingPacket(kBaseSeq, kBaseTimeMs);
IncomingPacket(kBaseSeq, kBaseTimeMs + 1000);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -167,13 +160,13 @@ TEST_F(RemoteEstimatorProxyTest, FeedbackWithMissingStart) {
// First feedback.
IncomingPacket(kBaseSeq, kBaseTimeMs);
IncomingPacket(kBaseSeq + 1, kBaseTimeMs + 1000);
- EXPECT_CALL(router_, SendCombinedRtcpPacket).WillOnce(Return(true));
+ EXPECT_CALL(feedback_sender_, Call);
Process();
// Second feedback starts with a missing packet (DROP kBaseSeq + 2).
IncomingPacket(kBaseSeq + 3, kBaseTimeMs + 3000);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -186,7 +179,6 @@ TEST_F(RemoteEstimatorProxyTest, FeedbackWithMissingStart) {
ElementsAre(kBaseSeq + 3));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + 3000));
- return true;
}));
Process();
@@ -197,7 +189,7 @@ TEST_F(RemoteEstimatorProxyTest, SendsFeedbackWithVaryingDeltas) {
IncomingPacket(kBaseSeq + 1, kBaseTimeMs + kMaxSmallDeltaMs);
IncomingPacket(kBaseSeq + 2, kBaseTimeMs + (2 * kMaxSmallDeltaMs) + 1);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -211,7 +203,6 @@ TEST_F(RemoteEstimatorProxyTest, SendsFeedbackWithVaryingDeltas) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs, kBaseTimeMs + kMaxSmallDeltaMs,
kBaseTimeMs + (2 * kMaxSmallDeltaMs) + 1));
- return true;
}));
Process();
@@ -224,7 +215,7 @@ TEST_F(RemoteEstimatorProxyTest, SendsFragmentedFeedback) {
IncomingPacket(kBaseSeq, kBaseTimeMs);
IncomingPacket(kBaseSeq + 1, kBaseTimeMs + kTooLargeDelta);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -237,7 +228,6 @@ TEST_F(RemoteEstimatorProxyTest, SendsFragmentedFeedback) {
ElementsAre(kBaseSeq));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs));
- return true;
}))
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
@@ -251,7 +241,6 @@ TEST_F(RemoteEstimatorProxyTest, SendsFragmentedFeedback) {
ElementsAre(kBaseSeq + 1));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + kTooLargeDelta));
- return true;
}));
Process();
@@ -263,7 +252,7 @@ TEST_F(RemoteEstimatorProxyTest, HandlesReorderingAndWrap) {
IncomingPacket(kBaseSeq, kBaseTimeMs);
IncomingPacket(kLargeSeq, kBaseTimeMs + kDeltaMs);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[&](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -274,7 +263,6 @@ TEST_F(RemoteEstimatorProxyTest, HandlesReorderingAndWrap) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + kDeltaMs, kBaseTimeMs));
- return true;
}));
Process();
@@ -293,7 +281,7 @@ TEST_F(RemoteEstimatorProxyTest, HandlesMalformedSequenceNumbers) {
}
// Only expect feedback for the last two packets.
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[&](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -306,7 +294,6 @@ TEST_F(RemoteEstimatorProxyTest, HandlesMalformedSequenceNumbers) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + 28 * kDeltaMs,
kBaseTimeMs + 29 * kDeltaMs));
- return true;
}));
Process();
@@ -324,7 +311,7 @@ TEST_F(RemoteEstimatorProxyTest, HandlesBackwardsWrappingSequenceNumbers) {
}
// Only expect feedback for the first two packets.
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[&](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -336,7 +323,6 @@ TEST_F(RemoteEstimatorProxyTest, HandlesBackwardsWrappingSequenceNumbers) {
ElementsAre(kBaseSeq + 40000, kBaseSeq));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + kDeltaMs, kBaseTimeMs));
- return true;
}));
Process();
@@ -346,7 +332,7 @@ TEST_F(RemoteEstimatorProxyTest, ResendsTimestampsOnReordering) {
IncomingPacket(kBaseSeq, kBaseTimeMs);
IncomingPacket(kBaseSeq + 2, kBaseTimeMs + 2);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -359,14 +345,13 @@ TEST_F(RemoteEstimatorProxyTest, ResendsTimestampsOnReordering) {
ElementsAre(kBaseSeq, kBaseSeq + 2));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs, kBaseTimeMs + 2));
- return true;
}));
Process();
IncomingPacket(kBaseSeq + 1, kBaseTimeMs + 1);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -379,7 +364,6 @@ TEST_F(RemoteEstimatorProxyTest, ResendsTimestampsOnReordering) {
ElementsAre(kBaseSeq + 1, kBaseSeq + 2));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + 1, kBaseTimeMs + 2));
- return true;
}));
Process();
@@ -390,7 +374,7 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) {
IncomingPacket(kBaseSeq + 2, kBaseTimeMs);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -400,14 +384,13 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs));
- return true;
}));
Process();
IncomingPacket(kBaseSeq + 3, kTimeoutTimeMs); // kBaseSeq + 2 times out here.
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[&](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -417,7 +400,6 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kTimeoutTimeMs));
- return true;
}));
Process();
@@ -427,7 +409,7 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) {
IncomingPacket(kBaseSeq, kBaseTimeMs - 1);
IncomingPacket(kBaseSeq + 1, kTimeoutTimeMs - 1);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[&](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -440,7 +422,6 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) {
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs - 1, kTimeoutTimeMs - 1,
kTimeoutTimeMs));
- return true;
}));
Process();
@@ -496,7 +477,7 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, TimeUntilNextProcessIsHigh) {
TEST_F(RemoteEstimatorProxyOnRequestTest, ProcessDoesNotSendFeedback) {
proxy_.SetSendPeriodicFeedback(false);
IncomingPacket(kBaseSeq, kBaseTimeMs);
- EXPECT_CALL(router_, SendCombinedRtcpPacket).Times(0);
+ EXPECT_CALL(feedback_sender_, Call).Times(0);
Process();
}
@@ -506,7 +487,7 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, RequestSinglePacketFeedback) {
IncomingPacket(kBaseSeq + 1, kBaseTimeMs + kMaxSmallDeltaMs);
IncomingPacket(kBaseSeq + 2, kBaseTimeMs + 2 * kMaxSmallDeltaMs);
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -519,7 +500,6 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, RequestSinglePacketFeedback) {
ElementsAre(kBaseSeq + 3));
EXPECT_THAT(TimestampsMs(*feedback_packet),
ElementsAre(kBaseTimeMs + 3 * kMaxSmallDeltaMs));
- return true;
}));
constexpr FeedbackRequest kSinglePacketFeedbackRequest = {
@@ -535,7 +515,7 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, RequestLastFivePacketFeedback) {
IncomingPacket(kBaseSeq + i, kBaseTimeMs + i * kMaxSmallDeltaMs);
}
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -553,7 +533,6 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, RequestLastFivePacketFeedback) {
kBaseTimeMs + 8 * kMaxSmallDeltaMs,
kBaseTimeMs + 9 * kMaxSmallDeltaMs,
kBaseTimeMs + 10 * kMaxSmallDeltaMs));
- return true;
}));
constexpr FeedbackRequest kFivePacketsFeedbackRequest = {
@@ -571,7 +550,7 @@ TEST_F(RemoteEstimatorProxyOnRequestTest,
IncomingPacket(kBaseSeq + i, kBaseTimeMs + i * kMaxSmallDeltaMs);
}
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
+ EXPECT_CALL(feedback_sender_, Call)
.WillOnce(Invoke(
[](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
rtcp::TransportFeedback* feedback_packet =
@@ -586,7 +565,6 @@ TEST_F(RemoteEstimatorProxyOnRequestTest,
ElementsAre(kBaseTimeMs + 6 * kMaxSmallDeltaMs,
kBaseTimeMs + 8 * kMaxSmallDeltaMs,
kBaseTimeMs + 10 * kMaxSmallDeltaMs));
- return true;
}));
constexpr FeedbackRequest kFivePacketsFeedbackRequest = {
@@ -658,13 +636,7 @@ TEST_F(RemoteEstimatorProxyTest, SendTransportFeedbackAndNetworkStateUpdate) {
AbsoluteSendTime::MsTo24Bits(kBaseTimeMs - 1)));
EXPECT_CALL(network_state_estimator_, GetCurrentEstimate())
.WillOnce(Return(NetworkStateEstimate()));
- EXPECT_CALL(router_, SendCombinedRtcpPacket)
- .WillOnce(
- [](std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets) {
- EXPECT_THAT(feedback_packets, SizeIs(2));
- return true;
- });
-
+ EXPECT_CALL(feedback_sender_, Call(SizeIs(2)));
Process();
}
diff --git a/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
index c0b3a37ba5..403f81fd03 100644
--- a/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
+++ b/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -18,10 +18,8 @@
#include "absl/flags/flag.h"
#include "absl/flags/parse.h"
-#include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
-#include "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "test/rtp_file_reader.h"
-#include "test/rtp_header_parser.h"
ABSL_FLAG(std::string,
extension_type,
@@ -65,14 +63,11 @@ std::set<uint32_t> SsrcFilter() {
return ssrcs;
}
-std::unique_ptr<webrtc::RtpHeaderParser> ParseArgsAndSetupEstimator(
+bool ParseArgsAndSetupRtpReader(
int argc,
char** argv,
- webrtc::Clock* clock,
- webrtc::RemoteBitrateObserver* observer,
- std::unique_ptr<webrtc::test::RtpFileReader>* rtp_reader,
- std::unique_ptr<webrtc::RemoteBitrateEstimator>* estimator,
- std::string* estimator_used) {
+ std::unique_ptr<webrtc::test::RtpFileReader>& rtp_reader,
+ webrtc::RtpHeaderExtensionMap& rtp_header_extensions) {
absl::ParseCommandLine(argc, argv);
std::string filename = InputFile();
@@ -84,16 +79,16 @@ std::unique_ptr<webrtc::RtpHeaderParser> ParseArgsAndSetupEstimator(
fprintf(stderr, "\n");
if (filename.substr(filename.find_last_of('.')) == ".pcap") {
fprintf(stderr, "Opening as pcap\n");
- rtp_reader->reset(webrtc::test::RtpFileReader::Create(
+ rtp_reader.reset(webrtc::test::RtpFileReader::Create(
webrtc::test::RtpFileReader::kPcap, filename.c_str(), SsrcFilter()));
} else {
fprintf(stderr, "Opening as rtp\n");
- rtp_reader->reset(webrtc::test::RtpFileReader::Create(
+ rtp_reader.reset(webrtc::test::RtpFileReader::Create(
webrtc::test::RtpFileReader::kRtpDump, filename.c_str()));
}
- if (!*rtp_reader) {
+ if (!rtp_reader) {
fprintf(stderr, "Cannot open input file %s\n", filename.c_str());
- return nullptr;
+ return false;
}
fprintf(stderr, "Input file: %s\n\n", filename.c_str());
@@ -105,31 +100,10 @@ std::unique_ptr<webrtc::RtpHeaderParser> ParseArgsAndSetupEstimator(
fprintf(stderr, "Extension: abs\n");
} else {
fprintf(stderr, "Unknown extension type\n");
- return nullptr;
+ return false;
}
- // Setup the RTP header parser and the bitrate estimator.
- auto parser = webrtc::RtpHeaderParser::CreateForTest();
- parser->RegisterRtpHeaderExtension(extension, ExtensionId());
- if (estimator) {
- switch (extension) {
- case webrtc::kRtpExtensionAbsoluteSendTime: {
- estimator->reset(
- new webrtc::RemoteBitrateEstimatorAbsSendTime(observer, clock));
- *estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator";
- break;
- }
- case webrtc::kRtpExtensionTransmissionTimeOffset: {
- estimator->reset(
- new webrtc::RemoteBitrateEstimatorSingleStream(observer, clock));
- *estimator_used = "RemoteBitrateEstimator";
- break;
- }
- default:
- assert(false);
- return nullptr;
- }
- }
+ rtp_header_extensions.RegisterByType(ExtensionId(), extension);
- return parser;
+ return true;
}
diff --git a/modules/remote_bitrate_estimator/tools/bwe_rtp.h b/modules/remote_bitrate_estimator/tools/bwe_rtp.h
index 4285f926b5..3b161db37b 100644
--- a/modules/remote_bitrate_estimator/tools/bwe_rtp.h
+++ b/modules/remote_bitrate_estimator/tools/bwe_rtp.h
@@ -12,25 +12,14 @@
#define MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
#include <memory>
-#include <string>
-namespace webrtc {
-class Clock;
-class RemoteBitrateEstimator;
-class RemoteBitrateObserver;
-class RtpHeaderParser;
-namespace test {
-class RtpFileReader;
-}
-} // namespace webrtc
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "test/rtp_file_reader.h"
-std::unique_ptr<webrtc::RtpHeaderParser> ParseArgsAndSetupEstimator(
+bool ParseArgsAndSetupRtpReader(
int argc,
char** argv,
- webrtc::Clock* clock,
- webrtc::RemoteBitrateObserver* observer,
- std::unique_ptr<webrtc::test::RtpFileReader>* rtp_reader,
- std::unique_ptr<webrtc::RemoteBitrateEstimator>* estimator,
- std::string* estimator_used);
+ std::unique_ptr<webrtc::test::RtpFileReader>& rtp_reader,
+ webrtc::RtpHeaderExtensionMap& rtp_header_extensions);
#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
diff --git a/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
index 7f1e009793..98f502a42e 100644
--- a/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
+++ b/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
@@ -13,17 +13,19 @@
#include <memory>
#include "modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "rtc_base/format_macros.h"
#include "rtc_base/strings/string_builder.h"
#include "test/rtp_file_reader.h"
-#include "test/rtp_header_parser.h"
int main(int argc, char* argv[]) {
std::unique_ptr<webrtc::test::RtpFileReader> reader;
- std::unique_ptr<webrtc::RtpHeaderParser> parser(ParseArgsAndSetupEstimator(
- argc, argv, nullptr, nullptr, &reader, nullptr, nullptr));
- if (!parser)
+ webrtc::RtpHeaderExtensionMap rtp_header_extensions;
+ if (!ParseArgsAndSetupRtpReader(argc, argv, reader, rtp_header_extensions)) {
return -1;
+ }
bool arrival_time_only = (argc >= 5 && strncmp(argv[4], "-t", 2) == 0);
@@ -35,11 +37,15 @@ int main(int argc, char* argv[]) {
int non_zero_ts_offsets = 0;
webrtc::test::RtpPacket packet;
while (reader->NextPacket(&packet)) {
- webrtc::RTPHeader header;
- parser->Parse(packet.data, packet.length, &header);
- if (header.extension.absoluteSendTime != 0)
+ webrtc::RtpPacket header(&rtp_header_extensions);
+ header.Parse(packet.data, packet.length);
+ uint32_t abs_send_time = 0;
+ if (header.GetExtension<webrtc::AbsoluteSendTime>(&abs_send_time) &&
+ abs_send_time != 0)
++non_zero_abs_send_time;
- if (header.extension.transmissionTimeOffset != 0)
+ int32_t toffset = 0;
+ if (header.GetExtension<webrtc::TransmissionOffset>(&toffset) &&
+ toffset != 0)
++non_zero_ts_offsets;
if (arrival_time_only) {
rtc::StringBuilder ss;
@@ -47,11 +53,9 @@ int main(int argc, char* argv[]) {
fprintf(stdout, "%s\n", ss.str().c_str());
} else {
fprintf(stdout, "%u %u %d %u %u %d %u %" RTC_PRIuS " %" RTC_PRIuS "\n",
- header.sequenceNumber, header.timestamp,
- header.extension.transmissionTimeOffset,
- header.extension.absoluteSendTime, packet.time_ms,
- header.markerBit, header.ssrc, packet.length,
- packet.original_length);
+ header.SequenceNumber(), header.Timestamp(), toffset,
+ abs_send_time, packet.time_ms, header.Marker(), header.Ssrc(),
+ packet.length, packet.original_length);
}
++packet_counter;
}
diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn
index a42c1784ee..07da942f0e 100644
--- a/modules/rtp_rtcp/BUILD.gn
+++ b/modules/rtp_rtcp/BUILD.gn
@@ -111,6 +111,7 @@ rtc_library("rtp_rtcp_format") {
"../../api/transport:network_control",
"../../api/transport/rtp:dependency_descriptor",
"../../api/units:time_delta",
+ "../../api/units:timestamp",
"../../api/video:video_frame",
"../../api/video:video_layers_allocation",
"../../api/video:video_rtp_headers",
@@ -137,12 +138,16 @@ rtc_library("rtp_rtcp") {
"include/receive_statistics.h",
"include/remote_ntp_time_estimator.h",
"include/ulpfec_receiver.h",
- "source/absolute_capture_time_receiver.cc",
- "source/absolute_capture_time_receiver.h",
+ "source/absolute_capture_time_interpolator.cc",
+ "source/absolute_capture_time_interpolator.h",
+ "source/absolute_capture_time_receiver.cc", # DEPRECATED
+ "source/absolute_capture_time_receiver.h", # DEPRECATED
"source/absolute_capture_time_sender.cc",
"source/absolute_capture_time_sender.h",
"source/active_decode_targets_helper.cc",
"source/active_decode_targets_helper.h",
+ "source/capture_clock_offset_updater.cc",
+ "source/capture_clock_offset_updater.h",
"source/create_video_rtp_depacketizer.cc",
"source/create_video_rtp_depacketizer.h",
"source/dtmf_queue.cc",
@@ -366,6 +371,7 @@ rtc_library("rtcp_transceiver") {
"../../api:rtp_headers",
"../../api:transport_api",
"../../api/task_queue",
+ "../../api/units:timestamp",
"../../api/video:video_bitrate_allocation",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
@@ -472,10 +478,11 @@ if (rtc_include_tests) {
testonly = true
sources = [
- "source/absolute_capture_time_receiver_unittest.cc",
+ "source/absolute_capture_time_interpolator_unittest.cc",
"source/absolute_capture_time_sender_unittest.cc",
"source/active_decode_targets_helper_unittest.cc",
"source/byte_io_unittest.cc",
+ "source/capture_clock_offset_updater_unittest.cc",
"source/fec_private_tables_bursty_unittest.cc",
"source/flexfec_header_reader_writer_unittest.cc",
"source/flexfec_receiver_unittest.cc",
@@ -531,6 +538,7 @@ if (rtc_include_tests) {
"source/rtp_rtcp_impl2_unittest.cc",
"source/rtp_rtcp_impl_unittest.cc",
"source/rtp_sender_audio_unittest.cc",
+ "source/rtp_sender_egress_unittest.cc",
"source/rtp_sender_unittest.cc",
"source/rtp_sender_video_unittest.cc",
"source/rtp_sequence_number_map_unittest.cc",
@@ -566,6 +574,8 @@ if (rtc_include_tests) {
"../../api/rtc_event_log",
"../../api/transport:field_trial_based_config",
"../../api/transport/rtp:dependency_descriptor",
+ "../../api/units:data_size",
+ "../../api/units:time_delta",
"../../api/units:timestamp",
"../../api/video:encoded_image",
"../../api/video:video_bitrate_allocation",
diff --git a/modules/rtp_rtcp/include/rtcp_statistics.h b/modules/rtp_rtcp/include/rtcp_statistics.h
index e26c475e31..de70c14943 100644
--- a/modules/rtp_rtcp/include/rtcp_statistics.h
+++ b/modules/rtp_rtcp/include/rtcp_statistics.h
@@ -17,22 +17,6 @@
namespace webrtc {
-// Statistics for an RTCP channel
-struct RtcpStatistics {
- uint8_t fraction_lost = 0;
- int32_t packets_lost = 0; // Defined as a 24 bit signed integer in RTCP
- uint32_t extended_highest_sequence_number = 0;
- uint32_t jitter = 0;
-};
-
-class RtcpStatisticsCallback {
- public:
- virtual ~RtcpStatisticsCallback() {}
-
- virtual void StatisticsUpdated(const RtcpStatistics& statistics,
- uint32_t ssrc) = 0;
-};
-
// Statistics for RTCP packet types.
struct RtcpPacketTypeCounter {
RtcpPacketTypeCounter()
diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 2e460e45c1..998a754cc0 100644
--- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -57,6 +57,7 @@ enum RTPExtensionType : int {
kRtpExtensionNone,
kRtpExtensionTransmissionTimeOffset,
kRtpExtensionAudioLevel,
+ kRtpExtensionCsrcAudioLevel,
kRtpExtensionInbandComfortNoise,
kRtpExtensionAbsoluteSendTime,
kRtpExtensionAbsoluteCaptureTime,
@@ -227,8 +228,11 @@ struct RtpPacketSendInfo {
RtpPacketSendInfo() = default;
uint16_t transport_sequence_number = 0;
+ // TODO(bugs.webrtc.org/12713): Remove once downstream usage is gone.
uint32_t ssrc = 0;
- uint16_t rtp_sequence_number = 0;
+ absl::optional<uint32_t> media_ssrc;
+ uint16_t rtp_sequence_number = 0; // Only valid if |media_ssrc| is set.
+ uint32_t rtp_timestamp = 0;
size_t length = 0;
absl::optional<RtpPacketMediaType> packet_type;
PacedPacketInfo pacing_info;
@@ -265,9 +269,13 @@ class RtcpFeedbackSenderInterface {
class StreamFeedbackObserver {
public:
struct StreamPacketInfo {
- uint32_t ssrc;
- uint16_t rtp_sequence_number;
bool received;
+
+ // |rtp_sequence_number| and |is_retransmission| are only valid if |ssrc|
+ // is populated.
+ absl::optional<uint32_t> ssrc;
+ uint16_t rtp_sequence_number;
+ bool is_retransmission;
};
virtual ~StreamFeedbackObserver() = default;
diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index d523128e38..a7707ecc19 100644
--- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -34,6 +34,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface {
(const uint8_t* incoming_packet, size_t packet_length),
(override));
MOCK_METHOD(void, SetRemoteSSRC, (uint32_t ssrc), (override));
+ MOCK_METHOD(void, SetLocalSsrc, (uint32_t ssrc), (override));
MOCK_METHOD(void, SetMaxRtpPacketSize, (size_t size), (override));
MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const, override));
MOCK_METHOD(void,
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc
new file mode 100644
index 0000000000..99fc030aca
--- /dev/null
+++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
+
+#include <limits>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+constexpr Timestamp kInvalidLastReceiveTime = Timestamp::MinusInfinity();
+} // namespace
+
+constexpr TimeDelta AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval;
+
+AbsoluteCaptureTimeInterpolator::AbsoluteCaptureTimeInterpolator(Clock* clock)
+ : clock_(clock), last_receive_time_(kInvalidLastReceiveTime) {}
+
+uint32_t AbsoluteCaptureTimeInterpolator::GetSource(
+ uint32_t ssrc,
+ rtc::ArrayView<const uint32_t> csrcs) {
+ if (csrcs.empty()) {
+ return ssrc;
+ }
+
+ return csrcs[0];
+}
+
+absl::optional<AbsoluteCaptureTime>
+AbsoluteCaptureTimeInterpolator::OnReceivePacket(
+ uint32_t source,
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency,
+ const absl::optional<AbsoluteCaptureTime>& received_extension) {
+ const Timestamp receive_time = clock_->CurrentTime();
+
+ MutexLock lock(&mutex_);
+
+ AbsoluteCaptureTime extension;
+ if (received_extension == absl::nullopt) {
+ if (!ShouldInterpolateExtension(receive_time, source, rtp_timestamp,
+ rtp_clock_frequency)) {
+ last_receive_time_ = kInvalidLastReceiveTime;
+ return absl::nullopt;
+ }
+
+ extension.absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp(
+ rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_,
+ last_absolute_capture_timestamp_);
+ extension.estimated_capture_clock_offset =
+ last_estimated_capture_clock_offset_;
+ } else {
+ last_source_ = source;
+ last_rtp_timestamp_ = rtp_timestamp;
+ last_rtp_clock_frequency_ = rtp_clock_frequency;
+ last_absolute_capture_timestamp_ =
+ received_extension->absolute_capture_timestamp;
+ last_estimated_capture_clock_offset_ =
+ received_extension->estimated_capture_clock_offset;
+
+ last_receive_time_ = receive_time;
+
+ extension = *received_extension;
+ }
+
+ return extension;
+}
+
+uint64_t AbsoluteCaptureTimeInterpolator::InterpolateAbsoluteCaptureTimestamp(
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency,
+ uint32_t last_rtp_timestamp,
+ uint64_t last_absolute_capture_timestamp) {
+ RTC_DCHECK_GT(rtp_clock_frequency, 0);
+
+ return last_absolute_capture_timestamp +
+ static_cast<int64_t>(
+ rtc::dchecked_cast<uint64_t>(rtp_timestamp - last_rtp_timestamp)
+ << 32) /
+ rtp_clock_frequency;
+}
+
+bool AbsoluteCaptureTimeInterpolator::ShouldInterpolateExtension(
+ Timestamp receive_time,
+ uint32_t source,
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency) const {
+ // Shouldn't if we don't have a previously received extension stored.
+ if (last_receive_time_ == kInvalidLastReceiveTime) {
+ return false;
+ }
+
+ // Shouldn't if the last received extension is too old.
+ if ((receive_time - last_receive_time_) > kInterpolationMaxInterval) {
+ return false;
+ }
+
+ // Shouldn't if the source has changed.
+ if (last_source_ != source) {
+ return false;
+ }
+
+ // Shouldn't if the RTP clock frequency has changed.
+ if (last_rtp_clock_frequency_ != rtp_clock_frequency) {
+ return false;
+ }
+
+ // Shouldn't if the RTP clock frequency is invalid.
+ if (rtp_clock_frequency <= 0) {
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h
new file mode 100644
index 0000000000..89d7f0850c
--- /dev/null
+++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_
+#define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_
+
+#include "api/array_view.h"
+#include "api/rtp_headers.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+//
+// Helper class for interpolating the |AbsoluteCaptureTime| header extension.
+//
+// Supports the "timestamp interpolation" optimization:
+// A receiver SHOULD memorize the capture system (i.e. CSRC/SSRC), capture
+// timestamp, and RTP timestamp of the most recently received abs-capture-time
+// packet on each received stream. It can then use that information, in
+// combination with RTP timestamps of packets without abs-capture-time, to
+// extrapolate missing capture timestamps.
+//
+// See: https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/
+//
+class AbsoluteCaptureTimeInterpolator {
+ public:
+ static constexpr TimeDelta kInterpolationMaxInterval =
+ TimeDelta::Millis(5000);
+
+ explicit AbsoluteCaptureTimeInterpolator(Clock* clock);
+
+ // Returns the source (i.e. SSRC or CSRC) of the capture system.
+ static uint32_t GetSource(uint32_t ssrc,
+ rtc::ArrayView<const uint32_t> csrcs);
+
+ // Returns a received header extension, an interpolated header extension, or
+ // |absl::nullopt| if it's not possible to interpolate a header extension.
+ absl::optional<AbsoluteCaptureTime> OnReceivePacket(
+ uint32_t source,
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency,
+ const absl::optional<AbsoluteCaptureTime>& received_extension);
+
+ private:
+ friend class AbsoluteCaptureTimeSender;
+
+ static uint64_t InterpolateAbsoluteCaptureTimestamp(
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency,
+ uint32_t last_rtp_timestamp,
+ uint64_t last_absolute_capture_timestamp);
+
+ bool ShouldInterpolateExtension(Timestamp receive_time,
+ uint32_t source,
+ uint32_t rtp_timestamp,
+ uint32_t rtp_clock_frequency) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ Clock* const clock_;
+
+ Mutex mutex_;
+
+ Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_);
+
+ uint32_t last_source_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_);
+ uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_);
+ uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_);
+ absl::optional<int64_t> last_estimated_capture_clock_offset_
+ RTC_GUARDED_BY(mutex_);
+}; // AbsoluteCaptureTimeInterpolator
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver_unittest.cc b/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc
index ecf256734d..6a312f9b43 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_receiver_unittest.cc
+++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
#include "system_wrappers/include/ntp_time.h"
#include "test/gmock.h"
@@ -16,20 +16,21 @@
namespace webrtc {
-TEST(AbsoluteCaptureTimeReceiverTest, GetSourceWithoutCsrcs) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, GetSourceWithoutCsrcs) {
constexpr uint32_t kSsrc = 12;
- EXPECT_EQ(AbsoluteCaptureTimeReceiver::GetSource(kSsrc, nullptr), kSsrc);
+ EXPECT_EQ(AbsoluteCaptureTimeInterpolator::GetSource(kSsrc, nullptr), kSsrc);
}
-TEST(AbsoluteCaptureTimeReceiverTest, GetSourceWithCsrcs) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, GetSourceWithCsrcs) {
constexpr uint32_t kSsrc = 12;
constexpr uint32_t kCsrcs[] = {34, 56, 78, 90};
- EXPECT_EQ(AbsoluteCaptureTimeReceiver::GetSource(kSsrc, kCsrcs), kCsrcs[0]);
+ EXPECT_EQ(AbsoluteCaptureTimeInterpolator::GetSource(kSsrc, kCsrcs),
+ kCsrcs[0]);
}
-TEST(AbsoluteCaptureTimeReceiverTest, ReceiveExtensionReturnsExtension) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, ReceiveExtensionReturnsExtension) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
constexpr uint32_t kRtpTimestamp0 = 1020300000;
@@ -40,20 +41,19 @@ TEST(AbsoluteCaptureTimeReceiverTest, ReceiveExtensionReturnsExtension) {
AbsoluteCaptureTime{Int64MsToUQ32x32(9020), absl::nullopt};
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp1,
- kRtpClockFrequency, kExtension1),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp1,
+ kRtpClockFrequency, kExtension1),
kExtension1);
}
-TEST(AbsoluteCaptureTimeReceiverTest, ReceiveNoExtensionReturnsNoExtension) {
+TEST(AbsoluteCaptureTimeInterpolatorTest,
+ ReceiveNoExtensionReturnsNoExtension) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
constexpr uint32_t kRtpTimestamp0 = 1020300000;
@@ -62,20 +62,18 @@ TEST(AbsoluteCaptureTimeReceiverTest, ReceiveNoExtensionReturnsNoExtension) {
static const absl::optional<AbsoluteCaptureTime> kExtension1 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
absl::nullopt);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp1,
- kRtpClockFrequency, kExtension1),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp1,
+ kRtpClockFrequency, kExtension1),
absl::nullopt);
}
-TEST(AbsoluteCaptureTimeReceiverTest, InterpolateLaterPacketArrivingLater) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, InterpolateLaterPacketArrivingLater) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
constexpr uint32_t kRtpTimestamp0 = 1020300000;
@@ -87,15 +85,13 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateLaterPacketArrivingLater) {
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- absl::optional<AbsoluteCaptureTime> extension = receiver.OnReceivePacket(
+ absl::optional<AbsoluteCaptureTime> extension = interpolator.OnReceivePacket(
kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
@@ -103,8 +99,8 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateLaterPacketArrivingLater) {
EXPECT_EQ(extension->estimated_capture_clock_offset,
kExtension0->estimated_capture_clock_offset);
- extension = receiver.OnReceivePacket(kSource, kRtpTimestamp2,
- kRtpClockFrequency, kExtension2);
+ extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2,
+ kRtpClockFrequency, kExtension2);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 40);
@@ -112,7 +108,8 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateLaterPacketArrivingLater) {
kExtension0->estimated_capture_clock_offset);
}
-TEST(AbsoluteCaptureTimeReceiverTest, InterpolateEarlierPacketArrivingLater) {
+TEST(AbsoluteCaptureTimeInterpolatorTest,
+ InterpolateEarlierPacketArrivingLater) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
constexpr uint32_t kRtpTimestamp0 = 1020300000;
@@ -124,15 +121,13 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateEarlierPacketArrivingLater) {
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- absl::optional<AbsoluteCaptureTime> extension = receiver.OnReceivePacket(
+ absl::optional<AbsoluteCaptureTime> extension = interpolator.OnReceivePacket(
kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
@@ -140,8 +135,8 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateEarlierPacketArrivingLater) {
EXPECT_EQ(extension->estimated_capture_clock_offset,
kExtension0->estimated_capture_clock_offset);
- extension = receiver.OnReceivePacket(kSource, kRtpTimestamp2,
- kRtpClockFrequency, kExtension2);
+ extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2,
+ kRtpClockFrequency, kExtension2);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 40);
@@ -149,7 +144,7 @@ TEST(AbsoluteCaptureTimeReceiverTest, InterpolateEarlierPacketArrivingLater) {
kExtension0->estimated_capture_clock_offset);
}
-TEST(AbsoluteCaptureTimeReceiverTest,
+TEST(AbsoluteCaptureTimeInterpolatorTest,
InterpolateLaterPacketArrivingLaterWithRtpTimestampWrapAround) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
@@ -162,15 +157,13 @@ TEST(AbsoluteCaptureTimeReceiverTest,
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- absl::optional<AbsoluteCaptureTime> extension = receiver.OnReceivePacket(
+ absl::optional<AbsoluteCaptureTime> extension = interpolator.OnReceivePacket(
kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
@@ -178,8 +171,8 @@ TEST(AbsoluteCaptureTimeReceiverTest,
EXPECT_EQ(extension->estimated_capture_clock_offset,
kExtension0->estimated_capture_clock_offset);
- extension = receiver.OnReceivePacket(kSource, kRtpTimestamp2,
- kRtpClockFrequency, kExtension2);
+ extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2,
+ kRtpClockFrequency, kExtension2);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 40);
@@ -187,7 +180,7 @@ TEST(AbsoluteCaptureTimeReceiverTest,
kExtension0->estimated_capture_clock_offset);
}
-TEST(AbsoluteCaptureTimeReceiverTest,
+TEST(AbsoluteCaptureTimeInterpolatorTest,
InterpolateEarlierPacketArrivingLaterWithRtpTimestampWrapAround) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
@@ -200,15 +193,13 @@ TEST(AbsoluteCaptureTimeReceiverTest,
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- absl::optional<AbsoluteCaptureTime> extension = receiver.OnReceivePacket(
+ absl::optional<AbsoluteCaptureTime> extension = interpolator.OnReceivePacket(
kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
@@ -216,8 +207,8 @@ TEST(AbsoluteCaptureTimeReceiverTest,
EXPECT_EQ(extension->estimated_capture_clock_offset,
kExtension0->estimated_capture_clock_offset);
- extension = receiver.OnReceivePacket(kSource, kRtpTimestamp2,
- kRtpClockFrequency, kExtension2);
+ extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2,
+ kRtpClockFrequency, kExtension2);
EXPECT_TRUE(extension.has_value());
EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 40);
@@ -225,51 +216,7 @@ TEST(AbsoluteCaptureTimeReceiverTest,
kExtension0->estimated_capture_clock_offset);
}
-TEST(AbsoluteCaptureTimeReceiverTest,
- SkipEstimatedCaptureClockOffsetIfRemoteToLocalClockOffsetIsUnknown) {
- constexpr uint32_t kSource = 1337;
- constexpr uint32_t kRtpClockFrequency = 64000;
- constexpr uint32_t kRtpTimestamp0 = 1020300000;
- constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280;
- constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560;
- static const absl::optional<AbsoluteCaptureTime> kExtension0 =
- AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)};
- static const absl::optional<AbsoluteCaptureTime> kExtension1 = absl::nullopt;
- static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
- static const absl::optional<int64_t> kRemoteToLocalClockOffset2 =
- Int64MsToQ32x32(-7000007);
-
- SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
- kExtension0);
-
- receiver.SetRemoteToLocalClockOffset(absl::nullopt);
-
- absl::optional<AbsoluteCaptureTime> extension = receiver.OnReceivePacket(
- kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1);
- EXPECT_TRUE(extension.has_value());
- EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
- UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 20);
- EXPECT_EQ(extension->estimated_capture_clock_offset, absl::nullopt);
-
- receiver.SetRemoteToLocalClockOffset(kRemoteToLocalClockOffset2);
-
- extension = receiver.OnReceivePacket(kSource, kRtpTimestamp2,
- kRtpClockFrequency, kExtension2);
- EXPECT_TRUE(extension.has_value());
- EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp),
- UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 40);
- EXPECT_EQ(extension->estimated_capture_clock_offset,
- *kExtension0->estimated_capture_clock_offset +
- *kRemoteToLocalClockOffset2);
-}
-
-TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIfTooLate) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfTooLate) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 64000;
constexpr uint32_t kRtpTimestamp0 = 1020300000;
@@ -281,30 +228,28 @@ TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIfTooLate) {
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- clock.AdvanceTime(AbsoluteCaptureTimeReceiver::kInterpolationMaxInterval);
+ clock.AdvanceTime(AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval);
- EXPECT_TRUE(receiver
+ EXPECT_TRUE(interpolator
.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency,
kExtension1)
.has_value());
clock.AdvanceTimeMilliseconds(1);
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency,
kExtension2)
.has_value());
}
-TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIfSourceChanged) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfSourceChanged) {
constexpr uint32_t kSource0 = 1337;
constexpr uint32_t kSource1 = 1338;
constexpr uint32_t kRtpClockFrequency = 64000;
@@ -315,21 +260,19 @@ TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIfSourceChanged) {
static const absl::optional<AbsoluteCaptureTime> kExtension1 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource0, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource0, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource1, kRtpTimestamp1,
kRtpClockFrequency, kExtension1)
.has_value());
}
-TEST(AbsoluteCaptureTimeReceiverTest,
+TEST(AbsoluteCaptureTimeInterpolatorTest,
SkipInterpolateIfRtpClockFrequencyChanged) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency0 = 64000;
@@ -341,21 +284,19 @@ TEST(AbsoluteCaptureTimeReceiverTest,
static const absl::optional<AbsoluteCaptureTime> kExtension1 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency0, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency0, kExtension0),
kExtension0);
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource, kRtpTimestamp1,
kRtpClockFrequency1, kExtension1)
.has_value());
}
-TEST(AbsoluteCaptureTimeReceiverTest,
+TEST(AbsoluteCaptureTimeInterpolatorTest,
SkipInterpolateIfRtpClockFrequencyIsInvalid) {
constexpr uint32_t kSource = 1337;
constexpr uint32_t kRtpClockFrequency = 0;
@@ -366,21 +307,19 @@ TEST(AbsoluteCaptureTimeReceiverTest,
static const absl::optional<AbsoluteCaptureTime> kExtension1 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- receiver.SetRemoteToLocalClockOffset(0);
-
- EXPECT_EQ(receiver.OnReceivePacket(kSource, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency,
kExtension1)
.has_value());
}
-TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIsSticky) {
+TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIsSticky) {
constexpr uint32_t kSource0 = 1337;
constexpr uint32_t kSource1 = 1338;
constexpr uint32_t kSource2 = 1337;
@@ -394,20 +333,18 @@ TEST(AbsoluteCaptureTimeReceiverTest, SkipInterpolateIsSticky) {
static const absl::optional<AbsoluteCaptureTime> kExtension2 = absl::nullopt;
SimulatedClock clock(0);
- AbsoluteCaptureTimeReceiver receiver(&clock);
-
- receiver.SetRemoteToLocalClockOffset(0);
+ AbsoluteCaptureTimeInterpolator interpolator(&clock);
- EXPECT_EQ(receiver.OnReceivePacket(kSource0, kRtpTimestamp0,
- kRtpClockFrequency, kExtension0),
+ EXPECT_EQ(interpolator.OnReceivePacket(kSource0, kRtpTimestamp0,
+ kRtpClockFrequency, kExtension0),
kExtension0);
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource1, kRtpTimestamp1,
kRtpClockFrequency, kExtension1)
.has_value());
- EXPECT_FALSE(receiver
+ EXPECT_FALSE(interpolator
.OnReceivePacket(kSource2, kRtpTimestamp2,
kRtpClockFrequency, kExtension2)
.has_value());
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
index 529ed7eef6..efb75506d0 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
+++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
@@ -10,38 +10,14 @@
#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
-#include <limits>
-
-#include "rtc_base/checks.h"
-
namespace webrtc {
-namespace {
-
-constexpr Timestamp kInvalidLastReceiveTime = Timestamp::MinusInfinity();
-} // namespace
-
-constexpr TimeDelta AbsoluteCaptureTimeReceiver::kInterpolationMaxInterval;
AbsoluteCaptureTimeReceiver::AbsoluteCaptureTimeReceiver(Clock* clock)
- : clock_(clock),
- remote_to_local_clock_offset_(absl::nullopt),
- last_receive_time_(kInvalidLastReceiveTime) {}
-
-uint32_t AbsoluteCaptureTimeReceiver::GetSource(
- uint32_t ssrc,
- rtc::ArrayView<const uint32_t> csrcs) {
- if (csrcs.empty()) {
- return ssrc;
- }
-
- return csrcs[0];
-}
+ : AbsoluteCaptureTimeInterpolator(clock) {}
void AbsoluteCaptureTimeReceiver::SetRemoteToLocalClockOffset(
absl::optional<int64_t> value_q32x32) {
- MutexLock lock(&mutex_);
-
- remote_to_local_clock_offset_ = value_q32x32;
+ capture_clock_offset_updater_.SetRemoteToLocalClockOffset(value_q32x32);
}
absl::optional<AbsoluteCaptureTime>
@@ -50,101 +26,16 @@ AbsoluteCaptureTimeReceiver::OnReceivePacket(
uint32_t rtp_timestamp,
uint32_t rtp_clock_frequency,
const absl::optional<AbsoluteCaptureTime>& received_extension) {
- const Timestamp receive_time = clock_->CurrentTime();
-
- MutexLock lock(&mutex_);
+ auto extension = AbsoluteCaptureTimeInterpolator::OnReceivePacket(
+ source, rtp_timestamp, rtp_clock_frequency, received_extension);
- AbsoluteCaptureTime extension;
- if (received_extension == absl::nullopt) {
- if (!ShouldInterpolateExtension(receive_time, source, rtp_timestamp,
- rtp_clock_frequency)) {
- last_receive_time_ = kInvalidLastReceiveTime;
- return absl::nullopt;
- }
-
- extension.absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp(
- rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_,
- last_absolute_capture_timestamp_);
- extension.estimated_capture_clock_offset =
- last_estimated_capture_clock_offset_;
- } else {
- last_source_ = source;
- last_rtp_timestamp_ = rtp_timestamp;
- last_rtp_clock_frequency_ = rtp_clock_frequency;
- last_absolute_capture_timestamp_ =
- received_extension->absolute_capture_timestamp;
- last_estimated_capture_clock_offset_ =
- received_extension->estimated_capture_clock_offset;
-
- last_receive_time_ = receive_time;
-
- extension = *received_extension;
+ if (extension.has_value()) {
+ extension->estimated_capture_clock_offset =
+ capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset(
+ extension->estimated_capture_clock_offset);
}
- extension.estimated_capture_clock_offset = AdjustEstimatedCaptureClockOffset(
- extension.estimated_capture_clock_offset);
-
return extension;
}
-uint64_t AbsoluteCaptureTimeReceiver::InterpolateAbsoluteCaptureTimestamp(
- uint32_t rtp_timestamp,
- uint32_t rtp_clock_frequency,
- uint32_t last_rtp_timestamp,
- uint64_t last_absolute_capture_timestamp) {
- RTC_DCHECK_GT(rtp_clock_frequency, 0);
-
- return last_absolute_capture_timestamp +
- static_cast<int64_t>(
- rtc::dchecked_cast<uint64_t>(rtp_timestamp - last_rtp_timestamp)
- << 32) /
- rtp_clock_frequency;
-}
-
-bool AbsoluteCaptureTimeReceiver::ShouldInterpolateExtension(
- Timestamp receive_time,
- uint32_t source,
- uint32_t rtp_timestamp,
- uint32_t rtp_clock_frequency) const {
- // Shouldn't if we don't have a previously received extension stored.
- if (last_receive_time_ == kInvalidLastReceiveTime) {
- return false;
- }
-
- // Shouldn't if the last received extension is too old.
- if ((receive_time - last_receive_time_) > kInterpolationMaxInterval) {
- return false;
- }
-
- // Shouldn't if the source has changed.
- if (last_source_ != source) {
- return false;
- }
-
- // Shouldn't if the RTP clock frequency has changed.
- if (last_rtp_clock_frequency_ != rtp_clock_frequency) {
- return false;
- }
-
- // Shouldn't if the RTP clock frequency is invalid.
- if (rtp_clock_frequency <= 0) {
- return false;
- }
-
- return true;
-}
-
-absl::optional<int64_t>
-AbsoluteCaptureTimeReceiver::AdjustEstimatedCaptureClockOffset(
- absl::optional<int64_t> received_value) const {
- if (received_value == absl::nullopt ||
- remote_to_local_clock_offset_ == absl::nullopt) {
- return absl::nullopt;
- }
-
- // Do calculations as "unsigned" to make overflows deterministic.
- return static_cast<uint64_t>(*received_value) +
- static_cast<uint64_t>(*remote_to_local_clock_offset_);
-}
-
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
index ce3442b386..ad1bd7eb5d 100644
--- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
+++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h
@@ -11,89 +11,28 @@
#ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_RECEIVER_H_
#define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_RECEIVER_H_
-#include "api/array_view.h"
-#include "api/rtp_headers.h"
-#include "api/units/time_delta.h"
-#include "api/units/timestamp.h"
-#include "rtc_base/synchronization/mutex.h"
-#include "rtc_base/thread_annotations.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
-//
-// Helper class for receiving the |AbsoluteCaptureTime| header extension.
-//
-// Supports the "timestamp interpolation" optimization:
-// A receiver SHOULD memorize the capture system (i.e. CSRC/SSRC), capture
-// timestamp, and RTP timestamp of the most recently received abs-capture-time
-// packet on each received stream. It can then use that information, in
-// combination with RTP timestamps of packets without abs-capture-time, to
-// extrapolate missing capture timestamps.
-//
-// See: https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/
-//
-class AbsoluteCaptureTimeReceiver {
+// DEPRECATED. Use `AbsoluteCaptureTimeInterpolator` instead.
+class AbsoluteCaptureTimeReceiver : public AbsoluteCaptureTimeInterpolator {
public:
- static constexpr TimeDelta kInterpolationMaxInterval =
- TimeDelta::Millis(5000);
-
explicit AbsoluteCaptureTimeReceiver(Clock* clock);
- // Returns the source (i.e. SSRC or CSRC) of the capture system.
- static uint32_t GetSource(uint32_t ssrc,
- rtc::ArrayView<const uint32_t> csrcs);
-
- // Sets the NTP clock offset between the sender system (which may be different
- // from the capture system) and the local system. This information is normally
- // provided by passing half the value of the Round-Trip Time estimation given
- // by RTCP sender reports (see DLSR/DLRR).
- //
- // Note that the value must be in Q32.32-formatted fixed-point seconds.
- void SetRemoteToLocalClockOffset(absl::optional<int64_t> value_q32x32);
-
- // Returns a received header extension, an interpolated header extension, or
- // |absl::nullopt| if it's not possible to interpolate a header extension.
absl::optional<AbsoluteCaptureTime> OnReceivePacket(
uint32_t source,
uint32_t rtp_timestamp,
uint32_t rtp_clock_frequency,
const absl::optional<AbsoluteCaptureTime>& received_extension);
- private:
- friend class AbsoluteCaptureTimeSender;
-
- static uint64_t InterpolateAbsoluteCaptureTimestamp(
- uint32_t rtp_timestamp,
- uint32_t rtp_clock_frequency,
- uint32_t last_rtp_timestamp,
- uint64_t last_absolute_capture_timestamp);
-
- bool ShouldInterpolateExtension(Timestamp receive_time,
- uint32_t source,
- uint32_t rtp_timestamp,
- uint32_t rtp_clock_frequency) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
-
- absl::optional<int64_t> AdjustEstimatedCaptureClockOffset(
- absl::optional<int64_t> received_value) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
-
- Clock* const clock_;
-
- Mutex mutex_;
-
- absl::optional<int64_t> remote_to_local_clock_offset_ RTC_GUARDED_BY(mutex_);
-
- Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_);
+ void SetRemoteToLocalClockOffset(absl::optional<int64_t> value_q32x32);
- uint32_t last_source_ RTC_GUARDED_BY(mutex_);
- uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_);
- uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_);
- uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_);
- absl::optional<int64_t> last_estimated_capture_clock_offset_
- RTC_GUARDED_BY(mutex_);
-}; // AbsoluteCaptureTimeReceiver
+ private:
+ CaptureClockOffsetUpdater capture_clock_offset_updater_;
+};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater.cc b/modules/rtp_rtcp/source/capture_clock_offset_updater.cc
new file mode 100644
index 0000000000..a5b12cb422
--- /dev/null
+++ b/modules/rtp_rtcp/source/capture_clock_offset_updater.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
+
+namespace webrtc {
+
+absl::optional<int64_t>
+CaptureClockOffsetUpdater::AdjustEstimatedCaptureClockOffset(
+ absl::optional<int64_t> remote_capture_clock_offset) const {
+ if (remote_capture_clock_offset == absl::nullopt ||
+ remote_to_local_clock_offset_ == absl::nullopt) {
+ return absl::nullopt;
+ }
+
+ // Do calculations as "unsigned" to make overflows deterministic.
+ return static_cast<uint64_t>(*remote_capture_clock_offset) +
+ static_cast<uint64_t>(*remote_to_local_clock_offset_);
+}
+
+void CaptureClockOffsetUpdater::SetRemoteToLocalClockOffset(
+ absl::optional<int64_t> offset_q32x32) {
+ remote_to_local_clock_offset_ = offset_q32x32;
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater.h b/modules/rtp_rtcp/source/capture_clock_offset_updater.h
new file mode 100644
index 0000000000..71d3eb4831
--- /dev/null
+++ b/modules/rtp_rtcp/source/capture_clock_offset_updater.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_
+#define MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_
+
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+//
+// Helper class for calculating the clock offset against the capturer's clock.
+//
+// This is achieved by adjusting the estimated capture clock offset in received
+// Absolute Capture Time RTP header extension (see
+// https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/), which
+// represents the clock offset between a remote sender and the capturer, by
+// adding local-to-remote clock offset.
+
+class CaptureClockOffsetUpdater {
+ public:
+ // Adjusts remote_capture_clock_offset, which originates from Absolute Capture
+ // Time RTP header extension, to get the local clock offset against the
+ // capturer's clock.
+ absl::optional<int64_t> AdjustEstimatedCaptureClockOffset(
+ absl::optional<int64_t> remote_capture_clock_offset) const;
+
+ // Sets the NTP clock offset between the sender system (which may be different
+ // from the capture system) and the local system. This information is normally
+ // provided by passing half the value of the Round-Trip Time estimation given
+ // by RTCP sender reports (see DLSR/DLRR).
+ //
+ // Note that the value must be in Q32.32-formatted fixed-point seconds.
+ void SetRemoteToLocalClockOffset(absl::optional<int64_t> offset_q32x32);
+
+ private:
+ absl::optional<int64_t> remote_to_local_clock_offset_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_
diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc b/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc
new file mode 100644
index 0000000000..43e1dd1379
--- /dev/null
+++ b/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
+
+#include "system_wrappers/include/ntp_time.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(AbsoluteCaptureTimeReceiverTest,
+ SkipEstimatedCaptureClockOffsetIfRemoteToLocalClockOffsetIsUnknown) {
+ static const absl::optional<int64_t> kRemoteCaptureClockOffset =
+ Int64MsToQ32x32(-350);
+ CaptureClockOffsetUpdater updater;
+ updater.SetRemoteToLocalClockOffset(absl::nullopt);
+ EXPECT_EQ(
+ updater.AdjustEstimatedCaptureClockOffset(kRemoteCaptureClockOffset),
+ absl::nullopt);
+}
+
+TEST(AbsoluteCaptureTimeReceiverTest,
+ SkipEstimatedCaptureClockOffsetIfRemoteCaptureClockOffsetIsUnknown) {
+ static const absl::optional<int64_t> kCaptureClockOffsetNull = absl::nullopt;
+ CaptureClockOffsetUpdater updater;
+ updater.SetRemoteToLocalClockOffset(0);
+ EXPECT_EQ(updater.AdjustEstimatedCaptureClockOffset(kCaptureClockOffsetNull),
+ kCaptureClockOffsetNull);
+
+ static const absl::optional<int64_t> kRemoteCaptureClockOffset =
+ Int64MsToQ32x32(-350);
+ EXPECT_EQ(
+ updater.AdjustEstimatedCaptureClockOffset(kRemoteCaptureClockOffset),
+ kRemoteCaptureClockOffset);
+}
+
+TEST(AbsoluteCaptureTimeReceiverTest, EstimatedCaptureClockOffsetArithmetic) {
+ static const absl::optional<int64_t> kRemoteCaptureClockOffset =
+ Int64MsToQ32x32(-350);
+ static const absl::optional<int64_t> kRemoteToLocalClockOffset =
+ Int64MsToQ32x32(-7000007);
+ CaptureClockOffsetUpdater updater;
+ updater.SetRemoteToLocalClockOffset(kRemoteToLocalClockOffset);
+ EXPECT_THAT(
+ updater.AdjustEstimatedCaptureClockOffset(kRemoteCaptureClockOffset),
+ ::testing::Optional(::testing::Eq(*kRemoteCaptureClockOffset +
+ *kRemoteToLocalClockOffset)));
+}
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
index 3f7d22c498..c542557526 100644
--- a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
+++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
@@ -313,7 +313,9 @@ void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback(
}
RtpPacketSendInfo packet_info;
+ // TODO(bugs.webrtc.org/12713): Remove once downstream usage is gone.
packet_info.ssrc = ssrc_;
+ packet_info.media_ssrc = ssrc_;
packet_info.transport_sequence_number = packet_id;
packet_info.rtp_sequence_number = packet.SequenceNumber();
packet_info.length = packet_size;
diff --git a/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc b/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
index 8b4162fe2f..40426f16bf 100644
--- a/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
+++ b/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
@@ -25,6 +25,11 @@ namespace {
// Maximum number of media packets that can be protected in one batch.
constexpr size_t kMaxMediaPackets = 48; // Since we are reusing ULPFEC masks.
+// Maximum number of media packets tracked by FEC decoder.
+// Maintain a sufficiently larger tracking window than |kMaxMediaPackets|
+// to account for packet reordering in pacer/ network.
+constexpr size_t kMaxTrackedMediaPackets = 4 * kMaxMediaPackets;
+
// Maximum number of FEC packets stored inside ForwardErrorCorrection.
constexpr size_t kMaxFecPackets = kMaxMediaPackets;
@@ -72,7 +77,7 @@ size_t FlexfecHeaderSize(size_t packet_mask_size) {
} // namespace
FlexfecHeaderReader::FlexfecHeaderReader()
- : FecHeaderReader(kMaxMediaPackets, kMaxFecPackets) {}
+ : FecHeaderReader(kMaxTrackedMediaPackets, kMaxFecPackets) {}
FlexfecHeaderReader::~FlexfecHeaderReader() = default;
diff --git a/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc b/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc
index b9391eeb74..7261280aef 100644
--- a/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc
+++ b/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc
@@ -374,7 +374,8 @@ TEST_F(FlexfecReceiverTest, RecoversFrom50PercentLoss) {
TEST_F(FlexfecReceiverTest, DelayedFecPacketDoesHelp) {
// These values need to be updated if the underlying erasure code
// implementation changes.
- const size_t kNumFrames = 48;
+ // Delay FEC packet by maximum number of media packets tracked by receiver.
+ const size_t kNumFrames = 192;
const size_t kNumMediaPacketsPerFrame = 1;
const size_t kNumFecPackets = 1;
@@ -412,14 +413,16 @@ TEST_F(FlexfecReceiverTest, DelayedFecPacketDoesHelp) {
TEST_F(FlexfecReceiverTest, TooDelayedFecPacketDoesNotHelp) {
// These values need to be updated if the underlying erasure code
// implementation changes.
- const size_t kNumFrames = 49;
+ // Delay FEC packet by one more than maximum number of media packets
+ // tracked by receiver.
+ const size_t kNumFrames = 193;
const size_t kNumMediaPacketsPerFrame = 1;
const size_t kNumFecPackets = 1;
PacketList media_packets;
PacketizeFrame(kNumMediaPacketsPerFrame, 0, &media_packets);
PacketizeFrame(kNumMediaPacketsPerFrame, 1, &media_packets);
- // Protect two first frames.
+ // Protect first two frames.
std::list<Packet*> fec_packets = EncodeFec(media_packets, kNumFecPackets);
for (size_t i = 2; i < kNumFrames; ++i) {
PacketizeFrame(kNumMediaPacketsPerFrame, i, &media_packets);
@@ -646,4 +649,58 @@ TEST_F(FlexfecReceiverTest, CalculatesNumberOfPackets) {
EXPECT_EQ(1U, packet_counter.num_recovered_packets);
}
+TEST_F(FlexfecReceiverTest, DoesNotDecodeWrappedMediaSequenceUsingOldFec) {
+ const size_t kFirstFrameNumMediaPackets = 2;
+ const size_t kFirstFrameNumFecPackets = 1;
+
+ PacketList media_packets;
+ PacketizeFrame(kFirstFrameNumMediaPackets, 0, &media_packets);
+
+ // Protect first frame (sequences 0 and 1) with 1 FEC packet.
+ std::list<Packet*> fec_packets =
+ EncodeFec(media_packets, kFirstFrameNumFecPackets);
+
+ // Generate enough media packets to simulate media sequence number wraparound.
+ // Use no FEC for these frames to make sure old FEC is not purged due to age.
+ const size_t kNumFramesSequenceWrapAround =
+ std::numeric_limits<uint16_t>::max();
+ const size_t kNumMediaPacketsPerFrame = 1;
+
+ for (size_t i = 1; i <= kNumFramesSequenceWrapAround; ++i) {
+ PacketizeFrame(kNumMediaPacketsPerFrame, i, &media_packets);
+ }
+
+ // Receive first (|kFirstFrameNumMediaPackets| + 192) media packets.
+ // Simulate an old FEC packet by separating it from its encoded media
+ // packets by at least 192 packets.
+ auto media_it = media_packets.begin();
+ for (size_t i = 0; i < (kFirstFrameNumMediaPackets + 192); i++) {
+ if (i == 1) {
+ // Drop the second packet of the first frame.
+ media_it++;
+ } else {
+ receiver_.OnRtpPacket(ParsePacket(**media_it++));
+ }
+ }
+
+ // Receive FEC packet. Although a protected packet was dropped,
+ // expect no recovery callback since it is delayed from first frame
+ // by more than 192 packets.
+ auto fec_it = fec_packets.begin();
+ std::unique_ptr<Packet> fec_packet_with_rtp_header =
+ packet_generator_.BuildFlexfecPacket(**fec_it);
+ receiver_.OnRtpPacket(ParsePacket(*fec_packet_with_rtp_header));
+
+ // Receive remaining media packets.
+ // NOTE: Because we sent enough to simulate wrap around, sequence 0 is
+ // received again, but is a different packet than the original first
+ // packet of first frame.
+ while (media_it != media_packets.end()) {
+ receiver_.OnRtpPacket(ParsePacket(**media_it++));
+ }
+
+ // Do not expect a recovery callback, the FEC packet is old
+ // and should not decode wrapped around media sequences.
+}
+
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/forward_error_correction.cc b/modules/rtp_rtcp/source/forward_error_correction.cc
index 56eabc8a7f..da8025d3db 100644
--- a/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -31,6 +31,8 @@ namespace webrtc {
namespace {
// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
constexpr size_t kTransportOverhead = 28;
+
+constexpr uint16_t kOldSequenceThreshold = 0x3fff;
} // namespace
ForwardErrorCorrection::Packet::Packet() : data(0), ref_count_(0) {}
@@ -508,9 +510,6 @@ void ForwardErrorCorrection::InsertPacket(
// This is important for keeping |received_fec_packets_| sorted, and may
// also reduce the possibility of incorrect decoding due to sequence number
// wrap-around.
- // TODO(marpan/holmer): We should be able to improve detection/discarding of
- // old FEC packets based on timestamp information or better sequence number
- // thresholding (e.g., to distinguish between wrap-around and reordering).
if (!received_fec_packets_.empty() &&
received_packet.ssrc == received_fec_packets_.front()->ssrc) {
// It only makes sense to detect wrap-around when |received_packet|
@@ -521,7 +520,7 @@ void ForwardErrorCorrection::InsertPacket(
auto it = received_fec_packets_.begin();
while (it != received_fec_packets_.end()) {
uint16_t seq_num_diff = MinDiff(received_packet.seq_num, (*it)->seq_num);
- if (seq_num_diff > 0x3fff) {
+ if (seq_num_diff > kOldSequenceThreshold) {
it = received_fec_packets_.erase(it);
} else {
// No need to keep iterating, since |received_fec_packets_| is sorted.
@@ -698,9 +697,10 @@ void ForwardErrorCorrection::AttemptRecovery(
// this may allow additional packets to be recovered.
// Restart for first FEC packet.
fec_packet_it = received_fec_packets_.begin();
- } else if (packets_missing == 0) {
- // Either all protected packets arrived or have been recovered. We can
- // discard this FEC packet.
+ } else if (packets_missing == 0 ||
+ IsOldFecPacket(**fec_packet_it, recovered_packets)) {
+ // Either all protected packets arrived or have been recovered, or the FEC
+ // packet is old. We can discard this FEC packet.
fec_packet_it = received_fec_packets_.erase(fec_packet_it);
} else {
fec_packet_it++;
@@ -731,6 +731,23 @@ void ForwardErrorCorrection::DiscardOldRecoveredPackets(
RTC_DCHECK_LE(recovered_packets->size(), max_media_packets);
}
+bool ForwardErrorCorrection::IsOldFecPacket(
+ const ReceivedFecPacket& fec_packet,
+ const RecoveredPacketList* recovered_packets) {
+ if (recovered_packets->empty()) {
+ return false;
+ }
+
+ const uint16_t back_recovered_seq_num = recovered_packets->back()->seq_num;
+ const uint16_t last_protected_seq_num =
+ fec_packet.protected_packets.back()->seq_num;
+
+ // FEC packet is old if its last protected sequence number is much
+ // older than the latest protected sequence number received.
+ return (MinDiff(back_recovered_seq_num, last_protected_seq_num) >
+ kOldSequenceThreshold);
+}
+
uint16_t ForwardErrorCorrection::ParseSequenceNumber(const uint8_t* packet) {
return (packet[2] << 8) + packet[3];
}
diff --git a/modules/rtp_rtcp/source/forward_error_correction.h b/modules/rtp_rtcp/source/forward_error_correction.h
index 0c54ad984c..b97693d01f 100644
--- a/modules/rtp_rtcp/source/forward_error_correction.h
+++ b/modules/rtp_rtcp/source/forward_error_correction.h
@@ -330,6 +330,11 @@ class ForwardErrorCorrection {
// for recovering lost packets.
void DiscardOldRecoveredPackets(RecoveredPacketList* recovered_packets);
+ // Checks if the FEC packet is old enough and no longer relevant for
+ // recovering lost media packets.
+ bool IsOldFecPacket(const ReceivedFecPacket& fec_packet,
+ const RecoveredPacketList* recovered_packets);
+
// These SSRCs are only used by the decoder.
const uint32_t ssrc_;
const uint32_t protected_media_ssrc_;
diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 26c8cdd8c7..f5c3eafbf3 100644
--- a/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -17,6 +17,7 @@
#include <vector>
#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "modules/rtp_rtcp/source/time_util.h"
@@ -47,6 +48,7 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc,
RateStatistics::kBpsScale),
max_reordering_threshold_(max_reordering_threshold),
enable_retransmit_detection_(false),
+ cumulative_loss_is_capped_(false),
jitter_q4_(0),
cumulative_loss_(0),
cumulative_loss_rtcp_offset_(0),
@@ -189,22 +191,20 @@ RtpReceiveStats StreamStatisticianImpl::GetStats() const {
return stats;
}
-bool StreamStatisticianImpl::GetActiveStatisticsAndReset(
- RtcpStatistics* statistics) {
- if (clock_->TimeInMilliseconds() - last_receive_time_ms_ >=
- kStatisticsTimeoutMs) {
+void StreamStatisticianImpl::MaybeAppendReportBlockAndReset(
+ std::vector<rtcp::ReportBlock>& report_blocks) {
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (now_ms - last_receive_time_ms_ >= kStatisticsTimeoutMs) {
// Not active.
- return false;
+ return;
}
if (!ReceivedRtpPacket()) {
- return false;
+ return;
}
- *statistics = CalculateRtcpStatistics();
- return true;
-}
-RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
- RtcpStatistics stats;
+ report_blocks.emplace_back();
+ rtcp::ReportBlock& stats = report_blocks.back();
+ stats.SetMediaSsrc(ssrc_);
// Calculate fraction lost.
int64_t exp_since_last = received_seq_max_ - last_report_seq_max_;
RTC_DCHECK_GE(exp_since_last, 0);
@@ -212,37 +212,39 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
int32_t lost_since_last = cumulative_loss_ - last_report_cumulative_loss_;
if (exp_since_last > 0 && lost_since_last > 0) {
// Scale 0 to 255, where 255 is 100% loss.
- stats.fraction_lost =
- static_cast<uint8_t>(255 * lost_since_last / exp_since_last);
- } else {
- stats.fraction_lost = 0;
+ stats.SetFractionLost(255 * lost_since_last / exp_since_last);
}
- // TODO(danilchap): Ensure |stats.packets_lost| is clamped to fit in a signed
- // 24-bit value.
- stats.packets_lost = cumulative_loss_ + cumulative_loss_rtcp_offset_;
- if (stats.packets_lost < 0) {
+ int packets_lost = cumulative_loss_ + cumulative_loss_rtcp_offset_;
+ if (packets_lost < 0) {
// Clamp to zero. Work around to accomodate for senders that misbehave with
// negative cumulative loss.
- stats.packets_lost = 0;
+ packets_lost = 0;
cumulative_loss_rtcp_offset_ = -cumulative_loss_;
}
- stats.extended_highest_sequence_number =
- static_cast<uint32_t>(received_seq_max_);
+ if (packets_lost > 0x7fffff) {
+ // Packets lost is a 24 bit signed field, and thus should be clamped, as
+ // described in https://datatracker.ietf.org/doc/html/rfc3550#appendix-A.3
+ if (!cumulative_loss_is_capped_) {
+ cumulative_loss_is_capped_ = true;
+ RTC_LOG(LS_WARNING) << "Cumulative loss reached maximum value for ssrc "
+ << ssrc_;
+ }
+ packets_lost = 0x7fffff;
+ }
+ stats.SetCumulativeLost(packets_lost);
+ stats.SetExtHighestSeqNum(received_seq_max_);
// Note: internal jitter value is in Q4 and needs to be scaled by 1/16.
- stats.jitter = jitter_q4_ >> 4;
+ stats.SetJitter(jitter_q4_ >> 4);
// Only for report blocks in RTCP SR and RR.
last_report_cumulative_loss_ = cumulative_loss_;
last_report_seq_max_ = received_seq_max_;
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "cumulative_loss_pkts",
- clock_->TimeInMilliseconds(),
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "cumulative_loss_pkts", now_ms,
cumulative_loss_, ssrc_);
- BWE_TEST_LOGGING_PLOT_WITH_SSRC(
- 1, "received_seq_max_pkts", clock_->TimeInMilliseconds(),
- (received_seq_max_ - received_seq_first_), ssrc_);
-
- return stats;
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "received_seq_max_pkts", now_ms,
+ (received_seq_max_ - received_seq_first_),
+ ssrc_);
}
absl::optional<int> StreamStatisticianImpl::GetFractionLostInPercent() const {
@@ -321,7 +323,7 @@ ReceiveStatisticsImpl::ReceiveStatisticsImpl(
int max_reordering_threshold)> stream_statistician_factory)
: clock_(clock),
stream_statistician_factory_(std::move(stream_statistician_factory)),
- last_returned_ssrc_(0),
+ last_returned_ssrc_idx_(0),
max_reordering_threshold_(kDefaultMaxReorderingThreshold) {}
void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) {
@@ -346,6 +348,7 @@ StreamStatisticianImplInterface* ReceiveStatisticsImpl::GetOrCreateStatistician(
if (impl == nullptr) { // new element
impl =
stream_statistician_factory_(ssrc, clock_, max_reordering_threshold_);
+ all_ssrcs_.push_back(ssrc);
}
return impl.get();
}
@@ -373,37 +376,17 @@ void ReceiveStatisticsImpl::EnableRetransmitDetection(uint32_t ssrc,
std::vector<rtcp::ReportBlock> ReceiveStatisticsImpl::RtcpReportBlocks(
size_t max_blocks) {
std::vector<rtcp::ReportBlock> result;
- result.reserve(std::min(max_blocks, statisticians_.size()));
- auto add_report_block = [&result](
- uint32_t media_ssrc,
- StreamStatisticianImplInterface* statistician) {
- // Do we have receive statistics to send?
- RtcpStatistics stats;
- if (!statistician->GetActiveStatisticsAndReset(&stats))
- return;
- result.emplace_back();
- rtcp::ReportBlock& block = result.back();
- block.SetMediaSsrc(media_ssrc);
- block.SetFractionLost(stats.fraction_lost);
- if (!block.SetCumulativeLost(stats.packets_lost)) {
- RTC_LOG(LS_WARNING) << "Cumulative lost is oversized.";
- result.pop_back();
- return;
- }
- block.SetExtHighestSeqNum(stats.extended_highest_sequence_number);
- block.SetJitter(stats.jitter);
- };
-
- const auto start_it = statisticians_.upper_bound(last_returned_ssrc_);
- for (auto it = start_it;
- result.size() < max_blocks && it != statisticians_.end(); ++it)
- add_report_block(it->first, it->second.get());
- for (auto it = statisticians_.begin();
- result.size() < max_blocks && it != start_it; ++it)
- add_report_block(it->first, it->second.get());
-
- if (!result.empty())
- last_returned_ssrc_ = result.back().source_ssrc();
+ result.reserve(std::min(max_blocks, all_ssrcs_.size()));
+
+ size_t ssrc_idx = 0;
+ for (size_t i = 0; i < all_ssrcs_.size() && result.size() < max_blocks; ++i) {
+ ssrc_idx = (last_returned_ssrc_idx_ + i + 1) % all_ssrcs_.size();
+ const uint32_t media_ssrc = all_ssrcs_[ssrc_idx];
+ auto statistician_it = statisticians_.find(media_ssrc);
+ RTC_DCHECK(statistician_it != statisticians_.end());
+ statistician_it->second->MaybeAppendReportBlockAndReset(result);
+ }
+ last_returned_ssrc_idx_ = ssrc_idx;
return result;
}
diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.h b/modules/rtp_rtcp/source/receive_statistics_impl.h
index be56f4ba5a..44f5144df9 100644
--- a/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -13,14 +13,15 @@
#include <algorithm>
#include <functional>
-#include <map>
#include <memory>
+#include <unordered_map>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "rtc_base/rate_statistics.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
@@ -31,7 +32,8 @@ namespace webrtc {
class StreamStatisticianImplInterface : public StreamStatistician {
public:
virtual ~StreamStatisticianImplInterface() = default;
- virtual bool GetActiveStatisticsAndReset(RtcpStatistics* statistics) = 0;
+ virtual void MaybeAppendReportBlockAndReset(
+ std::vector<rtcp::ReportBlock>& report_blocks) = 0;
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0;
virtual void EnableRetransmitDetection(bool enable) = 0;
virtual void UpdateCounters(const RtpPacketReceived& packet) = 0;
@@ -52,7 +54,8 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface {
uint32_t BitrateReceived() const override;
// Implements StreamStatisticianImplInterface
- bool GetActiveStatisticsAndReset(RtcpStatistics* statistics) override;
+ void MaybeAppendReportBlockAndReset(
+ std::vector<rtcp::ReportBlock>& report_blocks) override;
void SetMaxReorderingThreshold(int max_reordering_threshold) override;
void EnableRetransmitDetection(bool enable) override;
// Updates StreamStatistician for incoming packets.
@@ -61,7 +64,6 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface {
private:
bool IsRetransmitOfOldPacket(const RtpPacketReceived& packet,
int64_t now_ms) const;
- RtcpStatistics CalculateRtcpStatistics();
void UpdateJitter(const RtpPacketReceived& packet, int64_t receive_time_ms);
// Updates StreamStatistician for out of order packets.
// Returns true if packet considered to be out of order.
@@ -79,6 +81,7 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface {
// In number of packets or sequence numbers.
int max_reordering_threshold_;
bool enable_retransmit_detection_;
+ bool cumulative_loss_is_capped_;
// Stats on received RTP packets.
uint32_t jitter_q4_;
@@ -132,9 +135,10 @@ class StreamStatisticianLocked : public StreamStatisticianImplInterface {
MutexLock lock(&stream_lock_);
return impl_.BitrateReceived();
}
- bool GetActiveStatisticsAndReset(RtcpStatistics* statistics) override {
+ void MaybeAppendReportBlockAndReset(
+ std::vector<rtcp::ReportBlock>& report_blocks) override {
MutexLock lock(&stream_lock_);
- return impl_.GetActiveStatisticsAndReset(statistics);
+ impl_.MaybeAppendReportBlockAndReset(report_blocks);
}
void SetMaxReorderingThreshold(int max_reordering_threshold) override {
MutexLock lock(&stream_lock_);
@@ -187,9 +191,12 @@ class ReceiveStatisticsImpl : public ReceiveStatistics {
Clock* clock,
int max_reordering_threshold)>
stream_statistician_factory_;
- uint32_t last_returned_ssrc_;
+ // The index within `all_ssrcs_` that was last returned.
+ size_t last_returned_ssrc_idx_;
+ std::vector<uint32_t> all_ssrcs_;
int max_reordering_threshold_;
- std::map<uint32_t, std::unique_ptr<StreamStatisticianImplInterface>>
+ std::unordered_map<uint32_t /*ssrc*/,
+ std::unique_ptr<StreamStatisticianImplInterface>>
statisticians_;
};
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc
index ae0e9e5934..79f24c4779 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -39,6 +39,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "modules/rtp_rtcp/source/tmmbr_help.h"
#include "rtc_base/checks.h"
@@ -67,22 +68,6 @@ const size_t kMaxNumberOfStoredRrtrs = 300;
constexpr TimeDelta kDefaultVideoReportInterval = TimeDelta::Seconds(1);
constexpr TimeDelta kDefaultAudioReportInterval = TimeDelta::Seconds(5);
-std::set<uint32_t> GetRegisteredSsrcs(
- const RtpRtcpInterface::Configuration& config) {
- std::set<uint32_t> ssrcs;
- ssrcs.insert(config.local_media_ssrc);
- if (config.rtx_send_ssrc) {
- ssrcs.insert(*config.rtx_send_ssrc);
- }
- if (config.fec_generator) {
- absl::optional<uint32_t> flexfec_ssrc = config.fec_generator->FecSsrc();
- if (flexfec_ssrc) {
- ssrcs.insert(*flexfec_ssrc);
- }
- }
- return ssrcs;
-}
-
// Returns true if the |timestamp| has exceeded the |interval *
// kRrTimeoutIntervals| period and was reset (set to PlusInfinity()). Returns
// false if the timer was either already reset or if it has not expired.
@@ -100,6 +85,43 @@ bool ResetTimestampIfExpired(const Timestamp now,
} // namespace
+constexpr size_t RTCPReceiver::RegisteredSsrcs::kMediaSsrcIndex;
+constexpr size_t RTCPReceiver::RegisteredSsrcs::kMaxSsrcs;
+
+RTCPReceiver::RegisteredSsrcs::RegisteredSsrcs(
+ bool disable_sequence_checker,
+ const RtpRtcpInterface::Configuration& config)
+ : packet_sequence_checker_(disable_sequence_checker) {
+ packet_sequence_checker_.Detach();
+ ssrcs_.push_back(config.local_media_ssrc);
+ if (config.rtx_send_ssrc) {
+ ssrcs_.push_back(*config.rtx_send_ssrc);
+ }
+ if (config.fec_generator) {
+ absl::optional<uint32_t> flexfec_ssrc = config.fec_generator->FecSsrc();
+ if (flexfec_ssrc) {
+ ssrcs_.push_back(*flexfec_ssrc);
+ }
+ }
+ // Ensure that the RegisteredSsrcs can inline the SSRCs.
+ RTC_DCHECK_LE(ssrcs_.size(), RTCPReceiver::RegisteredSsrcs::kMaxSsrcs);
+}
+
+bool RTCPReceiver::RegisteredSsrcs::contains(uint32_t ssrc) const {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ return absl::c_linear_search(ssrcs_, ssrc);
+}
+
+uint32_t RTCPReceiver::RegisteredSsrcs::media_ssrc() const {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ return ssrcs_[kMediaSsrcIndex];
+}
+
+void RTCPReceiver::RegisteredSsrcs::set_media_ssrc(uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ ssrcs_[kMediaSsrcIndex] = ssrc;
+}
+
struct RTCPReceiver::PacketInformation {
uint32_t packet_type_flags = 0; // RTCPPacketTypeFlags bit field.
@@ -116,43 +138,39 @@ struct RTCPReceiver::PacketInformation {
std::unique_ptr<rtcp::LossNotification> loss_notification;
};
-// Structure for handing TMMBR and TMMBN rtcp messages (RFC5104, section 3.5.4).
-struct RTCPReceiver::TmmbrInformation {
- struct TimedTmmbrItem {
- rtcp::TmmbItem tmmbr_item;
- int64_t last_updated_ms;
- };
-
- int64_t last_time_received_ms = 0;
-
- bool ready_for_delete = false;
-
- std::vector<rtcp::TmmbItem> tmmbn;
- std::map<uint32_t, TimedTmmbrItem> tmmbr;
-};
-
-// Structure for storing received RRTR RTCP messages (RFC3611, section 4.4).
-struct RTCPReceiver::RrtrInformation {
- RrtrInformation(uint32_t ssrc,
- uint32_t received_remote_mid_ntp_time,
- uint32_t local_receive_mid_ntp_time)
- : ssrc(ssrc),
- received_remote_mid_ntp_time(received_remote_mid_ntp_time),
- local_receive_mid_ntp_time(local_receive_mid_ntp_time) {}
-
- uint32_t ssrc;
- // Received NTP timestamp in compact representation.
- uint32_t received_remote_mid_ntp_time;
- // NTP time when the report was received in compact representation.
- uint32_t local_receive_mid_ntp_time;
-};
-
-struct RTCPReceiver::LastFirStatus {
- LastFirStatus(int64_t now_ms, uint8_t sequence_number)
- : request_ms(now_ms), sequence_number(sequence_number) {}
- int64_t request_ms;
- uint8_t sequence_number;
-};
+RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
+ ModuleRtpRtcpImpl2* owner)
+ : clock_(config.clock),
+ receiver_only_(config.receiver_only),
+ rtp_rtcp_(owner),
+ main_ssrc_(config.local_media_ssrc),
+ registered_ssrcs_(false, config),
+ rtcp_bandwidth_observer_(config.bandwidth_callback),
+ rtcp_intra_frame_observer_(config.intra_frame_callback),
+ rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer),
+ network_state_estimate_observer_(config.network_state_estimate_observer),
+ transport_feedback_observer_(config.transport_feedback_callback),
+ bitrate_allocation_observer_(config.bitrate_allocation_observer),
+ report_interval_(config.rtcp_report_interval_ms > 0
+ ? TimeDelta::Millis(config.rtcp_report_interval_ms)
+ : (config.audio ? kDefaultAudioReportInterval
+ : kDefaultVideoReportInterval)),
+ // TODO(bugs.webrtc.org/10774): Remove fallback.
+ remote_ssrc_(0),
+ remote_sender_rtp_time_(0),
+ remote_sender_packet_count_(0),
+ remote_sender_octet_count_(0),
+ remote_sender_reports_count_(0),
+ xr_rrtr_status_(config.non_sender_rtt_measurement),
+ xr_rr_rtt_ms_(0),
+ oldest_tmmbr_info_ms_(0),
+ cname_callback_(config.rtcp_cname_callback),
+ report_block_data_observer_(config.report_block_data_observer),
+ packet_type_counter_observer_(config.rtcp_packet_type_counter_observer),
+ num_skipped_packets_(0),
+ last_skipped_packets_warning_ms_(clock_->TimeInMilliseconds()) {
+ RTC_DCHECK(owner);
+}
RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
ModuleRtpRtcp* owner)
@@ -160,7 +178,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
receiver_only_(config.receiver_only),
rtp_rtcp_(owner),
main_ssrc_(config.local_media_ssrc),
- registered_ssrcs_(GetRegisteredSsrcs(config)),
+ registered_ssrcs_(true, config),
rtcp_bandwidth_observer_(config.bandwidth_callback),
rtcp_intra_frame_observer_(config.intra_frame_callback),
rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer),
@@ -180,13 +198,25 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
xr_rrtr_status_(config.non_sender_rtt_measurement),
xr_rr_rtt_ms_(0),
oldest_tmmbr_info_ms_(0),
- stats_callback_(config.rtcp_statistics_callback),
cname_callback_(config.rtcp_cname_callback),
report_block_data_observer_(config.report_block_data_observer),
packet_type_counter_observer_(config.rtcp_packet_type_counter_observer),
num_skipped_packets_(0),
last_skipped_packets_warning_ms_(clock_->TimeInMilliseconds()) {
RTC_DCHECK(owner);
+ // Dear reader - if you're here because of this log statement and are
+ // wondering what this is about, chances are that you are using an instance
+ // of RTCPReceiver without using the webrtc APIs. This creates a bit of a
+ // problem for WebRTC because this class is a part of an internal
+ // implementation that is constantly changing and being improved.
+ // The intention of this log statement is to give a heads up that changes
+ // are coming and encourage you to use the public APIs or be prepared that
+ // things might break down the line as more changes land. A thing you could
+ // try out for now is to replace the `CustomSequenceChecker` in the header
+ // with a regular `SequenceChecker` and see if that triggers an
+ // error in your code. If it does, chances are you have your own threading
+ // model that is not the same as WebRTC internally has.
+ RTC_LOG(LS_INFO) << "************** !!!DEPRECATION WARNING!! **************";
}
RTCPReceiver::~RTCPReceiver() {}
@@ -217,6 +247,14 @@ void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
remote_ssrc_ = ssrc;
}
+void RTCPReceiver::set_local_media_ssrc(uint32_t ssrc) {
+ registered_ssrcs_.set_media_ssrc(ssrc);
+}
+
+uint32_t RTCPReceiver::local_media_ssrc() const {
+ return registered_ssrcs_.media_ssrc();
+}
+
uint32_t RTCPReceiver::RemoteSSRC() const {
MutexLock lock(&rtcp_receiver_lock_);
return remote_ssrc_;
@@ -371,8 +409,7 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() {
std::vector<rtcp::ReceiveTimeInfo> last_xr_rtis;
last_xr_rtis.reserve(last_xr_rtis_size);
- const uint32_t now_ntp =
- CompactNtp(TimeMicrosToNtp(clock_->TimeInMicroseconds()));
+ const uint32_t now_ntp = CompactNtp(clock_->CurrentNtpTime());
for (size_t i = 0; i < last_xr_rtis_size; ++i) {
RrtrInformation& rrtr = received_rrtrs_.front();
@@ -520,7 +557,7 @@ void RTCPReceiver::HandleSenderReport(const CommonHeader& rtcp_block,
remote_sender_ntp_time_ = sender_report.ntp();
remote_sender_rtp_time_ = sender_report.rtp_timestamp();
- last_received_sr_ntp_ = TimeMicrosToNtp(clock_->TimeInMicroseconds());
+ last_received_sr_ntp_ = clock_->CurrentNtpTime();
remote_sender_packet_count_ = sender_report.sender_packet_count();
remote_sender_octet_count_ = sender_report.sender_octet_count();
remote_sender_reports_count_++;
@@ -567,7 +604,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block,
// which the information in this reception report block pertains.
// Filter out all report blocks that are not for us.
- if (registered_ssrcs_.count(report_block.source_ssrc()) == 0)
+ if (!registered_ssrcs_.contains(report_block.source_ssrc()))
return;
last_received_rb_ = clock_->CurrentTime();
@@ -610,7 +647,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block,
uint32_t delay_ntp = report_block.delay_since_last_sr();
// Local NTP time.
uint32_t receive_time_ntp =
- CompactNtp(TimeMicrosToNtp(last_received_rb_.us()));
+ CompactNtp(clock_->ConvertTimestampToNtpTime(last_received_rb_));
// RTT in 1/(2^16) seconds.
uint32_t rtt_ntp = receive_time_ntp - delay_ntp - send_time_ntp;
@@ -813,8 +850,7 @@ void RTCPReceiver::HandleXr(const CommonHeader& rtcp_block,
void RTCPReceiver::HandleXrReceiveReferenceTime(uint32_t sender_ssrc,
const rtcp::Rrtr& rrtr) {
uint32_t received_remote_mid_ntp_time = CompactNtp(rrtr.ntp());
- uint32_t local_receive_mid_ntp_time =
- CompactNtp(TimeMicrosToNtp(clock_->TimeInMicroseconds()));
+ uint32_t local_receive_mid_ntp_time = CompactNtp(clock_->CurrentNtpTime());
auto it = received_rrtrs_ssrc_it_.find(sender_ssrc);
if (it != received_rrtrs_ssrc_it_.end()) {
@@ -833,7 +869,7 @@ void RTCPReceiver::HandleXrReceiveReferenceTime(uint32_t sender_ssrc,
}
void RTCPReceiver::HandleXrDlrrReportBlock(const rtcp::ReceiveTimeInfo& rti) {
- if (registered_ssrcs_.count(rti.ssrc) == 0) // Not to us.
+ if (!registered_ssrcs_.contains(rti.ssrc)) // Not to us.
return;
// Caller should explicitly enable rtt calculation using extended reports.
@@ -848,7 +884,7 @@ void RTCPReceiver::HandleXrDlrrReportBlock(const rtcp::ReceiveTimeInfo& rti) {
return;
uint32_t delay_ntp = rti.delay_since_last_rr;
- uint32_t now_ntp = CompactNtp(TimeMicrosToNtp(clock_->TimeInMicroseconds()));
+ uint32_t now_ntp = CompactNtp(clock_->CurrentNtpTime());
uint32_t rtt_ntp = now_ntp - delay_ntp - send_time_ntp;
xr_rr_rtt_ms_ = CompactNtpRttToMs(rtt_ntp);
@@ -1056,14 +1092,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
// Might trigger a OnReceivedBandwidthEstimateUpdate.
NotifyTmmbrUpdated();
}
- uint32_t local_ssrc;
- std::set<uint32_t> registered_ssrcs;
- {
- // We don't want to hold this critsect when triggering the callbacks below.
- MutexLock lock(&rtcp_receiver_lock_);
- local_ssrc = main_ssrc_;
- registered_ssrcs = registered_ssrcs_;
- }
+
if (!receiver_only_ && (packet_information.packet_type_flags & kRtcpSrReq)) {
rtp_rtcp_->OnRequestSendReport();
}
@@ -1090,7 +1119,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
RTC_LOG(LS_VERBOSE)
<< "Incoming FIR from SSRC " << packet_information.remote_ssrc;
}
- rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest(local_ssrc);
+ rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest(main_ssrc_);
}
}
if (rtcp_loss_notification_observer_ &&
@@ -1098,7 +1127,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
rtcp::LossNotification* loss_notification =
packet_information.loss_notification.get();
RTC_DCHECK(loss_notification);
- if (loss_notification->media_ssrc() == local_ssrc) {
+ if (loss_notification->media_ssrc() == main_ssrc_) {
rtcp_loss_notification_observer_->OnReceivedLossNotification(
loss_notification->media_ssrc(), loss_notification->last_decoded(),
loss_notification->last_received(),
@@ -1130,8 +1159,8 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
(packet_information.packet_type_flags & kRtcpTransportFeedback)) {
uint32_t media_source_ssrc =
packet_information.transport_feedback->media_ssrc();
- if (media_source_ssrc == local_ssrc ||
- registered_ssrcs.find(media_source_ssrc) != registered_ssrcs.end()) {
+ if (media_source_ssrc == main_ssrc_ ||
+ registered_ssrcs_.contains(media_source_ssrc)) {
transport_feedback_observer_->OnTransportFeedback(
*packet_information.transport_feedback);
}
@@ -1150,18 +1179,6 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket(
}
if (!receiver_only_) {
- if (stats_callback_) {
- for (const auto& report_block : packet_information.report_blocks) {
- RtcpStatistics stats;
- stats.packets_lost = report_block.packets_lost;
- stats.extended_highest_sequence_number =
- report_block.extended_highest_sequence_number;
- stats.fraction_lost = report_block.fraction_lost;
- stats.jitter = report_block.jitter;
-
- stats_callback_->StatisticsUpdated(stats, report_block.source_ssrc);
- }
- }
if (report_block_data_observer_) {
for (const auto& report_block_data :
packet_information.report_block_datas) {
diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h
index 7b0f38bea0..57dd1c06b4 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -15,21 +15,28 @@
#include <map>
#include <set>
#include <string>
+#include <unordered_map>
#include <vector>
#include "api/array_view.h"
+#include "api/sequence_checker.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
#include "modules/rtp_rtcp/include/rtcp_statistics.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_nack_stats.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/ntp_time.h"
namespace webrtc {
+
+class ModuleRtpRtcpImpl2;
class VideoBitrateAllocationObserver;
+
namespace rtcp {
class CommonHeader;
class ReportBlock;
@@ -55,6 +62,10 @@ class RTCPReceiver final {
RTCPReceiver(const RtpRtcpInterface::Configuration& config,
ModuleRtpRtcp* owner);
+
+ RTCPReceiver(const RtpRtcpInterface::Configuration& config,
+ ModuleRtpRtcpImpl2* owner);
+
~RTCPReceiver();
void IncomingPacket(const uint8_t* packet, size_t packet_size) {
@@ -64,9 +75,14 @@ class RTCPReceiver final {
int64_t LastReceivedReportBlockMs() const;
+ void set_local_media_ssrc(uint32_t ssrc);
+ uint32_t local_media_ssrc() const;
+
void SetRemoteSSRC(uint32_t ssrc);
uint32_t RemoteSSRC() const;
+ bool receiver_only() const { return receiver_only_; }
+
// Get received NTP.
// The types for the arguments below derive from the specification:
// - `remote_sender_packet_count`: `RTCSentRtpStreamStats.packetsSent` [1]
@@ -124,10 +140,95 @@ class RTCPReceiver final {
void NotifyTmmbrUpdated();
private:
+#if RTC_DCHECK_IS_ON
+ class CustomSequenceChecker : public SequenceChecker {
+ public:
+ explicit CustomSequenceChecker(bool disable_checks)
+ : disable_checks_(disable_checks) {}
+ bool IsCurrent() const {
+ if (disable_checks_)
+ return true;
+ return SequenceChecker::IsCurrent();
+ }
+
+ private:
+ const bool disable_checks_;
+ };
+#else
+ class CustomSequenceChecker : public SequenceChecker {
+ public:
+ explicit CustomSequenceChecker(bool) {}
+ };
+#endif
+
+ // A lightweight inlined set of local SSRCs.
+ class RegisteredSsrcs {
+ public:
+ static constexpr size_t kMediaSsrcIndex = 0;
+ static constexpr size_t kMaxSsrcs = 3;
+ // Initializes the set of registered local SSRCS by extracting them from the
+ // provided `config`. The `disable_sequence_checker` flag is a workaround
+ // to be able to use a sequence checker without breaking downstream
+ // code that currently doesn't follow the same threading rules as webrtc.
+ RegisteredSsrcs(bool disable_sequence_checker,
+ const RtpRtcpInterface::Configuration& config);
+
+ // Indicates if `ssrc` is in the set of registered local SSRCs.
+ bool contains(uint32_t ssrc) const;
+ uint32_t media_ssrc() const;
+ void set_media_ssrc(uint32_t ssrc);
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS CustomSequenceChecker packet_sequence_checker_;
+ absl::InlinedVector<uint32_t, kMaxSsrcs> ssrcs_
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ };
+
struct PacketInformation;
- struct TmmbrInformation;
- struct RrtrInformation;
- struct LastFirStatus;
+
+ // Structure for handing TMMBR and TMMBN rtcp messages (RFC5104,
+ // section 3.5.4).
+ struct TmmbrInformation {
+ struct TimedTmmbrItem {
+ rtcp::TmmbItem tmmbr_item;
+ int64_t last_updated_ms;
+ };
+
+ int64_t last_time_received_ms = 0;
+
+ bool ready_for_delete = false;
+
+ std::vector<rtcp::TmmbItem> tmmbn;
+ std::map<uint32_t, TimedTmmbrItem> tmmbr;
+ };
+
+ // Structure for storing received RRTR RTCP messages (RFC3611, section 4.4).
+ struct RrtrInformation {
+ RrtrInformation(uint32_t ssrc,
+ uint32_t received_remote_mid_ntp_time,
+ uint32_t local_receive_mid_ntp_time)
+ : ssrc(ssrc),
+ received_remote_mid_ntp_time(received_remote_mid_ntp_time),
+ local_receive_mid_ntp_time(local_receive_mid_ntp_time) {}
+
+ uint32_t ssrc;
+ // Received NTP timestamp in compact representation.
+ uint32_t received_remote_mid_ntp_time;
+ // NTP time when the report was received in compact representation.
+ uint32_t local_receive_mid_ntp_time;
+ };
+
+ struct LastFirStatus {
+ LastFirStatus(int64_t now_ms, uint8_t sequence_number)
+ : request_ms(now_ms), sequence_number(sequence_number) {}
+ int64_t request_ms;
+ uint8_t sequence_number;
+ };
+
+ // TODO(boivie): `ReportBlockDataMap` and `ReportBlockMap` should be converted
+ // to std::unordered_map, but as there are too many tests that assume a
+ // specific order, it's not easily done.
+
// RTCP report blocks mapped by remote SSRC.
using ReportBlockDataMap = std::map<uint32_t, ReportBlockData>;
// RTCP report blocks map mapped by source SSRC.
@@ -229,7 +330,8 @@ class RTCPReceiver final {
const bool receiver_only_;
ModuleRtpRtcp* const rtp_rtcp_;
const uint32_t main_ssrc_;
- const std::set<uint32_t> registered_ssrcs_;
+ // The set of registered local SSRCs.
+ RegisteredSsrcs registered_ssrcs_;
RtcpBandwidthObserver* const rtcp_bandwidth_observer_;
RtcpIntraFrameObserver* const rtcp_intra_frame_observer_;
@@ -255,7 +357,7 @@ class RTCPReceiver final {
std::list<RrtrInformation> received_rrtrs_
RTC_GUARDED_BY(rtcp_receiver_lock_);
// Received RRTR information mapped by remote ssrc.
- std::map<uint32_t, std::list<RrtrInformation>::iterator>
+ std::unordered_map<uint32_t, std::list<RrtrInformation>::iterator>
received_rrtrs_ssrc_it_ RTC_GUARDED_BY(rtcp_receiver_lock_);
// Estimated rtt, zero when there is no valid estimate.
@@ -264,11 +366,11 @@ class RTCPReceiver final {
int64_t oldest_tmmbr_info_ms_ RTC_GUARDED_BY(rtcp_receiver_lock_);
// Mapped by remote ssrc.
- std::map<uint32_t, TmmbrInformation> tmmbr_infos_
+ std::unordered_map<uint32_t, TmmbrInformation> tmmbr_infos_
RTC_GUARDED_BY(rtcp_receiver_lock_);
ReportBlockMap received_report_blocks_ RTC_GUARDED_BY(rtcp_receiver_lock_);
- std::map<uint32_t, LastFirStatus> last_fir_
+ std::unordered_map<uint32_t, LastFirStatus> last_fir_
RTC_GUARDED_BY(rtcp_receiver_lock_);
// The last time we received an RTCP Report block for this module.
@@ -279,11 +381,7 @@ class RTCPReceiver final {
// delivered RTP packet to the remote side.
Timestamp last_increased_sequence_number_ = Timestamp::PlusInfinity();
- RtcpStatisticsCallback* const stats_callback_;
RtcpCnameCallback* const cname_callback_;
- // TODO(hbos): Remove RtcpStatisticsCallback in favor of
- // ReportBlockDataObserver; the ReportBlockData contains a superset of the
- // RtcpStatistics data.
ReportBlockDataObserver* const report_block_data_observer_;
RtcpPacketTypeCounterObserver* const packet_type_counter_observer_;
diff --git a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index 5739c51f65..48fef19ad0 100644
--- a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -51,6 +51,7 @@ using rtcp::ReceiveTimeInfo;
using ::testing::_;
using ::testing::AllOf;
using ::testing::ElementsAreArray;
+using ::testing::Eq;
using ::testing::Field;
using ::testing::InSequence;
using ::testing::IsEmpty;
@@ -86,14 +87,6 @@ class MockRtcpLossNotificationObserver : public RtcpLossNotificationObserver {
(override));
};
-class MockRtcpCallbackImpl : public RtcpStatisticsCallback {
- public:
- MOCK_METHOD(void,
- StatisticsUpdated,
- (const RtcpStatistics&, uint32_t),
- (override));
-};
-
class MockCnameCallbackImpl : public RtcpCnameCallback {
public:
MOCK_METHOD(void, OnCname, (uint32_t, absl::string_view), (override));
@@ -257,8 +250,7 @@ TEST(RtcpReceiverTest, InjectSrPacketCalculatesRTT) {
int64_t rtt_ms = 0;
EXPECT_EQ(-1, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr));
- uint32_t sent_ntp =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime());
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
rtcp::SenderReport sr;
@@ -289,8 +281,7 @@ TEST(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOne) {
int64_t rtt_ms = 0;
EXPECT_EQ(-1, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr));
- uint32_t sent_ntp =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime());
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
rtcp::SenderReport sr;
@@ -320,8 +311,7 @@ TEST(RtcpReceiverTest,
const uint32_t kDelayNtp = 123000;
const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp);
- uint32_t sent_ntp =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime());
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
rtcp::SenderReport sr;
@@ -838,8 +828,7 @@ TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithSubBlock) {
receiver.IncomingPacket(xr.Build());
- uint32_t compact_ntp_now =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t compact_ntp_now = CompactNtp(mocks.clock.CurrentNtpTime());
EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms));
uint32_t rtt_ntp = compact_ntp_now - kDelay - kLastRR;
EXPECT_NEAR(CompactNtpRttToMs(rtt_ntp), rtt_ms, 1);
@@ -863,8 +852,7 @@ TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) {
receiver.IncomingPacket(xr.Build());
- uint32_t compact_ntp_now =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t compact_ntp_now = CompactNtp(mocks.clock.CurrentNtpTime());
int64_t rtt_ms = 0;
EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms));
uint32_t rtt_ntp = compact_ntp_now - kDelay - kLastRR;
@@ -943,7 +931,7 @@ TEST(RtcpReceiverTest, RttCalculatedAfterExtendedReportsDlrr) {
const int64_t kRttMs = rand.Rand(1, 9 * 3600 * 1000);
const uint32_t kDelayNtp = rand.Rand(0, 0x7fffffff);
const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp);
- NtpTime now = TimeMicrosToNtp(mocks.clock.TimeInMicroseconds());
+ NtpTime now = mocks.clock.CurrentNtpTime();
uint32_t sent_ntp = CompactNtp(now);
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
@@ -969,7 +957,7 @@ TEST(RtcpReceiverTest, XrDlrrCalculatesNegativeRttAsOne) {
const int64_t kRttMs = rand.Rand(-3600 * 1000, -1);
const uint32_t kDelayNtp = rand.Rand(0, 0x7fffffff);
const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp);
- NtpTime now = TimeMicrosToNtp(mocks.clock.TimeInMicroseconds());
+ NtpTime now = mocks.clock.CurrentNtpTime();
uint32_t sent_ntp = CompactNtp(now);
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
@@ -1266,53 +1254,17 @@ TEST(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) {
mocks.clock.AdvanceTimeMilliseconds(5000);
}
// It is now starttime + 15.
- std::vector<rtcp::TmmbItem> candidate_set = receiver.TmmbrReceived();
- ASSERT_EQ(3u, candidate_set.size());
- EXPECT_EQ(30000U, candidate_set[0].bitrate_bps());
+ EXPECT_THAT(receiver.TmmbrReceived(),
+ AllOf(SizeIs(3),
+ Each(Property(&rtcp::TmmbItem::bitrate_bps, Eq(30'000U)))));
// We expect the timeout to be 25 seconds. Advance the clock by 12
// seconds, timing out the first packet.
mocks.clock.AdvanceTimeMilliseconds(12000);
- candidate_set = receiver.TmmbrReceived();
- ASSERT_EQ(2u, candidate_set.size());
- EXPECT_EQ(kSenderSsrc + 1, candidate_set[0].ssrc());
-}
-
-TEST(RtcpReceiverTest, Callbacks) {
- ReceiverMocks mocks;
- MockRtcpCallbackImpl callback;
- RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
- config.rtcp_statistics_callback = &callback;
- RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
- receiver.SetRemoteSSRC(kSenderSsrc);
-
- const uint8_t kFractionLoss = 3;
- const uint32_t kCumulativeLoss = 7;
- const uint32_t kJitter = 9;
- const uint16_t kSequenceNumber = 1234;
-
- // First packet, all numbers should just propagate.
- rtcp::ReportBlock rb1;
- rb1.SetMediaSsrc(kReceiverMainSsrc);
- rb1.SetExtHighestSeqNum(kSequenceNumber);
- rb1.SetFractionLost(kFractionLoss);
- rb1.SetCumulativeLost(kCumulativeLoss);
- rb1.SetJitter(kJitter);
-
- rtcp::ReceiverReport rr1;
- rr1.SetSenderSsrc(kSenderSsrc);
- rr1.AddReportBlock(rb1);
- EXPECT_CALL(callback,
- StatisticsUpdated(
- AllOf(Field(&RtcpStatistics::fraction_lost, kFractionLoss),
- Field(&RtcpStatistics::packets_lost, kCumulativeLoss),
- Field(&RtcpStatistics::extended_highest_sequence_number,
- kSequenceNumber),
- Field(&RtcpStatistics::jitter, kJitter)),
- kReceiverMainSsrc));
- EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks);
- EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport);
- receiver.IncomingPacket(rr1.Build());
+ EXPECT_THAT(receiver.TmmbrReceived(),
+ UnorderedElementsAre(
+ Property(&rtcp::TmmbItem::ssrc, Eq(kSenderSsrc + 1)),
+ Property(&rtcp::TmmbItem::ssrc, Eq(kSenderSsrc + 2))));
}
TEST(RtcpReceiverTest,
@@ -1377,8 +1329,7 @@ TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) {
const uint32_t kDelayNtp = 123000;
const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp);
- uint32_t sent_ntp =
- CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()));
+ uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime());
mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs);
rtcp::SenderReport sr;
diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc
index 8b519b5c7d..c4c30a9467 100644
--- a/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -16,7 +16,11 @@
#include <memory>
#include <utility>
+#include "absl/types/optional.h"
#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtp_headers.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h"
#include "modules/rtp_rtcp/source/rtcp_packet/app.h"
#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
@@ -34,6 +38,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "modules/rtp_rtcp/source/tmmbr_help.h"
#include "rtc_base/checks.h"
@@ -49,7 +54,6 @@ const uint32_t kRtcpAnyExtendedReports = kRtcpXrReceiverReferenceTime |
kRtcpXrTargetBitrate;
constexpr int32_t kDefaultVideoReportInterval = 1000;
constexpr int32_t kDefaultAudioReportInterval = 5000;
-
} // namespace
// Helper to put several RTCP packets into lower layer datagram RTCP packet.
@@ -103,19 +107,38 @@ class RTCPSender::RtcpContext {
RtcpContext(const FeedbackState& feedback_state,
int32_t nack_size,
const uint16_t* nack_list,
- int64_t now_us)
+ Timestamp now)
: feedback_state_(feedback_state),
nack_size_(nack_size),
nack_list_(nack_list),
- now_us_(now_us) {}
+ now_(now) {}
const FeedbackState& feedback_state_;
const int32_t nack_size_;
const uint16_t* nack_list_;
- const int64_t now_us_;
+ const Timestamp now_;
};
-RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
+RTCPSender::Configuration RTCPSender::Configuration::FromRtpRtcpConfiguration(
+ const RtpRtcpInterface::Configuration& configuration) {
+ RTCPSender::Configuration result;
+ result.audio = configuration.audio;
+ result.local_media_ssrc = configuration.local_media_ssrc;
+ result.clock = configuration.clock;
+ result.outgoing_transport = configuration.outgoing_transport;
+ result.non_sender_rtt_measurement = configuration.non_sender_rtt_measurement;
+ result.event_log = configuration.event_log;
+ if (configuration.rtcp_report_interval_ms) {
+ result.rtcp_report_interval =
+ TimeDelta::Millis(configuration.rtcp_report_interval_ms);
+ }
+ result.receive_statistics = configuration.receive_statistics;
+ result.rtcp_packet_type_counter_observer =
+ configuration.rtcp_packet_type_counter_observer;
+ return result;
+}
+
+RTCPSender::RTCPSender(Configuration config)
: audio_(config.audio),
ssrc_(config.local_media_ssrc),
clock_(config.clock),
@@ -123,15 +146,14 @@ RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
method_(RtcpMode::kOff),
event_log_(config.event_log),
transport_(config.outgoing_transport),
- report_interval_ms_(config.rtcp_report_interval_ms > 0
- ? config.rtcp_report_interval_ms
- : (config.audio ? kDefaultAudioReportInterval
- : kDefaultVideoReportInterval)),
+ report_interval_(config.rtcp_report_interval.value_or(
+ TimeDelta::Millis(config.audio ? kDefaultAudioReportInterval
+ : kDefaultVideoReportInterval))),
+ schedule_next_rtcp_send_evaluation_function_(
+ std::move(config.schedule_next_rtcp_send_evaluation_function)),
sending_(false),
- next_time_to_send_rtcp_(0),
timestamp_offset_(0),
last_rtp_timestamp_(0),
- last_frame_capture_time_ms_(-1),
remote_ssrc_(0),
receive_statistics_(config.receive_statistics),
@@ -164,6 +186,9 @@ RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
builders_[kRtcpAnyExtendedReports] = &RTCPSender::BuildExtendedReports;
}
+RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
+ : RTCPSender(Configuration::FromRtpRtcpConfiguration(config)) {}
+
RTCPSender::~RTCPSender() {}
RtcpMode RTCPSender::Status() const {
@@ -174,10 +199,11 @@ RtcpMode RTCPSender::Status() const {
void RTCPSender::SetRTCPStatus(RtcpMode new_method) {
MutexLock lock(&mutex_rtcp_sender_);
- if (method_ == RtcpMode::kOff && new_method != RtcpMode::kOff) {
+ if (new_method == RtcpMode::kOff) {
+ next_time_to_send_rtcp_ = absl::nullopt;
+ } else if (method_ == RtcpMode::kOff) {
// When switching on, reschedule the next packet
- next_time_to_send_rtcp_ =
- clock_->TimeInMilliseconds() + (report_interval_ms_ / 2);
+ SetNextRtcpSendEvaluationDuration(report_interval_ / 2);
}
method_ = new_method;
}
@@ -187,8 +213,8 @@ bool RTCPSender::Sending() const {
return sending_;
}
-int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state,
- bool sending) {
+void RTCPSender::SetSendingStatus(const FeedbackState& feedback_state,
+ bool sending) {
bool sendRTCPBye = false;
{
MutexLock lock(&mutex_rtcp_sender_);
@@ -201,9 +227,11 @@ int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state,
}
sending_ = sending;
}
- if (sendRTCPBye)
- return SendRTCP(feedback_state, kRtcpBye);
- return 0;
+ if (sendRTCPBye) {
+ if (SendRTCP(feedback_state, kRtcpBye) != 0) {
+ RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE";
+ }
+ }
}
int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state,
@@ -213,11 +241,10 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state,
bool buffering_allowed) {
int32_t error_code = -1;
auto callback = [&](rtc::ArrayView<const uint8_t> packet) {
- if (transport_->SendRtcp(packet.data(), packet.size())) {
- error_code = 0;
- if (event_log_) {
- event_log_->Log(std::make_unique<RtcEventRtcpPacketOutgoing>(packet));
- }
+ transport_->SendRtcp(packet.data(), packet.size());
+ error_code = 0;
+ if (event_log_) {
+ event_log_->Log(std::make_unique<RtcEventRtcpPacketOutgoing>(packet));
}
};
absl::optional<PacketSender> sender;
@@ -259,7 +286,7 @@ void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) {
SetFlag(kRtcpRemb, /*is_volatile=*/false);
// Send a REMB immediately if we have a new REMB. The frequency of REMBs is
// throttled by the caller.
- next_time_to_send_rtcp_ = clock_->TimeInMilliseconds();
+ SetNextRtcpSendEvaluationDuration(TimeDelta::Zero());
}
void RTCPSender::UnsetRemb() {
@@ -284,28 +311,48 @@ void RTCPSender::SetTimestampOffset(uint32_t timestamp_offset) {
}
void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
- int64_t capture_time_ms,
- int8_t payload_type) {
+ absl::optional<Timestamp> capture_time,
+ absl::optional<int8_t> payload_type) {
MutexLock lock(&mutex_rtcp_sender_);
// For compatibility with clients who don't set payload type correctly on all
// calls.
- if (payload_type != -1) {
- last_payload_type_ = payload_type;
+ if (payload_type.has_value()) {
+ last_payload_type_ = *payload_type;
}
last_rtp_timestamp_ = rtp_timestamp;
- if (capture_time_ms <= 0) {
+ if (!capture_time.has_value()) {
// We don't currently get a capture time from VoiceEngine.
- last_frame_capture_time_ms_ = clock_->TimeInMilliseconds();
+ last_frame_capture_time_ = clock_->CurrentTime();
} else {
- last_frame_capture_time_ms_ = capture_time_ms;
+ last_frame_capture_time_ = *capture_time;
}
}
+void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
+ int64_t capture_time_ms,
+ int8_t payload_type) {
+ absl::optional<int8_t> payload_type_optional;
+ if (payload_type != -1)
+ payload_type_optional = payload_type;
+ SetLastRtpTime(rtp_timestamp, Timestamp::Millis(capture_time_ms),
+ payload_type_optional);
+}
+
void RTCPSender::SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz) {
MutexLock lock(&mutex_rtcp_sender_);
rtp_clock_rates_khz_[payload_type] = rtp_clock_rate_hz / 1000;
}
+uint32_t RTCPSender::SSRC() const {
+ MutexLock lock(&mutex_rtcp_sender_);
+ return ssrc_;
+}
+
+void RTCPSender::SetSsrc(uint32_t ssrc) {
+ MutexLock lock(&mutex_rtcp_sender_);
+ ssrc_ = ssrc;
+}
+
void RTCPSender::SetRemoteSSRC(uint32_t ssrc) {
MutexLock lock(&mutex_rtcp_sender_);
remote_ssrc_ = ssrc;
@@ -380,25 +427,27 @@ bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const {
a value of the RTCP bandwidth below the intended average
*/
- int64_t now = clock_->TimeInMilliseconds();
+ Timestamp now = clock_->CurrentTime();
MutexLock lock(&mutex_rtcp_sender_);
-
+ RTC_DCHECK(
+ (method_ == RtcpMode::kOff && !next_time_to_send_rtcp_.has_value()) ||
+ (method_ != RtcpMode::kOff && next_time_to_send_rtcp_.has_value()));
if (method_ == RtcpMode::kOff)
return false;
if (!audio_ && sendKeyframeBeforeRTP) {
// for video key-frames we want to send the RTCP before the large key-frame
// if we have a 100 ms margin
- now += RTCP_SEND_BEFORE_KEY_FRAME_MS;
+ now += RTCP_SEND_BEFORE_KEY_FRAME;
}
- return now >= next_time_to_send_rtcp_;
+ return now >= *next_time_to_send_rtcp_;
}
void RTCPSender::BuildSR(const RtcpContext& ctx, PacketSender& sender) {
// Timestamp shouldn't be estimated before first media frame.
- RTC_DCHECK_GE(last_frame_capture_time_ms_, 0);
+ RTC_DCHECK(last_frame_capture_time_.has_value());
// The timestamp of this RTCP packet should be estimated as the timestamp of
// the frame being captured at this moment. We are calculating that
// timestamp as the last frame's timestamp + the time since the last frame
@@ -413,11 +462,12 @@ void RTCPSender::BuildSR(const RtcpContext& ctx, PacketSender& sender) {
// when converted to milliseconds,
uint32_t rtp_timestamp =
timestamp_offset_ + last_rtp_timestamp_ +
- ((ctx.now_us_ + 500) / 1000 - last_frame_capture_time_ms_) * rtp_rate;
+ ((ctx.now_.us() + 500) / 1000 - last_frame_capture_time_->ms()) *
+ rtp_rate;
rtcp::SenderReport report;
report.SetSenderSsrc(ssrc_);
- report.SetNtp(TimeMicrosToNtp(ctx.now_us_));
+ report.SetNtp(clock_->ConvertTimestampToNtpTime(ctx.now_));
report.SetRtpTimestamp(rtp_timestamp);
report.SetPacketCount(ctx.feedback_state_.packets_sent);
report.SetOctetCount(ctx.feedback_state_.media_bytes_sent);
@@ -583,7 +633,7 @@ void RTCPSender::BuildExtendedReports(const RtcpContext& ctx,
if (!sending_ && xr_send_receiver_reference_time_enabled_) {
rtcp::Rrtr rrtr;
- rrtr.SetNtp(TimeMicrosToNtp(ctx.now_us_));
+ rrtr.SetNtp(clock_->ConvertTimestampToNtpTime(ctx.now_));
xr.SetRrtr(rrtr);
}
@@ -652,7 +702,7 @@ absl::optional<int32_t> RTCPSender::ComputeCompoundRTCPPacket(
SetFlag(packet_type, true);
// Prevent sending streams to send SR before any media has been sent.
- const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0);
+ const bool can_calculate_rtp_timestamp = last_frame_capture_time_.has_value();
if (!can_calculate_rtp_timestamp) {
bool consumed_sr_flag = ConsumeFlag(kRtcpSr);
bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport);
@@ -672,7 +722,7 @@ absl::optional<int32_t> RTCPSender::ComputeCompoundRTCPPacket(
// We need to send our NTP even if we haven't received any reports.
RtcpContext context(feedback_state, nack_size, nack_list,
- clock_->TimeInMicroseconds());
+ clock_->CurrentTime());
PrepareReport(feedback_state);
@@ -743,24 +793,25 @@ void RTCPSender::PrepareReport(const FeedbackState& feedback_state) {
}
// generate next time to send an RTCP report
- int min_interval_ms = report_interval_ms_;
+ TimeDelta min_interval = report_interval_;
if (!audio_ && sending_) {
// Calculate bandwidth for video; 360 / send bandwidth in kbit/s.
int send_bitrate_kbit = feedback_state.send_bitrate / 1000;
if (send_bitrate_kbit != 0) {
- min_interval_ms = 360000 / send_bitrate_kbit;
- min_interval_ms = std::min(min_interval_ms, report_interval_ms_);
+ min_interval = std::min(TimeDelta::Millis(360000 / send_bitrate_kbit),
+ report_interval_);
}
}
// The interval between RTCP packets is varied randomly over the
// range [1/2,3/2] times the calculated interval.
- int time_to_next =
- random_.Rand(min_interval_ms * 1 / 2, min_interval_ms * 3 / 2);
+ int min_interval_int = rtc::dchecked_cast<int>(min_interval.ms());
+ TimeDelta time_to_next = TimeDelta::Millis(
+ random_.Rand(min_interval_int * 1 / 2, min_interval_int * 3 / 2));
- RTC_DCHECK_GT(time_to_next, 0);
- next_time_to_send_rtcp_ = clock_->TimeInMilliseconds() + time_to_next;
+ RTC_DCHECK(!time_to_next.IsZero());
+ SetNextRtcpSendEvaluationDuration(time_to_next);
// RtcpSender expected to be used for sending either just sender reports
// or just receiver reports.
@@ -782,7 +833,7 @@ std::vector<rtcp::ReportBlock> RTCPSender::CreateReportBlocks(
if (!result.empty() && ((feedback_state.last_rr_ntp_secs != 0) ||
(feedback_state.last_rr_ntp_frac != 0))) {
// Get our NTP as late as possible to avoid a race.
- uint32_t now = CompactNtp(TimeMicrosToNtp(clock_->TimeInMicroseconds()));
+ uint32_t now = CompactNtp(clock_->CurrentNtpTime());
uint32_t receive_time = feedback_state.last_rr_ntp_secs & 0x0000FFFF;
receive_time <<= 16;
@@ -854,7 +905,7 @@ void RTCPSender::SetVideoBitrateAllocation(
RTC_LOG(LS_INFO) << "Emitting TargetBitrate XR for SSRC " << ssrc_
<< " with new layers enabled/disabled: "
<< video_bitrate_allocation_.ToString();
- next_time_to_send_rtcp_ = clock_->TimeInMilliseconds();
+ SetNextRtcpSendEvaluationDuration(TimeDelta::Zero());
} else {
video_bitrate_allocation_ = bitrate;
}
@@ -915,4 +966,10 @@ void RTCPSender::SendCombinedRtcpPacket(
sender.Send();
}
+void RTCPSender::SetNextRtcpSendEvaluationDuration(TimeDelta duration) {
+ next_time_to_send_rtcp_ = clock_->CurrentTime() + duration;
+ if (schedule_next_rtcp_send_evaluation_function_)
+ schedule_next_rtcp_send_evaluation_function_(duration);
+}
+
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtcp_sender.h b/modules/rtp_rtcp/source/rtcp_sender.h
index 463666a22a..e50ce44e13 100644
--- a/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/modules/rtp_rtcp/source/rtcp_sender.h
@@ -19,6 +19,8 @@
#include "absl/types/optional.h"
#include "api/call/transport.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "api/video/video_bitrate_allocation.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
@@ -42,6 +44,38 @@ class RtcEventLog;
class RTCPSender final {
public:
+ struct Configuration {
+ // TODO(bugs.webrtc.org/11581): Remove this temporary conversion utility
+ // once rtc_rtcp_impl.cc/h are gone.
+ static Configuration FromRtpRtcpConfiguration(
+ const RtpRtcpInterface::Configuration& config);
+
+ // True for a audio version of the RTP/RTCP module object false will create
+ // a video version.
+ bool audio = false;
+ // SSRCs for media and retransmission, respectively.
+ // FlexFec SSRC is fetched from |flexfec_sender|.
+ uint32_t local_media_ssrc = 0;
+ // The clock to use to read time. If nullptr then system clock will be used.
+ Clock* clock = nullptr;
+ // Transport object that will be called when packets are ready to be sent
+ // out on the network.
+ Transport* outgoing_transport = nullptr;
+ // Estimate RTT as non-sender as described in
+ // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5
+ bool non_sender_rtt_measurement = false;
+ // Optional callback which, if specified, is used by RTCPSender to schedule
+ // the next time to evaluate if RTCP should be sent by means of
+ // TimeToSendRTCPReport/SendRTCP.
+ // The RTCPSender client still needs to call TimeToSendRTCPReport/SendRTCP
+ // to actually get RTCP sent.
+ std::function<void(TimeDelta)> schedule_next_rtcp_send_evaluation_function;
+
+ RtcEventLog* event_log = nullptr;
+ absl::optional<TimeDelta> rtcp_report_interval;
+ ReceiveStatisticsProvider* receive_statistics = nullptr;
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
+ };
struct FeedbackState {
FeedbackState();
FeedbackState(const FeedbackState&);
@@ -63,6 +97,9 @@ class RTCPSender final {
RTCPReceiver* receiver;
};
+ explicit RTCPSender(Configuration config);
+ // TODO(bugs.webrtc.org/11581): delete this temporary compatibility helper
+ // once downstream dependencies migrates.
explicit RTCPSender(const RtpRtcpInterface::Configuration& config);
RTCPSender() = delete;
@@ -75,8 +112,8 @@ class RTCPSender final {
void SetRTCPStatus(RtcpMode method) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
bool Sending() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- int32_t SetSendingStatus(const FeedbackState& feedback_state,
- bool enabled)
+ void SetSendingStatus(const FeedbackState& feedback_state,
+ bool enabled)
RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); // combine the functions
int32_t SetNackStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
@@ -84,8 +121,14 @@ class RTCPSender final {
void SetTimestampOffset(uint32_t timestamp_offset)
RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
+ void SetLastRtpTime(uint32_t rtp_timestamp,
+ absl::optional<Timestamp> capture_time,
+ absl::optional<int8_t> payload_type)
+ RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
// TODO(bugs.webrtc.org/6458): Remove default parameter value when all the
// depending projects are updated to correctly set payload type.
+ // TODO(bugs.webrtc.org/12873): Remove once downstream consumers migrates to
+ // the new version of SetLastRtpTime declared above.
void SetLastRtpTime(uint32_t rtp_timestamp,
int64_t capture_time_ms,
int8_t payload_type = -1)
@@ -94,7 +137,8 @@ class RTCPSender final {
void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz)
RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
- uint32_t SSRC() const { return ssrc_; }
+ uint32_t SSRC() const;
+ void SetSsrc(uint32_t ssrc);
void SetRemoteSSRC(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_);
@@ -186,8 +230,16 @@ class RTCPSender final {
void BuildNACK(const RtcpContext& context, PacketSender& sender)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
+ // |duration| being TimeDelta::Zero() means schedule immediately.
+ void SetNextRtcpSendEvaluationDuration(TimeDelta duration)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_);
+
const bool audio_;
- const uint32_t ssrc_;
+ // TODO(bugs.webrtc.org/11581): `mutex_rtcp_sender_` shouldn't be required if
+ // we consistently run network related operations on the network thread.
+ // This is currently not possible due to callbacks from the process thread in
+ // ModuleRtpRtcpImpl2.
+ uint32_t ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_);
Clock* const clock_;
Random random_ RTC_GUARDED_BY(mutex_rtcp_sender_);
RtcpMode method_ RTC_GUARDED_BY(mutex_rtcp_sender_);
@@ -195,16 +247,22 @@ class RTCPSender final {
RtcEventLog* const event_log_;
Transport* const transport_;
- const int report_interval_ms_;
+ const TimeDelta report_interval_;
+ // Set from
+ // RTCPSender::Configuration::schedule_next_rtcp_send_evaluation_function.
+ const std::function<void(TimeDelta)>
+ schedule_next_rtcp_send_evaluation_function_;
mutable Mutex mutex_rtcp_sender_;
bool sending_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- int64_t next_time_to_send_rtcp_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ absl::optional<Timestamp> next_time_to_send_rtcp_
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
uint32_t timestamp_offset_ RTC_GUARDED_BY(mutex_rtcp_sender_);
uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_rtcp_sender_);
- int64_t last_frame_capture_time_ms_ RTC_GUARDED_BY(mutex_rtcp_sender_);
+ absl::optional<Timestamp> last_frame_capture_time_
+ RTC_GUARDED_BY(mutex_rtcp_sender_);
// SSRC that we receive on our RTP channel
uint32_t remote_ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_);
std::string cname_ RTC_GUARDED_BY(mutex_rtcp_sender_);
diff --git a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index 2c0bb2e2c4..347be79398 100644
--- a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -14,12 +14,12 @@
#include <utility>
#include "absl/base/macros.h"
+#include "api/units/time_delta.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
-#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/rate_limiter.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -28,7 +28,9 @@
using ::testing::_;
using ::testing::ElementsAre;
+using ::testing::Eq;
using ::testing::Invoke;
+using ::testing::Property;
using ::testing::SizeIs;
namespace webrtc {
@@ -70,43 +72,50 @@ static const uint32_t kStartRtpTimestamp = 0x34567;
static const uint32_t kRtpTimestamp = 0x45678;
std::unique_ptr<RTCPSender> CreateRtcpSender(
- const RtpRtcpInterface::Configuration& config,
+ const RTCPSender::Configuration& config,
bool init_timestamps = true) {
auto rtcp_sender = std::make_unique<RTCPSender>(config);
rtcp_sender->SetRemoteSSRC(kRemoteSsrc);
if (init_timestamps) {
rtcp_sender->SetTimestampOffset(kStartRtpTimestamp);
- rtcp_sender->SetLastRtpTime(kRtpTimestamp,
- config.clock->TimeInMilliseconds(),
+ rtcp_sender->SetLastRtpTime(kRtpTimestamp, config.clock->CurrentTime(),
/*payload_type=*/0);
}
return rtcp_sender;
}
-
} // namespace
class RtcpSenderTest : public ::testing::Test {
protected:
RtcpSenderTest()
: clock_(1335900000),
- receive_statistics_(ReceiveStatistics::Create(&clock_)),
- retransmission_rate_limiter_(&clock_, 1000) {
- RtpRtcpInterface::Configuration configuration = GetDefaultConfig();
- rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(configuration));
+ receive_statistics_(ReceiveStatistics::Create(&clock_)) {
+ rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(GetDefaultRtpRtcpConfig()));
}
- RtpRtcpInterface::Configuration GetDefaultConfig() {
- RtpRtcpInterface::Configuration configuration;
+ RTCPSender::Configuration GetDefaultConfig() {
+ RTCPSender::Configuration configuration;
configuration.audio = false;
configuration.clock = &clock_;
configuration.outgoing_transport = &test_transport_;
- configuration.retransmission_rate_limiter = &retransmission_rate_limiter_;
- configuration.rtcp_report_interval_ms = 1000;
+ configuration.rtcp_report_interval = TimeDelta::Millis(1000);
configuration.receive_statistics = receive_statistics_.get();
configuration.local_media_ssrc = kSenderSsrc;
return configuration;
}
+ RtpRtcpInterface::Configuration GetDefaultRtpRtcpConfig() {
+ RTCPSender::Configuration config = GetDefaultConfig();
+ RtpRtcpInterface::Configuration result;
+ result.audio = config.audio;
+ result.clock = config.clock;
+ result.outgoing_transport = config.outgoing_transport;
+ result.rtcp_report_interval_ms = config.rtcp_report_interval->ms();
+ result.receive_statistics = config.receive_statistics;
+ result.local_media_ssrc = config.local_media_ssrc;
+ return result;
+ }
+
void InsertIncomingPacket(uint32_t remote_ssrc, uint16_t seq_num) {
RtpPacketReceived packet;
packet.SetSsrc(remote_ssrc);
@@ -126,7 +135,6 @@ class RtcpSenderTest : public ::testing::Test {
TestTransport test_transport_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_impl_;
- RateLimiter retransmission_rate_limiter_;
};
TEST_F(RtcpSenderTest, SetRtcpStatus) {
@@ -139,7 +147,7 @@ TEST_F(RtcpSenderTest, SetRtcpStatus) {
TEST_F(RtcpSenderTest, SetSendingStatus) {
auto rtcp_sender = CreateRtcpSender(GetDefaultConfig());
EXPECT_FALSE(rtcp_sender->Sending());
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), true));
+ rtcp_sender->SetSendingStatus(feedback_state(), true);
EXPECT_TRUE(rtcp_sender->Sending());
}
@@ -158,7 +166,7 @@ TEST_F(RtcpSenderTest, SendSr) {
rtcp_sender->SetSendingStatus(feedback_state, true);
feedback_state.packets_sent = kPacketCount;
feedback_state.media_bytes_sent = kOctetCount;
- NtpTime ntp = TimeMicrosToNtp(clock_.TimeInMicroseconds());
+ NtpTime ntp = clock_.CurrentNtpTime();
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state, kRtcpSr));
EXPECT_EQ(1, parser()->sender_report()->num_packets());
EXPECT_EQ(kSenderSsrc, parser()->sender_report()->sender_ssrc());
@@ -205,11 +213,11 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) {
}
TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
- config.rtcp_report_interval_ms = 1000;
+ config.rtcp_report_interval = TimeDelta::Millis(1000);
config.local_media_ssrc = kSenderSsrc;
auto rtcp_sender = CreateRtcpSender(config, /*init_timestamps=*/false);
rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize);
@@ -226,11 +234,11 @@ TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
}
TEST_F(RtcpSenderTest, DoNotSendCompundBeforeRtp) {
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
- config.rtcp_report_interval_ms = 1000;
+ config.rtcp_report_interval = TimeDelta::Millis(1000);
config.local_media_ssrc = kSenderSsrc;
auto rtcp_sender = CreateRtcpSender(config, /*init_timestamps=*/false);
rtcp_sender->SetRTCPStatus(RtcpMode::kCompound);
@@ -276,11 +284,11 @@ TEST_F(RtcpSenderTest, SendRrWithTwoReportBlocks) {
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpRr));
EXPECT_EQ(1, parser()->receiver_report()->num_packets());
EXPECT_EQ(kSenderSsrc, parser()->receiver_report()->sender_ssrc());
- EXPECT_EQ(2U, parser()->receiver_report()->report_blocks().size());
- EXPECT_EQ(kRemoteSsrc,
- parser()->receiver_report()->report_blocks()[0].source_ssrc());
- EXPECT_EQ(kRemoteSsrc + 1,
- parser()->receiver_report()->report_blocks()[1].source_ssrc());
+ EXPECT_THAT(
+ parser()->receiver_report()->report_blocks(),
+ UnorderedElementsAre(
+ Property(&rtcp::ReportBlock::source_ssrc, Eq(kRemoteSsrc)),
+ Property(&rtcp::ReportBlock::source_ssrc, Eq(kRemoteSsrc + 1))));
}
TEST_F(RtcpSenderTest, SendSdes) {
@@ -315,8 +323,8 @@ TEST_F(RtcpSenderTest, SendBye) {
TEST_F(RtcpSenderTest, StopSendingTriggersBye) {
auto rtcp_sender = CreateRtcpSender(GetDefaultConfig());
rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize);
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), true));
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), false));
+ rtcp_sender->SetSendingStatus(feedback_state(), true);
+ rtcp_sender->SetSendingStatus(feedback_state(), false);
EXPECT_EQ(1, parser()->bye()->num_packets());
EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc());
}
@@ -509,12 +517,12 @@ TEST_F(RtcpSenderTest, SendXrWithMultipleDlrrSubBlocks) {
}
TEST_F(RtcpSenderTest, SendXrWithRrtr) {
- RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ RTCPSender::Configuration config = GetDefaultConfig();
config.non_sender_rtt_measurement = true;
auto rtcp_sender = CreateRtcpSender(config);
rtcp_sender->SetRTCPStatus(RtcpMode::kCompound);
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), false));
- NtpTime ntp = TimeMicrosToNtp(clock_.TimeInMicroseconds());
+ rtcp_sender->SetSendingStatus(feedback_state(), false);
+ NtpTime ntp = clock_.CurrentNtpTime();
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpReport));
EXPECT_EQ(1, parser()->xr()->num_packets());
EXPECT_EQ(kSenderSsrc, parser()->xr()->sender_ssrc());
@@ -524,33 +532,33 @@ TEST_F(RtcpSenderTest, SendXrWithRrtr) {
}
TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfSending) {
- RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ RTCPSender::Configuration config = GetDefaultConfig();
config.non_sender_rtt_measurement = true;
auto rtcp_sender = CreateRtcpSender(config);
rtcp_sender->SetRTCPStatus(RtcpMode::kCompound);
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), true));
+ rtcp_sender->SetSendingStatus(feedback_state(), true);
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpReport));
EXPECT_EQ(0, parser()->xr()->num_packets());
}
TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfNotEnabled) {
- RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ RTCPSender::Configuration config = GetDefaultConfig();
config.non_sender_rtt_measurement = false;
auto rtcp_sender = CreateRtcpSender(config);
rtcp_sender->SetRTCPStatus(RtcpMode::kCompound);
- EXPECT_EQ(0, rtcp_sender->SetSendingStatus(feedback_state(), false));
+ rtcp_sender->SetSendingStatus(feedback_state(), false);
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpReport));
EXPECT_EQ(0, parser()->xr()->num_packets());
}
TEST_F(RtcpSenderTest, TestRegisterRtcpPacketTypeObserver) {
RtcpPacketTypeCounterObserverImpl observer;
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
config.rtcp_packet_type_counter_observer = &observer;
- config.rtcp_report_interval_ms = 1000;
+ config.rtcp_report_interval = TimeDelta::Millis(1000);
auto rtcp_sender = CreateRtcpSender(config);
rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize);
EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpPli));
@@ -642,16 +650,16 @@ TEST_F(RtcpSenderTest, ByeMustBeLast) {
}));
// Re-configure rtcp_sender with mock_transport_
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &mock_transport;
- config.rtcp_report_interval_ms = 1000;
+ config.rtcp_report_interval = TimeDelta::Millis(1000);
config.local_media_ssrc = kSenderSsrc;
auto rtcp_sender = CreateRtcpSender(config);
rtcp_sender->SetTimestampOffset(kStartRtpTimestamp);
- rtcp_sender->SetLastRtpTime(kRtpTimestamp, clock_.TimeInMilliseconds(),
+ rtcp_sender->SetLastRtpTime(kRtpTimestamp, clock_.CurrentTime(),
/*payload_type=*/0);
// Set up REMB info to be included with BYE.
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.cc b/modules/rtp_rtcp/source/rtcp_transceiver.cc
index 1de581849b..41fa5e6206 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver.cc
@@ -14,6 +14,7 @@
#include <utility>
#include <vector>
+#include "api/units/timestamp.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
@@ -23,7 +24,8 @@
namespace webrtc {
RtcpTransceiver::RtcpTransceiver(const RtcpTransceiverConfig& config)
- : task_queue_(config.task_queue),
+ : clock_(config.clock),
+ task_queue_(config.task_queue),
rtcp_transceiver_(std::make_unique<RtcpTransceiverImpl>(config)) {
RTC_DCHECK(task_queue_);
}
@@ -82,9 +84,9 @@ void RtcpTransceiver::SetReadyToSend(bool ready) {
void RtcpTransceiver::ReceivePacket(rtc::CopyOnWriteBuffer packet) {
RTC_CHECK(rtcp_transceiver_);
RtcpTransceiverImpl* ptr = rtcp_transceiver_.get();
- int64_t now_us = rtc::TimeMicros();
- task_queue_->PostTask(ToQueuedTask(
- [ptr, packet, now_us] { ptr->ReceivePacket(packet, now_us); }));
+ Timestamp now = clock_->CurrentTime();
+ task_queue_->PostTask(
+ ToQueuedTask([ptr, packet, now] { ptr->ReceivePacket(packet, now); }));
}
void RtcpTransceiver::SendCompoundPacket() {
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.h b/modules/rtp_rtcp/source/rtcp_transceiver.h
index 2d1f37cd44..52f4610716 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver.h
+++ b/modules/rtp_rtcp/source/rtcp_transceiver.h
@@ -20,6 +20,7 @@
#include "modules/rtp_rtcp/source/rtcp_transceiver_config.h"
#include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h"
#include "rtc_base/copy_on_write_buffer.h"
+#include "system_wrappers/include/clock.h"
namespace webrtc {
//
@@ -93,6 +94,7 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface {
void SendFullIntraRequest(std::vector<uint32_t> ssrcs, bool new_request);
private:
+ Clock* const clock_;
TaskQueueBase* const task_queue_;
std::unique_ptr<RtcpTransceiverImpl> rtcp_transceiver_;
};
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc
index 0102616d59..5753ffd692 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc
@@ -40,7 +40,7 @@ namespace webrtc {
namespace {
struct SenderReportTimes {
- int64_t local_received_time_us;
+ Timestamp local_received_time;
NtpTime remote_sent_time;
};
@@ -92,9 +92,7 @@ RtcpTransceiverImpl::RtcpTransceiverImpl(const RtcpTransceiverConfig& config)
: config_(config), ready_to_send_(config.initial_ready_to_send) {
RTC_CHECK(config_.Validate());
if (ready_to_send_ && config_.schedule_periodic_compound_packets) {
- config_.task_queue->PostTask(ToQueuedTask([this] {
- SchedulePeriodicCompoundPackets(config_.initial_report_delay_ms);
- }));
+ SchedulePeriodicCompoundPackets(config_.initial_report_delay_ms);
}
}
@@ -133,13 +131,13 @@ void RtcpTransceiverImpl::SetReadyToSend(bool ready) {
}
void RtcpTransceiverImpl::ReceivePacket(rtc::ArrayView<const uint8_t> packet,
- int64_t now_us) {
+ Timestamp now) {
while (!packet.empty()) {
rtcp::CommonHeader rtcp_block;
if (!rtcp_block.Parse(packet.data(), packet.size()))
return;
- HandleReceivedPacket(rtcp_block, now_us);
+ HandleReceivedPacket(rtcp_block, now);
// TODO(danilchap): Use packet.remove_prefix() when that function exists.
packet = packet.subview(rtcp_block.packet_size());
@@ -228,16 +226,16 @@ void RtcpTransceiverImpl::SendFullIntraRequest(
void RtcpTransceiverImpl::HandleReceivedPacket(
const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us) {
+ Timestamp now) {
switch (rtcp_packet_header.type()) {
case rtcp::Bye::kPacketType:
HandleBye(rtcp_packet_header);
break;
case rtcp::SenderReport::kPacketType:
- HandleSenderReport(rtcp_packet_header, now_us);
+ HandleSenderReport(rtcp_packet_header, now);
break;
case rtcp::ExtendedReports::kPacketType:
- HandleExtendedReports(rtcp_packet_header, now_us);
+ HandleExtendedReports(rtcp_packet_header, now);
break;
}
}
@@ -256,17 +254,14 @@ void RtcpTransceiverImpl::HandleBye(
void RtcpTransceiverImpl::HandleSenderReport(
const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us) {
+ Timestamp now) {
rtcp::SenderReport sender_report;
if (!sender_report.Parse(rtcp_packet_header))
return;
RemoteSenderState& remote_sender =
remote_senders_[sender_report.sender_ssrc()];
- absl::optional<SenderReportTimes>& last =
- remote_sender.last_received_sender_report;
- last.emplace();
- last->local_received_time_us = now_us;
- last->remote_sent_time = sender_report.ntp();
+ remote_sender.last_received_sender_report =
+ absl::optional<SenderReportTimes>({now, sender_report.ntp()});
for (MediaReceiverRtcpObserver* observer : remote_sender.observers)
observer->OnSenderReport(sender_report.sender_ssrc(), sender_report.ntp(),
@@ -275,26 +270,27 @@ void RtcpTransceiverImpl::HandleSenderReport(
void RtcpTransceiverImpl::HandleExtendedReports(
const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us) {
+ Timestamp now) {
rtcp::ExtendedReports extended_reports;
if (!extended_reports.Parse(rtcp_packet_header))
return;
if (extended_reports.dlrr())
- HandleDlrr(extended_reports.dlrr(), now_us);
+ HandleDlrr(extended_reports.dlrr(), now);
if (extended_reports.target_bitrate())
HandleTargetBitrate(*extended_reports.target_bitrate(),
extended_reports.sender_ssrc());
}
-void RtcpTransceiverImpl::HandleDlrr(const rtcp::Dlrr& dlrr, int64_t now_us) {
+void RtcpTransceiverImpl::HandleDlrr(const rtcp::Dlrr& dlrr, Timestamp now) {
if (!config_.non_sender_rtt_measurement || config_.rtt_observer == nullptr)
return;
// Delay and last_rr are transferred using 32bit compact ntp resolution.
// Convert packet arrival time to same format through 64bit ntp format.
- uint32_t receive_time_ntp = CompactNtp(TimeMicrosToNtp(now_us));
+ uint32_t receive_time_ntp =
+ CompactNtp(config_.clock->ConvertTimestampToNtpTime(now));
for (const rtcp::ReceiveTimeInfo& rti : dlrr.sub_blocks()) {
if (rti.ssrc != config_.feedback_ssrc)
continue;
@@ -353,13 +349,16 @@ void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(int64_t delay_ms) {
void RtcpTransceiverImpl::CreateCompoundPacket(PacketSender* sender) {
RTC_DCHECK(sender->IsEmpty());
const uint32_t sender_ssrc = config_.feedback_ssrc;
- int64_t now_us = rtc::TimeMicros();
+ Timestamp now = config_.clock->CurrentTime();
rtcp::ReceiverReport receiver_report;
receiver_report.SetSenderSsrc(sender_ssrc);
- receiver_report.SetReportBlocks(CreateReportBlocks(now_us));
- sender->AppendPacket(receiver_report);
+ receiver_report.SetReportBlocks(CreateReportBlocks(now));
+ if (config_.rtcp_mode == RtcpMode::kCompound ||
+ !receiver_report.report_blocks().empty()) {
+ sender->AppendPacket(receiver_report);
+ }
- if (!config_.cname.empty()) {
+ if (!config_.cname.empty() && !sender->IsEmpty()) {
rtcp::Sdes sdes;
bool added = sdes.AddCName(config_.feedback_ssrc, config_.cname);
RTC_DCHECK(added) << "Failed to add cname " << config_.cname
@@ -377,7 +376,7 @@ void RtcpTransceiverImpl::CreateCompoundPacket(PacketSender* sender) {
rtcp::ExtendedReports xr;
rtcp::Rrtr rrtr;
- rrtr.SetNtp(TimeMicrosToNtp(now_us));
+ rrtr.SetNtp(config_.clock->ConvertTimestampToNtpTime(now));
xr.SetRrtr(rrtr);
xr.SetSenderSsrc(sender_ssrc);
@@ -428,7 +427,7 @@ void RtcpTransceiverImpl::SendImmediateFeedback(
}
std::vector<rtcp::ReportBlock> RtcpTransceiverImpl::CreateReportBlocks(
- int64_t now_us) {
+ Timestamp now) {
if (!config_.receive_statistics)
return {};
// TODO(danilchap): Support sending more than
@@ -448,7 +447,7 @@ std::vector<rtcp::ReportBlock> RtcpTransceiverImpl::CreateReportBlocks(
*it->second.last_received_sender_report;
last_sr = CompactNtp(last_sender_report.remote_sent_time);
last_delay = SaturatedUsToCompactNtp(
- now_us - last_sender_report.local_received_time_us);
+ now.us() - last_sender_report.local_received_time.us());
report_block.SetLastSr(last_sr);
report_block.SetDelayLastSr(last_delay);
}
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl.h b/modules/rtp_rtcp/source/rtcp_transceiver_impl.h
index 6a6454662c..bcdee83e56 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_impl.h
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl.h
@@ -18,6 +18,7 @@
#include "absl/types/optional.h"
#include "api/array_view.h"
+#include "api/units/timestamp.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/remb.h"
@@ -48,7 +49,7 @@ class RtcpTransceiverImpl {
void SetReadyToSend(bool ready);
- void ReceivePacket(rtc::ArrayView<const uint8_t> packet, int64_t now_us);
+ void ReceivePacket(rtc::ArrayView<const uint8_t> packet, Timestamp now);
void SendCompoundPacket();
@@ -76,15 +77,15 @@ class RtcpTransceiverImpl {
struct RemoteSenderState;
void HandleReceivedPacket(const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us);
+ Timestamp now);
// Individual rtcp packet handlers.
void HandleBye(const rtcp::CommonHeader& rtcp_packet_header);
void HandleSenderReport(const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us);
+ Timestamp now);
void HandleExtendedReports(const rtcp::CommonHeader& rtcp_packet_header,
- int64_t now_us);
+ Timestamp now);
// Extended Reports blocks handlers.
- void HandleDlrr(const rtcp::Dlrr& dlrr, int64_t now_us);
+ void HandleDlrr(const rtcp::Dlrr& dlrr, Timestamp now);
void HandleTargetBitrate(const rtcp::TargetBitrate& target_bitrate,
uint32_t remote_ssrc);
@@ -97,7 +98,7 @@ class RtcpTransceiverImpl {
void SendPeriodicCompoundPacket();
void SendImmediateFeedback(const rtcp::RtcpPacket& rtcp_packet);
// Generate Report Blocks to be send in Sender or Receiver Report.
- std::vector<rtcp::ReportBlock> CreateReportBlocks(int64_t now_us);
+ std::vector<rtcp::ReportBlock> CreateReportBlocks(Timestamp now);
const RtcpTransceiverConfig config_;
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
index b7694df1e8..06e1083aa8 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
@@ -16,6 +16,8 @@
#include "absl/memory/memory.h"
#include "api/rtp_headers.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "api/video/video_bitrate_allocation.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h"
@@ -24,8 +26,9 @@
#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/event.h"
-#include "rtc_base/fake_clock.h"
#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/clock.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
@@ -35,6 +38,7 @@ namespace {
using ::testing::_;
using ::testing::ElementsAre;
+using ::testing::NiceMock;
using ::testing::Return;
using ::testing::SizeIs;
using ::testing::StrictMock;
@@ -46,8 +50,10 @@ using ::webrtc::NtpTime;
using ::webrtc::RtcpTransceiverConfig;
using ::webrtc::RtcpTransceiverImpl;
using ::webrtc::SaturatedUsToCompactNtp;
+using ::webrtc::SimulatedClock;
using ::webrtc::TaskQueueForTest;
-using ::webrtc::TimeMicrosToNtp;
+using ::webrtc::TimeDelta;
+using ::webrtc::Timestamp;
using ::webrtc::VideoBitrateAllocation;
using ::webrtc::rtcp::Bye;
using ::webrtc::rtcp::CompoundPacket;
@@ -142,9 +148,11 @@ RtcpTransceiverConfig DefaultTestConfig() {
}
TEST(RtcpTransceiverImplTest, NeedToStopPeriodicTaskToDestroyOnTaskQueue) {
+ SimulatedClock clock(0);
FakeRtcpTransport transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
config.task_queue = queue.Get();
config.schedule_periodic_compound_packets = true;
config.outgoing_transport = &transport;
@@ -161,10 +169,31 @@ TEST(RtcpTransceiverImplTest, NeedToStopPeriodicTaskToDestroyOnTaskQueue) {
ASSERT_TRUE(done.Wait(/*milliseconds=*/1000));
}
+TEST(RtcpTransceiverImplTest, CanBeDestroyedRightAfterCreation) {
+ SimulatedClock clock(0);
+ FakeRtcpTransport transport;
+ TaskQueueForTest queue("rtcp");
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ config.task_queue = queue.Get();
+ config.schedule_periodic_compound_packets = true;
+ config.outgoing_transport = &transport;
+
+ rtc::Event done;
+ queue.PostTask([&] {
+ RtcpTransceiverImpl rtcp_transceiver(config);
+ rtcp_transceiver.StopPeriodicTask();
+ done.Set();
+ });
+ ASSERT_TRUE(done.Wait(/*milliseconds=*/1000));
+}
+
TEST(RtcpTransceiverImplTest, CanDestroyAfterTaskQueue) {
+ SimulatedClock clock(0);
FakeRtcpTransport transport;
auto* queue = new TaskQueueForTest("rtcp");
RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
config.task_queue = queue->Get();
config.schedule_periodic_compound_packets = true;
config.outgoing_transport = &transport;
@@ -177,9 +206,11 @@ TEST(RtcpTransceiverImplTest, CanDestroyAfterTaskQueue) {
}
TEST(RtcpTransceiverImplTest, DelaysSendingFirstCompondPacket) {
+ SimulatedClock clock(0);
TaskQueueForTest queue("rtcp");
FakeRtcpTransport transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &transport;
config.initial_report_delay_ms = 10;
config.task_queue = queue.Get();
@@ -202,9 +233,11 @@ TEST(RtcpTransceiverImplTest, DelaysSendingFirstCompondPacket) {
}
TEST(RtcpTransceiverImplTest, PeriodicallySendsPackets) {
+ SimulatedClock clock(0);
TaskQueueForTest queue("rtcp");
FakeRtcpTransport transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &transport;
config.initial_report_delay_ms = 0;
config.report_period_ms = kReportPeriodMs;
@@ -236,9 +269,11 @@ TEST(RtcpTransceiverImplTest, PeriodicallySendsPackets) {
}
TEST(RtcpTransceiverImplTest, SendCompoundPacketDelaysPeriodicSendPackets) {
+ SimulatedClock clock(0);
TaskQueueForTest queue("rtcp");
FakeRtcpTransport transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &transport;
config.initial_report_delay_ms = 0;
config.report_period_ms = kReportPeriodMs;
@@ -282,8 +317,10 @@ TEST(RtcpTransceiverImplTest, SendCompoundPacketDelaysPeriodicSendPackets) {
}
TEST(RtcpTransceiverImplTest, SendsNoRtcpWhenNetworkStateIsDown) {
+ SimulatedClock clock(0);
MockTransport mock_transport;
RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
config.initial_ready_to_send = false;
config.outgoing_transport = &mock_transport;
RtcpTransceiverImpl rtcp_transceiver(config);
@@ -301,8 +338,10 @@ TEST(RtcpTransceiverImplTest, SendsNoRtcpWhenNetworkStateIsDown) {
}
TEST(RtcpTransceiverImplTest, SendsRtcpWhenNetworkStateIsUp) {
+ SimulatedClock clock(0);
MockTransport mock_transport;
RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
config.initial_ready_to_send = false;
config.outgoing_transport = &mock_transport;
RtcpTransceiverImpl rtcp_transceiver(config);
@@ -322,9 +361,11 @@ TEST(RtcpTransceiverImplTest, SendsRtcpWhenNetworkStateIsUp) {
}
TEST(RtcpTransceiverImplTest, SendsPeriodicRtcpWhenNetworkStateIsUp) {
+ SimulatedClock clock(0);
TaskQueueForTest queue("rtcp");
FakeRtcpTransport transport;
RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
config.schedule_periodic_compound_packets = true;
config.initial_ready_to_send = false;
config.outgoing_transport = &transport;
@@ -348,7 +389,9 @@ TEST(RtcpTransceiverImplTest, SendsPeriodicRtcpWhenNetworkStateIsUp) {
TEST(RtcpTransceiverImplTest, SendsMinimalCompoundPacket) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.cname = "cname";
RtcpPacketParser rtcp_parser;
@@ -369,9 +412,52 @@ TEST(RtcpTransceiverImplTest, SendsMinimalCompoundPacket) {
EXPECT_EQ(rtcp_parser.sdes()->chunks()[0].cname, config.cname);
}
+TEST(RtcpTransceiverImplTest, AvoidsEmptyPacketsInReducedMode) {
+ MockTransport transport;
+ EXPECT_CALL(transport, SendRtcp).Times(0);
+ NiceMock<MockReceiveStatisticsProvider> receive_statistics;
+ SimulatedClock clock(0);
+
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ config.outgoing_transport = &transport;
+ config.rtcp_mode = webrtc::RtcpMode::kReducedSize;
+ config.schedule_periodic_compound_packets = false;
+ config.receive_statistics = &receive_statistics;
+ RtcpTransceiverImpl rtcp_transceiver(config);
+
+ rtcp_transceiver.SendCompoundPacket();
+}
+
+TEST(RtcpTransceiverImplTest, AvoidsEmptyReceiverReportsInReducedMode) {
+ RtcpPacketParser rtcp_parser;
+ RtcpParserTransport transport(&rtcp_parser);
+ NiceMock<MockReceiveStatisticsProvider> receive_statistics;
+ SimulatedClock clock(0);
+
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ config.outgoing_transport = &transport;
+ config.rtcp_mode = webrtc::RtcpMode::kReducedSize;
+ config.schedule_periodic_compound_packets = false;
+ config.receive_statistics = &receive_statistics;
+ // Set it to produce something (RRTR) in the "periodic" rtcp packets.
+ config.non_sender_rtt_measurement = true;
+ RtcpTransceiverImpl rtcp_transceiver(config);
+
+ // Rather than waiting for the right time to produce the periodic packet,
+ // trigger it manually.
+ rtcp_transceiver.SendCompoundPacket();
+
+ EXPECT_EQ(rtcp_parser.receiver_report()->num_packets(), 0);
+ EXPECT_GT(rtcp_parser.xr()->num_packets(), 0);
+}
+
TEST(RtcpTransceiverImplTest, SendsNoRembInitially) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -387,7 +473,9 @@ TEST(RtcpTransceiverImplTest, SendsNoRembInitially) {
TEST(RtcpTransceiverImplTest, SetRembIncludesRembInNextCompoundPacket) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -406,7 +494,9 @@ TEST(RtcpTransceiverImplTest, SetRembIncludesRembInNextCompoundPacket) {
TEST(RtcpTransceiverImplTest, SetRembUpdatesValuesToSend) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -431,7 +521,9 @@ TEST(RtcpTransceiverImplTest, SetRembUpdatesValuesToSend) {
TEST(RtcpTransceiverImplTest, SetRembSendsImmediatelyIfSendRembOnChange) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.send_remb_on_change = true;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
@@ -457,7 +549,9 @@ TEST(RtcpTransceiverImplTest, SetRembSendsImmediatelyIfSendRembOnChange) {
TEST(RtcpTransceiverImplTest,
SetRembSendsImmediatelyIfSendRembOnChangeReducedSize) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.send_remb_on_change = true;
config.rtcp_mode = webrtc::RtcpMode::kReducedSize;
config.feedback_ssrc = kSenderSsrc;
@@ -475,7 +569,9 @@ TEST(RtcpTransceiverImplTest,
TEST(RtcpTransceiverImplTest, SetRembIncludesRembInAllCompoundPackets) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -493,7 +589,9 @@ TEST(RtcpTransceiverImplTest, SetRembIncludesRembInAllCompoundPackets) {
TEST(RtcpTransceiverImplTest, SendsNoRembAfterUnset) {
const uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -522,7 +620,9 @@ TEST(RtcpTransceiverImplTest, ReceiverReportUsesReceiveStatistics) {
EXPECT_CALL(receive_statistics, RtcpReportBlocks(_))
.WillRepeatedly(Return(report_blocks));
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -543,9 +643,12 @@ TEST(RtcpTransceiverImplTest, ReceiverReportUsesReceiveStatistics) {
TEST(RtcpTransceiverImplTest, MultipleObserversOnSameSsrc) {
const uint32_t kRemoteSsrc = 12345;
+ SimulatedClock clock(0);
StrictMock<MockMediaReceiverRtcpObserver> observer1;
StrictMock<MockMediaReceiverRtcpObserver> observer2;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer1);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer2);
@@ -559,14 +662,17 @@ TEST(RtcpTransceiverImplTest, MultipleObserversOnSameSsrc) {
EXPECT_CALL(observer1, OnSenderReport(kRemoteSsrc, kRemoteNtp, kRemoteRtp));
EXPECT_CALL(observer2, OnSenderReport(kRemoteSsrc, kRemoteNtp, kRemoteRtp));
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, DoesntCallsObserverAfterRemoved) {
const uint32_t kRemoteSsrc = 12345;
+ SimulatedClock clock(0);
StrictMock<MockMediaReceiverRtcpObserver> observer1;
StrictMock<MockMediaReceiverRtcpObserver> observer2;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer1);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer2);
@@ -578,15 +684,18 @@ TEST(RtcpTransceiverImplTest, DoesntCallsObserverAfterRemoved) {
EXPECT_CALL(observer1, OnSenderReport(_, _, _)).Times(0);
EXPECT_CALL(observer2, OnSenderReport(_, _, _));
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, CallsObserverOnSenderReportBySenderSsrc) {
const uint32_t kRemoteSsrc1 = 12345;
const uint32_t kRemoteSsrc2 = 22345;
+ SimulatedClock clock(0);
StrictMock<MockMediaReceiverRtcpObserver> observer1;
StrictMock<MockMediaReceiverRtcpObserver> observer2;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc1, &observer1);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc2, &observer2);
@@ -600,15 +709,18 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnSenderReportBySenderSsrc) {
EXPECT_CALL(observer1, OnSenderReport(kRemoteSsrc1, kRemoteNtp, kRemoteRtp));
EXPECT_CALL(observer2, OnSenderReport(_, _, _)).Times(0);
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, CallsObserverOnByeBySenderSsrc) {
const uint32_t kRemoteSsrc1 = 12345;
const uint32_t kRemoteSsrc2 = 22345;
+ SimulatedClock clock(0);
StrictMock<MockMediaReceiverRtcpObserver> observer1;
StrictMock<MockMediaReceiverRtcpObserver> observer2;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc1, &observer1);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc2, &observer2);
@@ -618,15 +730,18 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnByeBySenderSsrc) {
EXPECT_CALL(observer1, OnBye(kRemoteSsrc1));
EXPECT_CALL(observer2, OnBye(_)).Times(0);
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, CallsObserverOnTargetBitrateBySenderSsrc) {
const uint32_t kRemoteSsrc1 = 12345;
const uint32_t kRemoteSsrc2 = 22345;
+ SimulatedClock clock(0);
StrictMock<MockMediaReceiverRtcpObserver> observer1;
StrictMock<MockMediaReceiverRtcpObserver> observer2;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc1, &observer1);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc2, &observer2);
@@ -647,13 +762,16 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnTargetBitrateBySenderSsrc) {
bitrate_allocation.SetBitrate(1, 1, /*bitrate_bps=*/80000);
EXPECT_CALL(observer1, OnBitrateAllocation(kRemoteSsrc1, bitrate_allocation));
EXPECT_CALL(observer2, OnBitrateAllocation(_, _)).Times(0);
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, SkipsIncorrectTargetBitrateEntries) {
const uint32_t kRemoteSsrc = 12345;
+ SimulatedClock clock(0);
MockMediaReceiverRtcpObserver observer;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer);
webrtc::rtcp::TargetBitrate target_bitrate;
@@ -669,13 +787,16 @@ TEST(RtcpTransceiverImplTest, SkipsIncorrectTargetBitrateEntries) {
VideoBitrateAllocation expected_allocation;
expected_allocation.SetBitrate(0, 0, /*bitrate_bps=*/10000);
EXPECT_CALL(observer, OnBitrateAllocation(kRemoteSsrc, expected_allocation));
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindSenderReport) {
const uint32_t kRemoteSsrc = 12345;
+ SimulatedClock clock(0);
MockMediaReceiverRtcpObserver observer;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer);
CompoundPacket compound;
@@ -689,13 +810,16 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindSenderReport) {
EXPECT_CALL(observer, OnBye(kRemoteSsrc));
EXPECT_CALL(observer, OnSenderReport(kRemoteSsrc, _, _));
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindUnknownRtcpPacket) {
const uint32_t kRemoteSsrc = 12345;
+ SimulatedClock clock(0);
MockMediaReceiverRtcpObserver observer;
- RtcpTransceiverImpl rtcp_transceiver(DefaultTestConfig());
+ RtcpTransceiverConfig config = DefaultTestConfig();
+ config.clock = &clock;
+ RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer);
CompoundPacket compound;
@@ -708,7 +832,7 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindUnknownRtcpPacket) {
auto raw_packet = compound.Build();
EXPECT_CALL(observer, OnBye(kRemoteSsrc));
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
}
TEST(RtcpTransceiverImplTest,
@@ -722,7 +846,9 @@ TEST(RtcpTransceiverImplTest,
EXPECT_CALL(receive_statistics, RtcpReportBlocks(_))
.WillOnce(Return(statistics_report_blocks));
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -736,7 +862,7 @@ TEST(RtcpTransceiverImplTest,
sr.SetSenderSsrc(kRemoteSsrc1);
sr.SetNtp(kRemoteNtp);
auto raw_packet = sr.Build();
- rtcp_transceiver.ReceivePacket(raw_packet, /*now_us=*/0);
+ rtcp_transceiver.ReceivePacket(raw_packet, Timestamp::Micros(0));
// Trigger sending ReceiverReport.
rtcp_transceiver.SendCompoundPacket();
@@ -759,7 +885,7 @@ TEST(RtcpTransceiverImplTest,
WhenSendsReceiverReportCalculatesDelaySinceLastSenderReport) {
const uint32_t kRemoteSsrc1 = 4321;
const uint32_t kRemoteSsrc2 = 5321;
- rtc::ScopedFakeClock clock;
+
std::vector<ReportBlock> statistics_report_blocks(2);
statistics_report_blocks[0].SetMediaSsrc(kRemoteSsrc1);
statistics_report_blocks[1].SetMediaSsrc(kRemoteSsrc2);
@@ -767,7 +893,9 @@ TEST(RtcpTransceiverImplTest,
EXPECT_CALL(receive_statistics, RtcpReportBlocks(_))
.WillOnce(Return(statistics_report_blocks));
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -775,18 +903,19 @@ TEST(RtcpTransceiverImplTest,
config.receive_statistics = &receive_statistics;
RtcpTransceiverImpl rtcp_transceiver(config);
- auto receive_sender_report = [&rtcp_transceiver](uint32_t remote_ssrc) {
+ auto receive_sender_report = [&rtcp_transceiver,
+ &clock](uint32_t remote_ssrc) {
SenderReport sr;
sr.SetSenderSsrc(remote_ssrc);
auto raw_packet = sr.Build();
- rtcp_transceiver.ReceivePacket(raw_packet, rtc::TimeMicros());
+ rtcp_transceiver.ReceivePacket(raw_packet, clock.CurrentTime());
};
receive_sender_report(kRemoteSsrc1);
- clock.AdvanceTime(webrtc::TimeDelta::Millis(100));
+ clock.AdvanceTime(TimeDelta::Millis(100));
receive_sender_report(kRemoteSsrc2);
- clock.AdvanceTime(webrtc::TimeDelta::Millis(100));
+ clock.AdvanceTime(TimeDelta::Millis(100));
// Trigger ReceiverReport back.
rtcp_transceiver.SendCompoundPacket();
@@ -808,7 +937,9 @@ TEST(RtcpTransceiverImplTest, SendsNack) {
const uint32_t kSenderSsrc = 1234;
const uint32_t kRemoteSsrc = 4321;
std::vector<uint16_t> kMissingSequenceNumbers = {34, 37, 38};
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -827,7 +958,9 @@ TEST(RtcpTransceiverImplTest, SendsNack) {
TEST(RtcpTransceiverImplTest, RequestKeyFrameWithPictureLossIndication) {
const uint32_t kSenderSsrc = 1234;
const uint32_t kRemoteSsrc = 4321;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -846,7 +979,9 @@ TEST(RtcpTransceiverImplTest, RequestKeyFrameWithPictureLossIndication) {
TEST(RtcpTransceiverImplTest, RequestKeyFrameWithFullIntraRequest) {
const uint32_t kSenderSsrc = 1234;
const uint32_t kRemoteSsrcs[] = {4321, 5321};
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -863,7 +998,9 @@ TEST(RtcpTransceiverImplTest, RequestKeyFrameWithFullIntraRequest) {
}
TEST(RtcpTransceiverImplTest, RequestKeyFrameWithFirIncreaseSeqNoPerSsrc) {
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -893,7 +1030,9 @@ TEST(RtcpTransceiverImplTest, RequestKeyFrameWithFirIncreaseSeqNoPerSsrc) {
}
TEST(RtcpTransceiverImplTest, SendFirDoesNotIncreaseSeqNoIfOldRequest) {
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -919,7 +1058,9 @@ TEST(RtcpTransceiverImplTest, SendFirDoesNotIncreaseSeqNoIfOldRequest) {
TEST(RtcpTransceiverImplTest, KeyFrameRequestCreatesCompoundPacket) {
const uint32_t kRemoteSsrcs[] = {4321};
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
// Turn periodic off to ensure sent rtcp packet is explicitly requested.
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -938,7 +1079,9 @@ TEST(RtcpTransceiverImplTest, KeyFrameRequestCreatesCompoundPacket) {
TEST(RtcpTransceiverImplTest, KeyFrameRequestCreatesReducedSizePacket) {
const uint32_t kRemoteSsrcs[] = {4321};
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
// Turn periodic off to ensure sent rtcp packet is explicitly requested.
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -957,8 +1100,9 @@ TEST(RtcpTransceiverImplTest, KeyFrameRequestCreatesReducedSizePacket) {
TEST(RtcpTransceiverImplTest, SendsXrRrtrWhenEnabled) {
const uint32_t kSenderSsrc = 4321;
- rtc::ScopedFakeClock clock;
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
@@ -968,7 +1112,7 @@ TEST(RtcpTransceiverImplTest, SendsXrRrtrWhenEnabled) {
RtcpTransceiverImpl rtcp_transceiver(config);
rtcp_transceiver.SendCompoundPacket();
- NtpTime ntp_time_now = TimeMicrosToNtp(rtc::TimeMicros());
+ NtpTime ntp_time_now = clock.CurrentNtpTime();
EXPECT_EQ(rtcp_parser.xr()->num_packets(), 1);
EXPECT_EQ(rtcp_parser.xr()->sender_ssrc(), kSenderSsrc);
@@ -977,7 +1121,9 @@ TEST(RtcpTransceiverImplTest, SendsXrRrtrWhenEnabled) {
}
TEST(RtcpTransceiverImplTest, SendsNoXrRrtrWhenDisabled) {
+ SimulatedClock clock(0);
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.schedule_periodic_compound_packets = false;
RtcpPacketParser rtcp_parser;
RtcpParserTransport transport(&rtcp_parser);
@@ -995,9 +1141,11 @@ TEST(RtcpTransceiverImplTest, SendsNoXrRrtrWhenDisabled) {
TEST(RtcpTransceiverImplTest, CalculatesRoundTripTimeOnDlrr) {
const uint32_t kSenderSsrc = 4321;
+ SimulatedClock clock(0);
MockRtcpRttStats rtt_observer;
MockTransport null_transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
config.outgoing_transport = &null_transport;
@@ -1005,25 +1153,27 @@ TEST(RtcpTransceiverImplTest, CalculatesRoundTripTimeOnDlrr) {
config.rtt_observer = &rtt_observer;
RtcpTransceiverImpl rtcp_transceiver(config);
- int64_t time_us = 12345678;
+ Timestamp time = Timestamp::Micros(12345678);
webrtc::rtcp::ReceiveTimeInfo rti;
rti.ssrc = kSenderSsrc;
- rti.last_rr = CompactNtp(TimeMicrosToNtp(time_us));
+ rti.last_rr = CompactNtp(clock.ConvertTimestampToNtpTime(time));
rti.delay_since_last_rr = SaturatedUsToCompactNtp(10 * 1000);
webrtc::rtcp::ExtendedReports xr;
xr.AddDlrrItem(rti);
auto raw_packet = xr.Build();
EXPECT_CALL(rtt_observer, OnRttUpdate(100 /* rtt_ms */));
- rtcp_transceiver.ReceivePacket(raw_packet, time_us + 110 * 1000);
+ rtcp_transceiver.ReceivePacket(raw_packet, time + TimeDelta::Millis(110));
}
TEST(RtcpTransceiverImplTest, IgnoresUnknownSsrcInDlrr) {
const uint32_t kSenderSsrc = 4321;
const uint32_t kUnknownSsrc = 4322;
+ SimulatedClock clock(0);
MockRtcpRttStats rtt_observer;
MockTransport null_transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.schedule_periodic_compound_packets = false;
config.outgoing_transport = &null_transport;
@@ -1031,16 +1181,16 @@ TEST(RtcpTransceiverImplTest, IgnoresUnknownSsrcInDlrr) {
config.rtt_observer = &rtt_observer;
RtcpTransceiverImpl rtcp_transceiver(config);
- int64_t time_us = 12345678;
+ Timestamp time = Timestamp::Micros(12345678);
webrtc::rtcp::ReceiveTimeInfo rti;
rti.ssrc = kUnknownSsrc;
- rti.last_rr = CompactNtp(TimeMicrosToNtp(time_us));
+ rti.last_rr = CompactNtp(clock.ConvertTimestampToNtpTime(time));
webrtc::rtcp::ExtendedReports xr;
xr.AddDlrrItem(rti);
auto raw_packet = xr.Build();
EXPECT_CALL(rtt_observer, OnRttUpdate(_)).Times(0);
- rtcp_transceiver.ReceivePacket(raw_packet, time_us + 100000);
+ rtcp_transceiver.ReceivePacket(raw_packet, time + TimeDelta::Millis(100));
}
} // namespace
diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
index 9c181c6526..290aa48ff4 100644
--- a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
+++ b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
@@ -18,6 +18,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "rtc_base/event.h"
#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/clock.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
@@ -34,6 +35,7 @@ using ::testing::NiceMock;
using ::webrtc::MockTransport;
using ::webrtc::RtcpTransceiver;
using ::webrtc::RtcpTransceiverConfig;
+using ::webrtc::SimulatedClock;
using ::webrtc::TaskQueueForTest;
using ::webrtc::rtcp::RemoteEstimate;
using ::webrtc::rtcp::RtcpPacket;
@@ -57,9 +59,11 @@ void WaitPostedTasks(TaskQueueForTest* queue) {
}
TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOffTaskQueue) {
+ SimulatedClock clock(0);
MockTransport outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
EXPECT_CALL(outgoing_transport, SendRtcp(_, _))
@@ -74,9 +78,11 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOffTaskQueue) {
}
TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOnTaskQueue) {
+ SimulatedClock clock(0);
MockTransport outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
EXPECT_CALL(outgoing_transport, SendRtcp(_, _))
@@ -94,9 +100,11 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOnTaskQueue) {
}
TEST(RtcpTransceiverTest, CanBeDestroyedOnTaskQueue) {
+ SimulatedClock clock(0);
NiceMock<MockTransport> outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
auto rtcp_transceiver = std::make_unique<RtcpTransceiver>(config);
@@ -110,9 +118,11 @@ TEST(RtcpTransceiverTest, CanBeDestroyedOnTaskQueue) {
}
TEST(RtcpTransceiverTest, CanBeDestroyedWithoutBlocking) {
+ SimulatedClock clock(0);
TaskQueueForTest queue("rtcp");
NiceMock<MockTransport> outgoing_transport;
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
auto* rtcp_transceiver = new RtcpTransceiver(config);
@@ -131,9 +141,11 @@ TEST(RtcpTransceiverTest, CanBeDestroyedWithoutBlocking) {
}
TEST(RtcpTransceiverTest, MaySendPacketsAfterDestructor) { // i.e. Be careful!
+ SimulatedClock clock(0);
NiceMock<MockTransport> outgoing_transport; // Must outlive queue below.
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
auto* rtcp_transceiver = new RtcpTransceiver(config);
@@ -162,9 +174,11 @@ rtc::CopyOnWriteBuffer CreateSenderReport(uint32_t ssrc, uint32_t rtp_time) {
TEST(RtcpTransceiverTest, DoesntPostToRtcpObserverAfterCallToRemove) {
const uint32_t kRemoteSsrc = 1234;
+ SimulatedClock clock(0);
MockTransport null_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &null_transport;
config.task_queue = queue.Get();
RtcpTransceiver rtcp_transceiver(config);
@@ -189,9 +203,11 @@ TEST(RtcpTransceiverTest, DoesntPostToRtcpObserverAfterCallToRemove) {
TEST(RtcpTransceiverTest, RemoveMediaReceiverRtcpObserverIsNonBlocking) {
const uint32_t kRemoteSsrc = 1234;
+ SimulatedClock clock(0);
MockTransport null_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &null_transport;
config.task_queue = queue.Get();
RtcpTransceiver rtcp_transceiver(config);
@@ -213,9 +229,11 @@ TEST(RtcpTransceiverTest, RemoveMediaReceiverRtcpObserverIsNonBlocking) {
}
TEST(RtcpTransceiverTest, CanCallSendCompoundPacketFromAnyThread) {
+ SimulatedClock clock(0);
MockTransport outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
@@ -242,9 +260,11 @@ TEST(RtcpTransceiverTest, CanCallSendCompoundPacketFromAnyThread) {
}
TEST(RtcpTransceiverTest, DoesntSendPacketsAfterStopCallback) {
+ SimulatedClock clock(0);
NiceMock<MockTransport> outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
config.schedule_periodic_compound_packets = true;
@@ -263,9 +283,11 @@ TEST(RtcpTransceiverTest, DoesntSendPacketsAfterStopCallback) {
TEST(RtcpTransceiverTest, SendsCombinedRtcpPacketOnTaskQueue) {
static constexpr uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
MockTransport outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
@@ -300,9 +322,11 @@ TEST(RtcpTransceiverTest, SendsCombinedRtcpPacketOnTaskQueue) {
TEST(RtcpTransceiverTest, SendFrameIntraRequestDefaultsToNewRequest) {
static constexpr uint32_t kSenderSsrc = 12345;
+ SimulatedClock clock(0);
MockTransport outgoing_transport;
TaskQueueForTest queue("rtcp");
RtcpTransceiverConfig config;
+ config.clock = &clock;
config.feedback_ssrc = kSenderSsrc;
config.outgoing_transport = &outgoing_transport;
config.task_queue = queue.Get();
diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
index cba594dc6f..8f0cb349bc 100644
--- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
+++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
@@ -47,14 +47,14 @@ RtpDependencyDescriptorReader::RtpDependencyDescriptorReader(
uint32_t RtpDependencyDescriptorReader::ReadBits(size_t bit_count) {
uint32_t value = 0;
- if (!buffer_.ReadBits(&value, bit_count))
+ if (!buffer_.ReadBits(bit_count, value))
parsing_failed_ = true;
return value;
}
uint32_t RtpDependencyDescriptorReader::ReadNonSymmetric(size_t num_values) {
uint32_t value = 0;
- if (!buffer_.ReadNonSymmetric(&value, num_values))
+ if (!buffer_.ReadNonSymmetric(num_values, value))
parsing_failed_ = true;
return value;
}
diff --git a/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/modules/rtp_rtcp/source/rtp_header_extension_map.cc
index aebe884c0f..0b5ba474c7 100644
--- a/modules/rtp_rtcp/source/rtp_header_extension_map.cc
+++ b/modules/rtp_rtcp/source/rtp_header_extension_map.cc
@@ -34,6 +34,7 @@ constexpr ExtensionInfo CreateExtensionInfo() {
constexpr ExtensionInfo kExtensions[] = {
CreateExtensionInfo<TransmissionOffset>(),
CreateExtensionInfo<AudioLevel>(),
+ CreateExtensionInfo<CsrcAudioLevel>(),
CreateExtensionInfo<AbsoluteSendTime>(),
CreateExtensionInfo<AbsoluteCaptureTimeExtension>(),
CreateExtensionInfo<VideoOrientation>(),
diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc
index 1c3073e90d..1dd4f54759 100644
--- a/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -13,6 +13,7 @@
#include <string.h>
#include <cmath>
+#include <cstdint>
#include <limits>
#include "modules/rtp_rtcp/include/rtp_cvo.h"
@@ -186,6 +187,60 @@ bool AudioLevel::Write(rtc::ArrayView<uint8_t> data,
return true;
}
+// An RTP Header Extension for Mixer-to-Client Audio Level Indication
+//
+// https://tools.ietf.org/html/rfc6465
+//
+// The form of the audio level extension block:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=2 |0| level 1 |0| level 2 |0| level 3 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// Sample Audio Level Encoding Using the One-Byte Header Format
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=3 |0| level 1 |0| level 2 |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |0| level 3 | 0 (pad) | ... |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// Sample Audio Level Encoding Using the Two-Byte Header Format
+constexpr RTPExtensionType CsrcAudioLevel::kId;
+constexpr uint8_t CsrcAudioLevel::kMaxValueSizeBytes;
+constexpr const char CsrcAudioLevel::kUri[];
+
+bool CsrcAudioLevel::Parse(rtc::ArrayView<const uint8_t> data,
+ std::vector<uint8_t>* csrc_audio_levels) {
+ if (data.size() > kRtpCsrcSize) {
+ return false;
+ }
+ csrc_audio_levels->resize(data.size());
+ for (size_t i = 0; i < data.size(); i++) {
+ (*csrc_audio_levels)[i] = data[i] & 0x7F;
+ }
+ return true;
+}
+
+size_t CsrcAudioLevel::ValueSize(
+ rtc::ArrayView<const uint8_t> csrc_audio_levels) {
+ return csrc_audio_levels.size();
+}
+
+bool CsrcAudioLevel::Write(rtc::ArrayView<uint8_t> data,
+ rtc::ArrayView<const uint8_t> csrc_audio_levels) {
+ RTC_CHECK_LE(csrc_audio_levels.size(), kRtpCsrcSize);
+ if (csrc_audio_levels.size() != data.size()) {
+ return false;
+ }
+ for (size_t i = 0; i < csrc_audio_levels.size(); i++) {
+ data[i] = csrc_audio_levels[i] & 0x7F;
+ }
+ return true;
+}
+
// From RFC 5450: Transmission Time Offsets in RTP Streams.
//
// The transmission time is signaled to the receiver in-band using the
diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h
index f6e7a579ab..b47824afdb 100644
--- a/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -14,6 +14,7 @@
#include <stdint.h>
#include <string>
+#include <vector>
#include "api/array_view.h"
#include "api/rtp_headers.h"
@@ -77,6 +78,20 @@ class AudioLevel {
uint8_t audio_level);
};
+class CsrcAudioLevel {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionCsrcAudioLevel;
+ static constexpr uint8_t kMaxValueSizeBytes = 15;
+ static constexpr const char kUri[] =
+ "urn:ietf:params:rtp-hdrext:csrc-audio-level";
+
+ static bool Parse(rtc::ArrayView<const uint8_t> data,
+ std::vector<uint8_t>* csrc_audio_levels);
+ static size_t ValueSize(rtc::ArrayView<const uint8_t> csrc_audio_levels);
+ static bool Write(rtc::ArrayView<uint8_t> data,
+ rtc::ArrayView<const uint8_t> csrc_audio_levels);
+};
+
class TransmissionOffset {
public:
using value_type = int32_t;
diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc
index 84769d0f4b..dcf773952b 100644
--- a/modules/rtp_rtcp/source/rtp_packet.cc
+++ b/modules/rtp_rtcp/source/rtp_packet.cc
@@ -27,6 +27,7 @@ constexpr size_t kFixedHeaderSize = 12;
constexpr uint8_t kRtpVersion = 2;
constexpr uint16_t kOneByteExtensionProfileId = 0xBEDE;
constexpr uint16_t kTwoByteExtensionProfileId = 0x1000;
+constexpr uint16_t kTwobyteExtensionProfileIdAppBitsFilter = 0xfff0;
constexpr size_t kOneByteExtensionHeaderLength = 1;
constexpr size_t kTwoByteExtensionHeaderLength = 2;
constexpr size_t kDefaultPacketSize = 1500;
@@ -70,8 +71,8 @@ RtpPacket::RtpPacket(const ExtensionManager* extensions, size_t capacity)
RtpPacket::~RtpPacket() {}
-void RtpPacket::IdentifyExtensions(const ExtensionManager& extensions) {
- extensions_ = extensions;
+void RtpPacket::IdentifyExtensions(ExtensionManager extensions) {
+ extensions_ = std::move(extensions);
}
bool RtpPacket::Parse(const uint8_t* buffer, size_t buffer_size) {
@@ -111,8 +112,6 @@ std::vector<uint32_t> RtpPacket::Csrcs() const {
}
void RtpPacket::CopyHeaderFrom(const RtpPacket& packet) {
- RTC_DCHECK_GE(capacity(), packet.headers_size());
-
marker_ = packet.marker_;
payload_type_ = packet.payload_type_;
sequence_number_ = packet.sequence_number_;
@@ -186,6 +185,7 @@ void RtpPacket::ZeroMutableExtensions() {
break;
}
case RTPExtensionType::kRtpExtensionAudioLevel:
+ case RTPExtensionType::kRtpExtensionCsrcAudioLevel:
case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime:
case RTPExtensionType::kRtpExtensionColorSpace:
case RTPExtensionType::kRtpExtensionGenericFrameDescriptor00:
@@ -501,7 +501,8 @@ bool RtpPacket::ParseBuffer(const uint8_t* buffer, size_t size) {
return false;
}
if (profile != kOneByteExtensionProfileId &&
- profile != kTwoByteExtensionProfileId) {
+ (profile & kTwobyteExtensionProfileIdAppBitsFilter) !=
+ kTwoByteExtensionProfileId) {
RTC_LOG(LS_WARNING) << "Unsupported rtp extension " << profile;
} else {
size_t extension_header_length = profile == kOneByteExtensionProfileId
diff --git a/modules/rtp_rtcp/source/rtp_packet.h b/modules/rtp_rtcp/source/rtp_packet.h
index aa854f35ab..81ca02c851 100644
--- a/modules/rtp_rtcp/source/rtp_packet.h
+++ b/modules/rtp_rtcp/source/rtp_packet.h
@@ -51,7 +51,7 @@ class RtpPacket {
bool Parse(rtc::CopyOnWriteBuffer packet);
// Maps extensions id to their types.
- void IdentifyExtensions(const ExtensionManager& extensions);
+ void IdentifyExtensions(ExtensionManager extensions);
// Header.
bool Marker() const { return marker_; }
@@ -114,6 +114,11 @@ class RtpPacket {
bool HasExtension() const;
bool HasExtension(ExtensionType type) const;
+ // Returns whether there is an associated id for the extension and thus it is
+ // possible to set the extension.
+ template <typename Extension>
+ bool IsRegistered() const;
+
template <typename Extension, typename FirstValue, typename... Values>
bool GetExtension(FirstValue, Values...) const;
@@ -207,6 +212,11 @@ bool RtpPacket::HasExtension() const {
return HasExtension(Extension::kId);
}
+template <typename Extension>
+bool RtpPacket::IsRegistered() const {
+ return extensions_.IsRegistered(Extension::kId);
+}
+
template <typename Extension, typename FirstValue, typename... Values>
bool RtpPacket::GetExtension(FirstValue first, Values... values) const {
auto raw = FindExtension(Extension::kId);
diff --git a/modules/rtp_rtcp/source/rtp_packet_received.cc b/modules/rtp_rtcp/source/rtp_packet_received.cc
index feadee1db1..6b2cc76981 100644
--- a/modules/rtp_rtcp/source/rtp_packet_received.cc
+++ b/modules/rtp_rtcp/source/rtp_packet_received.cc
@@ -21,8 +21,10 @@
namespace webrtc {
RtpPacketReceived::RtpPacketReceived() = default;
-RtpPacketReceived::RtpPacketReceived(const ExtensionManager* extensions)
- : RtpPacket(extensions) {}
+RtpPacketReceived::RtpPacketReceived(
+ const ExtensionManager* extensions,
+ webrtc::Timestamp arrival_time /*= webrtc::Timestamp::MinusInfinity()*/)
+ : RtpPacket(extensions), arrival_time_(arrival_time) {}
RtpPacketReceived::RtpPacketReceived(const RtpPacketReceived& packet) = default;
RtpPacketReceived::RtpPacketReceived(RtpPacketReceived&& packet) = default;
diff --git a/modules/rtp_rtcp/source/rtp_packet_received.h b/modules/rtp_rtcp/source/rtp_packet_received.h
index e1e1309594..431d3f52be 100644
--- a/modules/rtp_rtcp/source/rtp_packet_received.h
+++ b/modules/rtp_rtcp/source/rtp_packet_received.h
@@ -14,10 +14,12 @@
#include <utility>
+#include "absl/base/attributes.h"
#include "api/array_view.h"
#include "api/ref_counted_base.h"
#include "api/rtp_headers.h"
#include "api/scoped_refptr.h"
+#include "api/units/timestamp.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
namespace webrtc {
@@ -27,7 +29,9 @@ namespace webrtc {
class RtpPacketReceived : public RtpPacket {
public:
RtpPacketReceived();
- explicit RtpPacketReceived(const ExtensionManager* extensions);
+ explicit RtpPacketReceived(
+ const ExtensionManager* extensions,
+ webrtc::Timestamp arrival_time = webrtc::Timestamp::MinusInfinity());
RtpPacketReceived(const RtpPacketReceived& packet);
RtpPacketReceived(RtpPacketReceived&& packet);
@@ -42,8 +46,17 @@ class RtpPacketReceived : public RtpPacket {
// Time in local time base as close as it can to packet arrived on the
// network.
- int64_t arrival_time_ms() const { return arrival_time_ms_; }
- void set_arrival_time_ms(int64_t time) { arrival_time_ms_ = time; }
+ webrtc::Timestamp arrival_time() const { return arrival_time_; }
+ void set_arrival_time(webrtc::Timestamp time) { arrival_time_ = time; }
+
+ ABSL_DEPRECATED("Use arrival_time() instead")
+ int64_t arrival_time_ms() const {
+ return arrival_time_.IsMinusInfinity() ? -1 : arrival_time_.ms();
+ }
+ ABSL_DEPRECATED("Use set_arrival_time() instead")
+ void set_arrival_time_ms(int64_t time) {
+ arrival_time_ = webrtc::Timestamp::Millis(time);
+ }
// Flag if packet was recovered via RTX or FEC.
bool recovered() const { return recovered_; }
@@ -64,7 +77,7 @@ class RtpPacketReceived : public RtpPacket {
}
private:
- int64_t arrival_time_ms_ = 0;
+ webrtc::Timestamp arrival_time_ = Timestamp::MinusInfinity();
int payload_type_frequency_ = 0;
bool recovered_ = false;
rtc::scoped_refptr<rtc::RefCountedBase> additional_data_;
diff --git a/modules/rtp_rtcp/source/rtp_packet_to_send.h b/modules/rtp_rtcp/source/rtp_packet_to_send.h
index 2411deac49..12341ef6cf 100644
--- a/modules/rtp_rtcp/source/rtp_packet_to_send.h
+++ b/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -59,14 +59,14 @@ class RtpPacketToSend : public RtpPacket {
void set_retransmitted_sequence_number(uint16_t sequence_number) {
retransmitted_sequence_number_ = sequence_number;
}
- absl::optional<uint16_t> retransmitted_sequence_number() {
+ absl::optional<uint16_t> retransmitted_sequence_number() const {
return retransmitted_sequence_number_;
}
void set_allow_retransmission(bool allow_retransmission) {
allow_retransmission_ = allow_retransmission;
}
- bool allow_retransmission() { return allow_retransmission_; }
+ bool allow_retransmission() const { return allow_retransmission_; }
// An application can attach arbitrary data to an RTP packet using
// `additional_data`. The additional data does not affect WebRTC processing.
diff --git a/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_unittest.cc
index f7f21af41d..533e509d33 100644
--- a/modules/rtp_rtcp/source/rtp_packet_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_packet_unittest.cc
@@ -354,6 +354,35 @@ TEST(RtpPacketTest, CreateWithMaxSizeHeaderExtension) {
EXPECT_EQ(read, kValue);
}
+TEST(RtpPacketTest, SetsRegisteredExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register<TransmissionOffset>(kTransmissionOffsetExtensionId);
+ RtpPacketToSend packet(&extensions);
+
+ EXPECT_TRUE(packet.IsRegistered<TransmissionOffset>());
+ EXPECT_FALSE(packet.HasExtension<TransmissionOffset>());
+
+ // Try to set the extensions.
+ EXPECT_TRUE(packet.SetExtension<TransmissionOffset>(kTimeOffset));
+
+ EXPECT_TRUE(packet.HasExtension<TransmissionOffset>());
+ EXPECT_EQ(packet.GetExtension<TransmissionOffset>(), kTimeOffset);
+}
+
+TEST(RtpPacketTest, FailsToSetUnregisteredExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register<TransmissionOffset>(kTransmissionOffsetExtensionId);
+ RtpPacketToSend packet(&extensions);
+
+ EXPECT_FALSE(packet.IsRegistered<TransportSequenceNumber>());
+ EXPECT_FALSE(packet.HasExtension<TransportSequenceNumber>());
+
+ EXPECT_FALSE(packet.SetExtension<TransportSequenceNumber>(42));
+
+ EXPECT_FALSE(packet.HasExtension<TransportSequenceNumber>());
+ EXPECT_EQ(packet.GetExtension<TransportSequenceNumber>(), absl::nullopt);
+}
+
TEST(RtpPacketTest, SetReservedExtensionsAfterPayload) {
const size_t kPayloadSize = 4;
RtpPacketToSend::ExtensionManager extensions;
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_config.h b/modules/rtp_rtcp/source/rtp_rtcp_config.h
index 6863c4c353..66caadd578 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_config.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_config.h
@@ -11,13 +11,15 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+#include "api/units/time_delta.h"
+
// Configuration file for RTP utilities (RTPSender, RTPReceiver ...)
namespace webrtc {
-enum { kDefaultMaxReorderingThreshold = 50 }; // In sequence numbers.
-enum { kRtcpMaxNackFields = 253 };
+constexpr int kDefaultMaxReorderingThreshold = 5; // In sequence numbers.
+constexpr int kRtcpMaxNackFields = 253;
-enum { RTCP_SEND_BEFORE_KEY_FRAME_MS = 100 };
-enum { RTCP_MAX_REPORT_BLOCKS = 31 }; // RFC 3550 page 37
+constexpr TimeDelta RTCP_SEND_BEFORE_KEY_FRAME = TimeDelta::Millis(100);
+constexpr int RTCP_MAX_REPORT_BLOCKS = 31; // RFC 3550 page 37
} // namespace webrtc
#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index fa4af1dedc..3f985e213a 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -21,7 +21,9 @@
#include "api/transport/field_trial_based_config.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/ntp_time.h"
@@ -58,7 +60,8 @@ std::unique_ptr<RtpRtcp> RtpRtcp::DEPRECATED_Create(
}
ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
- : rtcp_sender_(configuration),
+ : rtcp_sender_(
+ RTCPSender::Configuration::FromRtpRtcpConfiguration(configuration)),
rtcp_receiver_(configuration, this),
clock_(configuration.clock),
last_bitrate_process_time_(clock_->TimeInMilliseconds()),
@@ -336,9 +339,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() {
int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
if (rtcp_sender_.Sending() != sending) {
// Sends RTCP BYE when going from true to false
- if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) {
- RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE";
- }
+ rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending);
}
return 0;
}
@@ -380,7 +381,16 @@ bool ModuleRtpRtcpImpl::OnSendingRtpFrame(uint32_t timestamp,
if (!Sending())
return false;
- rtcp_sender_.SetLastRtpTime(timestamp, capture_time_ms, payload_type);
+ // TODO(bugs.webrtc.org/12873): Migrate this method and it's users to use
+ // optional Timestamps.
+ absl::optional<Timestamp> capture_time;
+ if (capture_time_ms > 0) {
+ capture_time = Timestamp::Millis(capture_time_ms);
+ }
+ absl::optional<int> payload_type_optional;
+ if (payload_type >= 0)
+ payload_type_optional = payload_type;
+ rtcp_sender_.SetLastRtpTime(timestamp, capture_time, payload_type_optional);
// Make sure an RTCP report isn't queued behind a key frame.
if (rtcp_sender_.TimeToSendRTCPReport(force_sender_report))
rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
@@ -685,6 +695,11 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) {
rtcp_receiver_.SetRemoteSSRC(ssrc);
}
+void ModuleRtpRtcpImpl::SetLocalSsrc(uint32_t local_ssrc) {
+ rtcp_receiver_.set_local_media_ssrc(local_ssrc);
+ rtcp_sender_.SetSsrc(local_ssrc);
+}
+
RtpSendRates ModuleRtpRtcpImpl::GetSendRates() const {
return rtp_sender_->packet_sender.GetSendRates();
}
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 5bcabc57b1..b0e0b41c48 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -63,6 +63,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
size_t incoming_packet_length) override;
void SetRemoteSSRC(uint32_t ssrc) override;
+ void SetLocalSsrc(uint32_t ssrc) override;
// Sender part.
void RegisterSendPayloadFrequency(int payload_type,
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
index 78ccf9907f..77054576a8 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
@@ -19,11 +19,17 @@
#include <string>
#include <utility>
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
#include "api/transport/field_trial_based_config.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/time_utils.h"
#include "system_wrappers/include/ntp_time.h"
#ifdef _WIN32
@@ -37,6 +43,22 @@ const int64_t kRtpRtcpMaxIdleTimeProcessMs = 5;
const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
constexpr TimeDelta kRttUpdateInterval = TimeDelta::Millis(1000);
+
+RTCPSender::Configuration AddRtcpSendEvaluationCallback(
+ RTCPSender::Configuration config,
+ std::function<void(TimeDelta)> send_evaluation_callback) {
+ config.schedule_next_rtcp_send_evaluation_function =
+ std::move(send_evaluation_callback);
+ return config;
+}
+
+int DelayMillisForDuration(TimeDelta duration) {
+ // TimeDelta::ms() rounds downwards sometimes which leads to too little time
+ // slept. Account for this, unless |duration| is exactly representable in
+ // millisecs.
+ return (duration.us() + rtc::kNumMillisecsPerSec - 1) /
+ rtc::kNumMicrosecsPerMillisec;
+}
} // namespace
ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext(
@@ -55,7 +77,11 @@ void ModuleRtpRtcpImpl2::RtpSenderContext::AssignSequenceNumber(
ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration)
: worker_queue_(TaskQueueBase::Current()),
- rtcp_sender_(configuration),
+ rtcp_sender_(AddRtcpSendEvaluationCallback(
+ RTCPSender::Configuration::FromRtpRtcpConfiguration(configuration),
+ [this](TimeDelta duration) {
+ ScheduleRtcpSendEvaluation(duration);
+ })),
rtcp_receiver_(configuration, this),
clock_(configuration.clock),
last_rtt_process_time_(clock_->TimeInMilliseconds()),
@@ -69,6 +95,7 @@ ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration)
rtt_ms_(0) {
RTC_DCHECK(worker_queue_);
process_thread_checker_.Detach();
+ packet_sequence_checker_.Detach();
if (!configuration.receiver_only) {
rtp_sender_ = std::make_unique<RtpSenderContext>(configuration);
// Make sure rtcp sender use same timestamp offset as rtp sender.
@@ -135,11 +162,6 @@ void ModuleRtpRtcpImpl2::Process() {
rtcp_sender_.SetTargetBitrate(target_bitrate);
}
}
-
- // TODO(bugs.webrtc.org/11581): Run this on a separate set of delayed tasks
- // based off of next_time_to_send_rtcp_ in RTCPSender.
- if (rtcp_sender_.TimeToSendRTCPReport())
- rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
}
void ModuleRtpRtcpImpl2::SetRtxSendStatus(int mode) {
@@ -169,6 +191,7 @@ absl::optional<uint32_t> ModuleRtpRtcpImpl2::FlexfecSsrc() const {
void ModuleRtpRtcpImpl2::IncomingRtcpPacket(const uint8_t* rtcp_packet,
const size_t length) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtcp_receiver_.IncomingPacket(rtcp_packet, length);
}
@@ -219,6 +242,12 @@ RtpState ModuleRtpRtcpImpl2::GetRtxState() const {
return rtp_sender_->packet_generator.GetRtxRtpState();
}
+uint32_t ModuleRtpRtcpImpl2::local_media_ssrc() const {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ RTC_DCHECK_EQ(rtcp_receiver_.local_media_ssrc(), rtcp_sender_.SSRC());
+ return rtcp_receiver_.local_media_ssrc();
+}
+
void ModuleRtpRtcpImpl2::SetRid(const std::string& rid) {
if (rtp_sender_) {
rtp_sender_->packet_generator.SetRid(rid);
@@ -286,9 +315,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() {
int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) {
if (rtcp_sender_.Sending() != sending) {
// Sends RTCP BYE when going from true to false
- if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) {
- RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE";
- }
+ rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending);
}
return 0;
}
@@ -330,7 +357,16 @@ bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp,
if (!Sending())
return false;
- rtcp_sender_.SetLastRtpTime(timestamp, capture_time_ms, payload_type);
+ // TODO(bugs.webrtc.org/12873): Migrate this method and it's users to use
+ // optional Timestamps.
+ absl::optional<Timestamp> capture_time;
+ if (capture_time_ms > 0) {
+ capture_time = Timestamp::Millis(capture_time_ms);
+ }
+ absl::optional<int> payload_type_optional;
+ if (payload_type >= 0)
+ payload_type_optional = payload_type;
+ rtcp_sender_.SetLastRtpTime(timestamp, capture_time, payload_type_optional);
// Make sure an RTCP report isn't queued behind a key frame.
if (rtcp_sender_.TimeToSendRTCPReport(force_sender_report))
rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
@@ -652,8 +688,15 @@ void ModuleRtpRtcpImpl2::SetRemoteSSRC(const uint32_t ssrc) {
rtcp_receiver_.SetRemoteSSRC(ssrc);
}
+void ModuleRtpRtcpImpl2::SetLocalSsrc(uint32_t local_ssrc) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtcp_receiver_.set_local_media_ssrc(local_ssrc);
+ rtcp_sender_.SetSsrc(local_ssrc);
+}
+
RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ // Typically called on the `rtp_transport_queue_` owned by an
+ // RtpTransportControllerSendInterface instance.
return rtp_sender_->packet_sender.GetSendRates();
}
@@ -746,4 +789,60 @@ void ModuleRtpRtcpImpl2::PeriodicUpdate() {
rtcp_receiver_.NotifyTmmbrUpdated();
}
+// RTC_RUN_ON(worker_queue_);
+void ModuleRtpRtcpImpl2::MaybeSendRtcp() {
+ if (rtcp_sender_.TimeToSendRTCPReport())
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+}
+
+// TODO(bugs.webrtc.org/12889): Consider removing this function when the issue
+// is resolved.
+// RTC_RUN_ON(worker_queue_);
+void ModuleRtpRtcpImpl2::MaybeSendRtcpAtOrAfterTimestamp(
+ Timestamp execution_time) {
+ Timestamp now = clock_->CurrentTime();
+ if (now >= execution_time) {
+ MaybeSendRtcp();
+ return;
+ }
+
+ RTC_DLOG(LS_WARNING)
+ << "BUGBUG: Task queue scheduled delayed call too early.";
+
+ ScheduleMaybeSendRtcpAtOrAfterTimestamp(execution_time, execution_time - now);
+}
+
+void ModuleRtpRtcpImpl2::ScheduleRtcpSendEvaluation(TimeDelta duration) {
+ // We end up here under various sequences including the worker queue, and
+ // the RTCPSender lock is held.
+ // We're assuming that the fact that RTCPSender executes under other sequences
+ // than the worker queue on which it's created on implies that external
+ // synchronization is present and removes this activity before destruction.
+ if (duration.IsZero()) {
+ worker_queue_->PostTask(ToQueuedTask(task_safety_, [this] {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ MaybeSendRtcp();
+ }));
+ } else {
+ Timestamp execution_time = clock_->CurrentTime() + duration;
+ ScheduleMaybeSendRtcpAtOrAfterTimestamp(execution_time, duration);
+ }
+}
+
+void ModuleRtpRtcpImpl2::ScheduleMaybeSendRtcpAtOrAfterTimestamp(
+ Timestamp execution_time,
+ TimeDelta duration) {
+ // We end up here under various sequences including the worker queue, and
+ // the RTCPSender lock is held.
+ // See note in ScheduleRtcpSendEvaluation about why |worker_queue_| can be
+ // accessed.
+ worker_queue_->PostDelayedTask(
+ ToQueuedTask(task_safety_,
+ [this, execution_time] {
+ RTC_DCHECK_RUN_ON(worker_queue_);
+ MaybeSendRtcpAtOrAfterTimestamp(execution_time);
+ }),
+ DelayMillisForDuration(duration));
+}
+
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
index 00f6ff161d..849cc42c5e 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
@@ -23,6 +23,7 @@
#include "api/rtp_headers.h"
#include "api/sequence_checker.h"
#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
#include "api/video/video_bitrate_allocation.h"
#include "modules/include/module_fec_types.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
@@ -32,7 +33,6 @@
#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
#include "rtc_base/gtest_prod_util.h"
@@ -40,6 +40,8 @@
#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/thread_annotations.h"
namespace webrtc {
@@ -77,6 +79,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
void SetRemoteSSRC(uint32_t ssrc) override;
+ void SetLocalSsrc(uint32_t local_ssrc) override;
+
// Sender part.
void RegisterSendPayloadFrequency(int payload_type,
int payload_frequency) override;
@@ -110,6 +114,11 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
uint32_t SSRC() const override { return rtcp_sender_.SSRC(); }
+ // Semantically identical to `SSRC()` but must be called on the packet
+ // delivery thread/tq and returns the ssrc that maps to
+ // RtpRtcpInterface::Configuration::local_media_ssrc.
+ uint32_t local_media_ssrc() const;
+
void SetRid(const std::string& rid) override;
void SetMid(const std::string& mid) override;
@@ -193,7 +202,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
int64_t ExpectedRetransmissionTimeMs() const override;
// Force a send of an RTCP packet.
- // Normal SR and RR are triggered via the process function.
+ // Normal SR and RR are triggered via the task queue that's current when this
+ // object is created.
int32_t SendRTCP(RTCPPacketType rtcpPacketType) override;
void GetSendStreamDataCounters(
@@ -282,11 +292,28 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
// Returns true if the module is configured to store packets.
bool StorePackets() const;
+ // Used from RtcpSenderMediator to maybe send rtcp.
+ void MaybeSendRtcp() RTC_RUN_ON(worker_queue_);
+
+ // Called when |rtcp_sender_| informs of the next RTCP instant. The method may
+ // be called on various sequences, and is called under a RTCPSenderLock.
+ void ScheduleRtcpSendEvaluation(TimeDelta duration);
+
+ // Helper method combating too early delayed calls from task queues.
+ // TODO(bugs.webrtc.org/12889): Consider removing this function when the issue
+ // is resolved.
+ void MaybeSendRtcpAtOrAfterTimestamp(Timestamp execution_time)
+ RTC_RUN_ON(worker_queue_);
+
+ // Schedules a call to MaybeSendRtcpAtOrAfterTimestamp delayed by |duration|.
+ void ScheduleMaybeSendRtcpAtOrAfterTimestamp(Timestamp execution_time,
+ TimeDelta duration);
+
TaskQueueBase* const worker_queue_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker process_thread_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
std::unique_ptr<RtpSenderContext> rtp_sender_;
-
RTCPSender rtcp_sender_;
RTCPReceiver rtcp_receiver_;
@@ -308,6 +335,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
// The processed RTT from RtcpRttStats.
mutable Mutex mutex_rtt_;
int64_t rtt_ms_ RTC_GUARDED_BY(mutex_rtt_);
+
+ RTC_NO_UNIQUE_ADDRESS ScopedTaskSafety task_safety_;
};
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
index 40a002a116..5deb12d465 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
@@ -10,18 +10,22 @@
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include <deque>
#include <map>
#include <memory>
#include <set>
+#include <utility>
#include "absl/types/optional.h"
#include "api/transport/field_trial_based_config.h"
+#include "api/units/time_delta.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/nack.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
+#include "rtc_base/logging.h"
#include "rtc_base/rate_limiter.h"
#include "rtc_base/strings/string_builder.h"
#include "test/gmock.h"
@@ -31,6 +35,7 @@
#include "test/run_loop.h"
#include "test/time_controller/simulated_time_controller.h"
+using ::testing::AllOf;
using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Field;
@@ -41,15 +46,24 @@ using ::testing::SizeIs;
namespace webrtc {
namespace {
-const uint32_t kSenderSsrc = 0x12345;
-const uint32_t kReceiverSsrc = 0x23456;
-const int64_t kOneWayNetworkDelayMs = 100;
-const uint8_t kBaseLayerTid = 0;
-const uint8_t kHigherLayerTid = 1;
-const uint16_t kSequenceNumber = 100;
-const uint8_t kPayloadType = 100;
-const int kWidth = 320;
-const int kHeight = 100;
+constexpr uint32_t kSenderSsrc = 0x12345;
+constexpr uint32_t kReceiverSsrc = 0x23456;
+constexpr TimeDelta kOneWayNetworkDelay = TimeDelta::Millis(100);
+constexpr uint8_t kBaseLayerTid = 0;
+constexpr uint8_t kHigherLayerTid = 1;
+constexpr uint16_t kSequenceNumber = 100;
+constexpr uint8_t kPayloadType = 100;
+constexpr int kWidth = 320;
+constexpr int kHeight = 100;
+constexpr int kCaptureTimeMsToRtpTimestamp = 90; // 90 kHz clock.
+constexpr TimeDelta kDefaultReportInterval = TimeDelta::Millis(1000);
+
+// RTP header extension ids.
+enum : int {
+ kAbsoluteSendTimeExtensionId = 1,
+ kTransportSequenceNumberExtensionId,
+ kTransmissionOffsetExtensionId,
+};
class RtcpRttStatsTestImpl : public RtcpRttStats {
public:
@@ -61,53 +75,79 @@ class RtcpRttStatsTestImpl : public RtcpRttStats {
int64_t rtt_ms_;
};
-class SendTransport : public Transport {
+// TODO(bugs.webrtc.org/11581): remove inheritance once the ModuleRtpRtcpImpl2
+// Module/ProcessThread dependency is gone.
+class SendTransport : public Transport,
+ public sim_time_impl::SimulatedSequenceRunner {
public:
- SendTransport()
+ SendTransport(TimeDelta delay, GlobalSimulatedTimeController* time_controller)
: receiver_(nullptr),
- time_controller_(nullptr),
- delay_ms_(0),
+ time_controller_(time_controller),
+ delay_(delay),
rtp_packets_sent_(0),
- rtcp_packets_sent_(0) {}
+ rtcp_packets_sent_(0),
+ last_packet_(&header_extensions_) {
+ time_controller_->Register(this);
+ }
+
+ ~SendTransport() { time_controller_->Unregister(this); }
void SetRtpRtcpModule(ModuleRtpRtcpImpl2* receiver) { receiver_ = receiver; }
- void SimulateNetworkDelay(int64_t delay_ms, TimeController* time_controller) {
- time_controller_ = time_controller;
- delay_ms_ = delay_ms;
- }
+ void SimulateNetworkDelay(TimeDelta delay) { delay_ = delay; }
bool SendRtp(const uint8_t* data,
size_t len,
const PacketOptions& options) override {
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::CreateForTest());
- EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
+ EXPECT_TRUE(last_packet_.Parse(data, len));
++rtp_packets_sent_;
- last_rtp_header_ = header;
return true;
}
bool SendRtcp(const uint8_t* data, size_t len) override {
test::RtcpPacketParser parser;
parser.Parse(data, len);
last_nack_list_ = parser.nack()->packet_ids();
-
- if (time_controller_) {
- time_controller_->AdvanceTime(TimeDelta::Millis(delay_ms_));
- }
- EXPECT_TRUE(receiver_);
- receiver_->IncomingRtcpPacket(data, len);
+ Timestamp current_time = time_controller_->GetClock()->CurrentTime();
+ Timestamp delivery_time = current_time + delay_;
+ rtcp_packets_.push_back(
+ Packet{delivery_time, std::vector<uint8_t>(data, data + len)});
++rtcp_packets_sent_;
+ RunReady(current_time);
return true;
}
+ // sim_time_impl::SimulatedSequenceRunner
+ Timestamp GetNextRunTime() const override {
+ if (!rtcp_packets_.empty())
+ return rtcp_packets_.front().send_time;
+ return Timestamp::PlusInfinity();
+ }
+ void RunReady(Timestamp at_time) override {
+ while (!rtcp_packets_.empty() &&
+ rtcp_packets_.front().send_time <= at_time) {
+ Packet packet = std::move(rtcp_packets_.front());
+ rtcp_packets_.pop_front();
+ EXPECT_TRUE(receiver_);
+ receiver_->IncomingRtcpPacket(packet.data.data(), packet.data.size());
+ }
+ }
+ TaskQueueBase* GetAsTaskQueue() override {
+ return reinterpret_cast<TaskQueueBase*>(this);
+ }
+
size_t NumRtcpSent() { return rtcp_packets_sent_; }
ModuleRtpRtcpImpl2* receiver_;
- TimeController* time_controller_;
- int64_t delay_ms_;
+ GlobalSimulatedTimeController* const time_controller_;
+ TimeDelta delay_;
int rtp_packets_sent_;
size_t rtcp_packets_sent_;
- RTPHeader last_rtp_header_;
std::vector<uint16_t> last_nack_list_;
+ RtpHeaderExtensionMap header_extensions_;
+ RtpPacketReceived last_packet_;
+ struct Packet {
+ Timestamp send_time;
+ std::vector<uint8_t> data;
+ };
+ std::deque<Packet> rtcp_packets_;
};
struct TestConfig {
@@ -147,29 +187,43 @@ class FieldTrialConfig : public WebRtcKeyValueConfig {
double max_padding_factor_;
};
-class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
+class RtpRtcpModule : public RtcpPacketTypeCounterObserver,
+ public SendPacketObserver {
public:
- RtpRtcpModule(TimeController* time_controller,
+ struct SentPacket {
+ SentPacket(uint16_t packet_id, int64_t capture_time_ms, uint32_t ssrc)
+ : packet_id(packet_id), capture_time_ms(capture_time_ms), ssrc(ssrc) {}
+ uint16_t packet_id;
+ int64_t capture_time_ms;
+ uint32_t ssrc;
+ };
+
+ RtpRtcpModule(GlobalSimulatedTimeController* time_controller,
+ ProcessThread* process_thread,
bool is_sender,
const FieldTrialConfig& trials)
- : is_sender_(is_sender),
+ : time_controller_(time_controller),
+ is_sender_(is_sender),
trials_(trials),
+ process_thread_(process_thread),
receive_statistics_(
ReceiveStatistics::Create(time_controller->GetClock())),
- time_controller_(time_controller) {
+ transport_(kOneWayNetworkDelay, time_controller) {
CreateModuleImpl();
- transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, time_controller);
}
+ ~RtpRtcpModule() { process_thread_->DeRegisterModule(impl_.get()); }
+
+ TimeController* const time_controller_;
const bool is_sender_;
const FieldTrialConfig& trials_;
+ ProcessThread* const process_thread_;
RtcpPacketTypeCounter packets_sent_;
RtcpPacketTypeCounter packets_received_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
SendTransport transport_;
RtcpRttStatsTestImpl rtt_stats_;
std::unique_ptr<ModuleRtpRtcpImpl2> impl_;
- int rtcp_report_interval_ms_ = 0;
void RtcpPacketTypesCounterUpdated(
uint32_t ssrc,
@@ -177,6 +231,16 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
counter_map_[ssrc] = packet_counter;
}
+ void OnSendPacket(uint16_t packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) override {
+ last_sent_packet_.emplace(packet_id, capture_time_ms, ssrc);
+ }
+
+ absl::optional<SentPacket> last_sent_packet() const {
+ return last_sent_packet_;
+ }
+
RtcpPacketTypeCounter RtcpSent() {
// RTCP counters for remote SSRC.
return counter_map_[is_sender_ ? kReceiverSsrc : kSenderSsrc];
@@ -187,14 +251,22 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
return counter_map_[impl_->SSRC()];
}
int RtpSent() { return transport_.rtp_packets_sent_; }
- uint16_t LastRtpSequenceNumber() {
- return transport_.last_rtp_header_.sequenceNumber;
- }
+ uint16_t LastRtpSequenceNumber() { return last_packet().SequenceNumber(); }
std::vector<uint16_t> LastNackListSent() {
return transport_.last_nack_list_;
}
- void SetRtcpReportIntervalAndReset(int rtcp_report_interval_ms) {
- rtcp_report_interval_ms_ = rtcp_report_interval_ms;
+ void SetRtcpReportIntervalAndReset(TimeDelta rtcp_report_interval) {
+ rtcp_report_interval_ = rtcp_report_interval;
+ CreateModuleImpl();
+ }
+ const RtpPacketReceived& last_packet() { return transport_.last_packet_; }
+ void RegisterHeaderExtension(absl::string_view uri, int id) {
+ impl_->RegisterRtpHeaderExtension(uri, id);
+ transport_.header_extensions_.RegisterByUri(id, uri);
+ transport_.last_packet_.IdentifyExtensions(transport_.header_extensions_);
+ }
+ void ReinintWithFec(VideoFecGenerator* fec_generator) {
+ fec_generator_ = fec_generator;
CreateModuleImpl();
}
@@ -207,19 +279,25 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
config.receive_statistics = receive_statistics_.get();
config.rtcp_packet_type_counter_observer = this;
config.rtt_stats = &rtt_stats_;
- config.rtcp_report_interval_ms = rtcp_report_interval_ms_;
+ config.rtcp_report_interval_ms = rtcp_report_interval_.ms();
config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc;
config.need_rtp_packet_infos = true;
config.non_sender_rtt_measurement = true;
config.field_trials = &trials_;
-
+ config.send_packet_observer = this;
+ config.fec_generator = fec_generator_;
+ if (impl_)
+ process_thread_->DeRegisterModule(impl_.get());
impl_.reset(new ModuleRtpRtcpImpl2(config));
+ process_thread_->RegisterModule(impl_.get(), RTC_FROM_HERE);
impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc);
impl_->SetRTCPStatus(RtcpMode::kCompound);
}
- TimeController* const time_controller_;
std::map<uint32_t, RtcpPacketTypeCounter> counter_map_;
+ absl::optional<SentPacket> last_sent_packet_;
+ VideoFecGenerator* fec_generator_ = nullptr;
+ TimeDelta rtcp_report_interval_ = kDefaultReportInterval;
};
} // namespace
@@ -228,12 +306,20 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam<TestConfig> {
RtpRtcpImpl2Test()
: time_controller_(Timestamp::Micros(133590000000000)),
field_trials_(FieldTrialConfig::GetFromTestConfig(GetParam())),
+ process_thread_(
+ time_controller_.CreateProcessThread("RtpRtcpImpl2Test")),
sender_(&time_controller_,
+ process_thread_.get(),
/*is_sender=*/true,
field_trials_),
receiver_(&time_controller_,
+ process_thread_.get(),
/*is_sender=*/false,
- field_trials_) {}
+ field_trials_) {
+ process_thread_->Start();
+ }
+
+ ~RtpRtcpImpl2Test() { process_thread_->Stop(); }
void SetUp() override {
// Send module.
@@ -256,20 +342,50 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam<TestConfig> {
receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
}
- void AdvanceTimeMs(int64_t milliseconds) {
- time_controller_.AdvanceTime(TimeDelta::Millis(milliseconds));
+ void AdvanceTime(TimeDelta duration) {
+ time_controller_.AdvanceTime(duration);
+ }
+
+ void ReinitWithFec(VideoFecGenerator* fec_generator,
+ absl::optional<int> red_payload_type) {
+ sender_.ReinintWithFec(fec_generator);
+ EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true));
+ sender_.impl_->SetSendingMediaStatus(true);
+ sender_.impl_->SetSequenceNumber(kSequenceNumber);
+ sender_.impl_->SetStorePacketsStatus(true, 100);
+ receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
+
+ RTPSenderVideo::Config video_config;
+ video_config.clock = time_controller_.GetClock();
+ video_config.rtp_sender = sender_.impl_->RtpSender();
+ video_config.field_trials = &field_trials_;
+ video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead();
+ video_config.fec_type = fec_generator->GetFecType();
+ video_config.red_payload_type = red_payload_type;
+ sender_video_ = std::make_unique<RTPSenderVideo>(video_config);
}
GlobalSimulatedTimeController time_controller_;
FieldTrialConfig field_trials_;
+ std::unique_ptr<ProcessThread> process_thread_;
RtpRtcpModule sender_;
std::unique_ptr<RTPSenderVideo> sender_video_;
RtpRtcpModule receiver_;
bool SendFrame(const RtpRtcpModule* module,
RTPSenderVideo* sender,
+ uint8_t tid) {
+ int64_t now_ms = time_controller_.GetClock()->TimeInMilliseconds();
+ return SendFrame(
+ module, sender, tid,
+ static_cast<uint32_t>(now_ms * kCaptureTimeMsToRtpTimestamp), now_ms);
+ }
+
+ bool SendFrame(const RtpRtcpModule* module,
+ RTPSenderVideo* sender,
uint8_t tid,
- uint32_t rtp_timestamp) {
+ uint32_t rtp_timestamp,
+ int64_t capture_time_ms) {
RTPVideoHeaderVP8 vp8_header = {};
vp8_header.temporalIdx = tid;
RTPVideoHeader rtp_video_header;
@@ -288,9 +404,9 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam<TestConfig> {
const uint8_t payload[100] = {0};
bool success = module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true);
- success &=
- sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8,
- rtp_timestamp, 0, payload, rtp_video_header, 0);
+ success &= sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8,
+ rtp_timestamp, capture_time_ms, payload,
+ rtp_video_header, 0);
return success;
}
@@ -311,17 +427,17 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam<TestConfig> {
TEST_P(RtpRtcpImpl2Test, RetransmitsAllLayers) {
// Send frames.
EXPECT_EQ(0, sender_.RtpSent());
- EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid,
- /*timestamp=*/0)); // kSequenceNumber
- EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kHigherLayerTid,
- /*timestamp=*/0)); // kSequenceNumber + 1
- EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kNoTemporalIdx,
- /*timestamp=*/0)); // kSequenceNumber + 2
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(),
+ kBaseLayerTid)); // kSequenceNumber
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(),
+ kHigherLayerTid)); // kSequenceNumber + 1
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(),
+ kNoTemporalIdx)); // kSequenceNumber + 2
EXPECT_EQ(3, sender_.RtpSent());
EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber());
// Min required delay until retransmit = 5 + RTT ms (RTT = 0).
- AdvanceTimeMs(5);
+ AdvanceTime(TimeDelta::Millis(5));
// Frame with kBaseLayerTid re-sent.
IncomingRtcpNack(&sender_, kSequenceNumber);
@@ -346,14 +462,14 @@ TEST_P(RtpRtcpImpl2Test, Rtt) {
receiver_.receive_statistics_->OnRtpPacket(packet);
// Send Frame before sending an SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Sender module should send an SR.
EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
+ AdvanceTime(kOneWayNetworkDelay);
// Receiver module should send a RR with a response to the last received SR.
- AdvanceTimeMs(1000);
EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
+ AdvanceTime(kOneWayNetworkDelay);
// Verify RTT.
int64_t rtt;
@@ -362,10 +478,10 @@ TEST_P(RtpRtcpImpl2Test, Rtt) {
int64_t max_rtt;
EXPECT_EQ(
0, sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, rtt, 1);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, avg_rtt, 1);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, min_rtt, 1);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, max_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), avg_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), min_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), max_rtt, 1);
// No RTT from other ssrc.
EXPECT_EQ(-1, sender_.impl_->RTT(kReceiverSsrc + 1, &rtt, &avg_rtt, &min_rtt,
@@ -374,11 +490,11 @@ TEST_P(RtpRtcpImpl2Test, Rtt) {
// Verify RTT from rtt_stats config.
EXPECT_EQ(0, sender_.rtt_stats_.LastProcessedRtt());
EXPECT_EQ(0, sender_.impl_->rtt_ms());
- AdvanceTimeMs(1000);
+ AdvanceTime(TimeDelta::Millis(1000));
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt(),
- 1);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms(), 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(),
+ sender_.rtt_stats_.LastProcessedRtt(), 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), sender_.impl_->rtt_ms(), 1);
}
TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) {
@@ -386,40 +502,35 @@ TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) {
EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
// Sender module should send a response to the last received RTRR (DLRR).
- AdvanceTimeMs(1000);
+ AdvanceTime(TimeDelta::Millis(1000));
// Send Frame before sending a SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
// Verify RTT.
EXPECT_EQ(0, receiver_.rtt_stats_.LastProcessedRtt());
EXPECT_EQ(0, receiver_.impl_->rtt_ms());
- AdvanceTimeMs(1000);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs,
+ AdvanceTime(TimeDelta::Millis(1000));
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(),
receiver_.rtt_stats_.LastProcessedRtt(), 1);
- EXPECT_NEAR(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms(), 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), receiver_.impl_->rtt_ms(), 1);
}
TEST_P(RtpRtcpImpl2Test, NoSrBeforeMedia) {
// Ignore fake transport delays in this test.
- sender_.transport_.SimulateNetworkDelay(0, &time_controller_);
- receiver_.transport_.SimulateNetworkDelay(0, &time_controller_);
-
- sender_.impl_->Process();
- EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+ sender_.transport_.SimulateNetworkDelay(TimeDelta::Millis(0));
+ receiver_.transport_.SimulateNetworkDelay(TimeDelta::Millis(0));
+ // Move ahead to the instant a rtcp is expected.
// Verify no SR is sent before media has been sent, RR should still be sent
// from the receiving module though.
- AdvanceTimeMs(2000);
+ AdvanceTime(kDefaultReportInterval / 2);
int64_t current_time = time_controller_.GetClock()->TimeInMilliseconds();
- sender_.impl_->Process();
- receiver_.impl_->Process();
EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
EXPECT_EQ(receiver_.RtcpSent().first_packet_time_ms, current_time);
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ // RTCP should be triggered by the RTP send.
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, current_time);
}
@@ -433,6 +544,7 @@ TEST_P(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) {
const uint16_t kNackLength = 1;
uint16_t nack_list[kNackLength] = {123};
EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ AdvanceTime(kOneWayNetworkDelay);
EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
EXPECT_GT(receiver_.RtcpSent().first_packet_time_ms, -1);
@@ -490,8 +602,7 @@ TEST_P(RtpRtcpImpl2Test, SendsInitialNackList) {
uint16_t nack_list[kNackLength] = {123};
EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
// Send Frame before sending a compound RTCP that starts with SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
@@ -503,8 +614,7 @@ TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) {
uint16_t nack_list[kNackLength] = {123};
EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
// Send Frame before sending a compound RTCP that starts with SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
@@ -523,33 +633,32 @@ TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) {
}
TEST_P(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) {
- sender_.transport_.SimulateNetworkDelay(0, &time_controller_);
+ sender_.transport_.SimulateNetworkDelay(TimeDelta::Millis(0));
// Send module sends a NACK.
const uint16_t kNackLength = 2;
uint16_t nack_list[kNackLength] = {123, 125};
EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
// Send Frame before sending a compound RTCP that starts with SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
// Same list not re-send, rtt interval has not passed.
- const int kStartupRttMs = 100;
- AdvanceTimeMs(kStartupRttMs);
+ const TimeDelta kStartupRtt = TimeDelta::Millis(100);
+ AdvanceTime(kStartupRtt);
EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
// Rtt interval passed, full list sent.
- AdvanceTimeMs(1);
+ AdvanceTime(TimeDelta::Millis(1));
EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
EXPECT_EQ(2U, sender_.RtcpSent().nack_packets);
EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
}
TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) {
- receiver_.transport_.SimulateNetworkDelay(0, &time_controller_);
+ receiver_.transport_.SimulateNetworkDelay(TimeDelta::Millis(0));
EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests);
EXPECT_EQ(0U, receiver_.RtcpSent().unique_nack_requests);
@@ -571,8 +680,8 @@ TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) {
EXPECT_EQ(100, sender_.RtcpReceived().UniqueNackRequestsInPercent());
// Receive module sends new request with duplicated packets.
- const int kStartupRttMs = 100;
- AdvanceTimeMs(kStartupRttMs + 1);
+ const TimeDelta kStartupRtt = TimeDelta::Millis(100);
+ AdvanceTime(kStartupRtt + TimeDelta::Millis(1));
const uint16_t kNackLength2 = 4;
uint16_t nack_list2[kNackLength2] = {11, 18, 20, 21};
EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list2, kNackLength2));
@@ -589,52 +698,43 @@ TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) {
}
TEST_P(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) {
- const int kVideoReportInterval = 3000;
+ const TimeDelta kVideoReportInterval = TimeDelta::Millis(3000);
// Recreate sender impl with new configuration, and redo setup.
sender_.SetRtcpReportIntervalAndReset(kVideoReportInterval);
SetUp();
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Initial state
- sender_.impl_->Process();
EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
EXPECT_EQ(0u, sender_.transport_.NumRtcpSent());
// Move ahead to the last ms before a rtcp is expected, no action.
- AdvanceTimeMs(kVideoReportInterval / 2 - 1);
- sender_.impl_->Process();
+ AdvanceTime(kVideoReportInterval / 2 - TimeDelta::Millis(1));
EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
EXPECT_EQ(sender_.transport_.NumRtcpSent(), 0u);
// Move ahead to the first rtcp. Send RTCP.
- AdvanceTimeMs(1);
- sender_.impl_->Process();
+ AdvanceTime(TimeDelta::Millis(1));
EXPECT_GT(sender_.RtcpSent().first_packet_time_ms, -1);
EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Move ahead to the last possible second before second rtcp is expected.
- AdvanceTimeMs(kVideoReportInterval * 1 / 2 - 1);
- sender_.impl_->Process();
+ AdvanceTime(kVideoReportInterval * 1 / 2 - TimeDelta::Millis(1));
EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
// Move ahead into the range of second rtcp, the second rtcp may be sent.
- AdvanceTimeMs(1);
- sender_.impl_->Process();
+ AdvanceTime(TimeDelta::Millis(1));
EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
- AdvanceTimeMs(kVideoReportInterval / 2);
- sender_.impl_->Process();
+ AdvanceTime(kVideoReportInterval / 2);
EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
// Move out the range of second rtcp, the second rtcp must have been sent.
- AdvanceTimeMs(kVideoReportInterval / 2);
- sender_.impl_->Process();
+ AdvanceTime(kVideoReportInterval / 2);
EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u);
}
@@ -656,7 +756,7 @@ TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) {
packet.set_first_packet_of_frame(true);
packet.SetMarker(true);
sender_.impl_->TrySendPacket(&packet, pacing_info);
- AdvanceTimeMs(1);
+ AdvanceTime(TimeDelta::Millis(1));
std::vector<RtpSequenceNumberMap::Info> seqno_info =
sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{1});
@@ -681,7 +781,7 @@ TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) {
packet.SetMarker(true);
sender_.impl_->TrySendPacket(&packet, pacing_info);
- AdvanceTimeMs(1);
+ AdvanceTime(TimeDelta::Millis(1));
seqno_info =
sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{2, 3, 4});
@@ -708,10 +808,10 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) {
// Checks that the sender report stats are available if an RTCP SR was sent.
TEST_P(RtpRtcpImpl2Test, SenderReportStatsAvailable) {
// Send a frame in order to send an SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Send an SR.
ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0));
+ AdvanceTime(kOneWayNetworkDelay);
EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Not(Eq(absl::nullopt)));
}
@@ -759,14 +859,15 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) {
TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) {
using SenderReportStats = RtpRtcpInterface::SenderReportStats;
// Send a frame in order to send an SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Send the first SR.
ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0));
+ AdvanceTime(kOneWayNetworkDelay);
EXPECT_THAT(receiver_.impl_->GetSenderReportStats(),
Optional(Field(&SenderReportStats::reports_count, Eq(1u))));
// Send the second SR.
ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0));
+ AdvanceTime(kOneWayNetworkDelay);
EXPECT_THAT(receiver_.impl_->GetSenderReportStats(),
Optional(Field(&SenderReportStats::reports_count, Eq(2u))));
}
@@ -775,10 +876,10 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) {
// SR was sent.
TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) {
// Send a frame in order to send an SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Send an SR.
ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0));
+ AdvanceTime(kOneWayNetworkDelay);
auto stats = receiver_.impl_->GetSenderReportStats();
ASSERT_THAT(stats, Not(Eq(absl::nullopt)));
EXPECT_TRUE(stats->last_arrival_timestamp.Valid());
@@ -789,14 +890,14 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) {
TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) {
using SenderReportStats = RtpRtcpInterface::SenderReportStats;
// Send a frame in order to send an SR.
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Gt(0));
// Advance time otherwise the RTCP SR report will not include any packets
// generated by `SendFrame()`.
- AdvanceTimeMs(1);
+ AdvanceTime(TimeDelta::Millis(1));
// Send an SR.
ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0));
+ AdvanceTime(kOneWayNetworkDelay);
EXPECT_THAT(receiver_.impl_->GetSenderReportStats(),
Optional(AllOf(Field(&SenderReportStats::packets_sent, Gt(0u)),
Field(&SenderReportStats::bytes_sent, Gt(0u)))));
@@ -804,8 +905,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) {
TEST_P(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) {
const uint16_t sequence_number = sender_.impl_->SequenceNumber();
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Gt(0));
EXPECT_EQ(sequence_number + 1, sender_.impl_->SequenceNumber());
}
@@ -813,8 +913,7 @@ TEST_P(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) {
TEST_P(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) {
const uint16_t sequence_number = sender_.impl_->SequenceNumber();
sender_.impl_->SetSendingMediaStatus(false);
- EXPECT_FALSE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_FALSE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Eq(0));
EXPECT_EQ(sequence_number, sender_.impl_->SequenceNumber());
}
@@ -825,8 +924,7 @@ TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) {
// Can't send padding before media.
EXPECT_THAT(sender_.impl_->GeneratePadding(kPaddingSize), SizeIs(0u));
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, /*timestamp=*/0));
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
// Padding is now ok.
EXPECT_THAT(sender_.impl_->GeneratePadding(kPaddingSize), SizeIs(Gt(0u)));
@@ -859,12 +957,12 @@ TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) {
TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) {
constexpr size_t kPaddingSize = 100;
- uint32_t kTimestamp = 123;
+ const uint32_t kTimestamp = 123;
- EXPECT_TRUE(
- SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, kTimestamp));
- EXPECT_EQ(sender_.transport_.last_rtp_header_.timestamp, kTimestamp);
- uint16_t media_seq = sender_.transport_.last_rtp_header_.sequenceNumber;
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid,
+ kTimestamp, /*capture_time_ms=*/0));
+ EXPECT_EQ(sender_.last_packet().Timestamp(), kTimestamp);
+ uint16_t media_seq = sender_.last_packet().SequenceNumber();
// Generate and send padding.
auto padding = sender_.impl_->GeneratePadding(kPaddingSize);
@@ -874,8 +972,125 @@ TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) {
}
// Verify we sent a new packet, but with the same timestamp.
- EXPECT_NE(sender_.transport_.last_rtp_header_.sequenceNumber, media_seq);
- EXPECT_EQ(sender_.transport_.last_rtp_header_.timestamp, kTimestamp);
+ EXPECT_NE(sender_.last_packet().SequenceNumber(), media_seq);
+ EXPECT_EQ(sender_.last_packet().Timestamp(), kTimestamp);
+}
+
+TEST_P(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) {
+ sender_.RegisterHeaderExtension(TransportSequenceNumber::kUri,
+ kTransportSequenceNumberExtensionId);
+
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ uint16_t first_transport_seq = 0;
+ EXPECT_TRUE(sender_.last_packet().GetExtension<TransportSequenceNumber>(
+ &first_transport_seq));
+
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ uint16_t second_transport_seq = 0;
+ EXPECT_TRUE(sender_.last_packet().GetExtension<TransportSequenceNumber>(
+ &second_transport_seq));
+
+ EXPECT_EQ(first_transport_seq + 1, second_transport_seq);
+}
+
+TEST_P(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) {
+ sender_.RegisterHeaderExtension(AbsoluteSendTime::kUri,
+ kAbsoluteSendTimeExtensionId);
+
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ EXPECT_NE(sender_.last_packet().GetExtension<AbsoluteSendTime>(), 0u);
+}
+
+TEST_P(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) {
+ sender_.RegisterHeaderExtension(TransmissionOffset::kUri,
+ kTransmissionOffsetExtensionId);
+
+ constexpr TimeDelta kOffset = TimeDelta::Millis(100);
+ // Transmission offset is calculated from difference between capture time
+ // and send time.
+ int64_t capture_time_ms = time_controller_.GetClock()->TimeInMilliseconds();
+ time_controller_.AdvanceTime(kOffset);
+
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid,
+ /*timestamp=*/0, capture_time_ms));
+ EXPECT_EQ(sender_.last_packet().GetExtension<TransmissionOffset>(),
+ kOffset.ms() * kCaptureTimeMsToRtpTimestamp);
+}
+
+TEST_P(RtpRtcpImpl2Test, PropagatesSentPacketInfo) {
+ sender_.RegisterHeaderExtension(TransportSequenceNumber::kUri,
+ kTransportSequenceNumberExtensionId);
+ int64_t now_ms = time_controller_.GetClock()->TimeInMilliseconds();
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ EXPECT_THAT(
+ sender_.last_sent_packet(),
+ Optional(
+ AllOf(Field(&RtpRtcpModule::SentPacket::packet_id,
+ Eq(sender_.last_packet()
+ .GetExtension<TransportSequenceNumber>())),
+ Field(&RtpRtcpModule::SentPacket::capture_time_ms, Eq(now_ms)),
+ Field(&RtpRtcpModule::SentPacket::ssrc, Eq(kSenderSsrc)))));
+}
+
+TEST_P(RtpRtcpImpl2Test, GeneratesFlexfec) {
+ constexpr int kFlexfecPayloadType = 118;
+ constexpr uint32_t kFlexfecSsrc = 17;
+ const char kNoMid[] = "";
+ const std::vector<RtpExtension> kNoRtpExtensions;
+ const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
+
+ // Make sure FlexFec sequence numbers start at a different point than media.
+ const uint16_t fec_start_seq = sender_.impl_->SequenceNumber() + 100;
+ RtpState start_state;
+ start_state.sequence_number = fec_start_seq;
+ FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexfecSsrc, kSenderSsrc,
+ kNoMid, kNoRtpExtensions, kNoRtpExtensionSizes,
+ &start_state, time_controller_.GetClock());
+ ReinitWithFec(&flexfec_sender, /*red_payload_type=*/absl::nullopt);
+
+ // Parameters selected to generate a single FEC packet per media packet.
+ FecProtectionParams params;
+ params.fec_rate = 15;
+ params.max_fec_frames = 1;
+ params.fec_mask_type = kFecMaskRandom;
+ sender_.impl_->SetFecProtectionParams(params, params);
+
+ // Send a one packet frame, expect one media packet and one FEC packet.
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Eq(2));
+
+ const RtpPacketReceived& fec_packet = sender_.last_packet();
+ EXPECT_EQ(fec_packet.SequenceNumber(), fec_start_seq);
+ EXPECT_EQ(fec_packet.Ssrc(), kFlexfecSsrc);
+ EXPECT_EQ(fec_packet.PayloadType(), kFlexfecPayloadType);
+}
+
+TEST_P(RtpRtcpImpl2Test, GeneratesUlpfec) {
+ constexpr int kUlpfecPayloadType = 118;
+ constexpr int kRedPayloadType = 119;
+ UlpfecGenerator ulpfec_sender(kRedPayloadType, kUlpfecPayloadType,
+ time_controller_.GetClock());
+ ReinitWithFec(&ulpfec_sender, kRedPayloadType);
+
+ // Parameters selected to generate a single FEC packet per media packet.
+ FecProtectionParams params;
+ params.fec_rate = 15;
+ params.max_fec_frames = 1;
+ params.fec_mask_type = kFecMaskRandom;
+ sender_.impl_->SetFecProtectionParams(params, params);
+
+ // Send a one packet frame, expect one media packet and one FEC packet.
+ EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid));
+ ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Eq(2));
+
+ // Ulpfec is sent on the media ssrc, sharing the sequene number series.
+ const RtpPacketReceived& fec_packet = sender_.last_packet();
+ EXPECT_EQ(fec_packet.SequenceNumber(), kSequenceNumber + 1);
+ EXPECT_EQ(fec_packet.Ssrc(), kSenderSsrc);
+ // The packets are encapsulated in RED packets, check that and that the RED
+ // header (first byte of payload) indicates the desired FEC payload type.
+ EXPECT_EQ(fec_packet.PayloadType(), kRedPayloadType);
+ EXPECT_EQ(fec_packet.payload()[0], kUlpfecPayloadType);
}
INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index cc6b76c121..ac05584e18 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -72,11 +72,10 @@ class SendTransport : public Transport {
bool SendRtp(const uint8_t* data,
size_t len,
const PacketOptions& options) override {
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::CreateForTest());
- EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
+ RtpPacket packet;
+ EXPECT_TRUE(packet.Parse(data, len));
++rtp_packets_sent_;
- last_rtp_header_ = header;
+ last_rtp_sequence_number_ = packet.SequenceNumber();
return true;
}
bool SendRtcp(const uint8_t* data, size_t len) override {
@@ -98,7 +97,7 @@ class SendTransport : public Transport {
int64_t delay_ms_;
int rtp_packets_sent_;
size_t rtcp_packets_sent_;
- RTPHeader last_rtp_header_;
+ uint16_t last_rtp_sequence_number_;
std::vector<uint16_t> last_nack_list_;
};
@@ -138,7 +137,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
}
int RtpSent() { return transport_.rtp_packets_sent_; }
uint16_t LastRtpSequenceNumber() {
- return transport_.last_rtp_header_.sequenceNumber;
+ return transport_.last_rtp_sequence_number_;
}
std::vector<uint16_t> LastNackListSent() {
return transport_.last_nack_list_;
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h
index 5ab48c9ad4..dd5744ec54 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h
+++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h
@@ -77,13 +77,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface {
RtcpRttStats* rtt_stats = nullptr;
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
// Called on receipt of RTCP report block from remote side.
- // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in
- // favor of ReportBlockDataObserver.
// TODO(bugs.webrtc.org/10679): Consider whether we want to use
// only getters or only callbacks. If we decide on getters, the
// ReportBlockDataObserver should also be removed in favor of
// GetLatestReportBlockData().
- RtcpStatisticsCallback* rtcp_statistics_callback = nullptr;
RtcpCnameCallback* rtcp_cname_callback = nullptr;
ReportBlockDataObserver* report_block_data_observer = nullptr;
@@ -183,6 +180,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface {
virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
+ // Called when the local ssrc changes (post initialization) for receive
+ // streams to match with send. Called on the packet receive thread/tq.
+ virtual void SetLocalSsrc(uint32_t ssrc) = 0;
+
// **************************************************************************
// Sender
// **************************************************************************
diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc
index 8435e5f33d..80c319f4f2 100644
--- a/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/modules/rtp_rtcp/source/rtp_sender.cc
@@ -104,6 +104,7 @@ bool IsNonVolatile(RTPExtensionType type) {
switch (type) {
case kRtpExtensionTransmissionTimeOffset:
case kRtpExtensionAudioLevel:
+ case kRtpExtensionCsrcAudioLevel:
case kRtpExtensionAbsoluteSendTime:
case kRtpExtensionTransportSequenceNumber:
case kRtpExtensionTransportSequenceNumber02:
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc
index 8cf60aaecd..4d72211b7c 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -157,7 +157,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
return SendAudio(frame_type, payload_type, rtp_timestamp, payload_data,
payload_size,
// TODO(bugs.webrtc.org/10739) replace once plumbed.
- /*absolute_capture_timestamp_ms=*/0);
+ /*absolute_capture_timestamp_ms=*/-1);
}
bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
@@ -277,22 +277,26 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
packet->SetExtension<AudioLevel>(
frame_type == AudioFrameType::kAudioFrameSpeech, audio_level_dbov);
- // Send absolute capture time periodically in order to optimize and save
- // network traffic. Missing absolute capture times can be interpolated on the
- // receiving end if sending intervals are small enough.
- auto absolute_capture_time = absolute_capture_time_sender_.OnSendPacket(
- AbsoluteCaptureTimeSender::GetSource(packet->Ssrc(), packet->Csrcs()),
- packet->Timestamp(),
- // Replace missing value with 0 (invalid frequency), this will trigger
- // absolute capture time sending.
- encoder_rtp_timestamp_frequency.value_or(0),
- Int64MsToUQ32x32(absolute_capture_timestamp_ms + NtpOffsetMs()),
- /*estimated_capture_clock_offset=*/
- include_capture_clock_offset_ ? absl::make_optional(0) : absl::nullopt);
- if (absolute_capture_time) {
- // It also checks that extension was registered during SDP negotiation. If
- // not then setter won't do anything.
- packet->SetExtension<AbsoluteCaptureTimeExtension>(*absolute_capture_time);
+ if (absolute_capture_timestamp_ms > 0) {
+ // Send absolute capture time periodically in order to optimize and save
+ // network traffic. Missing absolute capture times can be interpolated on
+ // the receiving end if sending intervals are small enough.
+ auto absolute_capture_time = absolute_capture_time_sender_.OnSendPacket(
+ AbsoluteCaptureTimeSender::GetSource(packet->Ssrc(), packet->Csrcs()),
+ packet->Timestamp(),
+ // Replace missing value with 0 (invalid frequency), this will trigger
+ // absolute capture time sending.
+ encoder_rtp_timestamp_frequency.value_or(0),
+ Int64MsToUQ32x32(clock_->ConvertTimestampToNtpTimeInMilliseconds(
+ absolute_capture_timestamp_ms)),
+ /*estimated_capture_clock_offset=*/
+ include_capture_clock_offset_ ? absl::make_optional(0) : absl::nullopt);
+ if (absolute_capture_time) {
+ // It also checks that extension was registered during SDP negotiation. If
+ // not then setter won't do anything.
+ packet->SetExtension<AbsoluteCaptureTimeExtension>(
+ *absolute_capture_time);
+ }
}
uint8_t* payload = packet->AllocatePayload(payload_size);
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h
index 57b9dd7ce6..6d61facc9a 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -51,6 +51,8 @@ class RTPSenderAudio {
const uint8_t* payload_data,
size_t payload_size);
+ // `absolute_capture_timestamp_ms` and `Clock::CurrentTime`
+ // should be using the same epoch.
bool SendAudio(AudioFrameType frame_type,
int8_t payload_type,
uint32_t rtp_timestamp,
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
index d75f4e8947..0221800ea8 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
@@ -19,7 +19,6 @@
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
-#include "modules/rtp_rtcp/source/time_util.h"
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -167,8 +166,10 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAbsoluteCaptureTime) {
transport_.last_sent_packet()
.GetExtension<AbsoluteCaptureTimeExtension>();
EXPECT_TRUE(absolute_capture_time);
- EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
- Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs()));
+ EXPECT_EQ(
+ absolute_capture_time->absolute_capture_timestamp,
+ Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds(
+ kAbsoluteCaptureTimestampMs)));
EXPECT_FALSE(
absolute_capture_time->estimated_capture_clock_offset.has_value());
}
@@ -201,8 +202,10 @@ TEST_F(RtpSenderAudioTest,
transport_.last_sent_packet()
.GetExtension<AbsoluteCaptureTimeExtension>();
EXPECT_TRUE(absolute_capture_time);
- EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
- Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs()));
+ EXPECT_EQ(
+ absolute_capture_time->absolute_capture_timestamp,
+ Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds(
+ kAbsoluteCaptureTimestampMs)));
EXPECT_TRUE(
absolute_capture_time->estimated_capture_clock_offset.has_value());
EXPECT_EQ(0, *absolute_capture_time->estimated_capture_clock_offset);
diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.cc b/modules/rtp_rtcp/source/rtp_sender_egress.cc
index 48f536c6bb..126b89c8c8 100644
--- a/modules/rtp_rtcp/source/rtp_sender_egress.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_egress.cc
@@ -142,6 +142,9 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
RTC_DCHECK(packet->packet_type().has_value());
RTC_DCHECK(HasCorrectSsrc(*packet));
+ if (packet->packet_type() == RtpPacketMediaType::kRetransmission) {
+ RTC_DCHECK(packet->retransmitted_sequence_number().has_value());
+ }
const uint32_t packet_ssrc = packet->Ssrc();
const int64_t now_ms = clock_->TimeInMilliseconds();
@@ -409,12 +412,34 @@ void RtpSenderEgress::AddPacketToTransportFeedback(
}
RtpPacketSendInfo packet_info;
- packet_info.ssrc = ssrc_;
packet_info.transport_sequence_number = packet_id;
- packet_info.rtp_sequence_number = packet.SequenceNumber();
+ packet_info.rtp_timestamp = packet.Timestamp();
packet_info.length = packet_size;
packet_info.pacing_info = pacing_info;
packet_info.packet_type = packet.packet_type();
+
+ switch (*packet_info.packet_type) {
+ case RtpPacketMediaType::kAudio:
+ case RtpPacketMediaType::kVideo:
+ packet_info.media_ssrc = ssrc_;
+ packet_info.rtp_sequence_number = packet.SequenceNumber();
+ break;
+ case RtpPacketMediaType::kRetransmission:
+ // For retransmissions, we're want to remove the original media packet
+ // if the rentrasmit arrives - so populate that in the packet info.
+ packet_info.media_ssrc = ssrc_;
+ packet_info.rtp_sequence_number =
+ *packet.retransmitted_sequence_number();
+ break;
+ case RtpPacketMediaType::kPadding:
+ case RtpPacketMediaType::kForwardErrorCorrection:
+ // We're not interested in feedback about these packets being received
+ // or lost.
+ break;
+ }
+ // TODO(bugs.webrtc.org/12713): Remove once downstream usage is gone.
+ packet_info.ssrc = packet_info.media_ssrc.value_or(0);
+
transport_feedback_observer_->OnAddPacket(packet_info);
}
}
diff --git a/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc
new file mode 100644
index 0000000000..4f3990cc3e
--- /dev/null
+++ b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc
@@ -0,0 +1,982 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/call/transport.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "logging/rtc_event_log/mock/mock_rtc_event_log.h"
+#include "modules/rtp_rtcp/include/flexfec_sender.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_history.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::_;
+using ::testing::Field;
+using ::testing::NiceMock;
+using ::testing::Optional;
+using ::testing::StrictMock;
+
+constexpr Timestamp kStartTime = Timestamp::Millis(123456789);
+constexpr int kDefaultPayloadType = 100;
+constexpr int kFlexfectPayloadType = 110;
+constexpr uint16_t kStartSequenceNumber = 33;
+constexpr uint32_t kSsrc = 725242;
+constexpr uint32_t kRtxSsrc = 12345;
+constexpr uint32_t kFlexFecSsrc = 23456;
+enum : int {
+ kTransportSequenceNumberExtensionId = 1,
+ kAbsoluteSendTimeExtensionId,
+ kTransmissionOffsetExtensionId,
+ kVideoTimingExtensionId,
+};
+
+struct TestConfig {
+ explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {}
+ bool with_overhead = false;
+};
+
+class MockSendPacketObserver : public SendPacketObserver {
+ public:
+ MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override));
+};
+
+class MockTransportFeedbackObserver : public TransportFeedbackObserver {
+ public:
+ MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override));
+ MOCK_METHOD(void,
+ OnTransportFeedback,
+ (const rtcp::TransportFeedback&),
+ (override));
+};
+
+class MockStreamDataCountersCallback : public StreamDataCountersCallback {
+ public:
+ MOCK_METHOD(void,
+ DataCountersUpdated,
+ (const StreamDataCounters& counters, uint32_t ssrc),
+ (override));
+};
+
+class MockSendSideDelayObserver : public SendSideDelayObserver {
+ public:
+ MOCK_METHOD(void,
+ SendSideDelayUpdated,
+ (int, int, uint64_t, uint32_t),
+ (override));
+};
+
+class FieldTrialConfig : public WebRtcKeyValueConfig {
+ public:
+ FieldTrialConfig() : overhead_enabled_(false) {}
+ ~FieldTrialConfig() override {}
+
+ void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; }
+
+ std::string Lookup(absl::string_view key) const override {
+ if (key == "WebRTC-SendSideBwe-WithOverhead") {
+ return overhead_enabled_ ? "Enabled" : "Disabled";
+ }
+ return "";
+ }
+
+ private:
+ bool overhead_enabled_;
+};
+
+struct TransmittedPacket {
+ TransmittedPacket(rtc::ArrayView<const uint8_t> data,
+ const PacketOptions& packet_options,
+ RtpHeaderExtensionMap* extensions)
+ : packet(extensions), options(packet_options) {
+ EXPECT_TRUE(packet.Parse(data));
+ }
+ RtpPacketReceived packet;
+ PacketOptions options;
+};
+
+class TestTransport : public Transport {
+ public:
+ explicit TestTransport(RtpHeaderExtensionMap* extensions)
+ : total_data_sent_(DataSize::Zero()), extensions_(extensions) {}
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override {
+ total_data_sent_ += DataSize::Bytes(length);
+ last_packet_.emplace(rtc::MakeArrayView(packet, length), options,
+ extensions_);
+ return true;
+ }
+
+ bool SendRtcp(const uint8_t*, size_t) override { RTC_CHECK_NOTREACHED(); }
+
+ absl::optional<TransmittedPacket> last_packet() { return last_packet_; }
+
+ private:
+ DataSize total_data_sent_;
+ absl::optional<TransmittedPacket> last_packet_;
+ RtpHeaderExtensionMap* const extensions_;
+};
+
+} // namespace
+
+class RtpSenderEgressTest : public ::testing::TestWithParam<TestConfig> {
+ protected:
+ RtpSenderEgressTest()
+ : time_controller_(kStartTime),
+ clock_(time_controller_.GetClock()),
+ transport_(&header_extensions_),
+ packet_history_(clock_, /*enable_rtx_padding_prioritization=*/true),
+ sequence_number_(kStartSequenceNumber) {
+ trials_.SetOverHeadEnabled(GetParam().with_overhead);
+ }
+
+ std::unique_ptr<RtpSenderEgress> CreateRtpSenderEgress() {
+ return std::make_unique<RtpSenderEgress>(DefaultConfig(), &packet_history_);
+ }
+
+ RtpRtcp::Configuration DefaultConfig() {
+ RtpRtcp::Configuration config;
+ config.clock = clock_;
+ config.outgoing_transport = &transport_;
+ config.local_media_ssrc = kSsrc;
+ config.rtx_send_ssrc = kRtxSsrc;
+ config.fec_generator = nullptr;
+ config.event_log = &mock_rtc_event_log_;
+ config.send_packet_observer = &send_packet_observer_;
+ config.rtp_stats_callback = &mock_rtp_stats_callback_;
+ config.transport_feedback_callback = &feedback_observer_;
+ config.populate_network2_timestamp = false;
+ config.field_trials = &trials_;
+ return config;
+ }
+
+ std::unique_ptr<RtpPacketToSend> BuildRtpPacket(bool marker_bit,
+ int64_t capture_time_ms) {
+ auto packet = std::make_unique<RtpPacketToSend>(&header_extensions_);
+ packet->SetSsrc(kSsrc);
+ packet->ReserveExtension<AbsoluteSendTime>();
+ packet->ReserveExtension<TransmissionOffset>();
+ packet->ReserveExtension<TransportSequenceNumber>();
+
+ packet->SetPayloadType(kDefaultPayloadType);
+ packet->set_packet_type(RtpPacketMediaType::kVideo);
+ packet->SetMarker(marker_bit);
+ packet->SetTimestamp(capture_time_ms * 90);
+ packet->set_capture_time_ms(capture_time_ms);
+ packet->SetSequenceNumber(sequence_number_++);
+ return packet;
+ }
+
+ std::unique_ptr<RtpPacketToSend> BuildRtpPacket() {
+ return BuildRtpPacket(/*marker_bit=*/true, clock_->CurrentTime().ms());
+ }
+
+ GlobalSimulatedTimeController time_controller_;
+ Clock* const clock_;
+ NiceMock<MockRtcEventLog> mock_rtc_event_log_;
+ NiceMock<MockStreamDataCountersCallback> mock_rtp_stats_callback_;
+ NiceMock<MockSendPacketObserver> send_packet_observer_;
+ NiceMock<MockTransportFeedbackObserver> feedback_observer_;
+ RtpHeaderExtensionMap header_extensions_;
+ TestTransport transport_;
+ RtpPacketHistory packet_history_;
+ FieldTrialConfig trials_;
+ uint16_t sequence_number_;
+};
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) {
+ constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8;
+ constexpr size_t kPayloadSize = 1400;
+ const uint16_t kTransportSequenceNumber = 17;
+
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ const size_t expected_bytes = GetParam().with_overhead
+ ? kPayloadSize + kRtpOverheadBytesPerPacket
+ : kPayloadSize;
+
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(
+ Field(&RtpPacketSendInfo::media_ssrc, kSsrc),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber),
+ Field(&RtpPacketSendInfo::rtp_sequence_number, kStartSequenceNumber),
+ Field(&RtpPacketSendInfo::length, expected_bytes),
+ Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo()))));
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+ packet->AllocatePayload(kPayloadSize);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->set_packet_type(RtpPacketMediaType::kVideo);
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+ EXPECT_FALSE(transport_.last_packet()->options.is_retransmit);
+
+ std::unique_ptr<RtpPacketToSend> retransmission = BuildRtpPacket();
+ retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ retransmission->set_retransmitted_sequence_number(
+ media_packet->SequenceNumber());
+ sender->SendPacket(retransmission.get(), PacedPacketInfo());
+ EXPECT_TRUE(transport_.last_packet()->options.is_retransmit);
+}
+
+TEST_P(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_FALSE(transport_.last_packet()->options.included_in_feedback);
+ EXPECT_FALSE(transport_.last_packet()->options.included_in_allocation);
+}
+
+TEST_P(RtpSenderEgressTest,
+ SetsIncludedInFeedbackWhenTransportSequenceNumberExtensionIsRegistered) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_TRUE(transport_.last_packet()->options.included_in_feedback);
+}
+
+TEST_P(
+ RtpSenderEgressTest,
+ SetsIncludedInAllocationWhenTransportSequenceNumberExtensionIsRegistered) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation);
+}
+
+TEST_P(RtpSenderEgressTest,
+ SetsIncludedInAllocationWhenForcedAsPartOfAllocation) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ sender->ForceIncludeSendPacketsInAllocation(true);
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_FALSE(transport_.last_packet()->options.included_in_feedback);
+ EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation);
+}
+
+TEST_P(RtpSenderEgressTest, OnSendSideDelayUpdated) {
+ StrictMock<MockSendSideDelayObserver> send_side_delay_observer;
+ RtpRtcpInterface::Configuration config = DefaultConfig();
+ config.send_side_delay_observer = &send_side_delay_observer;
+ auto sender = std::make_unique<RtpSenderEgress>(config, &packet_history_);
+
+ // Send packet with 10 ms send-side delay. The average, max and total should
+ // be 10 ms.
+ EXPECT_CALL(send_side_delay_observer,
+ SendSideDelayUpdated(10, 10, 10, kSsrc));
+ int64_t capture_time_ms = clock_->TimeInMilliseconds();
+ time_controller_.AdvanceTime(TimeDelta::Millis(10));
+ sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms).get(),
+ PacedPacketInfo());
+
+ // Send another packet with 20 ms delay. The average, max and total should be
+ // 15, 20 and 30 ms respectively.
+ EXPECT_CALL(send_side_delay_observer,
+ SendSideDelayUpdated(15, 20, 30, kSsrc));
+ capture_time_ms = clock_->TimeInMilliseconds();
+ time_controller_.AdvanceTime(TimeDelta::Millis(20));
+ sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms).get(),
+ PacedPacketInfo());
+
+ // Send another packet at the same time, which replaces the last packet.
+ // Since this packet has 0 ms delay, the average is now 5 ms and max is 10 ms.
+ // The total counter stays the same though.
+ // TODO(terelius): Is is not clear that this is the right behavior.
+ EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(5, 10, 30, kSsrc));
+ capture_time_ms = clock_->TimeInMilliseconds();
+ sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms).get(),
+ PacedPacketInfo());
+
+ // Send a packet 1 second later. The earlier packets should have timed
+ // out, so both max and average should be the delay of this packet. The total
+ // keeps increasing.
+ time_controller_.AdvanceTime(TimeDelta::Seconds(1));
+ EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(1, 1, 31, kSsrc));
+ capture_time_ms = clock_->TimeInMilliseconds();
+ time_controller_.AdvanceTime(TimeDelta::Millis(1));
+ sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms).get(),
+ PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, WritesPacerExitToTimingExtension) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ header_extensions_.RegisterByUri(kVideoTimingExtensionId,
+ VideoTimingExtension::kUri);
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetExtension<VideoTimingExtension>(VideoSendTiming{});
+
+ const int kStoredTimeInMs = 100;
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ ASSERT_TRUE(transport_.last_packet().has_value());
+
+ VideoSendTiming video_timing;
+ EXPECT_TRUE(
+ transport_.last_packet()->packet.GetExtension<VideoTimingExtension>(
+ &video_timing));
+ EXPECT_EQ(video_timing.pacer_exit_delta_ms, kStoredTimeInMs);
+}
+
+TEST_P(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) {
+ RtpRtcpInterface::Configuration rtp_config = DefaultConfig();
+ rtp_config.populate_network2_timestamp = true;
+ auto sender = std::make_unique<RtpSenderEgress>(rtp_config, &packet_history_);
+ header_extensions_.RegisterByUri(kVideoTimingExtensionId,
+ VideoTimingExtension::kUri);
+
+ const uint16_t kPacerExitMs = 1234u;
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ VideoSendTiming send_timing = {};
+ send_timing.pacer_exit_delta_ms = kPacerExitMs;
+ packet->SetExtension<VideoTimingExtension>(send_timing);
+
+ const int kStoredTimeInMs = 100;
+ time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ ASSERT_TRUE(transport_.last_packet().has_value());
+
+ VideoSendTiming video_timing;
+ EXPECT_TRUE(
+ transport_.last_packet()->packet.GetExtension<VideoTimingExtension>(
+ &video_timing));
+ EXPECT_EQ(video_timing.network2_timestamp_delta_ms, kStoredTimeInMs);
+ EXPECT_EQ(video_timing.pacer_exit_delta_ms, kPacerExitMs);
+}
+
+TEST_P(RtpSenderEgressTest, OnSendPacketUpdated) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ const uint16_t kTransportSequenceNumber = 1;
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber,
+ clock_->TimeInMilliseconds(), kSsrc));
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ const uint16_t kTransportSequenceNumber = 1;
+ EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(0);
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+ packet->set_packet_type(RtpPacketMediaType::kRetransmission);
+ packet->set_retransmitted_sequence_number(packet->SequenceNumber());
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, ReportsFecRate) {
+ constexpr int kNumPackets = 10;
+ constexpr TimeDelta kTimeBetweenPackets = TimeDelta::Millis(33);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ DataSize total_fec_data_sent = DataSize::Zero();
+ // Send some packets, alternating between media and FEC.
+ for (size_t i = 0; i < kNumPackets; ++i) {
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->set_packet_type(RtpPacketMediaType::kVideo);
+ media_packet->SetPayloadSize(500);
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+
+ std::unique_ptr<RtpPacketToSend> fec_packet = BuildRtpPacket();
+ fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
+ fec_packet->SetPayloadSize(123);
+ sender->SendPacket(fec_packet.get(), PacedPacketInfo());
+ total_fec_data_sent += DataSize::Bytes(fec_packet->size());
+
+ time_controller_.AdvanceTime(kTimeBetweenPackets);
+ }
+
+ EXPECT_NEAR(
+ (sender->GetSendRates()[RtpPacketMediaType::kForwardErrorCorrection])
+ .bps(),
+ (total_fec_data_sent / (kTimeBetweenPackets * kNumPackets)).bps(), 500);
+}
+
+TEST_P(RtpSenderEgressTest, BitrateCallbacks) {
+ class MockBitrateStaticsObserver : public BitrateStatisticsObserver {
+ public:
+ MOCK_METHOD(void, Notify, (uint32_t, uint32_t, uint32_t), (override));
+ } observer;
+
+ RtpRtcpInterface::Configuration config = DefaultConfig();
+ config.send_bitrate_observer = &observer;
+ auto sender = std::make_unique<RtpSenderEgress>(config, &packet_history_);
+
+ // Simulate kNumPackets sent with kPacketInterval intervals, with the
+ // number of packets selected so that we fill (but don't overflow) the one
+ // second averaging window.
+ const TimeDelta kWindowSize = TimeDelta::Seconds(1);
+ const TimeDelta kPacketInterval = TimeDelta::Millis(20);
+ const int kNumPackets = (kWindowSize - kPacketInterval) / kPacketInterval;
+
+ DataSize total_data_sent = DataSize::Zero();
+
+ // Send all but on of the packets, expect a call for each packet but don't
+ // verify bitrate yet (noisy measurements in the beginning).
+ for (int i = 0; i < kNumPackets; ++i) {
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetPayloadSize(500);
+ // Mark all packets as retransmissions - will cause total and retransmission
+ // rates to be equal.
+ packet->set_packet_type(RtpPacketMediaType::kRetransmission);
+ packet->set_retransmitted_sequence_number(packet->SequenceNumber());
+ total_data_sent += DataSize::Bytes(packet->size());
+
+ EXPECT_CALL(observer, Notify(_, _, kSsrc))
+ .WillOnce([&](uint32_t total_bitrate_bps,
+ uint32_t retransmission_bitrate_bps, uint32_t /*ssrc*/) {
+ TimeDelta window_size = i * kPacketInterval + TimeDelta::Millis(1);
+ // If there is just a single data point, there is no well defined
+ // averaging window so a bitrate of zero will be reported.
+ const double expected_bitrate_bps =
+ i == 0 ? 0.0 : (total_data_sent / window_size).bps();
+ EXPECT_NEAR(total_bitrate_bps, expected_bitrate_bps, 500);
+ EXPECT_NEAR(retransmission_bitrate_bps, expected_bitrate_bps, 500);
+ });
+
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(kPacketInterval);
+ }
+}
+
+TEST_P(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ packet_history_.SetStorePacketsStatus(
+ RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->set_allow_retransmission(false);
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_FALSE(
+ packet_history_.GetPacketState(packet->SequenceNumber()).has_value());
+}
+
+TEST_P(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ packet_history_.SetStorePacketsStatus(
+ RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->set_allow_retransmission(true);
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+ EXPECT_THAT(
+ packet_history_.GetPacketState(packet->SequenceNumber()),
+ Optional(
+ Field(&RtpPacketHistory::PacketState::pending_transmission, false)));
+}
+
+TEST_P(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ packet_history_.SetStorePacketsStatus(
+ RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+
+ // Non-media packets, even when marked as retransmittable, are not put into
+ // the packet history.
+ std::unique_ptr<RtpPacketToSend> retransmission = BuildRtpPacket();
+ retransmission->set_allow_retransmission(true);
+ retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ retransmission->set_retransmitted_sequence_number(
+ retransmission->SequenceNumber());
+ sender->SendPacket(retransmission.get(), PacedPacketInfo());
+ EXPECT_FALSE(packet_history_.GetPacketState(retransmission->SequenceNumber())
+ .has_value());
+
+ std::unique_ptr<RtpPacketToSend> fec = BuildRtpPacket();
+ fec->set_allow_retransmission(true);
+ fec->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
+ sender->SendPacket(fec.get(), PacedPacketInfo());
+ EXPECT_FALSE(
+ packet_history_.GetPacketState(fec->SequenceNumber()).has_value());
+
+ std::unique_ptr<RtpPacketToSend> padding = BuildRtpPacket();
+ padding->set_allow_retransmission(true);
+ padding->set_packet_type(RtpPacketMediaType::kPadding);
+ sender->SendPacket(padding.get(), PacedPacketInfo());
+ EXPECT_FALSE(
+ packet_history_.GetPacketState(padding->SequenceNumber()).has_value());
+}
+
+TEST_P(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ packet_history_.SetStorePacketsStatus(
+ RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+
+ // Send a packet, putting it in the history.
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->set_allow_retransmission(true);
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+ EXPECT_THAT(
+ packet_history_.GetPacketState(media_packet->SequenceNumber()),
+ Optional(
+ Field(&RtpPacketHistory::PacketState::pending_transmission, false)));
+
+ // Simulate a retransmission, marking the packet as pending.
+ std::unique_ptr<RtpPacketToSend> retransmission =
+ packet_history_.GetPacketAndMarkAsPending(media_packet->SequenceNumber());
+ retransmission->set_retransmitted_sequence_number(
+ media_packet->SequenceNumber());
+ retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ EXPECT_THAT(packet_history_.GetPacketState(media_packet->SequenceNumber()),
+ Optional(Field(
+ &RtpPacketHistory::PacketState::pending_transmission, true)));
+
+ // Simulate packet leaving pacer, the packet should be marked as non-pending.
+ sender->SendPacket(retransmission.get(), PacedPacketInfo());
+ EXPECT_THAT(
+ packet_history_.GetPacketState(media_packet->SequenceNumber()),
+ Optional(
+ Field(&RtpPacketHistory::PacketState::pending_transmission, false)));
+}
+
+TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacks) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ const RtpPacketCounter kEmptyCounter;
+ RtpPacketCounter expected_transmitted_counter;
+ RtpPacketCounter expected_retransmission_counter;
+
+ // Send a media packet.
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->SetPayloadSize(6);
+ expected_transmitted_counter.packets += 1;
+ expected_transmitted_counter.payload_bytes += media_packet->payload_size();
+ expected_transmitted_counter.header_bytes += media_packet->headers_size();
+
+ EXPECT_CALL(
+ mock_rtp_stats_callback_,
+ DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted,
+ expected_transmitted_counter),
+ Field(&StreamDataCounters::retransmitted,
+ expected_retransmission_counter),
+ Field(&StreamDataCounters::fec, kEmptyCounter)),
+ kSsrc));
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ // Send a retransmission. Retransmissions are counted into both transmitted
+ // and retransmitted packet statistics.
+ std::unique_ptr<RtpPacketToSend> retransmission_packet = BuildRtpPacket();
+ retransmission_packet->set_packet_type(RtpPacketMediaType::kRetransmission);
+ retransmission_packet->set_retransmitted_sequence_number(
+ retransmission_packet->SequenceNumber());
+ media_packet->SetPayloadSize(7);
+ expected_transmitted_counter.packets += 1;
+ expected_transmitted_counter.payload_bytes +=
+ retransmission_packet->payload_size();
+ expected_transmitted_counter.header_bytes +=
+ retransmission_packet->headers_size();
+
+ expected_retransmission_counter.packets += 1;
+ expected_retransmission_counter.payload_bytes +=
+ retransmission_packet->payload_size();
+ expected_retransmission_counter.header_bytes +=
+ retransmission_packet->headers_size();
+
+ EXPECT_CALL(
+ mock_rtp_stats_callback_,
+ DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted,
+ expected_transmitted_counter),
+ Field(&StreamDataCounters::retransmitted,
+ expected_retransmission_counter),
+ Field(&StreamDataCounters::fec, kEmptyCounter)),
+ kSsrc));
+ sender->SendPacket(retransmission_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ // Send a padding packet.
+ std::unique_ptr<RtpPacketToSend> padding_packet = BuildRtpPacket();
+ padding_packet->set_packet_type(RtpPacketMediaType::kPadding);
+ padding_packet->SetPadding(224);
+ expected_transmitted_counter.packets += 1;
+ expected_transmitted_counter.padding_bytes += padding_packet->padding_size();
+ expected_transmitted_counter.header_bytes += padding_packet->headers_size();
+
+ EXPECT_CALL(
+ mock_rtp_stats_callback_,
+ DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted,
+ expected_transmitted_counter),
+ Field(&StreamDataCounters::retransmitted,
+ expected_retransmission_counter),
+ Field(&StreamDataCounters::fec, kEmptyCounter)),
+ kSsrc));
+ sender->SendPacket(padding_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+}
+
+TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacksFec) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ const RtpPacketCounter kEmptyCounter;
+ RtpPacketCounter expected_transmitted_counter;
+ RtpPacketCounter expected_fec_counter;
+
+ // Send a media packet.
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->SetPayloadSize(6);
+ expected_transmitted_counter.packets += 1;
+ expected_transmitted_counter.payload_bytes += media_packet->payload_size();
+ expected_transmitted_counter.header_bytes += media_packet->headers_size();
+
+ EXPECT_CALL(
+ mock_rtp_stats_callback_,
+ DataCountersUpdated(
+ AllOf(Field(&StreamDataCounters::transmitted,
+ expected_transmitted_counter),
+ Field(&StreamDataCounters::retransmitted, kEmptyCounter),
+ Field(&StreamDataCounters::fec, expected_fec_counter)),
+ kSsrc));
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ // Send and FEC packet. FEC is counted into both transmitted and FEC packet
+ // statistics.
+ std::unique_ptr<RtpPacketToSend> fec_packet = BuildRtpPacket();
+ fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
+ fec_packet->SetPayloadSize(6);
+ expected_transmitted_counter.packets += 1;
+ expected_transmitted_counter.payload_bytes += fec_packet->payload_size();
+ expected_transmitted_counter.header_bytes += fec_packet->headers_size();
+
+ expected_fec_counter.packets += 1;
+ expected_fec_counter.payload_bytes += fec_packet->payload_size();
+ expected_fec_counter.header_bytes += fec_packet->headers_size();
+
+ EXPECT_CALL(
+ mock_rtp_stats_callback_,
+ DataCountersUpdated(
+ AllOf(Field(&StreamDataCounters::transmitted,
+ expected_transmitted_counter),
+ Field(&StreamDataCounters::retransmitted, kEmptyCounter),
+ Field(&StreamDataCounters::fec, expected_fec_counter)),
+ kSsrc));
+ sender->SendPacket(fec_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+}
+
+TEST_P(RtpSenderEgressTest, UpdatesDataCounters) {
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ const RtpPacketCounter kEmptyCounter;
+
+ // Send a media packet.
+ std::unique_ptr<RtpPacketToSend> media_packet = BuildRtpPacket();
+ media_packet->SetPayloadSize(6);
+ sender->SendPacket(media_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ // Send an RTX retransmission packet.
+ std::unique_ptr<RtpPacketToSend> rtx_packet = BuildRtpPacket();
+ rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission);
+ rtx_packet->SetSsrc(kRtxSsrc);
+ rtx_packet->SetPayloadSize(7);
+ rtx_packet->set_retransmitted_sequence_number(media_packet->SequenceNumber());
+ sender->SendPacket(rtx_packet.get(), PacedPacketInfo());
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ StreamDataCounters rtp_stats;
+ StreamDataCounters rtx_stats;
+ sender->GetDataCounters(&rtp_stats, &rtx_stats);
+
+ EXPECT_EQ(rtp_stats.transmitted.packets, 1u);
+ EXPECT_EQ(rtp_stats.transmitted.payload_bytes, media_packet->payload_size());
+ EXPECT_EQ(rtp_stats.transmitted.padding_bytes, media_packet->padding_size());
+ EXPECT_EQ(rtp_stats.transmitted.header_bytes, media_packet->headers_size());
+ EXPECT_EQ(rtp_stats.retransmitted, kEmptyCounter);
+ EXPECT_EQ(rtp_stats.fec, kEmptyCounter);
+
+ // Retransmissions are counted both into transmitted and retransmitted
+ // packet counts.
+ EXPECT_EQ(rtx_stats.transmitted.packets, 1u);
+ EXPECT_EQ(rtx_stats.transmitted.payload_bytes, rtx_packet->payload_size());
+ EXPECT_EQ(rtx_stats.transmitted.padding_bytes, rtx_packet->padding_size());
+ EXPECT_EQ(rtx_stats.transmitted.header_bytes, rtx_packet->headers_size());
+ EXPECT_EQ(rtx_stats.retransmitted, rtx_stats.transmitted);
+ EXPECT_EQ(rtx_stats.fec, kEmptyCounter);
+}
+
+TEST_P(RtpSenderEgressTest, SendPacketUpdatesExtensions) {
+ header_extensions_.RegisterByUri(kVideoTimingExtensionId,
+ VideoTimingExtension::kUri);
+ header_extensions_.RegisterByUri(kAbsoluteSendTimeExtensionId,
+ AbsoluteSendTime::kUri);
+ header_extensions_.RegisterByUri(kTransmissionOffsetExtensionId,
+ TransmissionOffset::kUri);
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds());
+
+ const int32_t kDiffMs = 10;
+ time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
+
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+
+ RtpPacketReceived received_packet = transport_.last_packet()->packet;
+
+ EXPECT_EQ(received_packet.GetExtension<TransmissionOffset>(), kDiffMs * 90);
+
+ EXPECT_EQ(received_packet.GetExtension<AbsoluteSendTime>(),
+ AbsoluteSendTime::MsTo24Bits(clock_->TimeInMilliseconds()));
+
+ VideoSendTiming timing;
+ EXPECT_TRUE(received_packet.GetExtension<VideoTimingExtension>(&timing));
+ EXPECT_EQ(timing.pacer_exit_delta_ms, kDiffMs);
+}
+
+TEST_P(RtpSenderEgressTest, SendPacketSetsPacketOptions) {
+ const uint16_t kPacketId = 42;
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ std::unique_ptr<RtpPacketToSend> packet = BuildRtpPacket();
+ packet->SetExtension<TransportSequenceNumber>(kPacketId);
+ EXPECT_CALL(send_packet_observer_, OnSendPacket);
+ sender->SendPacket(packet.get(), PacedPacketInfo());
+
+ PacketOptions packet_options = transport_.last_packet()->options;
+
+ EXPECT_EQ(packet_options.packet_id, kPacketId);
+ EXPECT_TRUE(packet_options.included_in_allocation);
+ EXPECT_TRUE(packet_options.included_in_feedback);
+ EXPECT_FALSE(packet_options.is_retransmit);
+
+ // Send another packet as retransmission, verify options are populated.
+ std::unique_ptr<RtpPacketToSend> retransmission = BuildRtpPacket();
+ retransmission->SetExtension<TransportSequenceNumber>(kPacketId + 1);
+ retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ retransmission->set_retransmitted_sequence_number(packet->SequenceNumber());
+ sender->SendPacket(retransmission.get(), PacedPacketInfo());
+ EXPECT_TRUE(transport_.last_packet()->options.is_retransmit);
+}
+
+TEST_P(RtpSenderEgressTest, SendPacketUpdatesStats) {
+ const size_t kPayloadSize = 1000;
+ StrictMock<MockSendSideDelayObserver> send_side_delay_observer;
+
+ const rtc::ArrayView<const RtpExtensionSize> kNoRtpHeaderExtensionSizes;
+ FlexfecSender flexfec(kFlexfectPayloadType, kFlexFecSsrc, kSsrc, /*mid=*/"",
+ /*header_extensions=*/{}, kNoRtpHeaderExtensionSizes,
+ /*rtp_state=*/nullptr, time_controller_.GetClock());
+ RtpRtcpInterface::Configuration config = DefaultConfig();
+ config.fec_generator = &flexfec;
+ config.send_side_delay_observer = &send_side_delay_observer;
+ auto sender = std::make_unique<RtpSenderEgress>(config, &packet_history_);
+
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ const int64_t capture_time_ms = clock_->TimeInMilliseconds();
+
+ std::unique_ptr<RtpPacketToSend> video_packet = BuildRtpPacket();
+ video_packet->set_packet_type(RtpPacketMediaType::kVideo);
+ video_packet->SetPayloadSize(kPayloadSize);
+ video_packet->SetExtension<TransportSequenceNumber>(1);
+
+ std::unique_ptr<RtpPacketToSend> rtx_packet = BuildRtpPacket();
+ rtx_packet->SetSsrc(kRtxSsrc);
+ rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission);
+ rtx_packet->set_retransmitted_sequence_number(video_packet->SequenceNumber());
+ rtx_packet->SetPayloadSize(kPayloadSize);
+ rtx_packet->SetExtension<TransportSequenceNumber>(2);
+
+ std::unique_ptr<RtpPacketToSend> fec_packet = BuildRtpPacket();
+ fec_packet->SetSsrc(kFlexFecSsrc);
+ fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
+ fec_packet->SetPayloadSize(kPayloadSize);
+ fec_packet->SetExtension<TransportSequenceNumber>(3);
+
+ const int64_t kDiffMs = 25;
+ time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
+
+ EXPECT_CALL(send_side_delay_observer,
+ SendSideDelayUpdated(kDiffMs, kDiffMs, kDiffMs, kSsrc));
+ EXPECT_CALL(
+ send_side_delay_observer,
+ SendSideDelayUpdated(kDiffMs, kDiffMs, 2 * kDiffMs, kFlexFecSsrc));
+
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time_ms, kSsrc));
+
+ sender->SendPacket(video_packet.get(), PacedPacketInfo());
+
+ // Send packet observer not called for padding/retransmissions.
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(2, _, _)).Times(0);
+ sender->SendPacket(rtx_packet.get(), PacedPacketInfo());
+
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(3, capture_time_ms, kFlexFecSsrc));
+ sender->SendPacket(fec_packet.get(), PacedPacketInfo());
+
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+ StreamDataCounters rtp_stats;
+ StreamDataCounters rtx_stats;
+ sender->GetDataCounters(&rtp_stats, &rtx_stats);
+ EXPECT_EQ(rtp_stats.transmitted.packets, 2u);
+ EXPECT_EQ(rtp_stats.fec.packets, 1u);
+ EXPECT_EQ(rtx_stats.retransmitted.packets, 1u);
+}
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) {
+ const uint16_t kTransportSequenceNumber = 17;
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+ std::unique_ptr<RtpPacketToSend> retransmission = BuildRtpPacket();
+ retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ retransmission->SetExtension<TransportSequenceNumber>(
+ kTransportSequenceNumber);
+ uint16_t retransmitted_seq = retransmission->SequenceNumber() - 2;
+ retransmission->set_retransmitted_sequence_number(retransmitted_seq);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(
+ Field(&RtpPacketSendInfo::media_ssrc, kSsrc),
+ Field(&RtpPacketSendInfo::rtp_sequence_number, retransmitted_seq),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber))));
+ sender->SendPacket(retransmission.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) {
+ const uint16_t kTransportSequenceNumber = 17;
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ std::unique_ptr<RtpPacketToSend> rtx_retransmission = BuildRtpPacket();
+ rtx_retransmission->SetSsrc(kRtxSsrc);
+ rtx_retransmission->SetExtension<TransportSequenceNumber>(
+ kTransportSequenceNumber);
+ rtx_retransmission->set_packet_type(RtpPacketMediaType::kRetransmission);
+ uint16_t rtx_retransmitted_seq = rtx_retransmission->SequenceNumber() - 2;
+ rtx_retransmission->set_retransmitted_sequence_number(rtx_retransmitted_seq);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(
+ Field(&RtpPacketSendInfo::media_ssrc, kSsrc),
+ Field(&RtpPacketSendInfo::rtp_sequence_number, rtx_retransmitted_seq),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber))));
+ sender->SendPacket(rtx_retransmission.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverPadding) {
+ const uint16_t kTransportSequenceNumber = 17;
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+ std::unique_ptr<RtpPacketToSend> padding = BuildRtpPacket();
+ padding->SetPadding(224);
+ padding->set_packet_type(RtpPacketMediaType::kPadding);
+ padding->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber))));
+ sender->SendPacket(padding.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) {
+ const uint16_t kTransportSequenceNumber = 17;
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ std::unique_ptr<RtpPacketToSend> rtx_padding = BuildRtpPacket();
+ rtx_padding->SetPadding(224);
+ rtx_padding->SetSsrc(kRtxSsrc);
+ rtx_padding->set_packet_type(RtpPacketMediaType::kPadding);
+ rtx_padding->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+
+ std::unique_ptr<RtpSenderEgress> sender = CreateRtpSenderEgress();
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber))));
+ sender->SendPacket(rtx_padding.get(), PacedPacketInfo());
+}
+
+TEST_P(RtpSenderEgressTest, TransportFeedbackObserverFec) {
+ const uint16_t kTransportSequenceNumber = 17;
+ header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId,
+ TransportSequenceNumber::kUri);
+
+ std::unique_ptr<RtpPacketToSend> fec_packet = BuildRtpPacket();
+ fec_packet->SetSsrc(kFlexFecSsrc);
+ fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
+ fec_packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
+
+ const rtc::ArrayView<const RtpExtensionSize> kNoRtpHeaderExtensionSizes;
+ FlexfecSender flexfec(kFlexfectPayloadType, kFlexFecSsrc, kSsrc, /*mid=*/"",
+ /*header_extensions=*/{}, kNoRtpHeaderExtensionSizes,
+ /*rtp_state=*/nullptr, time_controller_.GetClock());
+ RtpRtcpInterface::Configuration config = DefaultConfig();
+ config.fec_generator = &flexfec;
+ auto sender = std::make_unique<RtpSenderEgress>(config, &packet_history_);
+ EXPECT_CALL(
+ feedback_observer_,
+ OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt),
+ Field(&RtpPacketSendInfo::transport_sequence_number,
+ kTransportSequenceNumber))));
+ sender->SendPacket(fec_packet.get(), PacedPacketInfo());
+}
+
+INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
+ RtpSenderEgressTest,
+ ::testing::Values(TestConfig(false),
+ TestConfig(true)));
+
+} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index 709f96198c..e9be016143 100644
--- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -22,14 +22,12 @@
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "modules/rtp_rtcp/source/video_fec_generator.h"
@@ -68,20 +66,19 @@ const uint16_t kSeqNum = 33;
const uint32_t kSsrc = 725242;
const uint32_t kRtxSsrc = 12345;
const uint32_t kFlexFecSsrc = 45678;
-const uint16_t kTransportSequenceNumber = 1;
const uint64_t kStartTime = 123456789;
const size_t kMaxPaddingSize = 224u;
const uint8_t kPayloadData[] = {47, 11, 32, 93, 89};
const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
-const char kNoRid[] = "";
-const char kNoMid[] = "";
+const size_t kMaxPaddingLength = 224; // Value taken from rtp_sender.cc.
+const uint32_t kTimestampTicksPerMs = 90; // 90kHz clock.
using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
using ::testing::Contains;
using ::testing::Each;
-using ::testing::ElementsAreArray;
+using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Field;
using ::testing::Gt;
@@ -92,62 +89,6 @@ using ::testing::Pointee;
using ::testing::Property;
using ::testing::Return;
using ::testing::SizeIs;
-using ::testing::StrictMock;
-
-uint64_t ConvertMsToAbsSendTime(int64_t time_ms) {
- return (((time_ms << 18) + 500) / 1000) & 0x00ffffff;
-}
-
-class LoopbackTransportTest : public webrtc::Transport {
- public:
- LoopbackTransportTest() : total_bytes_sent_(0) {
- receivers_extensions_.Register<TransmissionOffset>(
- kTransmissionTimeOffsetExtensionId);
- receivers_extensions_.Register<AbsoluteSendTime>(
- kAbsoluteSendTimeExtensionId);
- receivers_extensions_.Register<TransportSequenceNumber>(
- kTransportSequenceNumberExtensionId);
- receivers_extensions_.Register<VideoOrientation>(kVideoRotationExtensionId);
- receivers_extensions_.Register<AudioLevel>(kAudioLevelExtensionId);
- receivers_extensions_.Register<VideoTimingExtension>(
- kVideoTimingExtensionId);
- receivers_extensions_.Register<RtpMid>(kMidExtensionId);
- receivers_extensions_.Register<RtpGenericFrameDescriptorExtension00>(
- kGenericDescriptorId);
- receivers_extensions_.Register<RtpStreamId>(kRidExtensionId);
- receivers_extensions_.Register<RepairedRtpStreamId>(
- kRepairedRidExtensionId);
- }
-
- bool SendRtp(const uint8_t* data,
- size_t len,
- const PacketOptions& options) override {
- last_options_ = options;
- total_bytes_sent_ += len;
- sent_packets_.push_back(RtpPacketReceived(&receivers_extensions_));
- EXPECT_TRUE(sent_packets_.back().Parse(data, len));
- return true;
- }
- bool SendRtcp(const uint8_t* data, size_t len) override { return false; }
- const RtpPacketReceived& last_sent_packet() { return sent_packets_.back(); }
- int packets_sent() { return sent_packets_.size(); }
-
- size_t total_bytes_sent_;
- PacketOptions last_options_;
- std::vector<RtpPacketReceived> sent_packets_;
-
- private:
- RtpHeaderExtensionMap receivers_extensions_;
-};
-
-MATCHER_P(SameRtcEventTypeAs, value, "") {
- return value == arg->GetType();
-}
-
-struct TestConfig {
- explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {}
- bool with_overhead = false;
-};
class MockRtpPacketPacer : public RtpPacketSender {
public:
@@ -160,133 +101,11 @@ class MockRtpPacketPacer : public RtpPacketSender {
(override));
};
-class MockSendSideDelayObserver : public SendSideDelayObserver {
- public:
- MOCK_METHOD(void,
- SendSideDelayUpdated,
- (int, int, uint64_t, uint32_t),
- (override));
-};
-
-class MockSendPacketObserver : public SendPacketObserver {
- public:
- MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override));
-};
-
-class MockTransportFeedbackObserver : public TransportFeedbackObserver {
- public:
- MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override));
- MOCK_METHOD(void,
- OnTransportFeedback,
- (const rtcp::TransportFeedback&),
- (override));
-};
-
-class StreamDataTestCallback : public StreamDataCountersCallback {
- public:
- StreamDataTestCallback()
- : StreamDataCountersCallback(), ssrc_(0), counters_() {}
- ~StreamDataTestCallback() override = default;
-
- void DataCountersUpdated(const StreamDataCounters& counters,
- uint32_t ssrc) override {
- ssrc_ = ssrc;
- counters_ = counters;
- }
-
- uint32_t ssrc_;
- StreamDataCounters counters_;
-
- void MatchPacketCounter(const RtpPacketCounter& expected,
- const RtpPacketCounter& actual) {
- EXPECT_EQ(expected.payload_bytes, actual.payload_bytes);
- EXPECT_EQ(expected.header_bytes, actual.header_bytes);
- EXPECT_EQ(expected.padding_bytes, actual.padding_bytes);
- EXPECT_EQ(expected.packets, actual.packets);
- }
-
- void Matches(uint32_t ssrc, const StreamDataCounters& counters) {
- EXPECT_EQ(ssrc, ssrc_);
- MatchPacketCounter(counters.transmitted, counters_.transmitted);
- MatchPacketCounter(counters.retransmitted, counters_.retransmitted);
- EXPECT_EQ(counters.fec.packets, counters_.fec.packets);
- }
-};
-
-class TaskQueuePacketSender : public RtpPacketSender {
- public:
- TaskQueuePacketSender(TimeController* time_controller,
- std::unique_ptr<RtpPacketSender> packet_sender)
- : time_controller_(time_controller),
- packet_sender_(std::move(packet_sender)),
- queue_(time_controller_->CreateTaskQueueFactory()->CreateTaskQueue(
- "PacerQueue",
- TaskQueueFactory::Priority::NORMAL)) {}
-
- void EnqueuePackets(
- std::vector<std::unique_ptr<RtpPacketToSend>> packets) override {
- queue_->PostTask(ToQueuedTask([sender = packet_sender_.get(),
- packets_ = std::move(packets)]() mutable {
- sender->EnqueuePackets(std::move(packets_));
- }));
- // Trigger task we just enqueued to be executed by updating the simulated
- // time controller.
- time_controller_->AdvanceTime(TimeDelta::Zero());
- }
-
- TaskQueueBase* task_queue() const { return queue_.get(); }
-
- TimeController* const time_controller_;
- std::unique_ptr<RtpPacketSender> packet_sender_;
- std::unique_ptr<TaskQueueBase, TaskQueueDeleter> queue_;
-};
-
-// Mimics ModuleRtpRtcp::RtpSenderContext.
-// TODO(sprang): Split up unit tests and test these components individually
-// wherever possible.
-struct RtpSenderContext : public SequenceNumberAssigner {
- RtpSenderContext(const RtpRtcpInterface::Configuration& config,
- TimeController* time_controller)
- : time_controller_(time_controller),
- packet_history_(config.clock, config.enable_rtx_padding_prioritization),
- packet_sender_(config, &packet_history_),
- pacer_(time_controller,
- std::make_unique<RtpSenderEgress::NonPacedPacketSender>(
- &packet_sender_,
- this)),
- packet_generator_(config,
- &packet_history_,
- config.paced_sender ? config.paced_sender : &pacer_) {
- }
- void AssignSequenceNumber(RtpPacketToSend* packet) override {
- packet_generator_.AssignSequenceNumber(packet);
- }
- // Inject packet straight into RtpSenderEgress without passing through the
- // pacer, but while still running on the pacer task queue.
- void InjectPacket(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& packet_info) {
- pacer_.task_queue()->PostTask(
- ToQueuedTask([sender_ = &packet_sender_, packet_ = std::move(packet),
- packet_info]() mutable {
- sender_->SendPacket(packet_.get(), packet_info);
- }));
- time_controller_->AdvanceTime(TimeDelta::Zero());
- }
- TimeController* time_controller_;
- RtpPacketHistory packet_history_;
- RtpSenderEgress packet_sender_;
- TaskQueuePacketSender pacer_;
- RTPSender packet_generator_;
-};
-
class FieldTrialConfig : public WebRtcKeyValueConfig {
public:
- FieldTrialConfig()
- : overhead_enabled_(false),
- max_padding_factor_(1200) {}
+ FieldTrialConfig() : max_padding_factor_(1200) {}
~FieldTrialConfig() override {}
- void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; }
void SetMaxPaddingFactor(double factor) { max_padding_factor_ = factor; }
std::string Lookup(absl::string_view key) const override {
@@ -295,20 +114,17 @@ class FieldTrialConfig : public WebRtcKeyValueConfig {
rtc::SimpleStringBuilder ssb(string_buf);
ssb << "factor:" << max_padding_factor_;
return ssb.str();
- } else if (key == "WebRTC-SendSideBwe-WithOverhead") {
- return overhead_enabled_ ? "Enabled" : "Disabled";
}
return "";
}
private:
- bool overhead_enabled_;
double max_padding_factor_;
};
} // namespace
-class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
+class RtpSenderTest : public ::testing::Test {
protected:
RtpSenderTest()
: time_controller_(Timestamp::Millis(kStartTime)),
@@ -323,80 +139,65 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
nullptr,
clock_),
kMarkerBit(true) {
- field_trials_.SetOverHeadEnabled(GetParam().with_overhead);
}
- void SetUp() override { SetUpRtpSender(true, false, false); }
+ void SetUp() override { SetUpRtpSender(true, false, nullptr); }
- RTPSender* rtp_sender() {
- RTC_DCHECK(rtp_sender_context_);
- return &rtp_sender_context_->packet_generator_;
- }
-
- RtpSenderEgress* rtp_egress() {
- RTC_DCHECK(rtp_sender_context_);
- return &rtp_sender_context_->packet_sender_;
- }
-
- void SetUpRtpSender(bool pacer,
- bool populate_network2,
- bool always_send_mid_and_rid) {
- SetUpRtpSender(pacer, populate_network2, always_send_mid_and_rid,
- &flexfec_sender_);
- }
-
- void SetUpRtpSender(bool pacer,
- bool populate_network2,
+ void SetUpRtpSender(bool populate_network2,
bool always_send_mid_and_rid,
VideoFecGenerator* fec_generator) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ config.fec_generator = fec_generator;
+ config.populate_network2_timestamp = populate_network2;
+ config.always_send_mid_and_rid = always_send_mid_and_rid;
+ CreateSender(config);
+ }
+
+ RtpRtcpInterface::Configuration GetDefaultConfig() {
RtpRtcpInterface::Configuration config;
config.clock = clock_;
- config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
config.rtx_send_ssrc = kRtxSsrc;
- config.fec_generator = fec_generator;
config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- config.paced_sender = pacer ? &mock_paced_sender_ : nullptr;
- config.populate_network2_timestamp = populate_network2;
- config.rtp_stats_callback = &rtp_stats_callback_;
- config.always_send_mid_and_rid = always_send_mid_and_rid;
+ config.paced_sender = &mock_paced_sender_;
config.field_trials = &field_trials_;
+ return config;
+ }
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
- rtp_sender()->SetSequenceNumber(kSeqNum);
- rtp_sender()->SetTimestampOffset(0);
+ void CreateSender(const RtpRtcpInterface::Configuration& config) {
+ packet_history_ = std::make_unique<RtpPacketHistory>(
+ config.clock, config.enable_rtx_padding_prioritization);
+ rtp_sender_ = std::make_unique<RTPSender>(config, packet_history_.get(),
+ config.paced_sender);
+ rtp_sender_->SetSequenceNumber(kSeqNum);
+ rtp_sender_->SetTimestampOffset(0);
}
GlobalSimulatedTimeController time_controller_;
Clock* const clock_;
NiceMock<MockRtcEventLog> mock_rtc_event_log_;
MockRtpPacketPacer mock_paced_sender_;
- StrictMock<MockSendPacketObserver> send_packet_observer_;
- StrictMock<MockTransportFeedbackObserver> feedback_observer_;
RateLimiter retransmission_rate_limiter_;
FlexfecSender flexfec_sender_;
- std::unique_ptr<RtpSenderContext> rtp_sender_context_;
+ std::unique_ptr<RtpPacketHistory> packet_history_;
+ std::unique_ptr<RTPSender> rtp_sender_;
- LoopbackTransportTest transport_;
const bool kMarkerBit;
FieldTrialConfig field_trials_;
- StreamDataTestCallback rtp_stats_callback_;
std::unique_ptr<RtpPacketToSend> BuildRtpPacket(int payload_type,
bool marker_bit,
uint32_t timestamp,
int64_t capture_time_ms) {
- auto packet = rtp_sender()->AllocatePacket();
+ auto packet = rtp_sender_->AllocatePacket();
packet->SetPayloadType(payload_type);
packet->set_packet_type(RtpPacketMediaType::kVideo);
packet->SetMarker(marker_bit);
packet->SetTimestamp(timestamp);
packet->set_capture_time_ms(capture_time_ms);
- EXPECT_TRUE(rtp_sender()->AssignSequenceNumber(packet.get()));
+ EXPECT_TRUE(rtp_sender_->AssignSequenceNumber(packet.get()));
return packet;
}
@@ -409,8 +210,8 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
packet->set_allow_retransmission(true);
// Packet should be stored in a send bucket.
- EXPECT_TRUE(rtp_sender()->SendToNetwork(
- std::make_unique<RtpPacketToSend>(*packet)));
+ EXPECT_TRUE(
+ rtp_sender_->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
return packet;
}
@@ -420,16 +221,15 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
// because of lack of capacity for the media packet, or for an rtx packet
// containing the media packet.
return SendPacket(kCaptureTimeMs,
- /*payload_length=*/rtp_sender()->MaxRtpPacketSize() -
- rtp_sender()->ExpectedPerPacketOverhead());
+ /*payload_length=*/rtp_sender_->MaxRtpPacketSize() -
+ rtp_sender_->ExpectedPerPacketOverhead());
}
size_t GenerateAndSendPadding(size_t target_size_bytes) {
size_t generated_bytes = 0;
- for (auto& packet :
- rtp_sender()->GeneratePadding(target_size_bytes, true)) {
+ for (auto& packet : rtp_sender_->GeneratePadding(target_size_bytes, true)) {
generated_bytes += packet->payload_size() + packet->padding_size();
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ rtp_sender_->SendToNetwork(std::move(packet));
}
return generated_bytes;
}
@@ -442,64 +242,56 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
// RTX needs to be able to read the source packets from the packet store.
// Pick a number of packets to store big enough for any unit test.
constexpr uint16_t kNumberOfPacketsToStore = 100;
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, kNumberOfPacketsToStore);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ rtp_sender_->SetRtxPayloadType(kRtxPayload, kPayload);
+ rtp_sender_->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
}
// Enable sending of the MID header extension for both the primary SSRC and
// the RTX SSRC.
void EnableMidSending(const std::string& mid) {
- rtp_sender()->RegisterRtpHeaderExtension(RtpMid::kUri, kMidExtensionId);
- rtp_sender()->SetMid(mid);
+ rtp_sender_->RegisterRtpHeaderExtension(RtpMid::kUri, kMidExtensionId);
+ rtp_sender_->SetMid(mid);
}
// Enable sending of the RSID header extension for the primary SSRC and the
// RRSID header extension for the RTX SSRC.
void EnableRidSending(const std::string& rid) {
- rtp_sender()->RegisterRtpHeaderExtension(RtpStreamId::kUri,
- kRidExtensionId);
- rtp_sender()->RegisterRtpHeaderExtension(RepairedRtpStreamId::kUri,
- kRepairedRidExtensionId);
- rtp_sender()->SetRid(rid);
+ rtp_sender_->RegisterRtpHeaderExtension(RtpStreamId::kUri, kRidExtensionId);
+ rtp_sender_->RegisterRtpHeaderExtension(RepairedRtpStreamId::kUri,
+ kRepairedRidExtensionId);
+ rtp_sender_->SetRid(rid);
}
};
-// TODO(pbos): Move tests over from WithoutPacer to RtpSenderTest as this is our
-// default code path.
-class RtpSenderTestWithoutPacer : public RtpSenderTest {
- public:
- void SetUp() override { SetUpRtpSender(false, false, false); }
-};
-
-TEST_P(RtpSenderTestWithoutPacer, AllocatePacketSetCsrc) {
+TEST_F(RtpSenderTest, AllocatePacketSetCsrc) {
// Configure rtp_sender with csrc.
std::vector<uint32_t> csrcs;
csrcs.push_back(0x23456789);
- rtp_sender()->SetCsrcs(csrcs);
+ rtp_sender_->SetCsrcs(csrcs);
- auto packet = rtp_sender()->AllocatePacket();
+ auto packet = rtp_sender_->AllocatePacket();
ASSERT_TRUE(packet);
- EXPECT_EQ(rtp_sender()->SSRC(), packet->Ssrc());
+ EXPECT_EQ(rtp_sender_->SSRC(), packet->Ssrc());
EXPECT_EQ(csrcs, packet->Csrcs());
}
-TEST_P(RtpSenderTestWithoutPacer, AllocatePacketReserveExtensions) {
+TEST_F(RtpSenderTest, AllocatePacketReserveExtensions) {
// Configure rtp_sender with extensions.
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(AudioLevel::kUri,
- kAudioLevelExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(AudioLevel::kUri,
+ kAudioLevelExtensionId));
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
VideoOrientation::kUri, kVideoRotationExtensionId));
- auto packet = rtp_sender()->AllocatePacket();
+ auto packet = rtp_sender_->AllocatePacket();
ASSERT_TRUE(packet);
// Preallocate BWE extensions RtpSender set itself.
@@ -511,953 +303,284 @@ TEST_P(RtpSenderTestWithoutPacer, AllocatePacketReserveExtensions) {
EXPECT_FALSE(packet->HasExtension<VideoOrientation>());
}
-TEST_P(RtpSenderTest, PaddingAlwaysAllowedOnAudio) {
- MockTransport transport;
- RtpRtcpInterface::Configuration config;
+TEST_F(RtpSenderTest, PaddingAlwaysAllowedOnAudio) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
config.audio = true;
- config.clock = clock_;
- config.outgoing_transport = &transport;
- config.paced_sender = &mock_paced_sender_;
- config.local_media_ssrc = kSsrc;
- config.event_log = &mock_rtc_event_log_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- rtp_sender()->SetTimestampOffset(0);
-
- std::unique_ptr<RtpPacketToSend> audio_packet =
- rtp_sender()->AllocatePacket();
+ CreateSender(config);
+
+ std::unique_ptr<RtpPacketToSend> audio_packet = rtp_sender_->AllocatePacket();
// Padding on audio stream allowed regardless of marker in the last packet.
audio_packet->SetMarker(false);
audio_packet->SetPayloadType(kPayload);
- rtp_sender()->AssignSequenceNumber(audio_packet.get());
+ rtp_sender_->AssignSequenceNumber(audio_packet.get());
const size_t kPaddingSize = 59;
- EXPECT_CALL(transport, SendRtp(_, kPaddingSize + kRtpHeaderSize, _))
- .WillOnce(Return(true));
+
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::packet_type,
+ RtpPacketMediaType::kPadding)),
+ Pointee(Property(&RtpPacketToSend::padding_size, kPaddingSize))))));
EXPECT_EQ(kPaddingSize, GenerateAndSendPadding(kPaddingSize));
// Requested padding size is too small, will send a larger one.
const size_t kMinPaddingSize = 50;
- EXPECT_CALL(transport, SendRtp(_, kMinPaddingSize + kRtpHeaderSize, _))
- .WillOnce(Return(true));
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(
+ AllOf(Pointee(Property(&RtpPacketToSend::packet_type,
+ RtpPacketMediaType::kPadding)),
+ Pointee(Property(&RtpPacketToSend::padding_size,
+ kMinPaddingSize))))));
EXPECT_EQ(kMinPaddingSize, GenerateAndSendPadding(kMinPaddingSize - 5));
}
-TEST_P(RtpSenderTestWithoutPacer,
- TransportFeedbackObserverGetsCorrectByteCount) {
- constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8;
- constexpr size_t kPayloadSize = 1400;
-
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.transport_feedback_callback = &feedback_observer_;
- config.event_log = &mock_rtc_event_log_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- config.field_trials = &field_trials_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
-
- const size_t expected_bytes = GetParam().with_overhead
- ? kPayloadSize + kRtpOverheadBytesPerPacket
- : kPayloadSize;
-
- EXPECT_CALL(feedback_observer_,
- OnAddPacket(AllOf(
- Field(&RtpPacketSendInfo::ssrc, rtp_sender()->SSRC()),
- Field(&RtpPacketSendInfo::transport_sequence_number,
- kTransportSequenceNumber),
- Field(&RtpPacketSendInfo::rtp_sequence_number,
- rtp_sender()->SequenceNumber()),
- Field(&RtpPacketSendInfo::length, expected_bytes),
- Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo()))))
- .Times(1);
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(),
- kRtpOverheadBytesPerPacket);
- SendPacket(clock_->TimeInMilliseconds(), kPayloadSize);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.transport_feedback_callback = &feedback_observer_;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
-
- EXPECT_CALL(send_packet_observer_,
- OnSendPacket(kTransportSequenceNumber, _, _))
- .Times(1);
-
- EXPECT_CALL(feedback_observer_,
- OnAddPacket(AllOf(
- Field(&RtpPacketSendInfo::ssrc, rtp_sender()->SSRC()),
- Field(&RtpPacketSendInfo::transport_sequence_number,
- kTransportSequenceNumber),
- Field(&RtpPacketSendInfo::rtp_sequence_number,
- rtp_sender()->SequenceNumber()),
- Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo()))))
- .Times(1);
-
- SendGenericPacket();
-
- const auto& packet = transport_.last_sent_packet();
- uint16_t transport_seq_no;
- ASSERT_TRUE(packet.GetExtension<TransportSequenceNumber>(&transport_seq_no));
- EXPECT_EQ(kTransportSequenceNumber, transport_seq_no);
- EXPECT_EQ(transport_.last_options_.packet_id, transport_seq_no);
- EXPECT_TRUE(transport_.last_options_.included_in_allocation);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, PacketOptionsNoRetransmission) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.transport_feedback_callback = &feedback_observer_;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- SendGenericPacket();
-
- EXPECT_FALSE(transport_.last_options_.is_retransmit);
-}
-
-TEST_P(RtpSenderTestWithoutPacer,
- SetsIncludedInFeedbackWhenTransportSequenceNumberExtensionIsRegistered) {
- SetUpRtpSender(false, false, false);
- rtp_sender()->RegisterRtpHeaderExtension(TransportSequenceNumber::kUri,
- kTransportSequenceNumberExtensionId);
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- SendGenericPacket();
- EXPECT_TRUE(transport_.last_options_.included_in_feedback);
-}
-
-TEST_P(
- RtpSenderTestWithoutPacer,
- SetsIncludedInAllocationWhenTransportSequenceNumberExtensionIsRegistered) {
- SetUpRtpSender(false, false, false);
- rtp_sender()->RegisterRtpHeaderExtension(TransportSequenceNumber::kUri,
- kTransportSequenceNumberExtensionId);
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- SendGenericPacket();
- EXPECT_TRUE(transport_.last_options_.included_in_allocation);
-}
-
-TEST_P(RtpSenderTestWithoutPacer,
- SetsIncludedInAllocationWhenForcedAsPartOfAllocation) {
- SetUpRtpSender(false, false, false);
- rtp_egress()->ForceIncludeSendPacketsInAllocation(true);
- SendGenericPacket();
- EXPECT_FALSE(transport_.last_options_.included_in_feedback);
- EXPECT_TRUE(transport_.last_options_.included_in_allocation);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, DoesnSetIncludedInAllocationByDefault) {
- SetUpRtpSender(false, false, false);
- SendGenericPacket();
- EXPECT_FALSE(transport_.last_options_.included_in_feedback);
- EXPECT_FALSE(transport_.last_options_.included_in_allocation);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
- StrictMock<MockSendSideDelayObserver> send_side_delay_observer_;
-
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.send_side_delay_observer = &send_side_delay_observer_;
- config.event_log = &mock_rtc_event_log_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
-
- const uint8_t kPayloadType = 127;
- const absl::optional<VideoCodecType> kCodecType =
- VideoCodecType::kVideoCodecGeneric;
-
- const uint32_t kCaptureTimeMsToRtpTimestamp = 90; // 90 kHz clock
- RTPVideoHeader video_header;
-
- // Send packet with 10 ms send-side delay. The average, max and total should
- // be 10 ms.
- EXPECT_CALL(send_side_delay_observer_,
- SendSideDelayUpdated(10, 10, 10, kSsrc))
- .Times(1);
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- time_controller_.AdvanceTime(TimeDelta::Millis(10));
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- // Send another packet with 20 ms delay. The average, max and total should be
- // 15, 20 and 30 ms respectively.
- EXPECT_CALL(send_side_delay_observer_,
- SendSideDelayUpdated(15, 20, 30, kSsrc))
- .Times(1);
- time_controller_.AdvanceTime(TimeDelta::Millis(10));
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- // Send another packet at the same time, which replaces the last packet.
- // Since this packet has 0 ms delay, the average is now 5 ms and max is 10 ms.
- // The total counter stays the same though.
- // TODO(terelius): Is is not clear that this is the right behavior.
- EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(5, 10, 30, kSsrc))
- .Times(1);
- capture_time_ms = clock_->TimeInMilliseconds();
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- // Send a packet 1 second later. The earlier packets should have timed
- // out, so both max and average should be the delay of this packet. The total
- // keeps increasing.
- time_controller_.AdvanceTime(TimeDelta::Millis(1000));
- capture_time_ms = clock_->TimeInMilliseconds();
- time_controller_.AdvanceTime(TimeDelta::Millis(1));
- EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(1, 1, 31, kSsrc))
- .Times(1);
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp,
- capture_time_ms, kPayloadData, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-}
-
-TEST_P(RtpSenderTestWithoutPacer, OnSendPacketUpdated) {
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- EXPECT_CALL(send_packet_observer_,
- OnSendPacket(kTransportSequenceNumber, _, _))
- .Times(1);
-
- SendGenericPacket();
-}
-
-TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.paced_sender = &mock_paced_sender_;
- config.local_media_ssrc = kSsrc;
- config.transport_feedback_callback = &feedback_observer_;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- rtp_sender()->SetSequenceNumber(kSeqNum);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
-
- EXPECT_CALL(send_packet_observer_,
- OnSendPacket(kTransportSequenceNumber, _, _))
- .Times(1);
- EXPECT_CALL(feedback_observer_,
- OnAddPacket(AllOf(
- Field(&RtpPacketSendInfo::ssrc, rtp_sender()->SSRC()),
- Field(&RtpPacketSendInfo::transport_sequence_number,
- kTransportSequenceNumber),
- Field(&RtpPacketSendInfo::rtp_sequence_number,
- rtp_sender()->SequenceNumber()),
- Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo()))))
- .Times(1);
+TEST_F(RtpSenderTest, SendToNetworkForwardsPacketsToPacer) {
+ auto packet = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, 0);
+ int64_t now_ms = clock_->TimeInMilliseconds();
EXPECT_CALL(
mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
+ EnqueuePackets(ElementsAre(AllOf(
Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- auto packet = SendGenericPacket();
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- // Transport sequence number is set by PacketRouter, before SendPacket().
- packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- uint16_t transport_seq_no;
+ Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms, now_ms))))));
EXPECT_TRUE(
- transport_.last_sent_packet().GetExtension<TransportSequenceNumber>(
- &transport_seq_no));
- EXPECT_EQ(kTransportSequenceNumber, transport_seq_no);
- EXPECT_EQ(transport_.last_options_.packet_id, transport_seq_no);
+ rtp_sender_->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
}
-TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) {
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+TEST_F(RtpSenderTest, ReSendPacketForwardsPacketsToPacer) {
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- VideoTimingExtension::kUri, kVideoTimingExtensionId));
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- auto packet = rtp_sender()->AllocatePacket();
- packet->SetPayloadType(kPayload);
- packet->SetMarker(true);
- packet->SetTimestamp(kTimestamp);
- packet->set_capture_time_ms(capture_time_ms);
- const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
- packet->SetExtension<VideoTimingExtension>(kVideoTiming);
- EXPECT_TRUE(rtp_sender()->AssignSequenceNumber(packet.get()));
- size_t packet_size = packet->size();
-
- const int kStoredTimeInMs = 100;
- packet->set_packet_type(RtpPacketMediaType::kVideo);
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ auto packet = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, now_ms);
+ uint16_t seq_no = packet->SequenceNumber();
packet->set_allow_retransmission(true);
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets(Contains(Pointee(Property(
- &RtpPacketToSend::Ssrc, kSsrc)))));
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(1, transport_.packets_sent());
- EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
-
- VideoSendTiming video_timing;
- EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
- &video_timing));
- EXPECT_EQ(kStoredTimeInMs, video_timing.pacer_exit_delta_ms);
-}
-
-TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithPacer) {
- SetUpRtpSender(/*pacer=*/true, /*populate_network2=*/true, false);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- VideoTimingExtension::kUri, kVideoTimingExtensionId));
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- auto packet = rtp_sender()->AllocatePacket();
- packet->SetPayloadType(kPayload);
- packet->SetMarker(true);
- packet->SetTimestamp(kTimestamp);
- packet->set_capture_time_ms(capture_time_ms);
- const uint16_t kPacerExitMs = 1234u;
- const VideoSendTiming kVideoTiming = {0u, 0u, 0u, kPacerExitMs, 0u, 0u, true};
- packet->SetExtension<VideoTimingExtension>(kVideoTiming);
- EXPECT_TRUE(rtp_sender()->AssignSequenceNumber(packet.get()));
- size_t packet_size = packet->size();
-
- const int kStoredTimeInMs = 100;
+ packet_history_->PutRtpPacket(std::move(packet), now_ms);
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->set_allow_retransmission(true);
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets(Contains(Pointee(Property(
- &RtpPacketToSend::Ssrc, kSsrc)))));
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- EXPECT_EQ(1, transport_.packets_sent());
- EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
-
- VideoSendTiming video_timing;
- EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
- &video_timing));
- EXPECT_EQ(kStoredTimeInMs, video_timing.network2_timestamp_delta_ms);
- EXPECT_EQ(kPacerExitMs, video_timing.pacer_exit_delta_ms);
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
+ Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms, now_ms)),
+ Pointee(Property(&RtpPacketToSend::packet_type,
+ RtpPacketMediaType::kRetransmission))))));
+ EXPECT_TRUE(rtp_sender_->ReSendPacket(seq_no));
}
-TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithoutPacer) {
- SetUpRtpSender(/*pacer=*/false, /*populate_network2=*/true, false);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- VideoTimingExtension::kUri, kVideoTimingExtensionId));
- auto packet = rtp_sender()->AllocatePacket();
- packet->SetMarker(true);
- packet->set_capture_time_ms(clock_->TimeInMilliseconds());
- const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
- packet->SetExtension<VideoTimingExtension>(kVideoTiming);
- packet->set_allow_retransmission(true);
- EXPECT_TRUE(rtp_sender()->AssignSequenceNumber(packet.get()));
- packet->set_packet_type(RtpPacketMediaType::kVideo);
-
- const int kPropagateTimeMs = 10;
- time_controller_.AdvanceTime(TimeDelta::Millis(kPropagateTimeMs));
+// This test sends 1 regular video packet, then 4 padding packets, and then
+// 1 more regular packet.
+TEST_F(RtpSenderTest, SendPadding) {
+ constexpr int kNumPaddingPackets = 4;
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets);
+ std::unique_ptr<RtpPacketToSend> media_packet =
+ SendPacket(/*capture_time_ms=*/clock_->TimeInMilliseconds(),
+ /*payload_size=*/100);
+
+ // Wait 50 ms before generating each padding packet.
+ for (int i = 0; i < kNumPaddingPackets; ++i) {
+ time_controller_.AdvanceTime(TimeDelta::Millis(50));
+ const size_t kPaddingTargetBytes = 100; // Request 100 bytes of padding.
+
+ // Padding should be sent on the media ssrc, with a continous sequence
+ // number range. Size will be forced to full pack size and the timestamp
+ // shall be that of the last media packet.
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
+ Pointee(Property(&RtpPacketToSend::SequenceNumber,
+ media_packet->SequenceNumber() + i + 1)),
+ Pointee(Property(&RtpPacketToSend::padding_size,
+ kMaxPaddingLength)),
+ Pointee(Property(&RtpPacketToSend::Timestamp,
+ media_packet->Timestamp()))))));
+ std::vector<std::unique_ptr<RtpPacketToSend>> padding_packets =
+ rtp_sender_->GeneratePadding(kPaddingTargetBytes,
+ /*media_has_been_sent=*/true);
+ ASSERT_THAT(padding_packets, SizeIs(1));
+ rtp_sender_->SendToNetwork(std::move(padding_packets[0]));
+ }
- EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet)));
+ // Send a regular video packet again.
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(
+ &RtpPacketToSend::SequenceNumber,
+ media_packet->SequenceNumber() + kNumPaddingPackets + 1)),
+ Pointee(Property(&RtpPacketToSend::Timestamp,
+ Gt(media_packet->Timestamp())))))));
- EXPECT_EQ(1, transport_.packets_sent());
- absl::optional<VideoSendTiming> video_timing =
- transport_.last_sent_packet().GetExtension<VideoTimingExtension>();
- ASSERT_TRUE(video_timing);
- EXPECT_EQ(kPropagateTimeMs, video_timing->network2_timestamp_delta_ms);
+ std::unique_ptr<RtpPacketToSend> next_media_packet =
+ SendPacket(/*capture_time_ms=*/clock_->TimeInMilliseconds(),
+ /*payload_size=*/100);
}
-TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) {
- EXPECT_CALL(mock_rtc_event_log_,
- LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)));
-
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- auto packet =
- BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms);
- size_t packet_size = packet->size();
+TEST_F(RtpSenderTest, NoPaddingAsFirstPacketWithoutBweExtensions) {
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ IsEmpty());
- const int kStoredTimeInMs = 100;
- EXPECT_CALL(
- mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->set_allow_retransmission(true);
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- EXPECT_EQ(0, transport_.packets_sent());
- time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- // Process send bucket. Packet should now be sent.
- EXPECT_EQ(1, transport_.packets_sent());
- EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
-
- webrtc::RTPHeader rtp_header;
- transport_.last_sent_packet().GetHeader(&rtp_header);
-
- // Verify transmission time offset.
- EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
- uint64_t expected_send_time =
- ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
- EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
+ // Don't send padding before media even with RTX.
+ EnableRtx();
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ IsEmpty());
}
-TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) {
- EXPECT_CALL(mock_rtc_event_log_,
- LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)));
-
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- auto packet =
- BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms);
- size_t packet_size = packet->size();
-
- // Packet should be stored in a send bucket.
- EXPECT_CALL(
- mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->set_allow_retransmission(true);
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- // Immediately process send bucket and send packet.
- rtp_sender_context_->InjectPacket(std::make_unique<RtpPacketToSend>(*packet),
- PacedPacketInfo());
-
- EXPECT_EQ(1, transport_.packets_sent());
+TEST_F(RtpSenderTest, AllowPaddingAsFirstPacketOnRtxWithTransportCc) {
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
+ TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- // Retransmit packet.
- const int kStoredTimeInMs = 100;
- time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
+ // Padding can't be sent as first packet on media SSRC since we don't know
+ // what payload type to assign.
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ IsEmpty());
- EXPECT_CALL(mock_rtc_event_log_,
- LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)));
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- packet->set_retransmitted_sequence_number(kSeqNum);
- EXPECT_CALL(
- mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- EXPECT_EQ(static_cast<int>(packet_size), rtp_sender()->ReSendPacket(kSeqNum));
- EXPECT_EQ(1, transport_.packets_sent());
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- // Process send bucket. Packet should now be sent.
- EXPECT_EQ(2, transport_.packets_sent());
- EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
-
- webrtc::RTPHeader rtp_header;
- transport_.last_sent_packet().GetHeader(&rtp_header);
-
- // Verify transmission time offset.
- EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
- uint64_t expected_send_time =
- ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
- EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
+ // With transportcc padding can be sent as first packet on the RTX SSRC.
+ EnableRtx();
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ Not(IsEmpty()));
}
-// This test sends 1 regular video packet, then 4 padding packets, and then
-// 1 more regular packet.
-TEST_P(RtpSenderTest, SendPadding) {
- // Make all (non-padding) packets go to send queue.
- EXPECT_CALL(mock_rtc_event_log_,
- LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)))
- .Times(1 + 4 + 1);
-
- uint16_t seq_num = kSeqNum;
- uint32_t timestamp = kTimestamp;
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- size_t rtp_header_len = kRtpHeaderSize;
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- rtp_header_len += 4; // 4 bytes extension.
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+TEST_F(RtpSenderTest, AllowPaddingAsFirstPacketOnRtxWithAbsSendTime) {
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- rtp_header_len += 4; // 4 bytes extension.
- rtp_header_len += 4; // 4 extra bytes common to all extension headers.
-
- webrtc::RTPHeader rtp_header;
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- auto packet =
- BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms);
- const uint32_t media_packet_timestamp = timestamp;
- size_t packet_size = packet->size();
- int total_packets_sent = 0;
- const int kStoredTimeInMs = 100;
+ // Padding can't be sent as first packet on media SSRC since we don't know
+ // what payload type to assign.
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ IsEmpty());
- // Packet should be stored in a send bucket.
- EXPECT_CALL(
- mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->set_allow_retransmission(true);
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- EXPECT_EQ(total_packets_sent, transport_.packets_sent());
- time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs));
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- ++seq_num;
-
- // Packet should now be sent. This test doesn't verify the regular video
- // packet, since it is tested in another test.
- EXPECT_EQ(++total_packets_sent, transport_.packets_sent());
- timestamp += 90 * kStoredTimeInMs;
-
- // Send padding 4 times, waiting 50 ms between each.
- for (int i = 0; i < 4; ++i) {
- const int kPaddingPeriodMs = 50;
- const size_t kPaddingBytes = 100;
- const size_t kMaxPaddingLength = 224; // Value taken from rtp_sender.cc.
- // Padding will be forced to full packets.
- EXPECT_EQ(kMaxPaddingLength, GenerateAndSendPadding(kPaddingBytes));
-
- // Process send bucket. Padding should now be sent.
- EXPECT_EQ(++total_packets_sent, transport_.packets_sent());
- EXPECT_EQ(kMaxPaddingLength + rtp_header_len,
- transport_.last_sent_packet().size());
-
- transport_.last_sent_packet().GetHeader(&rtp_header);
- EXPECT_EQ(kMaxPaddingLength, rtp_header.paddingLength);
-
- // Verify sequence number and timestamp. The timestamp should be the same
- // as the last media packet.
- EXPECT_EQ(seq_num++, rtp_header.sequenceNumber);
- EXPECT_EQ(media_packet_timestamp, rtp_header.timestamp);
- // Verify transmission time offset.
- int offset = timestamp - media_packet_timestamp;
- EXPECT_EQ(offset, rtp_header.extension.transmissionTimeOffset);
- uint64_t expected_send_time =
- ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
- EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
- time_controller_.AdvanceTime(TimeDelta::Millis(kPaddingPeriodMs));
- timestamp += 90 * kPaddingPeriodMs;
- }
-
- // Send a regular video packet again.
- capture_time_ms = clock_->TimeInMilliseconds();
- packet = BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms);
- packet_size = packet->size();
-
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->set_allow_retransmission(true);
- EXPECT_CALL(
- mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, seq_num))))));
- EXPECT_TRUE(
- rtp_sender()->SendToNetwork(std::make_unique<RtpPacketToSend>(*packet)));
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- // Process send bucket.
- EXPECT_EQ(++total_packets_sent, transport_.packets_sent());
- EXPECT_EQ(packet_size, transport_.last_sent_packet().size());
- transport_.last_sent_packet().GetHeader(&rtp_header);
-
- // Verify sequence number and timestamp.
- EXPECT_EQ(seq_num, rtp_header.sequenceNumber);
- EXPECT_EQ(timestamp, rtp_header.timestamp);
- // Verify transmission time offset. This packet is sent without delay.
- EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
- uint64_t expected_send_time =
- ConvertMsToAbsSendTime(clock_->TimeInMilliseconds());
- EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
+ // With abs send time, padding can be sent as first packet on the RTX SSRC.
+ EnableRtx();
+ EXPECT_THAT(rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/false),
+ Not(IsEmpty()));
}
-TEST_P(RtpSenderTest, OnSendPacketUpdated) {
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
-
- EXPECT_CALL(send_packet_observer_,
- OnSendPacket(kTransportSequenceNumber, _, _))
- .Times(1);
+TEST_F(RtpSenderTest, UpdatesTimestampsOnPlainRtxPadding) {
+ EnableRtx();
+ // Timestamps as set based on capture time in RtpSenderTest.
+ const int64_t start_time = clock_->TimeInMilliseconds();
+ const uint32_t start_timestamp = start_time * kTimestampTicksPerMs;
+ // Start by sending one media packet.
EXPECT_CALL(
mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- auto packet = SendGenericPacket();
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- EXPECT_EQ(1, transport_.packets_sent());
-}
-
-TEST_P(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) {
- EXPECT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::padding_size, 0u)),
+ Pointee(Property(&RtpPacketToSend::Timestamp, start_timestamp)),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms, start_time))))));
+ std::unique_ptr<RtpPacketToSend> media_packet =
+ SendPacket(start_time, /*payload_size=*/600);
+
+ // Advance time before sending padding.
+ const TimeDelta kTimeDiff = TimeDelta::Millis(17);
+ time_controller_.AdvanceTime(kTimeDiff);
+
+ // Timestamps on padding should be offset from the sent media.
+ EXPECT_THAT(
+ rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/true),
+ Each(AllOf(
+ Pointee(Property(&RtpPacketToSend::padding_size, kMaxPaddingLength)),
+ Pointee(Property(
+ &RtpPacketToSend::Timestamp,
+ start_timestamp + (kTimestampTicksPerMs * kTimeDiff.ms()))),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms,
+ start_time + kTimeDiff.ms())))));
+}
+
+TEST_F(RtpSenderTest, KeepsTimestampsOnPayloadPadding) {
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
-
- EXPECT_CALL(send_packet_observer_, OnSendPacket(_, _, _)).Times(0);
+ EnableRtx();
+ // Timestamps as set based on capture time in RtpSenderTest.
+ const int64_t start_time = clock_->TimeInMilliseconds();
+ const uint32_t start_timestamp = start_time * kTimestampTicksPerMs;
+ const size_t kPayloadSize = 600;
+ const size_t kRtxHeaderSize = 2;
+ // Start by sending one media packet and putting in the packet history.
EXPECT_CALL(
mock_paced_sender_,
- EnqueuePackets(Contains(AllOf(
- Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)),
- Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum))))));
- auto packet = SendGenericPacket();
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- packet->SetExtension<TransportSequenceNumber>(kTransportSequenceNumber);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- EXPECT_EQ(1, transport_.packets_sent());
- EXPECT_TRUE(transport_.last_options_.is_retransmit);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) {
- const uint8_t kPayloadType = 127;
- const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
- uint8_t payload[] = {47, 11, 32, 93, 89};
-
- // Send keyframe
- RTPVideoHeader video_header;
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- auto sent_payload = transport_.last_sent_packet().payload();
- uint8_t generic_header = sent_payload[0];
- EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
- EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
- EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(payload));
-
- // Send delta frame
- payload[0] = 13;
- payload[1] = 42;
- payload[4] = 13;
-
- video_header.frame_type = VideoFrameType::kVideoFrameDelta;
- ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- sent_payload = transport_.last_sent_packet().payload();
- generic_header = sent_payload[0];
- EXPECT_FALSE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
- EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
- EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(payload));
-}
-
-TEST_P(RtpSenderTestWithoutPacer, SendRawVideo) {
- const uint8_t kPayloadType = 111;
- const uint8_t payload[] = {11, 22, 33, 44, 55};
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
-
- // Send a frame.
- RTPVideoHeader video_header;
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, absl::nullopt, 1234,
- 4321, payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
-
- auto sent_payload = transport_.last_sent_packet().payload();
- EXPECT_THAT(sent_payload, ElementsAreArray(payload));
-}
-
-TEST_P(RtpSenderTest, SendFlexfecPackets) {
- constexpr uint32_t kTimestamp = 1234;
- constexpr int kMediaPayloadType = 127;
- constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- constexpr int kFlexfecPayloadType = 118;
- const std::vector<RtpExtension> kNoRtpExtensions;
- const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
- FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
- kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, clock_);
-
- // Reset |rtp_sender_| to use FlexFEC.
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.paced_sender = &mock_paced_sender_;
- config.local_media_ssrc = kSsrc;
- config.fec_generator = &flexfec_sender_;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- config.field_trials = &field_trials_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- rtp_sender()->SetSequenceNumber(kSeqNum);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.fec_type = flexfec_sender.GetFecType();
- video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
- video_config.fec_type = flexfec_sender.GetFecType();
- video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
-
- // Parameters selected to generate a single FEC packet per media packet.
- FecProtectionParams params;
- params.fec_rate = 15;
- params.max_fec_frames = 1;
- params.fec_mask_type = kFecMaskRandom;
- flexfec_sender.SetProtectionParameters(params, params);
-
- uint16_t flexfec_seq_num;
- RTPVideoHeader video_header;
-
- std::unique_ptr<RtpPacketToSend> media_packet;
- std::unique_ptr<RtpPacketToSend> fec_packet;
-
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- for (auto& packet : packets) {
- if (packet->packet_type() == RtpPacketMediaType::kVideo) {
- EXPECT_EQ(packet->Ssrc(), kSsrc);
- EXPECT_EQ(packet->SequenceNumber(), kSeqNum);
- media_packet = std::move(packet);
-
- // Simulate RtpSenderEgress adding packet to fec generator.
- flexfec_sender.AddPacketAndGenerateFec(*media_packet);
- auto fec_packets = flexfec_sender.GetFecPackets();
- EXPECT_EQ(fec_packets.size(), 1u);
- fec_packet = std::move(fec_packets[0]);
- EXPECT_EQ(fec_packet->packet_type(),
- RtpPacketMediaType::kForwardErrorCorrection);
- EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc);
- } else {
- EXPECT_EQ(packet->packet_type(),
- RtpPacketMediaType::kForwardErrorCorrection);
- fec_packet = std::move(packet);
- EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc);
- }
- }
- });
-
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
- kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
- ASSERT_TRUE(media_packet != nullptr);
- ASSERT_TRUE(fec_packet != nullptr);
-
- flexfec_seq_num = fec_packet->SequenceNumber();
- rtp_sender_context_->InjectPacket(std::move(media_packet), PacedPacketInfo());
- rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo());
-
- ASSERT_EQ(2, transport_.packets_sent());
- const RtpPacketReceived& sent_media_packet = transport_.sent_packets_[0];
- EXPECT_EQ(kMediaPayloadType, sent_media_packet.PayloadType());
- EXPECT_EQ(kSeqNum, sent_media_packet.SequenceNumber());
- EXPECT_EQ(kSsrc, sent_media_packet.Ssrc());
- const RtpPacketReceived& sent_flexfec_packet = transport_.sent_packets_[1];
- EXPECT_EQ(kFlexfecPayloadType, sent_flexfec_packet.PayloadType());
- EXPECT_EQ(flexfec_seq_num, sent_flexfec_packet.SequenceNumber());
- EXPECT_EQ(kFlexFecSsrc, sent_flexfec_packet.Ssrc());
-}
-
-TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
- constexpr uint32_t kTimestamp = 1234;
- constexpr int kMediaPayloadType = 127;
- constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- constexpr int kFlexfecPayloadType = 118;
- const std::vector<RtpExtension> kNoRtpExtensions;
- const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
- FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
- kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, clock_);
-
- // Reset |rtp_sender_| to use FlexFEC.
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.fec_generator = &flexfec_sender;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- config.field_trials = &field_trials_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- rtp_sender()->SetSequenceNumber(kSeqNum);
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.fec_type = flexfec_sender.GetFecType();
- video_config.fec_overhead_bytes = flexfec_sender_.MaxPacketOverhead();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
-
- // Parameters selected to generate a single FEC packet per media packet.
- FecProtectionParams params;
- params.fec_rate = 15;
- params.max_fec_frames = 1;
- params.fec_mask_type = kFecMaskRandom;
- rtp_egress()->SetFecProtectionParameters(params, params);
-
- EXPECT_CALL(mock_rtc_event_log_,
- LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing)))
- .Times(2);
- RTPVideoHeader video_header;
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
- kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
-
- ASSERT_EQ(2, transport_.packets_sent());
- const RtpPacketReceived& media_packet = transport_.sent_packets_[0];
- EXPECT_EQ(kMediaPayloadType, media_packet.PayloadType());
- EXPECT_EQ(kSsrc, media_packet.Ssrc());
- const RtpPacketReceived& flexfec_packet = transport_.sent_packets_[1];
- EXPECT_EQ(kFlexfecPayloadType, flexfec_packet.PayloadType());
- EXPECT_EQ(kFlexFecSsrc, flexfec_packet.Ssrc());
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::padding_size, 0u)),
+ Pointee(Property(&RtpPacketToSend::Timestamp, start_timestamp)),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms, start_time))))));
+ std::unique_ptr<RtpPacketToSend> media_packet =
+ SendPacket(start_time, kPayloadSize);
+ packet_history_->PutRtpPacket(std::move(media_packet), start_time);
+
+ // Advance time before sending padding.
+ const TimeDelta kTimeDiff = TimeDelta::Millis(17);
+ time_controller_.AdvanceTime(kTimeDiff);
+
+ // Timestamps on payload padding should be set to original.
+ EXPECT_THAT(
+ rtp_sender_->GeneratePadding(/*target_size_bytes=*/100,
+ /*media_has_been_sent=*/true),
+ Each(AllOf(
+ Pointee(Property(&RtpPacketToSend::padding_size, 0u)),
+ Pointee(Property(&RtpPacketToSend::payload_size,
+ kPayloadSize + kRtxHeaderSize)),
+ Pointee(Property(&RtpPacketToSend::Timestamp, start_timestamp)),
+ Pointee(Property(&RtpPacketToSend::capture_time_ms, start_time)))));
}
// Test that the MID header extension is included on sent packets when
// configured.
-TEST_P(RtpSenderTestWithoutPacer, MidIncludedOnSentPackets) {
+TEST_F(RtpSenderTest, MidIncludedOnSentPackets) {
const char kMid[] = "mid";
-
EnableMidSending(kMid);
- // Send a couple packets.
+ // Send a couple packets, expect both packets to have the MID set.
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(
+ Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid)))))
+ .Times(2);
SendGenericPacket();
SendGenericPacket();
-
- // Expect both packets to have the MID set.
- ASSERT_EQ(2u, transport_.sent_packets_.size());
- for (const RtpPacketReceived& packet : transport_.sent_packets_) {
- std::string mid;
- ASSERT_TRUE(packet.GetExtension<RtpMid>(&mid));
- EXPECT_EQ(kMid, mid);
- }
}
-TEST_P(RtpSenderTestWithoutPacer, RidIncludedOnSentPackets) {
+TEST_F(RtpSenderTest, RidIncludedOnSentPackets) {
const char kRid[] = "f";
-
EnableRidSending(kRid);
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(Property(
+ &RtpPacketToSend::GetExtension<RtpStreamId>, kRid)))));
SendGenericPacket();
-
- ASSERT_EQ(1u, transport_.sent_packets_.size());
- const RtpPacketReceived& packet = transport_.sent_packets_[0];
- std::string rid;
- ASSERT_TRUE(packet.GetExtension<RtpStreamId>(&rid));
- EXPECT_EQ(kRid, rid);
}
-TEST_P(RtpSenderTestWithoutPacer, RidIncludedOnRtxSentPackets) {
+TEST_F(RtpSenderTest, RidIncludedOnRtxSentPackets) {
const char kRid[] = "f";
-
EnableRtx();
EnableRidSending(kRid);
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::GetExtension<RtpStreamId>, kRid),
+ Property(&RtpPacketToSend::HasExtension<RepairedRtpStreamId>,
+ false))))))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
SendGenericPacket();
- ASSERT_EQ(1u, transport_.sent_packets_.size());
- const RtpPacketReceived& packet = transport_.sent_packets_[0];
- std::string rid;
- ASSERT_TRUE(packet.GetExtension<RtpStreamId>(&rid));
- EXPECT_EQ(kRid, rid);
- rid = kNoRid;
- EXPECT_FALSE(packet.HasExtension<RepairedRtpStreamId>());
-
- uint16_t packet_id = packet.SequenceNumber();
- rtp_sender()->ReSendPacket(packet_id);
- ASSERT_EQ(2u, transport_.sent_packets_.size());
- const RtpPacketReceived& rtx_packet = transport_.sent_packets_[1];
- ASSERT_TRUE(rtx_packet.GetExtension<RepairedRtpStreamId>(&rid));
- EXPECT_EQ(kRid, rid);
- EXPECT_FALSE(rtx_packet.HasExtension<RtpStreamId>());
+
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::GetExtension<RepairedRtpStreamId>, kRid),
+ Property(&RtpPacketToSend::HasExtension<RtpStreamId>, false))))));
+ rtp_sender_->ReSendPacket(kSeqNum);
}
-TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnSentPacketsAfterAck) {
+TEST_F(RtpSenderTest, MidAndRidNotIncludedOnSentPacketsAfterAck) {
const char kMid[] = "mid";
const char kRid[] = "f";
@@ -1465,53 +588,48 @@ TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnSentPacketsAfterAck) {
EnableRidSending(kRid);
// This first packet should include both MID and RID.
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid),
+ Property(&RtpPacketToSend::GetExtension<RtpStreamId>, kRid))))));
auto first_built_packet = SendGenericPacket();
-
- rtp_sender()->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
+ rtp_sender_->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
// The second packet should include neither since an ack was received.
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::HasExtension<RtpMid>, false),
+ Property(&RtpPacketToSend::HasExtension<RtpStreamId>, false))))));
SendGenericPacket();
-
- ASSERT_EQ(2u, transport_.sent_packets_.size());
-
- const RtpPacketReceived& first_packet = transport_.sent_packets_[0];
- std::string mid, rid;
- ASSERT_TRUE(first_packet.GetExtension<RtpMid>(&mid));
- EXPECT_EQ(kMid, mid);
- ASSERT_TRUE(first_packet.GetExtension<RtpStreamId>(&rid));
- EXPECT_EQ(kRid, rid);
-
- const RtpPacketReceived& second_packet = transport_.sent_packets_[1];
- EXPECT_FALSE(second_packet.HasExtension<RtpMid>());
- EXPECT_FALSE(second_packet.HasExtension<RtpStreamId>());
}
-TEST_P(RtpSenderTestWithoutPacer,
- MidAndRidAlwaysIncludedOnSentPacketsWhenConfigured) {
- SetUpRtpSender(false, false, /*always_send_mid_and_rid=*/true);
+TEST_F(RtpSenderTest, MidAndRidAlwaysIncludedOnSentPacketsWhenConfigured) {
+ SetUpRtpSender(false, /*always_send_mid_and_rid=*/true, nullptr);
const char kMid[] = "mid";
const char kRid[] = "f";
EnableMidSending(kMid);
EnableRidSending(kRid);
// Send two media packets: one before and one after the ack.
- auto first_packet = SendGenericPacket();
- rtp_sender()->OnReceivedAckOnSsrc(first_packet->SequenceNumber());
- SendGenericPacket();
-
// Due to the configuration, both sent packets should contain MID and RID.
- ASSERT_EQ(2u, transport_.sent_packets_.size());
- for (const RtpPacketReceived& packet : transport_.sent_packets_) {
- EXPECT_EQ(packet.GetExtension<RtpMid>(), kMid);
- EXPECT_EQ(packet.GetExtension<RtpStreamId>(), kRid);
- }
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(
+ AllOf(Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid),
+ Property(&RtpPacketToSend::GetExtension<RtpStreamId>, kRid))))))
+ .Times(2);
+ auto first_built_packet = SendGenericPacket();
+ rtp_sender_->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
+ SendGenericPacket();
}
// Test that the first RTX packet includes both MID and RRID even if the packet
// being retransmitted did not have MID or RID. The MID and RID are needed on
// the first packets for a given SSRC, and RTX packets are sent on a separate
// SSRC.
-TEST_P(RtpSenderTestWithoutPacer, MidAndRidIncludedOnFirstRtxPacket) {
+TEST_F(RtpSenderTest, MidAndRidIncludedOnFirstRtxPacket) {
const char kMid[] = "mid";
const char kRid[] = "f";
@@ -1520,30 +638,32 @@ TEST_P(RtpSenderTestWithoutPacer, MidAndRidIncludedOnFirstRtxPacket) {
EnableRidSending(kRid);
// This first packet will include both MID and RID.
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets);
auto first_built_packet = SendGenericPacket();
- rtp_sender()->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
+ rtp_sender_->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
- // The second packet will include neither since an ack was received.
+ // The second packet will include neither since an ack was received, put
+ // it in the packet history for retransmission.
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
auto second_built_packet = SendGenericPacket();
// The first RTX packet should include MID and RRID.
- ASSERT_LT(0,
- rtp_sender()->ReSendPacket(second_built_packet->SequenceNumber()));
-
- ASSERT_EQ(3u, transport_.sent_packets_.size());
-
- const RtpPacketReceived& rtx_packet = transport_.sent_packets_[2];
- std::string mid, rrid;
- ASSERT_TRUE(rtx_packet.GetExtension<RtpMid>(&mid));
- EXPECT_EQ(kMid, mid);
- ASSERT_TRUE(rtx_packet.GetExtension<RepairedRtpStreamId>(&rrid));
- EXPECT_EQ(kRid, rrid);
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid),
+ Property(&RtpPacketToSend::GetExtension<RepairedRtpStreamId>,
+ kRid))))));
+ rtp_sender_->ReSendPacket(second_built_packet->SequenceNumber());
}
// Test that the RTX packets sent after receving an ACK on the RTX SSRC does
// not include either MID or RRID even if the packet being retransmitted did
// had a MID or RID.
-TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnRtxPacketsAfterAck) {
+TEST_F(RtpSenderTest, MidAndRidNotIncludedOnRtxPacketsAfterAck) {
const char kMid[] = "mid";
const char kRid[] = "f";
@@ -1552,41 +672,44 @@ TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnRtxPacketsAfterAck) {
EnableRidSending(kRid);
// This first packet will include both MID and RID.
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
auto first_built_packet = SendGenericPacket();
- rtp_sender()->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
+ rtp_sender_->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber());
// The second packet will include neither since an ack was received.
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
auto second_built_packet = SendGenericPacket();
// The first RTX packet will include MID and RRID.
- ASSERT_LT(0,
- rtp_sender()->ReSendPacket(second_built_packet->SequenceNumber()));
-
- ASSERT_EQ(3u, transport_.sent_packets_.size());
- const RtpPacketReceived& first_rtx_packet = transport_.sent_packets_[2];
-
- rtp_sender()->OnReceivedAckOnRtxSsrc(first_rtx_packet.SequenceNumber());
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ rtp_sender_->OnReceivedAckOnRtxSsrc(packets[0]->SequenceNumber());
+ packet_history_->MarkPacketAsSent(
+ *packets[0]->retransmitted_sequence_number());
+ });
+ rtp_sender_->ReSendPacket(second_built_packet->SequenceNumber());
// The second and third RTX packets should not include MID nor RRID.
- ASSERT_LT(0,
- rtp_sender()->ReSendPacket(first_built_packet->SequenceNumber()));
- ASSERT_LT(0,
- rtp_sender()->ReSendPacket(second_built_packet->SequenceNumber()));
-
- ASSERT_EQ(5u, transport_.sent_packets_.size());
-
- const RtpPacketReceived& second_rtx_packet = transport_.sent_packets_[3];
- EXPECT_FALSE(second_rtx_packet.HasExtension<RtpMid>());
- EXPECT_FALSE(second_rtx_packet.HasExtension<RepairedRtpStreamId>());
-
- const RtpPacketReceived& third_rtx_packet = transport_.sent_packets_[4];
- EXPECT_FALSE(third_rtx_packet.HasExtension<RtpMid>());
- EXPECT_FALSE(third_rtx_packet.HasExtension<RepairedRtpStreamId>());
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::HasExtension<RtpMid>, false),
+ Property(&RtpPacketToSend::HasExtension<RepairedRtpStreamId>,
+ false))))))
+ .Times(2);
+ rtp_sender_->ReSendPacket(first_built_packet->SequenceNumber());
+ rtp_sender_->ReSendPacket(second_built_packet->SequenceNumber());
}
-TEST_P(RtpSenderTestWithoutPacer,
- MidAndRidAlwaysIncludedOnRtxPacketsWhenConfigured) {
- SetUpRtpSender(false, false, /*always_send_mid_and_rid=*/true);
+TEST_F(RtpSenderTest, MidAndRidAlwaysIncludedOnRtxPacketsWhenConfigured) {
+ SetUpRtpSender(false, /*always_send_mid_and_rid=*/true, nullptr);
const char kMid[] = "mid";
const char kRid[] = "f";
EnableRtx();
@@ -1594,63 +717,68 @@ TEST_P(RtpSenderTestWithoutPacer,
EnableRidSending(kRid);
// Send two media packets: one before and one after the ack.
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(
+ AllOf(Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid),
+ Property(&RtpPacketToSend::GetExtension<RtpStreamId>, kRid))))))
+ .Times(2)
+ .WillRepeatedly(
+ [&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
auto media_packet1 = SendGenericPacket();
- rtp_sender()->OnReceivedAckOnSsrc(media_packet1->SequenceNumber());
+ rtp_sender_->OnReceivedAckOnSsrc(media_packet1->SequenceNumber());
auto media_packet2 = SendGenericPacket();
// Send three RTX packets with different combinations of orders w.r.t. the
// media and RTX acks.
- ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet2->SequenceNumber()));
- ASSERT_EQ(3u, transport_.sent_packets_.size());
- rtp_sender()->OnReceivedAckOnRtxSsrc(
- transport_.sent_packets_[2].SequenceNumber());
- ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet1->SequenceNumber()));
- ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet2->SequenceNumber()));
-
// Due to the configuration, all sent packets should contain MID
// and either RID (media) or RRID (RTX).
- ASSERT_EQ(5u, transport_.sent_packets_.size());
- for (const auto& packet : transport_.sent_packets_) {
- EXPECT_EQ(packet.GetExtension<RtpMid>(), kMid);
- }
- for (size_t i = 0; i < 2; ++i) {
- const RtpPacketReceived& packet = transport_.sent_packets_[i];
- EXPECT_EQ(packet.GetExtension<RtpStreamId>(), kRid);
- }
- for (size_t i = 2; i < transport_.sent_packets_.size(); ++i) {
- const RtpPacketReceived& packet = transport_.sent_packets_[i];
- EXPECT_EQ(packet.GetExtension<RepairedRtpStreamId>(), kRid);
- }
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::GetExtension<RtpMid>, kMid),
+ Property(&RtpPacketToSend::GetExtension<RepairedRtpStreamId>,
+ kRid))))))
+ .Times(3)
+ .WillRepeatedly(
+ [&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ rtp_sender_->OnReceivedAckOnRtxSsrc(packets[0]->SequenceNumber());
+ packet_history_->MarkPacketAsSent(
+ *packets[0]->retransmitted_sequence_number());
+ });
+ rtp_sender_->ReSendPacket(media_packet2->SequenceNumber());
+ rtp_sender_->ReSendPacket(media_packet1->SequenceNumber());
+ rtp_sender_->ReSendPacket(media_packet2->SequenceNumber());
}
// Test that if the RtpState indicates an ACK has been received on that SSRC
// then neither the MID nor RID header extensions will be sent.
-TEST_P(RtpSenderTestWithoutPacer,
- MidAndRidNotIncludedOnSentPacketsAfterRtpStateRestored) {
+TEST_F(RtpSenderTest, MidAndRidNotIncludedOnSentPacketsAfterRtpStateRestored) {
const char kMid[] = "mid";
const char kRid[] = "f";
EnableMidSending(kMid);
EnableRidSending(kRid);
- RtpState state = rtp_sender()->GetRtpState();
+ RtpState state = rtp_sender_->GetRtpState();
EXPECT_FALSE(state.ssrc_has_acked);
state.ssrc_has_acked = true;
- rtp_sender()->SetRtpState(state);
+ rtp_sender_->SetRtpState(state);
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::HasExtension<RtpMid>, false),
+ Property(&RtpPacketToSend::HasExtension<RtpStreamId>, false))))));
SendGenericPacket();
-
- ASSERT_EQ(1u, transport_.sent_packets_.size());
- const RtpPacketReceived& packet = transport_.sent_packets_[0];
- EXPECT_FALSE(packet.HasExtension<RtpMid>());
- EXPECT_FALSE(packet.HasExtension<RtpStreamId>());
}
// Test that if the RTX RtpState indicates an ACK has been received on that
// RTX SSRC then neither the MID nor RRID header extensions will be sent on
// RTX packets.
-TEST_P(RtpSenderTestWithoutPacer,
- MidAndRridNotIncludedOnRtxPacketsAfterRtpStateRestored) {
+TEST_F(RtpSenderTest, MidAndRridNotIncludedOnRtxPacketsAfterRtpStateRestored) {
const char kMid[] = "mid";
const char kRid[] = "f";
@@ -1658,756 +786,255 @@ TEST_P(RtpSenderTestWithoutPacer,
EnableMidSending(kMid);
EnableRidSending(kRid);
- RtpState rtx_state = rtp_sender()->GetRtxRtpState();
+ RtpState rtx_state = rtp_sender_->GetRtxRtpState();
EXPECT_FALSE(rtx_state.ssrc_has_acked);
rtx_state.ssrc_has_acked = true;
- rtp_sender()->SetRtxRtpState(rtx_state);
+ rtp_sender_->SetRtxRtpState(rtx_state);
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
auto built_packet = SendGenericPacket();
- ASSERT_LT(0, rtp_sender()->ReSendPacket(built_packet->SequenceNumber()));
-
- ASSERT_EQ(2u, transport_.sent_packets_.size());
- const RtpPacketReceived& rtx_packet = transport_.sent_packets_[1];
- EXPECT_FALSE(rtx_packet.HasExtension<RtpMid>());
- EXPECT_FALSE(rtx_packet.HasExtension<RepairedRtpStreamId>());
-}
-
-TEST_P(RtpSenderTest, FecOverheadRate) {
- constexpr uint32_t kTimestamp = 1234;
- constexpr int kMediaPayloadType = 127;
- constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- constexpr int kFlexfecPayloadType = 118;
- const std::vector<RtpExtension> kNoRtpExtensions;
- const std::vector<RtpExtensionSize> kNoRtpExtensionSizes;
- FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid,
- kNoRtpExtensions, kNoRtpExtensionSizes,
- nullptr /* rtp_state */, clock_);
-
- // Reset |rtp_sender_| to use this FlexFEC instance.
- SetUpRtpSender(false, false, false, &flexfec_sender);
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.fec_type = flexfec_sender.GetFecType();
- video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
- // Parameters selected to generate a single FEC packet per media packet.
- FecProtectionParams params;
- params.fec_rate = 15;
- params.max_fec_frames = 1;
- params.fec_mask_type = kFecMaskRandom;
- rtp_egress()->SetFecProtectionParameters(params, params);
-
- constexpr size_t kNumMediaPackets = 10;
- constexpr size_t kNumFecPackets = kNumMediaPackets;
- constexpr int64_t kTimeBetweenPacketsMs = 10;
- for (size_t i = 0; i < kNumMediaPackets; ++i) {
- RTPVideoHeader video_header;
-
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- EXPECT_TRUE(rtp_sender_video.SendVideo(
- kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(),
- kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs));
-
- time_controller_.AdvanceTime(TimeDelta::Millis(kTimeBetweenPacketsMs));
- }
- constexpr size_t kRtpHeaderLength = 12;
- constexpr size_t kFlexfecHeaderLength = 20;
- constexpr size_t kGenericCodecHeaderLength = 1;
- constexpr size_t kPayloadLength = sizeof(kPayloadData);
- constexpr size_t kPacketLength = kRtpHeaderLength + kFlexfecHeaderLength +
- kGenericCodecHeaderLength + kPayloadLength;
-
- EXPECT_NEAR(
- kNumFecPackets * kPacketLength * 8 /
- (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f),
- rtp_egress()
- ->GetSendRates()[RtpPacketMediaType::kForwardErrorCorrection]
- .bps<double>(),
- 500);
-}
-
-TEST_P(RtpSenderTest, BitrateCallbacks) {
- class TestCallback : public BitrateStatisticsObserver {
- public:
- TestCallback()
- : BitrateStatisticsObserver(),
- num_calls_(0),
- ssrc_(0),
- total_bitrate_(0),
- retransmit_bitrate_(0) {}
- ~TestCallback() override = default;
-
- void Notify(uint32_t total_bitrate,
- uint32_t retransmit_bitrate,
- uint32_t ssrc) override {
- ++num_calls_;
- ssrc_ = ssrc;
- total_bitrate_ = total_bitrate;
- retransmit_bitrate_ = retransmit_bitrate;
- }
-
- uint32_t num_calls_;
- uint32_t ssrc_;
- uint32_t total_bitrate_;
- uint32_t retransmit_bitrate_;
- } callback;
-
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.send_bitrate_observer = &callback;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
-
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
- const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- const uint8_t kPayloadType = 127;
-
- // Simulate kNumPackets sent with kPacketInterval ms intervals, with the
- // number of packets selected so that we fill (but don't overflow) the one
- // second averaging window.
- const uint32_t kWindowSizeMs = 1000;
- const uint32_t kPacketInterval = 20;
- const uint32_t kNumPackets =
- (kWindowSizeMs - kPacketInterval) / kPacketInterval;
- // Overhead = 12 bytes RTP header + 1 byte generic header.
- const uint32_t kPacketOverhead = 13;
-
- uint8_t payload[] = {47, 11, 32, 93, 89};
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- uint32_t ssrc = rtp_sender()->SSRC();
-
- // Send a few frames.
- RTPVideoHeader video_header;
- for (uint32_t i = 0; i < kNumPackets; ++i) {
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- ASSERT_TRUE(rtp_sender_video.SendVideo(
- kPayloadType, kCodecType, 1234, 4321, payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
- time_controller_.AdvanceTime(TimeDelta::Millis(kPacketInterval));
- }
-
- // We get one call for every stats updated, thus two calls since both the
- // stream stats and the retransmit stats are updated once.
- EXPECT_EQ(kNumPackets, callback.num_calls_);
- EXPECT_EQ(ssrc, callback.ssrc_);
- const uint32_t kTotalPacketSize = kPacketOverhead + sizeof(payload);
- // Bitrate measured over delta between last and first timestamp, plus one.
- const uint32_t kExpectedWindowMs = (kNumPackets - 1) * kPacketInterval + 1;
- const uint32_t kExpectedBitsAccumulated = kTotalPacketSize * kNumPackets * 8;
- const uint32_t kExpectedRateBps =
- (kExpectedBitsAccumulated * 1000 + (kExpectedWindowMs / 2)) /
- kExpectedWindowMs;
- EXPECT_EQ(kExpectedRateBps, callback.total_bitrate_);
-}
-TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
- const uint8_t kPayloadType = 127;
- const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
- FieldTrialBasedConfig field_trials;
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials;
- RTPSenderVideo rtp_sender_video(video_config);
- uint8_t payload[] = {47, 11, 32, 93, 89};
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- uint32_t ssrc = rtp_sender()->SSRC();
-
- // Send a frame.
- RTPVideoHeader video_header;
- video_header.frame_type = VideoFrameType::kVideoFrameKey;
- ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
- StreamDataCounters expected;
- expected.transmitted.payload_bytes = 6;
- expected.transmitted.header_bytes = 12;
- expected.transmitted.padding_bytes = 0;
- expected.transmitted.packets = 1;
- expected.retransmitted.payload_bytes = 0;
- expected.retransmitted.header_bytes = 0;
- expected.retransmitted.padding_bytes = 0;
- expected.retransmitted.packets = 0;
- expected.fec.packets = 0;
- rtp_stats_callback_.Matches(ssrc, expected);
-
- // Retransmit a frame.
- uint16_t seqno = rtp_sender()->SequenceNumber() - 1;
- rtp_sender()->ReSendPacket(seqno);
- expected.transmitted.payload_bytes = 12;
- expected.transmitted.header_bytes = 24;
- expected.transmitted.packets = 2;
- expected.retransmitted.payload_bytes = 6;
- expected.retransmitted.header_bytes = 12;
- expected.retransmitted.padding_bytes = 0;
- expected.retransmitted.packets = 1;
- rtp_stats_callback_.Matches(ssrc, expected);
-
- // Send padding.
- GenerateAndSendPadding(kMaxPaddingSize);
- expected.transmitted.payload_bytes = 12;
- expected.transmitted.header_bytes = 36;
- expected.transmitted.padding_bytes = kMaxPaddingSize;
- expected.transmitted.packets = 3;
- rtp_stats_callback_.Matches(ssrc, expected);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacksUlpfec) {
- const uint8_t kRedPayloadType = 96;
- const uint8_t kUlpfecPayloadType = 97;
- const uint8_t kPayloadType = 127;
- const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
-
- UlpfecGenerator ulpfec_generator(kRedPayloadType, kUlpfecPayloadType, clock_);
- SetUpRtpSender(false, false, false, &ulpfec_generator);
- RTPSenderVideo::Config video_config;
- video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
- video_config.field_trials = &field_trials_;
- video_config.red_payload_type = kRedPayloadType;
- video_config.fec_type = ulpfec_generator.GetFecType();
- video_config.fec_overhead_bytes = ulpfec_generator.MaxPacketOverhead();
- RTPSenderVideo rtp_sender_video(video_config);
- uint8_t payload[] = {47, 11, 32, 93, 89};
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- uint32_t ssrc = rtp_sender()->SSRC();
-
- RTPVideoHeader video_header;
- StreamDataCounters expected;
-
- // Send ULPFEC.
- FecProtectionParams fec_params;
- fec_params.fec_mask_type = kFecMaskRandom;
- fec_params.fec_rate = 1;
- fec_params.max_fec_frames = 1;
- rtp_egress()->SetFecProtectionParameters(fec_params, fec_params);
- video_header.frame_type = VideoFrameType::kVideoFrameDelta;
- ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321,
- payload, video_header,
- kDefaultExpectedRetransmissionTimeMs));
- expected.transmitted.payload_bytes = 28;
- expected.transmitted.header_bytes = 24;
- expected.transmitted.packets = 2;
- expected.fec.packets = 1;
- rtp_stats_callback_.Matches(ssrc, expected);
-}
-
-TEST_P(RtpSenderTestWithoutPacer, BytesReportedCorrectly) {
- const uint8_t kPayloadType = 127;
- const size_t kPayloadSize = 1400;
- rtp_sender()->SetRtxPayloadType(kPayloadType - 1, kPayloadType);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
-
- SendPacket(clock_->TimeInMilliseconds(), kPayloadSize);
- // Will send 2 full-size padding packets.
- GenerateAndSendPadding(1);
- GenerateAndSendPadding(1);
-
- StreamDataCounters rtp_stats;
- StreamDataCounters rtx_stats;
- rtp_egress()->GetDataCounters(&rtp_stats, &rtx_stats);
-
- // Payload
- EXPECT_GT(rtp_stats.first_packet_time_ms, -1);
- EXPECT_EQ(rtp_stats.transmitted.payload_bytes, kPayloadSize);
- EXPECT_EQ(rtp_stats.transmitted.header_bytes, 12u);
- EXPECT_EQ(rtp_stats.transmitted.padding_bytes, 0u);
- EXPECT_EQ(rtx_stats.transmitted.payload_bytes, 0u);
- EXPECT_EQ(rtx_stats.transmitted.header_bytes, 24u);
- EXPECT_EQ(rtx_stats.transmitted.padding_bytes, 2 * kMaxPaddingSize);
-
- EXPECT_EQ(rtp_stats.transmitted.TotalBytes(),
- rtp_stats.transmitted.payload_bytes +
- rtp_stats.transmitted.header_bytes +
- rtp_stats.transmitted.padding_bytes);
- EXPECT_EQ(rtx_stats.transmitted.TotalBytes(),
- rtx_stats.transmitted.payload_bytes +
- rtx_stats.transmitted.header_bytes +
- rtx_stats.transmitted.padding_bytes);
-
- EXPECT_EQ(
- transport_.total_bytes_sent_,
- rtp_stats.transmitted.TotalBytes() + rtx_stats.transmitted.TotalBytes());
+ EXPECT_CALL(
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(AllOf(
+ Property(&RtpPacketToSend::HasExtension<RtpMid>, false),
+ Property(&RtpPacketToSend::HasExtension<RtpStreamId>, false))))));
+ ASSERT_LT(0, rtp_sender_->ReSendPacket(built_packet->SequenceNumber()));
}
-TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
+TEST_F(RtpSenderTest, RespectsNackBitrateLimit) {
const int32_t kPacketSize = 1400;
const int32_t kNumPackets = 30;
retransmission_rate_limiter_.SetMaxRate(kPacketSize * kNumPackets * 8);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, kNumPackets);
- const uint16_t kStartSequenceNumber = rtp_sender()->SequenceNumber();
+ const uint16_t kStartSequenceNumber = rtp_sender_->SequenceNumber();
std::vector<uint16_t> sequence_numbers;
for (int32_t i = 0; i < kNumPackets; ++i) {
sequence_numbers.push_back(kStartSequenceNumber + i);
time_controller_.AdvanceTime(TimeDelta::Millis(1));
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1)))
+ .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ packet_history_->PutRtpPacket(std::move(packets[0]),
+ clock_->TimeInMilliseconds());
+ });
SendPacket(clock_->TimeInMilliseconds(), kPacketSize);
}
- EXPECT_EQ(kNumPackets, transport_.packets_sent());
time_controller_.AdvanceTime(TimeDelta::Millis(1000 - kNumPackets));
// Resending should work - brings the bandwidth up to the limit.
// NACK bitrate is capped to the same bitrate as the encoder, since the max
// protection overhead is 50% (see MediaOptimization::SetTargetRates).
- rtp_sender()->OnReceivedNack(sequence_numbers, 0);
- EXPECT_EQ(kNumPackets * 2, transport_.packets_sent());
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets(ElementsAre(Pointee(Property(
+ &RtpPacketToSend::packet_type,
+ RtpPacketMediaType::kRetransmission)))))
+ .Times(kNumPackets)
+ .WillRepeatedly(
+ [&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ for (const auto& packet : packets) {
+ packet_history_->MarkPacketAsSent(
+ *packet->retransmitted_sequence_number());
+ }
+ });
+ rtp_sender_->OnReceivedNack(sequence_numbers, 0);
// Must be at least 5ms in between retransmission attempts.
time_controller_.AdvanceTime(TimeDelta::Millis(5));
// Resending should not work, bandwidth exceeded.
- rtp_sender()->OnReceivedNack(sequence_numbers, 0);
- EXPECT_EQ(kNumPackets * 2, transport_.packets_sent());
+ EXPECT_CALL(mock_paced_sender_, EnqueuePackets).Times(0);
+ rtp_sender_->OnReceivedNack(sequence_numbers, 0);
}
-TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
+TEST_F(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ config.rtx_send_ssrc = {};
+ CreateSender(config);
// Base RTP overhead is 12B.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
// Adding two csrcs adds 2*4 bytes to the header.
- rtp_sender()->SetCsrcs({1, 2});
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 20u);
+ rtp_sender_->SetCsrcs({1, 2});
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 20u);
}
-TEST_P(RtpSenderTest, OnOverheadChanged) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
+TEST_F(RtpSenderTest, OnOverheadChanged) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ config.rtx_send_ssrc = {};
+ CreateSender(config);
// Base RTP overhead is 12B.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
- rtp_sender()->RegisterRtpHeaderExtension(TransmissionOffset::kUri,
- kTransmissionTimeOffsetExtensionId);
+ rtp_sender_->RegisterRtpHeaderExtension(TransmissionOffset::kUri,
+ kTransmissionTimeOffsetExtensionId);
// TransmissionTimeOffset extension has a size of 3B, but with the addition
// of header index and rounding to 4 byte boundary we end up with 20B total.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 20u);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 20u);
}
-TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
+TEST_F(RtpSenderTest, CountMidOnlyUntilAcked) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ config.rtx_send_ssrc = {};
+ CreateSender(config);
// Base RTP overhead is 12B.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
- rtp_sender()->RegisterRtpHeaderExtension(RtpMid::kUri, kMidExtensionId);
- rtp_sender()->RegisterRtpHeaderExtension(RtpStreamId::kUri, kRidExtensionId);
+ rtp_sender_->RegisterRtpHeaderExtension(RtpMid::kUri, kMidExtensionId);
+ rtp_sender_->RegisterRtpHeaderExtension(RtpStreamId::kUri, kRidExtensionId);
// Counted only if set.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
- rtp_sender()->SetMid("foo");
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 36u);
- rtp_sender()->SetRid("bar");
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 52u);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
+ rtp_sender_->SetMid("foo");
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 36u);
+ rtp_sender_->SetRid("bar");
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 52u);
// Ack received, mid/rid no longer sent.
- rtp_sender()->OnReceivedAckOnSsrc(0);
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
+ rtp_sender_->OnReceivedAckOnSsrc(0);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
}
-TEST_P(RtpSenderTest, DontCountVolatileExtensionsIntoOverhead) {
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.retransmission_rate_limiter = &retransmission_rate_limiter_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
+TEST_F(RtpSenderTest, DontCountVolatileExtensionsIntoOverhead) {
+ RtpRtcpInterface::Configuration config = GetDefaultConfig();
+ config.rtx_send_ssrc = {};
+ CreateSender(config);
// Base RTP overhead is 12B.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
-
- rtp_sender()->RegisterRtpHeaderExtension(InbandComfortNoiseExtension::kUri,
- 1);
- rtp_sender()->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::kUri,
- 2);
- rtp_sender()->RegisterRtpHeaderExtension(VideoOrientation::kUri, 3);
- rtp_sender()->RegisterRtpHeaderExtension(PlayoutDelayLimits::kUri, 4);
- rtp_sender()->RegisterRtpHeaderExtension(VideoContentTypeExtension::kUri, 5);
- rtp_sender()->RegisterRtpHeaderExtension(VideoTimingExtension::kUri, 6);
- rtp_sender()->RegisterRtpHeaderExtension(RepairedRtpStreamId::kUri, 7);
- rtp_sender()->RegisterRtpHeaderExtension(ColorSpaceExtension::kUri, 8);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
+
+ rtp_sender_->RegisterRtpHeaderExtension(InbandComfortNoiseExtension::kUri, 1);
+ rtp_sender_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::kUri,
+ 2);
+ rtp_sender_->RegisterRtpHeaderExtension(VideoOrientation::kUri, 3);
+ rtp_sender_->RegisterRtpHeaderExtension(PlayoutDelayLimits::kUri, 4);
+ rtp_sender_->RegisterRtpHeaderExtension(VideoContentTypeExtension::kUri, 5);
+ rtp_sender_->RegisterRtpHeaderExtension(VideoTimingExtension::kUri, 6);
+ rtp_sender_->RegisterRtpHeaderExtension(RepairedRtpStreamId::kUri, 7);
+ rtp_sender_->RegisterRtpHeaderExtension(ColorSpaceExtension::kUri, 8);
// Still only 12B counted since can't count on above being sent.
- EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesVideo) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kVideo);
-
- // Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kSsrc);
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesAudio) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kAudio);
-
- // Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kSsrc);
- packet->set_packet_type(RtpPacketMediaType::kAudio);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesRetransmissions) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
-
- // Verify sent with correct SSRC (non-RTX).
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kSsrc);
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
-
- // RTX retransmission.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kRtxSsrc);
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 2);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesPadding) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kPadding);
-
- // Verify sent with correct SSRC (non-RTX).
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kSsrc);
- packet->set_packet_type(RtpPacketMediaType::kPadding);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
-
- // RTX padding.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kRtxSsrc);
- packet->set_packet_type(RtpPacketMediaType::kPadding);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 2);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesFlexfec) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
-
- // Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kFlexFecSsrc);
- packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
-}
-
-TEST_P(RtpSenderTest, SendPacketMatchesUlpfec) {
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
-
- // Verify sent with correct SSRC.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetSsrc(kSsrc);
- packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_EQ(transport_.packets_sent(), 1);
+ EXPECT_EQ(rtp_sender_->ExpectedPerPacketOverhead(), 12u);
}
-TEST_P(RtpSenderTest, SendPacketHandlesRetransmissionHistory) {
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+TEST_F(RtpSenderTest, SendPacketHandlesRetransmissionHistory) {
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
// Ignore calls to EnqueuePackets() for this test.
EXPECT_CALL(mock_paced_sender_, EnqueuePackets).WillRepeatedly(Return());
- // Build a media packet and send it.
+ // Build a media packet and put in the packet history.
std::unique_ptr<RtpPacketToSend> packet =
BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
const uint16_t media_sequence_number = packet->SequenceNumber();
- packet->set_packet_type(RtpPacketMediaType::kVideo);
packet->set_allow_retransmission(true);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
- // Simulate retransmission request.
+ // Simulate successful retransmission request.
time_controller_.AdvanceTime(TimeDelta::Millis(30));
- EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0);
+ EXPECT_THAT(rtp_sender_->ReSendPacket(media_sequence_number), Gt(0));
// Packet already pending, retransmission not allowed.
time_controller_.AdvanceTime(TimeDelta::Millis(30));
- EXPECT_EQ(rtp_sender()->ReSendPacket(media_sequence_number), 0);
-
- // Packet exiting pacer, mark as not longer pending.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- EXPECT_NE(packet->SequenceNumber(), media_sequence_number);
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- packet->SetSsrc(kRtxSsrc);
- packet->set_retransmitted_sequence_number(media_sequence_number);
- packet->set_allow_retransmission(false);
- uint16_t seq_no = packet->SequenceNumber();
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ EXPECT_THAT(rtp_sender_->ReSendPacket(media_sequence_number), Eq(0));
+
+ // Simulate packet exiting pacer, mark as not longer pending.
+ packet_history_->MarkPacketAsSent(media_sequence_number);
// Retransmissions allowed again.
time_controller_.AdvanceTime(TimeDelta::Millis(30));
- EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0);
-
- // Retransmission of RTX packet should not be allowed.
- EXPECT_EQ(rtp_sender()->ReSendPacket(seq_no), 0);
+ EXPECT_THAT(rtp_sender_->ReSendPacket(media_sequence_number), Gt(0));
}
-TEST_P(RtpSenderTest, SendPacketUpdatesExtensions) {
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- VideoTimingExtension::kUri, kVideoTimingExtensionId));
+TEST_F(RtpSenderTest, MarksRetransmittedPackets) {
+ packet_history_->SetStorePacketsStatus(
+ RtpPacketHistory::StorageMode::kStoreAndCull, 10);
+ // Build a media packet and put in the packet history.
std::unique_ptr<RtpPacketToSend> packet =
BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds());
-
- const int32_t kDiffMs = 10;
- time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
-
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- const RtpPacketReceived& received_packet = transport_.last_sent_packet();
-
- EXPECT_EQ(received_packet.GetExtension<TransmissionOffset>(), kDiffMs * 90);
-
- EXPECT_EQ(received_packet.GetExtension<AbsoluteSendTime>(),
- AbsoluteSendTime::MsTo24Bits(clock_->TimeInMilliseconds()));
-
- VideoSendTiming timing;
- EXPECT_TRUE(received_packet.GetExtension<VideoTimingExtension>(&timing));
- EXPECT_EQ(timing.pacer_exit_delta_ms, kDiffMs);
-}
-
-TEST_P(RtpSenderTest, SendPacketSetsPacketOptions) {
- const uint16_t kPacketId = 42;
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- std::unique_ptr<RtpPacketToSend> packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetExtension<TransportSequenceNumber>(kPacketId);
-
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- EXPECT_CALL(send_packet_observer_, OnSendPacket);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
-
- EXPECT_EQ(transport_.last_options_.packet_id, kPacketId);
- EXPECT_TRUE(transport_.last_options_.included_in_allocation);
- EXPECT_TRUE(transport_.last_options_.included_in_feedback);
- EXPECT_FALSE(transport_.last_options_.is_retransmit);
-
- // Send another packet as retransmission, verify options are populated.
- packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- packet->SetExtension<TransportSequenceNumber>(kPacketId + 1);
- packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- EXPECT_TRUE(transport_.last_options_.is_retransmit);
-}
-
-TEST_P(RtpSenderTest, SendPacketUpdatesStats) {
- const size_t kPayloadSize = 1000;
-
- StrictMock<MockSendSideDelayObserver> send_side_delay_observer;
-
- RtpRtcpInterface::Configuration config;
- config.clock = clock_;
- config.outgoing_transport = &transport_;
- config.local_media_ssrc = kSsrc;
- config.rtx_send_ssrc = kRtxSsrc;
- config.fec_generator = &flexfec_sender_;
- config.send_side_delay_observer = &send_side_delay_observer;
- config.event_log = &mock_rtc_event_log_;
- config.send_packet_observer = &send_packet_observer_;
- rtp_sender_context_ =
- std::make_unique<RtpSenderContext>(config, &time_controller_);
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
- TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
-
- const int64_t capture_time_ms = clock_->TimeInMilliseconds();
-
- std::unique_ptr<RtpPacketToSend> video_packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- video_packet->set_packet_type(RtpPacketMediaType::kVideo);
- video_packet->SetPayloadSize(kPayloadSize);
- video_packet->SetExtension<TransportSequenceNumber>(1);
-
- std::unique_ptr<RtpPacketToSend> rtx_packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- rtx_packet->SetSsrc(kRtxSsrc);
- rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission);
- rtx_packet->SetPayloadSize(kPayloadSize);
- rtx_packet->SetExtension<TransportSequenceNumber>(2);
-
- std::unique_ptr<RtpPacketToSend> fec_packet =
- BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
- fec_packet->SetSsrc(kFlexFecSsrc);
- fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection);
- fec_packet->SetPayloadSize(kPayloadSize);
- fec_packet->SetExtension<TransportSequenceNumber>(3);
-
- const int64_t kDiffMs = 25;
- time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs));
+ const uint16_t media_sequence_number = packet->SequenceNumber();
+ packet->set_allow_retransmission(true);
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
- EXPECT_CALL(send_side_delay_observer,
- SendSideDelayUpdated(kDiffMs, kDiffMs, kDiffMs, kSsrc));
+ // Expect a retransmission packet marked with which packet it is a
+ // retransmit of.
EXPECT_CALL(
- send_side_delay_observer,
- SendSideDelayUpdated(kDiffMs, kDiffMs, 2 * kDiffMs, kFlexFecSsrc));
-
- EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time_ms, kSsrc));
-
- rtp_sender_context_->InjectPacket(std::move(video_packet), PacedPacketInfo());
-
- // Send packet observer not called for padding/retransmissions.
- EXPECT_CALL(send_packet_observer_, OnSendPacket(2, _, _)).Times(0);
- rtp_sender_context_->InjectPacket(std::move(rtx_packet), PacedPacketInfo());
-
- EXPECT_CALL(send_packet_observer_,
- OnSendPacket(3, capture_time_ms, kFlexFecSsrc));
- rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo());
-
- StreamDataCounters rtp_stats;
- StreamDataCounters rtx_stats;
- rtp_egress()->GetDataCounters(&rtp_stats, &rtx_stats);
- EXPECT_EQ(rtp_stats.transmitted.packets, 2u);
- EXPECT_EQ(rtp_stats.fec.packets, 1u);
- EXPECT_EQ(rtx_stats.retransmitted.packets, 1u);
+ mock_paced_sender_,
+ EnqueuePackets(ElementsAre(AllOf(
+ Pointee(Property(&RtpPacketToSend::packet_type,
+ RtpPacketMediaType::kRetransmission)),
+ Pointee(Property(&RtpPacketToSend::retransmitted_sequence_number,
+ Eq(media_sequence_number)))))));
+ EXPECT_THAT(rtp_sender_->ReSendPacket(media_sequence_number), Gt(0));
}
-TEST_P(RtpSenderTest, GeneratedPaddingHasBweExtensions) {
+TEST_F(RtpSenderTest, GeneratedPaddingHasBweExtensions) {
// Min requested size in order to use RTX payload.
const size_t kMinPaddingSize = 50;
+ EnableRtx();
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 1);
-
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
- // Send a payload packet first, to enable padding and populate the packet
- // history.
+ // Put a packet in the history, in order to facilitate payload padding.
std::unique_ptr<RtpPacketToSend> packet =
BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds());
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kMinPaddingSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
// Generate a plain padding packet, check that extensions are registered.
std::vector<std::unique_ptr<RtpPacketToSend>> generated_packets =
- rtp_sender()->GeneratePadding(/*target_size_bytes=*/1, true);
+ rtp_sender_->GeneratePadding(/*target_size_bytes=*/1, true);
ASSERT_THAT(generated_packets, SizeIs(1));
auto& plain_padding = generated_packets.front();
EXPECT_GT(plain_padding->padding_size(), 0u);
EXPECT_TRUE(plain_padding->HasExtension<TransportSequenceNumber>());
EXPECT_TRUE(plain_padding->HasExtension<AbsoluteSendTime>());
EXPECT_TRUE(plain_padding->HasExtension<TransmissionOffset>());
-
- // Verify all header extensions have been written.
- rtp_sender_context_->InjectPacket(std::move(plain_padding),
- PacedPacketInfo());
- const auto& sent_plain_padding = transport_.last_sent_packet();
- EXPECT_TRUE(sent_plain_padding.HasExtension<TransportSequenceNumber>());
- EXPECT_TRUE(sent_plain_padding.HasExtension<AbsoluteSendTime>());
- EXPECT_TRUE(sent_plain_padding.HasExtension<TransmissionOffset>());
- webrtc::RTPHeader rtp_header;
- sent_plain_padding.GetHeader(&rtp_header);
- EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
- EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
- EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber);
+ EXPECT_GT(plain_padding->padding_size(), 0u);
// Generate a payload padding packets, check that extensions are registered.
- generated_packets = rtp_sender()->GeneratePadding(kMinPaddingSize, true);
+ generated_packets = rtp_sender_->GeneratePadding(kMinPaddingSize, true);
ASSERT_EQ(generated_packets.size(), 1u);
auto& payload_padding = generated_packets.front();
EXPECT_EQ(payload_padding->padding_size(), 0u);
EXPECT_TRUE(payload_padding->HasExtension<TransportSequenceNumber>());
EXPECT_TRUE(payload_padding->HasExtension<AbsoluteSendTime>());
EXPECT_TRUE(payload_padding->HasExtension<TransmissionOffset>());
-
- // Verify all header extensions have been written.
- rtp_sender_context_->InjectPacket(std::move(payload_padding),
- PacedPacketInfo());
- const auto& sent_payload_padding = transport_.last_sent_packet();
- EXPECT_TRUE(sent_payload_padding.HasExtension<TransportSequenceNumber>());
- EXPECT_TRUE(sent_payload_padding.HasExtension<AbsoluteSendTime>());
- EXPECT_TRUE(sent_payload_padding.HasExtension<TransmissionOffset>());
- sent_payload_padding.GetHeader(&rtp_header);
- EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
- EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
- EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber);
+ EXPECT_GT(payload_padding->payload_size(), 0u);
}
-TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
+TEST_F(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
// Min requested size in order to use RTX payload.
const size_t kMinPaddingSize = 50;
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+ rtp_sender_->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ rtp_sender_->SetRtxPayloadType(kRtxPayload, kPayload);
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
const size_t kPayloadPacketSize = kMinPaddingSize;
@@ -2416,15 +1043,13 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
-
- // Send a dummy video packet so it ends up in the packet history.
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
// Generated padding has large enough budget that the video packet should be
// retransmitted as padding.
std::vector<std::unique_ptr<RtpPacketToSend>> generated_packets =
- rtp_sender()->GeneratePadding(kMinPaddingSize, true);
+ rtp_sender_->GeneratePadding(kMinPaddingSize, true);
ASSERT_EQ(generated_packets.size(), 1u);
auto& padding_packet = generated_packets.front();
EXPECT_EQ(padding_packet->packet_type(), RtpPacketMediaType::kPadding);
@@ -2437,7 +1062,7 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
size_t padding_bytes_generated = 0;
generated_packets =
- rtp_sender()->GeneratePadding(kPaddingBytesRequested, true);
+ rtp_sender_->GeneratePadding(kPaddingBytesRequested, true);
EXPECT_EQ(generated_packets.size(), 1u);
for (auto& packet : generated_packets) {
EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding);
@@ -2450,17 +1075,17 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) {
EXPECT_EQ(padding_bytes_generated, kMaxPaddingSize);
}
-TEST_P(RtpSenderTest, LimitsPayloadPaddingSize) {
+TEST_F(RtpSenderTest, LimitsPayloadPaddingSize) {
// Limit RTX payload padding to 2x target size.
const double kFactor = 2.0;
field_trials_.SetMaxPaddingFactor(kFactor);
- SetUpRtpSender(true, false, false);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+ SetUpRtpSender(false, false, nullptr);
+ rtp_sender_->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ rtp_sender_->SetRtxPayloadType(kRtxPayload, kPayload);
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
// Send a dummy video packet so it ends up in the packet history.
@@ -2470,8 +1095,8 @@ TEST_P(RtpSenderTest, LimitsPayloadPaddingSize) {
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
// Smallest target size that will result in the sent packet being returned as
// padding.
@@ -2481,26 +1106,26 @@ TEST_P(RtpSenderTest, LimitsPayloadPaddingSize) {
// Generated padding has large enough budget that the video packet should be
// retransmitted as padding.
EXPECT_THAT(
- rtp_sender()->GeneratePadding(kMinTargerSizeForPayload, true),
+ rtp_sender_->GeneratePadding(kMinTargerSizeForPayload, true),
AllOf(Not(IsEmpty()),
Each(Pointee(Property(&RtpPacketToSend::padding_size, Eq(0u))))));
// If payload padding is > 2x requested size, plain padding is returned
// instead.
EXPECT_THAT(
- rtp_sender()->GeneratePadding(kMinTargerSizeForPayload - 1, true),
+ rtp_sender_->GeneratePadding(kMinTargerSizeForPayload - 1, true),
AllOf(Not(IsEmpty()),
Each(Pointee(Property(&RtpPacketToSend::padding_size, Gt(0u))))));
}
-TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+TEST_F(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 1);
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransmissionOffset::kUri, kTransmissionTimeOffsetExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
AbsoluteSendTime::kUri, kAbsoluteSendTimeExtensionId));
- ASSERT_TRUE(rtp_sender()->RegisterRtpHeaderExtension(
+ ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(
TransportSequenceNumber::kUri, kTransportSequenceNumberExtensionId));
const size_t kPayloadPacketSize = 1234;
@@ -2511,8 +1136,8 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
packet->set_allow_retransmission(true);
packet->SetPayloadSize(kPayloadPacketSize);
packet->set_packet_type(RtpPacketMediaType::kVideo);
- EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1);
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
+ packet_history_->PutRtpPacket(std::move(packet),
+ clock_->TimeInMilliseconds());
// Payload padding not available without RTX, only generate plain padding on
// the media SSRC.
@@ -2524,7 +1149,7 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
(kPaddingBytesRequested + kMaxPaddingSize - 1) / kMaxPaddingSize;
size_t padding_bytes_generated = 0;
std::vector<std::unique_ptr<RtpPacketToSend>> padding_packets =
- rtp_sender()->GeneratePadding(kPaddingBytesRequested, true);
+ rtp_sender_->GeneratePadding(kPaddingBytesRequested, true);
EXPECT_EQ(padding_packets.size(), kExpectedNumPaddingPackets);
for (auto& packet : padding_packets) {
EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding);
@@ -2535,21 +1160,13 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) {
EXPECT_TRUE(packet->HasExtension<TransportSequenceNumber>());
EXPECT_TRUE(packet->HasExtension<AbsoluteSendTime>());
EXPECT_TRUE(packet->HasExtension<TransmissionOffset>());
-
- // Verify all header extensions are received.
- rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo());
- webrtc::RTPHeader rtp_header;
- transport_.last_sent_packet().GetHeader(&rtp_header);
- EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
- EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
- EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber);
}
EXPECT_EQ(padding_bytes_generated,
kExpectedNumPaddingPackets * kMaxPaddingSize);
}
-TEST_P(RtpSenderTest, SupportsPadding) {
+TEST_F(RtpSenderTest, SupportsPadding) {
bool kSendingMediaStats[] = {true, false};
bool kEnableRedundantPayloads[] = {true, false};
absl::string_view kBweExtensionUris[] = {
@@ -2558,197 +1175,127 @@ TEST_P(RtpSenderTest, SupportsPadding) {
const int kExtensionsId = 7;
for (bool sending_media : kSendingMediaStats) {
- rtp_sender()->SetSendingMediaStatus(sending_media);
+ rtp_sender_->SetSendingMediaStatus(sending_media);
for (bool redundant_payloads : kEnableRedundantPayloads) {
int rtx_mode = kRtxRetransmitted;
if (redundant_payloads) {
rtx_mode |= kRtxRedundantPayloads;
}
- rtp_sender()->SetRtxStatus(rtx_mode);
+ rtp_sender_->SetRtxStatus(rtx_mode);
for (auto extension_uri : kBweExtensionUris) {
- EXPECT_FALSE(rtp_sender()->SupportsPadding());
- rtp_sender()->RegisterRtpHeaderExtension(extension_uri, kExtensionsId);
+ EXPECT_FALSE(rtp_sender_->SupportsPadding());
+ rtp_sender_->RegisterRtpHeaderExtension(extension_uri, kExtensionsId);
if (!sending_media) {
- EXPECT_FALSE(rtp_sender()->SupportsPadding());
+ EXPECT_FALSE(rtp_sender_->SupportsPadding());
} else {
- EXPECT_TRUE(rtp_sender()->SupportsPadding());
+ EXPECT_TRUE(rtp_sender_->SupportsPadding());
if (redundant_payloads) {
- EXPECT_TRUE(rtp_sender()->SupportsRtxPayloadPadding());
+ EXPECT_TRUE(rtp_sender_->SupportsRtxPayloadPadding());
} else {
- EXPECT_FALSE(rtp_sender()->SupportsRtxPayloadPadding());
+ EXPECT_FALSE(rtp_sender_->SupportsRtxPayloadPadding());
}
}
- rtp_sender()->DeregisterRtpHeaderExtension(extension_uri);
- EXPECT_FALSE(rtp_sender()->SupportsPadding());
+ rtp_sender_->DeregisterRtpHeaderExtension(extension_uri);
+ EXPECT_FALSE(rtp_sender_->SupportsPadding());
}
}
}
}
-TEST_P(RtpSenderTest, SetsCaptureTimeAndPopulatesTransmissionOffset) {
- rtp_sender()->RegisterRtpHeaderExtension(TransmissionOffset::kUri,
- kTransmissionTimeOffsetExtensionId);
-
- rtp_sender()->SetSendingMediaStatus(true);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
-
- const int64_t kMissingCaptureTimeMs = 0;
- const uint32_t kTimestampTicksPerMs = 90;
- const int64_t kOffsetMs = 10;
-
- auto packet =
- BuildRtpPacket(kPayload, kMarkerBit, clock_->TimeInMilliseconds(),
- kMissingCaptureTimeMs);
- packet->set_packet_type(RtpPacketMediaType::kVideo);
- packet->ReserveExtension<TransmissionOffset>();
- packet->AllocatePayload(sizeof(kPayloadData));
-
- std::unique_ptr<RtpPacketToSend> packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- EXPECT_EQ(packets.size(), 1u);
- EXPECT_GT(packets[0]->capture_time_ms(), 0);
- packet_to_pace = std::move(packets[0]);
- });
+TEST_F(RtpSenderTest, SetsCaptureTimeOnRtxRetransmissions) {
+ EnableRtx();
+ // Put a packet in the packet history, with current time as capture time.
+ const int64_t start_time_ms = clock_->TimeInMilliseconds();
+ std::unique_ptr<RtpPacketToSend> packet =
+ BuildRtpPacket(kPayload, kMarkerBit, start_time_ms,
+ /*capture_time_ms=*/start_time_ms);
packet->set_allow_retransmission(true);
- EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet)));
-
- time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs));
-
- rtp_sender_context_->InjectPacket(std::move(packet_to_pace),
- PacedPacketInfo());
-
- EXPECT_EQ(1, transport_.packets_sent());
- absl::optional<int32_t> transmission_time_extension =
- transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
- ASSERT_TRUE(transmission_time_extension.has_value());
- EXPECT_EQ(*transmission_time_extension, kOffsetMs * kTimestampTicksPerMs);
-
- // Retransmit packet. The RTX packet should get the same capture time as the
- // original packet, so offset is delta from original packet to now.
- time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs));
-
- std::unique_ptr<RtpPacketToSend> rtx_packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- EXPECT_GT(packets[0]->capture_time_ms(), 0);
- rtx_packet_to_pace = std::move(packets[0]);
- });
+ packet_history_->PutRtpPacket(std::move(packet), start_time_ms);
- EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0);
- rtp_sender_context_->InjectPacket(std::move(rtx_packet_to_pace),
- PacedPacketInfo());
+ // Advance time, request an RTX retransmission. Capture timestamp should be
+ // preserved.
+ time_controller_.AdvanceTime(TimeDelta::Millis(10));
- EXPECT_EQ(2, transport_.packets_sent());
- transmission_time_extension =
- transport_.sent_packets_.back().GetExtension<TransmissionOffset>();
- ASSERT_TRUE(transmission_time_extension.has_value());
- EXPECT_EQ(*transmission_time_extension, 2 * kOffsetMs * kTimestampTicksPerMs);
+ EXPECT_CALL(mock_paced_sender_,
+ EnqueuePackets(ElementsAre(Pointee(Property(
+ &RtpPacketToSend::capture_time_ms, start_time_ms)))));
+ EXPECT_GT(rtp_sender_->ReSendPacket(kSeqNum), 0);
}
-TEST_P(RtpSenderTestWithoutPacer, ClearHistoryOnSequenceNumberCange) {
- const int64_t kRtt = 10;
-
- rtp_sender()->SetSendingMediaStatus(true);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- rtp_sender_context_->packet_history_.SetRtt(kRtt);
+TEST_F(RtpSenderTest, ClearHistoryOnSequenceNumberCange) {
+ EnableRtx();
- // Send a packet and record its sequence numbers.
- SendGenericPacket();
- ASSERT_EQ(1u, transport_.sent_packets_.size());
- const uint16_t packet_seqence_number =
- transport_.sent_packets_.back().SequenceNumber();
+ // Put a packet in the packet history.
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ std::unique_ptr<RtpPacketToSend> packet =
+ BuildRtpPacket(kPayload, kMarkerBit, now_ms, now_ms);
+ packet->set_allow_retransmission(true);
+ packet_history_->PutRtpPacket(std::move(packet), now_ms);
- // Advance time and make sure it can be retransmitted, even if we try to set
- // the ssrc the what it already is.
- rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber());
- time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
- EXPECT_GT(rtp_sender()->ReSendPacket(packet_seqence_number), 0);
+ EXPECT_TRUE(packet_history_->GetPacketState(kSeqNum));
- // Change the sequence number, then move the time and try to retransmit again.
- // The old packet should now be gone.
- rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber() - 1);
- time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
- EXPECT_EQ(rtp_sender()->ReSendPacket(packet_seqence_number), 0);
+ // Update the sequence number of the RTP module, verify packet has been
+ // removed.
+ rtp_sender_->SetSequenceNumber(rtp_sender_->SequenceNumber() - 1);
+ EXPECT_FALSE(packet_history_->GetPacketState(kSeqNum));
}
-TEST_P(RtpSenderTest, IgnoresNackAfterDisablingMedia) {
+TEST_F(RtpSenderTest, IgnoresNackAfterDisablingMedia) {
const int64_t kRtt = 10;
- rtp_sender()->SetSendingMediaStatus(true);
- rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads);
- rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
- RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- rtp_sender_context_->packet_history_.SetRtt(kRtt);
-
- // Send a packet so it is in the packet history.
- std::unique_ptr<RtpPacketToSend> packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- packet_to_pace = std::move(packets[0]);
- });
-
- SendGenericPacket();
- rtp_sender_context_->InjectPacket(std::move(packet_to_pace),
- PacedPacketInfo());
+ EnableRtx();
+ packet_history_->SetRtt(kRtt);
- ASSERT_EQ(1u, transport_.sent_packets_.size());
+ // Put a packet in the history.
+ const int64_t start_time_ms = clock_->TimeInMilliseconds();
+ std::unique_ptr<RtpPacketToSend> packet =
+ BuildRtpPacket(kPayload, kMarkerBit, start_time_ms,
+ /*capture_time_ms=*/start_time_ms);
+ packet->set_allow_retransmission(true);
+ packet_history_->PutRtpPacket(std::move(packet), start_time_ms);
- // Disable media sending and try to retransmit the packet, it should fail.
- rtp_sender()->SetSendingMediaStatus(false);
- time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
- EXPECT_LT(rtp_sender()->ReSendPacket(kSeqNum), 0);
+ // Disable media sending and try to retransmit the packet, it should fail.
+ rtp_sender_->SetSendingMediaStatus(false);
+ time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
+ EXPECT_LT(rtp_sender_->ReSendPacket(kSeqNum), 0);
}
-TEST_P(RtpSenderTest, DoesntFecProtectRetransmissions) {
+TEST_F(RtpSenderTest, DoesntFecProtectRetransmissions) {
// Set up retranmission without RTX, so that a plain copy of the old packet is
// re-sent instead.
const int64_t kRtt = 10;
- rtp_sender()->SetSendingMediaStatus(true);
- rtp_sender()->SetRtxStatus(kRtxOff);
- rtp_sender_context_->packet_history_.SetStorePacketsStatus(
+ rtp_sender_->SetSendingMediaStatus(true);
+ rtp_sender_->SetRtxStatus(kRtxOff);
+ packet_history_->SetStorePacketsStatus(
RtpPacketHistory::StorageMode::kStoreAndCull, 10);
- rtp_sender_context_->packet_history_.SetRtt(kRtt);
+ packet_history_->SetRtt(kRtt);
- // Send a packet so it is in the packet history, make sure to mark it for
- // FEC protection.
- std::unique_ptr<RtpPacketToSend> packet_to_pace;
- EXPECT_CALL(mock_paced_sender_, EnqueuePackets)
- .WillOnce([&](std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
- packet_to_pace = std::move(packets[0]);
- });
-
- SendGenericPacket();
- packet_to_pace->set_fec_protect_packet(true);
- rtp_sender_context_->InjectPacket(std::move(packet_to_pace),
- PacedPacketInfo());
-
- ASSERT_EQ(1u, transport_.sent_packets_.size());
+ // Put a fec protected packet in the history.
+ const int64_t start_time_ms = clock_->TimeInMilliseconds();
+ std::unique_ptr<RtpPacketToSend> packet =
+ BuildRtpPacket(kPayload, kMarkerBit, start_time_ms,
+ /*capture_time_ms=*/start_time_ms);
+ packet->set_allow_retransmission(true);
+ packet->set_fec_protect_packet(true);
+ packet_history_->PutRtpPacket(std::move(packet), start_time_ms);
// Re-send packet, the retransmitted packet should not have the FEC protection
// flag set.
EXPECT_CALL(mock_paced_sender_,
- EnqueuePackets(Each(Pointee(
+ EnqueuePackets(ElementsAre(Pointee(
Property(&RtpPacketToSend::fec_protect_packet, false)))));
time_controller_.AdvanceTime(TimeDelta::Millis(kRtt));
- EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0);
+ EXPECT_GT(rtp_sender_->ReSendPacket(kSeqNum), 0);
}
-TEST_P(RtpSenderTest, MarksPacketsWithKeyframeStatus) {
+TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) {
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
video_config.clock = clock_;
- video_config.rtp_sender = rtp_sender();
+ video_config.rtp_sender = rtp_sender_.get();
video_config.field_trials = &field_trials;
RTPSenderVideo rtp_sender_video(video_config);
@@ -2791,14 +1338,4 @@ TEST_P(RtpSenderTest, MarksPacketsWithKeyframeStatus) {
}
}
-INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
- RtpSenderTest,
- ::testing::Values(TestConfig{false},
- TestConfig{true}));
-
-INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
- RtpSenderTestWithoutPacer,
- ::testing::Values(TestConfig{false},
- TestConfig{true}));
-
} // namespace webrtc
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc
index 3b992dc4eb..4919e3ebf4 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -169,8 +169,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config)
absolute_capture_time_sender_(config.clock),
frame_transformer_delegate_(
config.frame_transformer
- ? new rtc::RefCountedObject<
- RTPSenderVideoFrameTransformerDelegate>(
+ ? rtc::make_ref_counted<RTPSenderVideoFrameTransformerDelegate>(
this,
config.frame_transformer,
rtp_sender_->SSRC(),
@@ -362,7 +361,8 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
if (video_header.generic) {
bool extension_is_set = false;
- if (video_structure_ != nullptr) {
+ if (packet->IsRegistered<RtpDependencyDescriptorExtension>() &&
+ video_structure_ != nullptr) {
DependencyDescriptor descriptor;
descriptor.first_packet_in_frame = first_packet;
descriptor.last_packet_in_frame = last_packet;
@@ -408,7 +408,8 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
}
// Do not use generic frame descriptor when dependency descriptor is stored.
- if (!extension_is_set) {
+ if (packet->IsRegistered<RtpGenericFrameDescriptorExtension00>() &&
+ !extension_is_set) {
RtpGenericFrameDescriptor generic_descriptor;
generic_descriptor.SetFirstPacketInSubFrame(first_packet);
generic_descriptor.SetLastPacketInSubFrame(last_packet);
@@ -438,7 +439,8 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
}
}
- if (first_packet &&
+ if (packet->IsRegistered<RtpVideoLayersAllocationExtension>() &&
+ first_packet &&
send_allocation_ != SendVideoLayersAllocation::kDontSend &&
(video_header.frame_type == VideoFrameType::kVideoFrameKey ||
PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header))) {
@@ -524,7 +526,8 @@ bool RTPSenderVideo::SendVideo(
AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(),
single_packet->Csrcs()),
single_packet->Timestamp(), kVideoPayloadTypeFrequency,
- Int64MsToUQ32x32(single_packet->capture_time_ms() + NtpOffsetMs()),
+ Int64MsToUQ32x32(
+ clock_->ConvertTimestampToNtpTimeInMilliseconds(capture_time_ms)),
/*estimated_capture_clock_offset=*/
include_capture_clock_offset_ ? estimated_capture_clock_offset_ms
: absl::nullopt);
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h
index 06f3d20014..ba8d7e8360 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -89,6 +89,7 @@ class RTPSenderVideo {
virtual ~RTPSenderVideo();
// expected_retransmission_time_ms.has_value() -> retransmission allowed.
+ // `capture_time_ms` and `clock::CurrentTime` should be using the same epoch.
// Calls to this method is assumed to be externally serialized.
// |estimated_capture_clock_offset_ms| is an estimated clock offset between
// this sender and the original capturer, for this video packet. See
diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
index 074b64086a..23e66bf757 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
@@ -129,9 +129,10 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) {
MutexLock lock(&sender_lock_);
- // The encoder queue gets destroyed after the sender; as long as the sender is
- // alive, it's safe to post.
- if (!sender_)
+ // The encoder queue normally gets destroyed after the sender;
+ // however, it might still be null by the time a previously queued frame
+ // arrives.
+ if (!sender_ || !encoder_queue_)
return;
rtc::scoped_refptr<RTPSenderVideoFrameTransformerDelegate> delegate = this;
encoder_queue_->PostTask(ToQueuedTask(
diff --git a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
index 55bafdc790..ea727828cc 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
@@ -34,7 +34,6 @@
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h"
-#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h"
#include "rtc_base/task_queue_for_test.h"
@@ -1054,8 +1053,10 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) {
packet.GetExtension<AbsoluteCaptureTimeExtension>();
if (absolute_capture_time) {
++packets_with_abs_capture_time;
- EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
- Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs()));
+ EXPECT_EQ(
+ absolute_capture_time->absolute_capture_timestamp,
+ Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds(
+ kAbsoluteCaptureTimestampMs)));
EXPECT_FALSE(
absolute_capture_time->estimated_capture_clock_offset.has_value());
}
@@ -1092,8 +1093,10 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) {
packet.GetExtension<AbsoluteCaptureTimeExtension>();
if (absolute_capture_time) {
++packets_with_abs_capture_time;
- EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
- Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs()));
+ EXPECT_EQ(
+ absolute_capture_time->absolute_capture_timestamp,
+ Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds(
+ kAbsoluteCaptureTimestampMs)));
EXPECT_EQ(kExpectedCaptureClockOffset,
absolute_capture_time->estimated_capture_clock_offset);
}
@@ -1158,6 +1161,55 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) {
EXPECT_EQ(received_delay, kExpectedDelay);
}
+TEST_P(RtpSenderVideoTest, SendGenericVideo) {
+ const uint8_t kPayloadType = 127;
+ const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric;
+ const uint8_t kPayload[] = {47, 11, 32, 93, 89};
+
+ // Send keyframe.
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321,
+ kPayload, video_header,
+ absl::nullopt));
+
+ rtc::ArrayView<const uint8_t> sent_payload =
+ transport_.last_sent_packet().payload();
+ uint8_t generic_header = sent_payload[0];
+ EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
+ EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
+ EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kPayload));
+
+ // Send delta frame.
+ const uint8_t kDeltaPayload[] = {13, 42, 32, 93, 13};
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321,
+ kDeltaPayload, video_header,
+ absl::nullopt));
+
+ sent_payload = sent_payload = transport_.last_sent_packet().payload();
+ generic_header = sent_payload[0];
+ EXPECT_FALSE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit);
+ EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit);
+ EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kDeltaPayload));
+}
+
+TEST_P(RtpSenderVideoTest, SendRawVideo) {
+ const uint8_t kPayloadType = 111;
+ const uint8_t kPayload[] = {11, 22, 33, 44, 55};
+
+ // Send a frame.
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, absl::nullopt, 1234,
+ 4321, kPayload, video_header,
+ absl::nullopt));
+
+ rtc::ArrayView<const uint8_t> sent_payload =
+ transport_.last_sent_packet().payload();
+ EXPECT_THAT(sent_payload, ElementsAreArray(kPayload));
+}
+
INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
RtpSenderVideoTest,
::testing::Bool());
diff --git a/modules/rtp_rtcp/source/rtp_utility.cc b/modules/rtp_rtcp/source/rtp_utility.cc
index a22785faca..d7df1830ce 100644
--- a/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/modules/rtp_rtcp/source/rtp_utility.cc
@@ -364,6 +364,10 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
header->extension.hasTransmissionTimeOffset = true;
break;
}
+ case kRtpExtensionCsrcAudioLevel: {
+ RTC_LOG(LS_WARNING) << "Csrc audio level extension not supported";
+ return;
+ }
case kRtpExtensionAudioLevel: {
if (len != 0) {
RTC_LOG(LS_WARNING) << "Incorrect audio level len: " << len;
diff --git a/modules/rtp_rtcp/source/source_tracker_unittest.cc b/modules/rtp_rtcp/source/source_tracker_unittest.cc
index 32f9f4b2a3..8514e8462d 100644
--- a/modules/rtp_rtcp/source/source_tracker_unittest.cc
+++ b/modules/rtp_rtcp/source/source_tracker_unittest.cc
@@ -111,7 +111,7 @@ class SourceTrackerRandomTest
packet_infos.emplace_back(GenerateSsrc(), GenerateCsrcs(),
GenerateRtpTimestamp(), GenerateAudioLevel(),
GenerateAbsoluteCaptureTime(),
- GenerateReceiveTimeMs());
+ GenerateReceiveTime());
}
return RtpPacketInfos(std::move(packet_infos));
@@ -192,8 +192,9 @@ class SourceTrackerRandomTest
return value;
}
- int64_t GenerateReceiveTimeMs() {
- return std::uniform_int_distribution<int64_t>()(generator_);
+ Timestamp GenerateReceiveTime() {
+ return Timestamp::Micros(
+ std::uniform_int_distribution<int64_t>()(generator_));
}
const uint32_t ssrcs_count_;
@@ -239,78 +240,156 @@ TEST(SourceTrackerTest, StartEmpty) {
EXPECT_THAT(tracker.GetSources(), IsEmpty());
}
-TEST(SourceTrackerTest, OnFrameDeliveredRecordsSources) {
+TEST(SourceTrackerTest, OnFrameDeliveredRecordsSourcesDistinctSsrcs) {
+ constexpr uint32_t kSsrc1 = 10;
+ constexpr uint32_t kSsrc2 = 11;
+ constexpr uint32_t kCsrcs0 = 20;
+ constexpr uint32_t kCsrcs1 = 21;
+ constexpr uint32_t kCsrcs2 = 22;
+ constexpr uint32_t kRtpTimestamp0 = 40;
+ constexpr uint32_t kRtpTimestamp1 = 50;
+ constexpr absl::optional<uint8_t> kAudioLevel0 = 50;
+ constexpr absl::optional<uint8_t> kAudioLevel1 = 20;
+ constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime =
+ AbsoluteCaptureTime{/*absolute_capture_timestamp=*/12,
+ /*estimated_capture_clock_offset=*/absl::nullopt};
+ constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60);
+ constexpr Timestamp kReceiveTime1 = Timestamp::Millis(70);
+
+ SimulatedClock clock(1000000000000ULL);
+ SourceTracker tracker(&clock);
+
+ tracker.OnFrameDelivered(RtpPacketInfos(
+ {RtpPacketInfo(kSsrc1, {kCsrcs0, kCsrcs1}, kRtpTimestamp0, kAudioLevel0,
+ kAbsoluteCaptureTime, kReceiveTime0),
+ RtpPacketInfo(kSsrc2, {kCsrcs2}, kRtpTimestamp1, kAudioLevel1,
+ kAbsoluteCaptureTime, kReceiveTime1)}));
+
+ int64_t timestamp_ms = clock.TimeInMilliseconds();
+ constexpr RtpSource::Extensions extensions0 = {kAudioLevel0,
+ kAbsoluteCaptureTime};
+ constexpr RtpSource::Extensions extensions1 = {kAudioLevel1,
+ kAbsoluteCaptureTime};
+
+ EXPECT_THAT(tracker.GetSources(),
+ ElementsAre(RtpSource(timestamp_ms, kSsrc2, RtpSourceType::SSRC,
+ kRtpTimestamp1, extensions1),
+ RtpSource(timestamp_ms, kCsrcs2, RtpSourceType::CSRC,
+ kRtpTimestamp1, extensions1),
+ RtpSource(timestamp_ms, kSsrc1, RtpSourceType::SSRC,
+ kRtpTimestamp0, extensions0),
+ RtpSource(timestamp_ms, kCsrcs1, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0),
+ RtpSource(timestamp_ms, kCsrcs0, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0)));
+}
+
+TEST(SourceTrackerTest, OnFrameDeliveredRecordsSourcesSameSsrc) {
constexpr uint32_t kSsrc = 10;
constexpr uint32_t kCsrcs0 = 20;
constexpr uint32_t kCsrcs1 = 21;
- constexpr uint32_t kRtpTimestamp = 40;
- constexpr absl::optional<uint8_t> kAudioLevel = 50;
+ constexpr uint32_t kCsrcs2 = 22;
+ constexpr uint32_t kRtpTimestamp0 = 40;
+ constexpr uint32_t kRtpTimestamp1 = 45;
+ constexpr uint32_t kRtpTimestamp2 = 50;
+ constexpr absl::optional<uint8_t> kAudioLevel0 = 50;
+ constexpr absl::optional<uint8_t> kAudioLevel1 = 20;
+ constexpr absl::optional<uint8_t> kAudioLevel2 = 10;
constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime =
AbsoluteCaptureTime{/*absolute_capture_timestamp=*/12,
/*estimated_capture_clock_offset=*/absl::nullopt};
- constexpr int64_t kReceiveTimeMs = 60;
+ constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60);
+ constexpr Timestamp kReceiveTime1 = Timestamp::Millis(70);
+ constexpr Timestamp kReceiveTime2 = Timestamp::Millis(80);
SimulatedClock clock(1000000000000ULL);
SourceTracker tracker(&clock);
tracker.OnFrameDelivered(RtpPacketInfos(
- {RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs1}, kRtpTimestamp, kAudioLevel,
- kAbsoluteCaptureTime, kReceiveTimeMs)}));
+ {RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs1}, kRtpTimestamp0, kAudioLevel0,
+ kAbsoluteCaptureTime, kReceiveTime0),
+ RtpPacketInfo(kSsrc, {kCsrcs2}, kRtpTimestamp1, kAudioLevel1,
+ kAbsoluteCaptureTime, kReceiveTime1),
+ RtpPacketInfo(kSsrc, {kCsrcs0}, kRtpTimestamp2, kAudioLevel2,
+ kAbsoluteCaptureTime, kReceiveTime2)}));
int64_t timestamp_ms = clock.TimeInMilliseconds();
- constexpr RtpSource::Extensions extensions = {kAudioLevel,
- kAbsoluteCaptureTime};
+ constexpr RtpSource::Extensions extensions0 = {kAudioLevel0,
+ kAbsoluteCaptureTime};
+ constexpr RtpSource::Extensions extensions1 = {kAudioLevel1,
+ kAbsoluteCaptureTime};
+ constexpr RtpSource::Extensions extensions2 = {kAudioLevel2,
+ kAbsoluteCaptureTime};
EXPECT_THAT(tracker.GetSources(),
ElementsAre(RtpSource(timestamp_ms, kSsrc, RtpSourceType::SSRC,
- kRtpTimestamp, extensions),
- RtpSource(timestamp_ms, kCsrcs1, RtpSourceType::CSRC,
- kRtpTimestamp, extensions),
+ kRtpTimestamp2, extensions2),
RtpSource(timestamp_ms, kCsrcs0, RtpSourceType::CSRC,
- kRtpTimestamp, extensions)));
+ kRtpTimestamp2, extensions2),
+ RtpSource(timestamp_ms, kCsrcs2, RtpSourceType::CSRC,
+ kRtpTimestamp1, extensions1),
+ RtpSource(timestamp_ms, kCsrcs1, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0)));
}
TEST(SourceTrackerTest, OnFrameDeliveredUpdatesSources) {
- constexpr uint32_t kSsrc = 10;
+ constexpr uint32_t kSsrc1 = 10;
+ constexpr uint32_t kSsrc2 = 11;
constexpr uint32_t kCsrcs0 = 20;
constexpr uint32_t kCsrcs1 = 21;
constexpr uint32_t kCsrcs2 = 22;
constexpr uint32_t kRtpTimestamp0 = 40;
constexpr uint32_t kRtpTimestamp1 = 41;
+ constexpr uint32_t kRtpTimestamp2 = 42;
constexpr absl::optional<uint8_t> kAudioLevel0 = 50;
constexpr absl::optional<uint8_t> kAudioLevel1 = absl::nullopt;
+ constexpr absl::optional<uint8_t> kAudioLevel2 = 10;
constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime0 =
AbsoluteCaptureTime{12, 34};
constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime1 =
AbsoluteCaptureTime{56, 78};
- constexpr int64_t kReceiveTimeMs0 = 60;
- constexpr int64_t kReceiveTimeMs1 = 61;
+ constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime2 =
+ AbsoluteCaptureTime{89, 90};
+ constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60);
+ constexpr Timestamp kReceiveTime1 = Timestamp::Millis(61);
+ constexpr Timestamp kReceiveTime2 = Timestamp::Millis(62);
+
+ constexpr RtpSource::Extensions extensions0 = {kAudioLevel0,
+ kAbsoluteCaptureTime0};
+ constexpr RtpSource::Extensions extensions1 = {kAudioLevel1,
+ kAbsoluteCaptureTime1};
+ constexpr RtpSource::Extensions extensions2 = {kAudioLevel2,
+ kAbsoluteCaptureTime2};
SimulatedClock clock(1000000000000ULL);
SourceTracker tracker(&clock);
tracker.OnFrameDelivered(RtpPacketInfos(
- {RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs1}, kRtpTimestamp0, kAudioLevel0,
- kAbsoluteCaptureTime0, kReceiveTimeMs0)}));
+ {RtpPacketInfo(kSsrc1, {kCsrcs0, kCsrcs1}, kRtpTimestamp0, kAudioLevel0,
+ kAbsoluteCaptureTime0, kReceiveTime0)}));
int64_t timestamp_ms_0 = clock.TimeInMilliseconds();
+ EXPECT_THAT(
+ tracker.GetSources(),
+ ElementsAre(RtpSource(timestamp_ms_0, kSsrc1, RtpSourceType::SSRC,
+ kRtpTimestamp0, extensions0),
+ RtpSource(timestamp_ms_0, kCsrcs1, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0),
+ RtpSource(timestamp_ms_0, kCsrcs0, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0)));
- clock.AdvanceTimeMilliseconds(17);
+ // Deliver packets with updated sources.
+ clock.AdvanceTimeMilliseconds(17);
tracker.OnFrameDelivered(RtpPacketInfos(
- {RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs2}, kRtpTimestamp1, kAudioLevel1,
- kAbsoluteCaptureTime1, kReceiveTimeMs1)}));
+ {RtpPacketInfo(kSsrc1, {kCsrcs0, kCsrcs2}, kRtpTimestamp1, kAudioLevel1,
+ kAbsoluteCaptureTime1, kReceiveTime1)}));
int64_t timestamp_ms_1 = clock.TimeInMilliseconds();
- constexpr RtpSource::Extensions extensions0 = {kAudioLevel0,
- kAbsoluteCaptureTime0};
- constexpr RtpSource::Extensions extensions1 = {kAudioLevel1,
- kAbsoluteCaptureTime1};
-
EXPECT_THAT(
tracker.GetSources(),
- ElementsAre(RtpSource(timestamp_ms_1, kSsrc, RtpSourceType::SSRC,
+ ElementsAre(RtpSource(timestamp_ms_1, kSsrc1, RtpSourceType::SSRC,
kRtpTimestamp1, extensions1),
RtpSource(timestamp_ms_1, kCsrcs2, RtpSourceType::CSRC,
kRtpTimestamp1, extensions1),
@@ -318,6 +397,27 @@ TEST(SourceTrackerTest, OnFrameDeliveredUpdatesSources) {
kRtpTimestamp1, extensions1),
RtpSource(timestamp_ms_0, kCsrcs1, RtpSourceType::CSRC,
kRtpTimestamp0, extensions0)));
+
+ // Deliver more packets with update csrcs and a new ssrc.
+ clock.AdvanceTimeMilliseconds(17);
+ tracker.OnFrameDelivered(RtpPacketInfos(
+ {RtpPacketInfo(kSsrc2, {kCsrcs0}, kRtpTimestamp2, kAudioLevel2,
+ kAbsoluteCaptureTime2, kReceiveTime2)}));
+
+ int64_t timestamp_ms_2 = clock.TimeInMilliseconds();
+
+ EXPECT_THAT(
+ tracker.GetSources(),
+ ElementsAre(RtpSource(timestamp_ms_2, kSsrc2, RtpSourceType::SSRC,
+ kRtpTimestamp2, extensions2),
+ RtpSource(timestamp_ms_2, kCsrcs0, RtpSourceType::CSRC,
+ kRtpTimestamp2, extensions2),
+ RtpSource(timestamp_ms_1, kSsrc1, RtpSourceType::SSRC,
+ kRtpTimestamp1, extensions1),
+ RtpSource(timestamp_ms_1, kCsrcs2, RtpSourceType::CSRC,
+ kRtpTimestamp1, extensions1),
+ RtpSource(timestamp_ms_0, kCsrcs1, RtpSourceType::CSRC,
+ kRtpTimestamp0, extensions0)));
}
TEST(SourceTrackerTest, TimedOutSourcesAreRemoved) {
@@ -333,21 +433,21 @@ TEST(SourceTrackerTest, TimedOutSourcesAreRemoved) {
AbsoluteCaptureTime{12, 34};
constexpr absl::optional<AbsoluteCaptureTime> kAbsoluteCaptureTime1 =
AbsoluteCaptureTime{56, 78};
- constexpr int64_t kReceiveTimeMs0 = 60;
- constexpr int64_t kReceiveTimeMs1 = 61;
+ constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60);
+ constexpr Timestamp kReceiveTime1 = Timestamp::Millis(61);
SimulatedClock clock(1000000000000ULL);
SourceTracker tracker(&clock);
tracker.OnFrameDelivered(RtpPacketInfos(
{RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs1}, kRtpTimestamp0, kAudioLevel0,
- kAbsoluteCaptureTime0, kReceiveTimeMs0)}));
+ kAbsoluteCaptureTime0, kReceiveTime0)}));
clock.AdvanceTimeMilliseconds(17);
tracker.OnFrameDelivered(RtpPacketInfos(
{RtpPacketInfo(kSsrc, {kCsrcs0, kCsrcs2}, kRtpTimestamp1, kAudioLevel1,
- kAbsoluteCaptureTime1, kReceiveTimeMs1)}));
+ kAbsoluteCaptureTime1, kReceiveTime1)}));
int64_t timestamp_ms_1 = clock.TimeInMilliseconds();
diff --git a/modules/rtp_rtcp/source/time_util.cc b/modules/rtp_rtcp/source/time_util.cc
index b5b4f8bd98..fe0cfea11f 100644
--- a/modules/rtp_rtcp/source/time_util.cc
+++ b/modules/rtp_rtcp/source/time_util.cc
@@ -17,48 +17,6 @@
#include "rtc_base/time_utils.h"
namespace webrtc {
-namespace {
-
-int64_t NtpOffsetMsCalledOnce() {
- constexpr int64_t kNtpJan1970Sec = 2208988800;
- int64_t clock_time = rtc::TimeMillis();
- int64_t utc_time = rtc::TimeUTCMillis();
- return utc_time - clock_time + kNtpJan1970Sec * rtc::kNumMillisecsPerSec;
-}
-
-} // namespace
-
-int64_t NtpOffsetMs() {
- // Calculate the offset once.
- static int64_t ntp_offset_ms = NtpOffsetMsCalledOnce();
- return ntp_offset_ms;
-}
-
-NtpTime TimeMicrosToNtp(int64_t time_us) {
- // Since this doesn't return a wallclock time, but only NTP representation
- // of rtc::TimeMillis() clock, the exact offset doesn't matter.
- // To simplify conversions between NTP and RTP time, this offset is
- // limited to milliseconds in resolution.
- int64_t time_ntp_us = time_us + NtpOffsetMs() * 1000;
- RTC_DCHECK_GE(time_ntp_us, 0); // Time before year 1900 is unsupported.
-
- // TODO(danilchap): Convert both seconds and fraction together using int128
- // when that type is easily available.
- // Currently conversion is done separetly for seconds and fraction of a second
- // to avoid overflow.
-
- // Convert seconds to uint32 through uint64 for well-defined cast.
- // Wrap around (will happen in 2036) is expected for ntp time.
- uint32_t ntp_seconds =
- static_cast<uint64_t>(time_ntp_us / rtc::kNumMicrosecsPerSec);
-
- // Scale fractions of the second to ntp resolution.
- constexpr int64_t kNtpInSecond = 1LL << 32;
- int64_t us_fractions = time_ntp_us % rtc::kNumMicrosecsPerSec;
- uint32_t ntp_fractions =
- us_fractions * kNtpInSecond / rtc::kNumMicrosecsPerSec;
- return NtpTime(ntp_seconds, ntp_fractions);
-}
uint32_t SaturatedUsToCompactNtp(int64_t us) {
constexpr uint32_t kMaxCompactNtp = 0xFFFFFFFF;
diff --git a/modules/rtp_rtcp/source/time_util.h b/modules/rtp_rtcp/source/time_util.h
index 94b914310c..c883e5ca38 100644
--- a/modules/rtp_rtcp/source/time_util.h
+++ b/modules/rtp_rtcp/source/time_util.h
@@ -17,20 +17,6 @@
namespace webrtc {
-// Converts time obtained using rtc::TimeMicros to ntp format.
-// TimeMicrosToNtp guarantees difference of the returned values matches
-// difference of the passed values.
-// As a result TimeMicrosToNtp(rtc::TimeMicros()) doesn't guarantee to match
-// system time.
-// However, TimeMicrosToNtp Guarantees that returned NtpTime will be offsetted
-// from rtc::TimeMicros() by integral number of milliseconds.
-// Use NtpOffsetMs() to get that offset value.
-NtpTime TimeMicrosToNtp(int64_t time_us);
-
-// Difference between Ntp time and local relative time returned by
-// rtc::TimeMicros()
-int64_t NtpOffsetMs();
-
// Helper function for compact ntp representation:
// RFC 3550, Section 4. Time Format.
// Wallclock time is represented using the timestamp format of
diff --git a/modules/rtp_rtcp/source/time_util_unittest.cc b/modules/rtp_rtcp/source/time_util_unittest.cc
index 4b469bb956..6ff55dda55 100644
--- a/modules/rtp_rtcp/source/time_util_unittest.cc
+++ b/modules/rtp_rtcp/source/time_util_unittest.cc
@@ -9,34 +9,10 @@
*/
#include "modules/rtp_rtcp/source/time_util.h"
-#include "rtc_base/fake_clock.h"
-#include "rtc_base/time_utils.h"
-#include "system_wrappers/include/clock.h"
#include "test/gtest.h"
namespace webrtc {
-TEST(TimeUtilTest, TimeMicrosToNtpDoesntChangeBetweenRuns) {
- rtc::ScopedFakeClock clock;
- // TimeMicrosToNtp is not pure: it behave differently between different
- // execution of the program, but should behave same during same execution.
- const int64_t time_us = 12345;
- clock.SetTime(Timestamp::Micros(2));
- NtpTime time_ntp = TimeMicrosToNtp(time_us);
- clock.SetTime(Timestamp::Micros(time_us));
- EXPECT_EQ(TimeMicrosToNtp(time_us), time_ntp);
- clock.SetTime(Timestamp::Micros(1000000));
- EXPECT_EQ(TimeMicrosToNtp(time_us), time_ntp);
-}
-
-TEST(TimeUtilTest, TimeMicrosToNtpKeepsIntervals) {
- rtc::ScopedFakeClock clock;
- NtpTime time_ntp1 = TimeMicrosToNtp(rtc::TimeMicros());
- clock.AdvanceTime(TimeDelta::Millis(20));
- NtpTime time_ntp2 = TimeMicrosToNtp(rtc::TimeMicros());
- EXPECT_EQ(time_ntp2.ToMs() - time_ntp1.ToMs(), 20);
-}
-
TEST(TimeUtilTest, CompactNtp) {
const uint32_t kNtpSec = 0x12345678;
const uint32_t kNtpFrac = 0x23456789;
diff --git a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc
index 2aebbead68..49f483dad6 100644
--- a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc
+++ b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc
@@ -24,6 +24,11 @@ namespace {
// Maximum number of media packets that can be protected in one batch.
constexpr size_t kMaxMediaPackets = 48;
+// Maximum number of media packets tracked by FEC decoder.
+// Maintain a sufficiently larger tracking window than |kMaxMediaPackets|
+// to account for packet reordering in pacer/ network.
+constexpr size_t kMaxTrackedMediaPackets = 4 * kMaxMediaPackets;
+
// Maximum number of FEC packets stored inside ForwardErrorCorrection.
constexpr size_t kMaxFecPackets = kMaxMediaPackets;
@@ -51,7 +56,7 @@ size_t UlpfecHeaderSize(size_t packet_mask_size) {
} // namespace
UlpfecHeaderReader::UlpfecHeaderReader()
- : FecHeaderReader(kMaxMediaPackets, kMaxFecPackets) {}
+ : FecHeaderReader(kMaxTrackedMediaPackets, kMaxFecPackets) {}
UlpfecHeaderReader::~UlpfecHeaderReader() = default;
diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc
index 016df6e834..53d363de67 100644
--- a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc
+++ b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc
@@ -392,7 +392,7 @@ TEST_F(UlpfecReceiverTest, PacketNotDroppedTooEarly) {
delayed_fec = fec_packets.front();
// Fill the FEC decoder. No packets should be dropped.
- const size_t kNumMediaPacketsBatch2 = 46;
+ const size_t kNumMediaPacketsBatch2 = 191;
std::list<AugmentedPacket*> augmented_media_packets_batch2;
ForwardErrorCorrection::PacketList media_packets_batch2;
for (size_t i = 0; i < kNumMediaPacketsBatch2; ++i) {
@@ -431,7 +431,7 @@ TEST_F(UlpfecReceiverTest, PacketDroppedWhenTooOld) {
delayed_fec = fec_packets.front();
// Fill the FEC decoder and force the last packet to be dropped.
- const size_t kNumMediaPacketsBatch2 = 48;
+ const size_t kNumMediaPacketsBatch2 = 192;
std::list<AugmentedPacket*> augmented_media_packets_batch2;
ForwardErrorCorrection::PacketList media_packets_batch2;
for (size_t i = 0; i < kNumMediaPacketsBatch2; ++i) {
diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h
index a7573993f7..3d7cb3291d 100644
--- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h
+++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h
@@ -25,7 +25,7 @@ class VideoRtpDepacketizerVp8 : public VideoRtpDepacketizer {
public:
VideoRtpDepacketizerVp8() = default;
VideoRtpDepacketizerVp8(const VideoRtpDepacketizerVp8&) = delete;
- VideoRtpDepacketizerVp8& operator=(VideoRtpDepacketizerVp8&) = delete;
+ VideoRtpDepacketizerVp8& operator=(const VideoRtpDepacketizerVp8&) = delete;
~VideoRtpDepacketizerVp8() override = default;
// Parses vp8 rtp payload descriptor.
diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc
index a719d7ab12..be05009807 100644
--- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc
+++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc
@@ -40,12 +40,12 @@ constexpr int kFailedToParse = 0;
bool ParsePictureId(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
uint32_t picture_id;
uint32_t m_bit;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&m_bit, 1));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, m_bit));
if (m_bit) {
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&picture_id, 15));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(15, picture_id));
vp9->max_picture_id = kMaxTwoBytePictureId;
} else {
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&picture_id, 7));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(7, picture_id));
vp9->max_picture_id = kMaxOneBytePictureId;
}
vp9->picture_id = picture_id;
@@ -60,10 +60,10 @@ bool ParsePictureId(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
//
bool ParseLayerInfoCommon(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
uint32_t t, u_bit, s, d_bit;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&t, 3));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&u_bit, 1));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&s, 3));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&d_bit, 1));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(3, t));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, u_bit));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(3, s));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, d_bit));
vp9->temporal_idx = t;
vp9->temporal_up_switch = u_bit ? true : false;
if (s >= kMaxSpatialLayers)
@@ -84,7 +84,7 @@ bool ParseLayerInfoCommon(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
bool ParseLayerInfoNonFlexibleMode(rtc::BitBuffer* parser,
RTPVideoHeaderVP9* vp9) {
uint8_t tl0picidx;
- RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&tl0picidx));
+ RETURN_FALSE_ON_ERROR(parser->ReadUInt8(tl0picidx));
vp9->tl0_pic_idx = tl0picidx;
return true;
}
@@ -117,8 +117,8 @@ bool ParseRefIndices(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
return false;
uint32_t p_diff;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&p_diff, 7));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&n_bit, 1));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(7, p_diff));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, n_bit));
vp9->pid_diff[vp9->num_ref_pics] = p_diff;
uint32_t scaled_pid = vp9->picture_id;
@@ -154,9 +154,9 @@ bool ParseRefIndices(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
//
bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
uint32_t n_s, y_bit, g_bit;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&n_s, 3));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&y_bit, 1));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&g_bit, 1));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(3, n_s));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, y_bit));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, g_bit));
RETURN_FALSE_ON_ERROR(parser->ConsumeBits(3));
vp9->num_spatial_layers = n_s + 1;
vp9->spatial_layer_resolution_present = y_bit ? true : false;
@@ -164,20 +164,20 @@ bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
if (y_bit) {
for (size_t i = 0; i < vp9->num_spatial_layers; ++i) {
- RETURN_FALSE_ON_ERROR(parser->ReadUInt16(&vp9->width[i]));
- RETURN_FALSE_ON_ERROR(parser->ReadUInt16(&vp9->height[i]));
+ RETURN_FALSE_ON_ERROR(parser->ReadUInt16(vp9->width[i]));
+ RETURN_FALSE_ON_ERROR(parser->ReadUInt16(vp9->height[i]));
}
}
if (g_bit) {
uint8_t n_g;
- RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&n_g));
+ RETURN_FALSE_ON_ERROR(parser->ReadUInt8(n_g));
vp9->gof.num_frames_in_gof = n_g;
}
for (size_t i = 0; i < vp9->gof.num_frames_in_gof; ++i) {
uint32_t t, u_bit, r;
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&t, 3));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&u_bit, 1));
- RETURN_FALSE_ON_ERROR(parser->ReadBits(&r, 2));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(3, t));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(1, u_bit));
+ RETURN_FALSE_ON_ERROR(parser->ReadBits(2, r));
RETURN_FALSE_ON_ERROR(parser->ConsumeBits(2));
vp9->gof.temporal_idx[i] = t;
vp9->gof.temporal_up_switch[i] = u_bit ? true : false;
@@ -185,7 +185,7 @@ bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) {
for (uint8_t p = 0; p < vp9->gof.num_ref_pics[i]; ++p) {
uint8_t p_diff;
- RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&p_diff));
+ RETURN_FALSE_ON_ERROR(parser->ReadUInt8(p_diff));
vp9->gof.pid_diff[i][p] = p_diff;
}
}
@@ -214,7 +214,7 @@ int VideoRtpDepacketizerVp9::ParseRtpPayload(
// Parse mandatory first byte of payload descriptor.
rtc::BitBuffer parser(rtp_payload.data(), rtp_payload.size());
uint8_t first_byte;
- if (!parser.ReadUInt8(&first_byte)) {
+ if (!parser.ReadUInt8(first_byte)) {
RTC_LOG(LS_ERROR) << "Payload length is zero.";
return kFailedToParse;
}
diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h
index c622cbc75e..4bb358a15f 100644
--- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h
+++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h
@@ -25,7 +25,7 @@ class VideoRtpDepacketizerVp9 : public VideoRtpDepacketizer {
public:
VideoRtpDepacketizerVp9() = default;
VideoRtpDepacketizerVp9(const VideoRtpDepacketizerVp9&) = delete;
- VideoRtpDepacketizerVp9& operator=(VideoRtpDepacketizerVp9&) = delete;
+ VideoRtpDepacketizerVp9& operator=(const VideoRtpDepacketizerVp9&) = delete;
~VideoRtpDepacketizerVp9() override = default;
// Parses vp9 rtp payload descriptor.
diff --git a/modules/utility/source/process_thread_impl.cc b/modules/utility/source/process_thread_impl.cc
index dc2a0066e9..73fc23400b 100644
--- a/modules/utility/source/process_thread_impl.cc
+++ b/modules/utility/source/process_thread_impl.cc
@@ -48,7 +48,6 @@ ProcessThreadImpl::ProcessThreadImpl(const char* thread_name)
ProcessThreadImpl::~ProcessThreadImpl() {
RTC_DCHECK(thread_checker_.IsCurrent());
- RTC_DCHECK(!thread_.get());
RTC_DCHECK(!stop_);
while (!delayed_tasks_.empty()) {
@@ -72,8 +71,8 @@ void ProcessThreadImpl::Delete() {
// Doesn't need locking, because the contending thread isn't running.
void ProcessThreadImpl::Start() RTC_NO_THREAD_SAFETY_ANALYSIS {
RTC_DCHECK(thread_checker_.IsCurrent());
- RTC_DCHECK(!thread_.get());
- if (thread_.get())
+ RTC_DCHECK(thread_.empty());
+ if (!thread_.empty())
return;
RTC_DCHECK(!stop_);
@@ -81,14 +80,18 @@ void ProcessThreadImpl::Start() RTC_NO_THREAD_SAFETY_ANALYSIS {
for (ModuleCallback& m : modules_)
m.module->ProcessThreadAttached(this);
- thread_.reset(
- new rtc::PlatformThread(&ProcessThreadImpl::Run, this, thread_name_));
- thread_->Start();
+ thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ CurrentTaskQueueSetter set_current(this);
+ while (Process()) {
+ }
+ },
+ thread_name_);
}
void ProcessThreadImpl::Stop() {
RTC_DCHECK(thread_checker_.IsCurrent());
- if (!thread_.get())
+ if (thread_.empty())
return;
{
@@ -98,9 +101,7 @@ void ProcessThreadImpl::Stop() {
}
wake_up_.Set();
-
- thread_->Stop();
- thread_.reset();
+ thread_.Finalize();
StopNoLocks();
}
@@ -108,7 +109,7 @@ void ProcessThreadImpl::Stop() {
// No locking needed, since this is called after the contending thread is
// stopped.
void ProcessThreadImpl::StopNoLocks() RTC_NO_THREAD_SAFETY_ANALYSIS {
- RTC_DCHECK(!thread_);
+ RTC_DCHECK(thread_.empty());
stop_ = false;
for (ModuleCallback& m : modules_)
@@ -179,6 +180,7 @@ void ProcessThreadImpl::PostDelayedTask(std::unique_ptr<QueuedTask> task,
void ProcessThreadImpl::RegisterModule(Module* module,
const rtc::Location& from) {
+ TRACE_EVENT0("webrtc", "ProcessThreadImpl::RegisterModule");
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK(module) << from.ToString();
@@ -199,7 +201,7 @@ void ProcessThreadImpl::RegisterModule(Module* module,
// Now that we know the module isn't in the list, we'll call out to notify
// the module that it's attached to the worker thread. We don't hold
// the lock while we make this call.
- if (thread_.get())
+ if (!thread_.empty())
module->ProcessThreadAttached(this);
{
@@ -227,14 +229,6 @@ void ProcessThreadImpl::DeRegisterModule(Module* module) {
module->ProcessThreadAttached(nullptr);
}
-// static
-void ProcessThreadImpl::Run(void* obj) {
- ProcessThreadImpl* impl = static_cast<ProcessThreadImpl*>(obj);
- CurrentTaskQueueSetter set_current(impl);
- while (impl->Process()) {
- }
-}
-
bool ProcessThreadImpl::Process() {
TRACE_EVENT1("webrtc", "ProcessThreadImpl", "name", thread_name_);
int64_t now = rtc::TimeMillis();
diff --git a/modules/utility/source/process_thread_impl.h b/modules/utility/source/process_thread_impl.h
index b83994cef8..b667bfc68a 100644
--- a/modules/utility/source/process_thread_impl.h
+++ b/modules/utility/source/process_thread_impl.h
@@ -45,7 +45,6 @@ class ProcessThreadImpl : public ProcessThread {
void DeRegisterModule(Module* module) override;
protected:
- static void Run(void* obj);
bool Process();
private:
@@ -97,8 +96,7 @@ class ProcessThreadImpl : public ProcessThread {
SequenceChecker thread_checker_;
rtc::Event wake_up_;
- // TODO(pbos): Remove unique_ptr and stop recreating the thread.
- std::unique_ptr<rtc::PlatformThread> thread_;
+ rtc::PlatformThread thread_;
ModuleList modules_ RTC_GUARDED_BY(mutex_);
// Set to true when calling Process, to allow reentrant calls to WakeUp.
diff --git a/modules/video_capture/linux/device_info_linux.cc b/modules/video_capture/linux/device_info_linux.cc
index b3c9766029..cde3b86d5c 100644
--- a/modules/video_capture/linux/device_info_linux.cc
+++ b/modules/video_capture/linux/device_info_linux.cc
@@ -116,7 +116,7 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
memset(deviceNameUTF8, 0, deviceNameLength);
memcpy(cameraName, cap.card, sizeof(cap.card));
- if (deviceNameLength >= strlen(cameraName)) {
+ if (deviceNameLength > strlen(cameraName)) {
memcpy(deviceNameUTF8, cameraName, strlen(cameraName));
} else {
RTC_LOG(LS_INFO) << "buffer passed is too small";
@@ -126,7 +126,7 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
if (cap.bus_info[0] != 0) // may not available in all drivers
{
// copy device id
- if (deviceUniqueIdUTF8Length >= strlen((const char*)cap.bus_info)) {
+ if (deviceUniqueIdUTF8Length > strlen((const char*)cap.bus_info)) {
memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
memcpy(deviceUniqueIdUTF8, cap.bus_info,
strlen((const char*)cap.bus_info));
@@ -146,7 +146,7 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
const int32_t deviceUniqueIdUTF8Length =
(int32_t)strlen((char*)deviceUniqueIdUTF8);
- if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength) {
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
RTC_LOG(LS_INFO) << "Device name too long";
return -1;
}
diff --git a/modules/video_capture/linux/video_capture_linux.cc b/modules/video_capture/linux/video_capture_linux.cc
index 504565f512..10f9713ec3 100644
--- a/modules/video_capture/linux/video_capture_linux.cc
+++ b/modules/video_capture/linux/video_capture_linux.cc
@@ -34,8 +34,7 @@ namespace webrtc {
namespace videocapturemodule {
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
const char* deviceUniqueId) {
- rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation(
- new rtc::RefCountedObject<VideoCaptureModuleV4L2>());
+ auto implementation = rtc::make_ref_counted<VideoCaptureModuleV4L2>();
if (implementation->Init(deviceUniqueId) != 0)
return nullptr;
@@ -241,12 +240,15 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
}
// start capture thread;
- if (!_captureThread) {
+ if (_captureThread.empty()) {
quit_ = false;
- _captureThread.reset(
- new rtc::PlatformThread(VideoCaptureModuleV4L2::CaptureThread, this,
- "CaptureThread", rtc::kHighPriority));
- _captureThread->Start();
+ _captureThread = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (CaptureProcess()) {
+ }
+ },
+ "CaptureThread",
+ rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh));
}
// Needed to start UVC camera - from the uvcview application
@@ -262,14 +264,13 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
}
int32_t VideoCaptureModuleV4L2::StopCapture() {
- if (_captureThread) {
+ if (!_captureThread.empty()) {
{
MutexLock lock(&capture_lock_);
quit_ = true;
}
- // Make sure the capture thread stop stop using the critsect.
- _captureThread->Stop();
- _captureThread.reset();
+ // Make sure the capture thread stops using the mutex.
+ _captureThread.Finalize();
}
MutexLock lock(&capture_lock_);
@@ -357,11 +358,6 @@ bool VideoCaptureModuleV4L2::CaptureStarted() {
return _captureStarted;
}
-void VideoCaptureModuleV4L2::CaptureThread(void* obj) {
- VideoCaptureModuleV4L2* capture = static_cast<VideoCaptureModuleV4L2*>(obj);
- while (capture->CaptureProcess()) {
- }
-}
bool VideoCaptureModuleV4L2::CaptureProcess() {
int retVal = 0;
fd_set rSet;
diff --git a/modules/video_capture/linux/video_capture_linux.h b/modules/video_capture/linux/video_capture_linux.h
index ddb5d5ba87..fa06d72b8d 100644
--- a/modules/video_capture/linux/video_capture_linux.h
+++ b/modules/video_capture/linux/video_capture_linux.h
@@ -41,8 +41,7 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl {
bool AllocateVideoBuffers();
bool DeAllocateVideoBuffers();
- // TODO(pbos): Stop using unique_ptr and resetting the thread.
- std::unique_ptr<rtc::PlatformThread> _captureThread;
+ rtc::PlatformThread _captureThread;
Mutex capture_lock_;
bool quit_ RTC_GUARDED_BY(capture_lock_);
int32_t _deviceId;
diff --git a/modules/video_capture/windows/device_info_ds.cc b/modules/video_capture/windows/device_info_ds.cc
index 97f61f7845..e3833bc8d8 100644
--- a/modules/video_capture/windows/device_info_ds.cc
+++ b/modules/video_capture/windows/device_info_ds.cc
@@ -213,7 +213,7 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
const int32_t deviceUniqueIdUTF8Length = (int32_t)strlen(
(char*)deviceUniqueIdUTF8); // UTF8 is also NULL terminated
- if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength) {
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
RTC_LOG(LS_INFO) << "Device name too long";
return NULL;
}
@@ -306,7 +306,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
const int32_t deviceUniqueIdUTF8Length =
(int32_t)strlen((char*)deviceUniqueIdUTF8);
- if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength) {
+ if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
RTC_LOG(LS_INFO) << "Device name too long";
return -1;
}
@@ -568,7 +568,7 @@ void DeviceInfoDS::GetProductId(const char* devicePath,
// Find the second occurrence.
pos = strchr(pos + 1, '&');
uint32_t bytesToCopy = (uint32_t)(pos - startPos);
- if (pos && (bytesToCopy <= productUniqueIdUTF8Length) &&
+ if (pos && (bytesToCopy < productUniqueIdUTF8Length) &&
bytesToCopy <= kVideoCaptureProductIdLength) {
strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length,
(char*)startPos, bytesToCopy);
diff --git a/modules/video_capture/windows/video_capture_ds.cc b/modules/video_capture/windows/video_capture_ds.cc
index 6dca74750c..1a1e51934d 100644
--- a/modules/video_capture/windows/video_capture_ds.cc
+++ b/modules/video_capture/windows/video_capture_ds.cc
@@ -57,7 +57,7 @@ VideoCaptureDS::~VideoCaptureDS() {
int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
const int32_t nameLength = (int32_t)strlen((char*)deviceUniqueIdUTF8);
- if (nameLength > kVideoCaptureUniqueNameLength)
+ if (nameLength >= kVideoCaptureUniqueNameLength)
return -1;
// Store the device name
diff --git a/modules/video_capture/windows/video_capture_factory_windows.cc b/modules/video_capture/windows/video_capture_factory_windows.cc
index ea9d31add9..34cc982d7e 100644
--- a/modules/video_capture/windows/video_capture_factory_windows.cc
+++ b/modules/video_capture/windows/video_capture_factory_windows.cc
@@ -27,8 +27,7 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
return nullptr;
// TODO(tommi): Use Media Foundation implementation for Vista and up.
- rtc::scoped_refptr<VideoCaptureDS> capture(
- new rtc::RefCountedObject<VideoCaptureDS>());
+ auto capture = rtc::make_ref_counted<VideoCaptureDS>();
if (capture->Init(device_id) != 0) {
return nullptr;
}
diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn
index 153a4b532e..b28517e8be 100644
--- a/modules/video_coding/BUILD.gn
+++ b/modules/video_coding/BUILD.gn
@@ -274,6 +274,7 @@ rtc_library("video_coding_legacy") {
"../../api:rtp_headers",
"../../api:rtp_packet_info",
"../../api:sequence_checker",
+ "../../api/units:timestamp",
"../../api/video:encoded_image",
"../../api/video:video_frame",
"../../api/video:video_frame_type",
@@ -466,6 +467,15 @@ rtc_library("webrtc_libvpx_interface") {
}
}
+rtc_library("mock_libvpx_interface") {
+ testonly = true
+ sources = [ "codecs/interface/mock_libvpx_interface.h" ]
+ deps = [
+ ":webrtc_libvpx_interface",
+ "../../test:test_support",
+ ]
+}
+
# This target includes the internal SW codec.
rtc_library("webrtc_vp8") {
visibility = [ "*" ]
@@ -583,6 +593,7 @@ rtc_library("webrtc_vp9") {
":webrtc_libvpx_interface",
":webrtc_vp9_helpers",
"../../api:fec_controller_api",
+ "../../api:refcountedbase",
"../../api:scoped_refptr",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
@@ -677,15 +688,6 @@ if (rtc_include_tests) {
]
}
- rtc_library("mock_libvpx_interface") {
- testonly = true
- sources = [ "codecs/interface/mock_libvpx_interface.h" ]
- deps = [
- ":webrtc_libvpx_interface",
- "../../test:test_support",
- ]
- }
-
rtc_library("simulcast_test_fixture_impl") {
testonly = true
sources = [
diff --git a/modules/video_coding/codecs/av1/av1_svc_config.cc b/modules/video_coding/codecs/av1/av1_svc_config.cc
index 1e61477b78..b15443c563 100644
--- a/modules/video_coding/codecs/av1/av1_svc_config.cc
+++ b/modules/video_coding/codecs/av1/av1_svc_config.cc
@@ -51,8 +51,9 @@ bool SetAv1SvcConfig(VideoCodec& video_codec) {
if (info.num_spatial_layers == 1) {
SpatialLayer& spatial_layer = video_codec.spatialLayers[0];
spatial_layer.minBitrate = video_codec.minBitrate;
- spatial_layer.targetBitrate = video_codec.startBitrate;
spatial_layer.maxBitrate = video_codec.maxBitrate;
+ spatial_layer.targetBitrate =
+ (video_codec.minBitrate + video_codec.maxBitrate) / 2;
return true;
}
diff --git a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
index 02ded1c70d..e6035328da 100644
--- a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
+++ b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc
@@ -97,19 +97,21 @@ TEST(Av1SvcConfigTest, SetsNumberOfTemporalLayers) {
EXPECT_EQ(video_codec.spatialLayers[0].numberOfTemporalLayers, 3);
}
-TEST(Av1SvcConfigTest, CopiesBitrateForSingleSpatialLayer) {
+TEST(Av1SvcConfigTest, CopiesMinMaxBitrateForSingleSpatialLayer) {
VideoCodec video_codec;
video_codec.codecType = kVideoCodecAV1;
video_codec.SetScalabilityMode("L1T3");
video_codec.minBitrate = 100;
- video_codec.startBitrate = 200;
video_codec.maxBitrate = 500;
EXPECT_TRUE(SetAv1SvcConfig(video_codec));
EXPECT_EQ(video_codec.spatialLayers[0].minBitrate, 100u);
- EXPECT_EQ(video_codec.spatialLayers[0].targetBitrate, 200u);
EXPECT_EQ(video_codec.spatialLayers[0].maxBitrate, 500u);
+ EXPECT_LE(video_codec.spatialLayers[0].minBitrate,
+ video_codec.spatialLayers[0].targetBitrate);
+ EXPECT_LE(video_codec.spatialLayers[0].targetBitrate,
+ video_codec.spatialLayers[0].maxBitrate);
}
TEST(Av1SvcConfigTest, SetsBitratesForMultipleSpatialLayers) {
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
index 8c82476b7a..3b5fdd78e2 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@@ -588,12 +588,26 @@ int32_t LibaomAv1Encoder::Encode(
// kNative. As a workaround to this, we perform ToI420() a second time.
// TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct
// ToI420() implementaion, remove his workaround.
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
prepped_input_frame = VideoFrame(converted_buffer, frame.timestamp(),
frame.render_time_ms(), frame.rotation());
}
@@ -671,8 +685,15 @@ int32_t LibaomAv1Encoder::Encode(
encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
// If encoded image width/height info are added to aom_codec_cx_pkt_t,
// use those values in lieu of the values in frame.
- encoded_image._encodedHeight = frame.height();
- encoded_image._encodedWidth = frame.width();
+ if (svc_params_) {
+ int n = svc_params_->scaling_factor_num[layer_frame.SpatialId()];
+ int d = svc_params_->scaling_factor_den[layer_frame.SpatialId()];
+ encoded_image._encodedWidth = cfg_.g_w * n / d;
+ encoded_image._encodedHeight = cfg_.g_h * n / d;
+ } else {
+ encoded_image._encodedWidth = cfg_.g_w;
+ encoded_image._encodedHeight = cfg_.g_h;
+ }
encoded_image.timing_.flags = VideoSendTiming::kInvalid;
int qp = -1;
ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
index ea77e091af..96057a0ce2 100644
--- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
+++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
@@ -25,6 +25,7 @@ namespace webrtc {
namespace {
using ::testing::ElementsAre;
+using ::testing::Field;
using ::testing::IsEmpty;
using ::testing::SizeIs;
@@ -135,5 +136,36 @@ TEST(LibaomAv1EncoderTest, EncoderInfoProvidesFpsAllocation) {
EXPECT_THAT(encoder_info.fps_allocation[3], IsEmpty());
}
+TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) {
+ std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_GT(codec_settings.width, 4);
+ // Configure encoder with 3 spatial layers.
+ codec_settings.SetScalabilityMode("L3T1");
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ using Frame = EncodedVideoFrameProducer::EncodedFrame;
+ std::vector<Frame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode();
+ EXPECT_THAT(
+ encoded_frames,
+ ElementsAre(
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth,
+ codec_settings.width / 4),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height / 4))),
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth,
+ codec_settings.width / 2),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height / 2))),
+ Field(&Frame::encoded_image,
+ AllOf(Field(&EncodedImage::_encodedWidth, codec_settings.width),
+ Field(&EncodedImage::_encodedHeight,
+ codec_settings.height)))));
+}
+
} // namespace
} // namespace webrtc
diff --git a/modules/video_coding/codecs/h264/h264.cc b/modules/video_coding/codecs/h264/h264.cc
index 016d0aa538..14e1691153 100644
--- a/modules/video_coding/codecs/h264/h264.cc
+++ b/modules/video_coding/codecs/h264/h264.cc
@@ -17,6 +17,7 @@
#include "absl/types/optional.h"
#include "api/video_codecs/sdp_video_format.h"
#include "media/base/media_constants.h"
+#include "rtc_base/trace_event.h"
#if defined(WEBRTC_USE_H264)
#include "modules/video_coding/codecs/h264/h264_decoder_impl.h"
@@ -65,6 +66,7 @@ void DisableRtcUseH264() {
}
std::vector<SdpVideoFormat> SupportedH264Codecs() {
+ TRACE_EVENT0("webrtc", __func__);
if (!IsH264CodecSupported())
return std::vector<SdpVideoFormat>();
// We only support encoding Constrained Baseline Profile (CBP), but the
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index 6f37b52fd8..83f9a77614 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -54,6 +54,16 @@ enum H264DecoderImplEvent {
kH264DecoderEventMax = 16,
};
+struct ScopedPtrAVFreePacket {
+ void operator()(AVPacket* packet) { av_packet_free(&packet); }
+};
+typedef std::unique_ptr<AVPacket, ScopedPtrAVFreePacket> ScopedAVPacket;
+
+ScopedAVPacket MakeScopedAVPacket() {
+ ScopedAVPacket packet(av_packet_alloc());
+ return packet;
+}
+
} // namespace
int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
@@ -202,7 +212,7 @@ int32_t H264DecoderImpl::InitDecode(const VideoCodec* codec_settings,
// a pointer |this|.
av_context_->opaque = this;
- AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
+ const AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
if (!codec) {
// This is an indication that FFmpeg has not been initialized or it has not
// been compiled/initialized with the correct set of codecs.
@@ -261,21 +271,25 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
- AVPacket packet;
- av_init_packet(&packet);
+ ScopedAVPacket packet = MakeScopedAVPacket();
+ if (!packet) {
+ ReportError();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
// packet.data has a non-const type, but isn't modified by
// avcodec_send_packet.
- packet.data = const_cast<uint8_t*>(input_image.data());
+ packet->data = const_cast<uint8_t*>(input_image.data());
if (input_image.size() >
static_cast<size_t>(std::numeric_limits<int>::max())) {
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
- packet.size = static_cast<int>(input_image.size());
+ packet->size = static_cast<int>(input_image.size());
int64_t frame_timestamp_us = input_image.ntp_time_ms_ * 1000; // ms -> μs
av_context_->reordered_opaque = frame_timestamp_us;
- int result = avcodec_send_packet(av_context_.get(), &packet);
+ int result = avcodec_send_packet(av_context_.get(), packet.get());
+
if (result < 0) {
RTC_LOG(LS_ERROR) << "avcodec_send_packet error: " << result;
ReportError();
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 949c51bafa..733f00f5c0 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -445,7 +445,7 @@ int32_t H264EncoderImpl::Encode(
pictures_[i].iStride[0], pictures_[i].pData[1],
pictures_[i].iStride[1], pictures_[i].pData[2],
pictures_[i].iStride[2], configurations_[i].width,
- configurations_[i].height, libyuv::kFilterBilinear);
+ configurations_[i].height, libyuv::kFilterBox);
}
if (!configurations_[i].sending) {
diff --git a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
index 8d23c6d858..2332fcddfb 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
@@ -248,9 +248,8 @@ void MultiplexDecoderAdapter::MergeAlphaImages(
[yuv_buffer, alpha_buffer] {});
}
if (supports_augmenting_data_) {
- merged_buffer = rtc::scoped_refptr<webrtc::AugmentedVideoFrameBuffer>(
- new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
- merged_buffer, std::move(augmenting_data), augmenting_data_length));
+ merged_buffer = rtc::make_ref_counted<AugmentedVideoFrameBuffer>(
+ merged_buffer, std::move(augmenting_data), augmenting_data_length);
}
VideoFrame merged_image = VideoFrame::Builder()
diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
index 2e7b0cb3c0..7ecb24a87c 100644
--- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
+++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
@@ -90,9 +90,9 @@ class TestMultiplexAdapter : public VideoCodecUnitTest,
for (int i = 0; i < 16; i++) {
data[i] = i;
}
- rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
- new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
- video_buffer, std::move(data), 16);
+ auto augmented_video_frame_buffer =
+ rtc::make_ref_counted<AugmentedVideoFrameBuffer>(video_buffer,
+ std::move(data), 16);
return std::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(augmented_video_frame_buffer)
diff --git a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
index 8076e40fd4..fa768927b0 100644
--- a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
+++ b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc
@@ -301,7 +301,7 @@ TEST(VideoCodecTestLibvpx, MAYBE_ChangeFramerateVP8) {
{31, 30, 0.85, 0.84}, {31.5, 30.5, 0.86, 0.84}, {30.5, 29, 0.83, 0.78}};
#else
std::vector<QualityThresholds> quality_thresholds = {
- {31, 30, 0.87, 0.86}, {32, 31, 0.89, 0.86}, {32, 30, 0.87, 0.82}};
+ {31, 30, 0.87, 0.85}, {32, 31, 0.88, 0.85}, {32, 30, 0.87, 0.82}};
#endif
fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
}
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index a4918ae73d..23eadfc0db 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -650,6 +650,8 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe(
EncodedImage copied_image = encoded_image;
copied_image.SetEncodedData(buffer);
+ if (base_image.size())
+ copied_image._frameType = base_image._frameType;
// Replace previous EncodedImage for this spatial layer.
merged_encoded_frames_.at(spatial_idx) = std::move(copied_image);
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index e2d9b1ebd2..c84d9acb1c 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -265,7 +265,8 @@ DefaultTemporalLayers::DefaultTemporalLayers(int number_of_temporal_layers)
temporal_ids_(GetTemporalIds(num_layers_)),
temporal_pattern_(GetDependencyInfo(num_layers_)),
is_static_buffer_(DetermineStaticBuffers(temporal_pattern_)),
- pattern_idx_(kUninitializedPatternIndex) {
+ pattern_idx_(kUninitializedPatternIndex),
+ new_bitrates_bps_(std::vector<uint32_t>(num_layers_, 0u)) {
RTC_CHECK_GE(kMaxTemporalStreams, number_of_temporal_layers);
RTC_CHECK_GE(number_of_temporal_layers, 0);
RTC_CHECK_LE(number_of_temporal_layers, 4);
diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
index 64ad40ab76..a18ac40e7d 100644
--- a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
+++ b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc
@@ -687,6 +687,25 @@ TEST_F(TemporalLayersTest, KeyFrame) {
}
}
+TEST_F(TemporalLayersTest, SetsTlCountOnFirstConfigUpdate) {
+ // Create an instance and fetch config update without setting any rate.
+ constexpr int kNumLayers = 2;
+ DefaultTemporalLayers tl(kNumLayers);
+ Vp8EncoderConfig config = tl.UpdateConfiguration(0);
+
+ // Config should indicate correct number of temporal layers, but zero bitrate.
+ ASSERT_TRUE(config.temporal_layer_config.has_value());
+ EXPECT_EQ(config.temporal_layer_config->ts_number_layers,
+ uint32_t{kNumLayers});
+ std::array<uint32_t, Vp8EncoderConfig::TemporalLayerConfig::kMaxLayers>
+ kZeroRate = {};
+ EXPECT_EQ(config.temporal_layer_config->ts_target_bitrate, kZeroRate);
+
+ // On second call, no new update.
+ config = tl.UpdateConfiguration(0);
+ EXPECT_FALSE(config.temporal_layer_config.has_value());
+}
+
class TemporalLayersReferenceTest : public TemporalLayersTest,
public ::testing::WithParamInterface<int> {
public:
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
index 979ded9a63..9d6ffdba90 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
@@ -54,13 +54,9 @@ constexpr bool kIsArm = false;
#endif
absl::optional<LibvpxVp8Decoder::DeblockParams> DefaultDeblockParams() {
- if (kIsArm) {
- // For ARM, this is only called when deblocking is explicitly enabled, and
- // the default strength is set by the ctor.
- return LibvpxVp8Decoder::DeblockParams();
- }
- // For non-arm, don't use the explicit deblocking settings by default.
- return absl::nullopt;
+ return LibvpxVp8Decoder::DeblockParams(/*max_level=*/8,
+ /*degrade_qp=*/60,
+ /*min_qp=*/30);
}
absl::optional<LibvpxVp8Decoder::DeblockParams>
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
index 8d84b67ce3..60295e5d5d 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
@@ -42,9 +42,12 @@ class LibvpxVp8Decoder : public VideoDecoder {
const char* ImplementationName() const override;
struct DeblockParams {
- int max_level = 6; // Deblocking strength: [0, 16].
- int degrade_qp = 1; // If QP value is below, start lowering |max_level|.
- int min_qp = 0; // If QP value is below, turn off deblocking.
+ DeblockParams() : max_level(6), degrade_qp(1), min_qp(0) {}
+ DeblockParams(int max_level, int degrade_qp, int min_qp)
+ : max_level(max_level), degrade_qp(degrade_qp), min_qp(min_qp) {}
+ int max_level; // Deblocking strength: [0, 16].
+ int degrade_qp; // If QP value is below, start lowering |max_level|.
+ int min_qp; // If QP value is below, turn off deblocking.
};
private:
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 2411c1622e..e2849dbe6f 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -161,6 +161,18 @@ void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config,
}
}
+bool IsCompatibleVideoFrameBufferType(VideoFrameBuffer::Type left,
+ VideoFrameBuffer::Type right) {
+ if (left == VideoFrameBuffer::Type::kI420 ||
+ left == VideoFrameBuffer::Type::kI420A) {
+ // LibvpxVp8Encoder does not care about the alpha channel, I420A and I420
+ // are considered compatible.
+ return right == VideoFrameBuffer::Type::kI420 ||
+ right == VideoFrameBuffer::Type::kI420A;
+ }
+ return left == right;
+}
+
void SetRawImagePlanes(vpx_image_t* raw_image, VideoFrameBuffer* buffer) {
switch (buffer->type()) {
case VideoFrameBuffer::Type::kI420:
@@ -1324,6 +1336,13 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ converted_buffer->type())
+ << " image to I420. Can't encode frame.";
+ return {};
+ }
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
@@ -1376,9 +1395,8 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
}
scaled_buffer = mapped_scaled_buffer;
}
- RTC_DCHECK_EQ(scaled_buffer->type(), mapped_buffer->type())
- << "Scaled frames must have the same type as the mapped frame.";
- if (scaled_buffer->type() != mapped_buffer->type()) {
+ if (!IsCompatibleVideoFrameBufferType(scaled_buffer->type(),
+ mapped_buffer->type())) {
RTC_LOG(LS_ERROR) << "When scaling "
<< VideoFrameBufferTypeToString(buffer_to_scale->type())
<< ", the image was unexpectedly converted to "
@@ -1386,6 +1404,10 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
<< " instead of "
<< VideoFrameBufferTypeToString(mapped_buffer->type())
<< ". Can't encode frame.";
+ RTC_NOTREACHED() << "Scaled buffer type "
+ << VideoFrameBufferTypeToString(scaled_buffer->type())
+ << " is not compatible with mapped buffer type "
+ << VideoFrameBufferTypeToString(mapped_buffer->type());
return {};
}
SetRawImagePlanes(&raw_images_[i], scaled_buffer);
diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
index feb2339404..20ab027684 100644
--- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
+++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
@@ -1074,8 +1074,15 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image,
break;
}
default: {
- i010_copy =
- I010Buffer::Copy(*input_image.video_frame_buffer()->ToI420());
+ auto i420_buffer = input_image.video_frame_buffer()->ToI420();
+ if (!i420_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(
+ input_image.video_frame_buffer()->type())
+ << " image to I420. Can't encode frame.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ i010_copy = I010Buffer::Copy(*i420_buffer);
i010_buffer = i010_copy.get();
}
}
@@ -1914,6 +1921,12 @@ rtc::scoped_refptr<VideoFrameBuffer> LibvpxVp9Encoder::PrepareBufferForProfile0(
if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 &&
converted_buffer->type() != VideoFrameBuffer::Type::kI420A) {
converted_buffer = converted_buffer->ToI420();
+ if (!converted_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert "
+ << VideoFrameBufferTypeToString(buffer->type())
+ << " image to I420. Can't encode frame.";
+ return {};
+ }
RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
}
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
index 4d0a6983ac..d1f58b1bb8 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
@@ -15,7 +15,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
-#include "rtc_base/ref_counted_object.h"
#include "vpx/vpx_codec.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vpx_frame_buffer.h"
@@ -68,7 +67,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) {
}
// Otherwise create one.
if (available_buffer == nullptr) {
- available_buffer = new rtc::RefCountedObject<Vp9FrameBuffer>();
+ available_buffer = new Vp9FrameBuffer();
allocated_buffers_.push_back(available_buffer);
if (allocated_buffers_.size() > max_num_buffers_) {
RTC_LOG(LS_WARNING)
diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
index d37a9fc0e2..bce10be4d9 100644
--- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
+++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h
@@ -16,9 +16,9 @@
#include <vector>
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "rtc_base/buffer.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/synchronization/mutex.h"
struct vpx_codec_ctx;
@@ -65,13 +65,14 @@ constexpr size_t kDefaultMaxNumBuffers = 68;
// vpx_codec_destroy(decoder_ctx);
class Vp9FrameBufferPool {
public:
- class Vp9FrameBuffer : public rtc::RefCountInterface {
+ class Vp9FrameBuffer final
+ : public rtc::RefCountedNonVirtual<Vp9FrameBuffer> {
public:
uint8_t* GetData();
size_t GetDataSize() const;
void SetSize(size_t size);
- virtual bool HasOneRef() const = 0;
+ using rtc::RefCountedNonVirtual<Vp9FrameBuffer>::HasOneRef;
private:
// Data as an easily resizable buffer.
diff --git a/modules/video_coding/deprecated/nack_module.h b/modules/video_coding/deprecated/nack_module.h
index 0ad009b597..2fac6ce128 100644
--- a/modules/video_coding/deprecated/nack_module.h
+++ b/modules/video_coding/deprecated/nack_module.h
@@ -125,7 +125,7 @@ class DEPRECATED_NackModule : public Module {
const absl::optional<BackoffSettings> backoff_settings_;
};
-using NackModule = ABSL_DEPRECATED("") DEPRECATED_NackModule;
+using NackModule ABSL_DEPRECATED("") = DEPRECATED_NackModule;
} // namespace webrtc
diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc
index 621fd73972..acb4307f3f 100644
--- a/modules/video_coding/generic_decoder.cc
+++ b/modules/video_coding/generic_decoder.cc
@@ -113,7 +113,8 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
if (!frameInfo) {
RTC_LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping "
- "this one.";
+ "frame with timestamp "
+ << decodedImage.timestamp();
return;
}
diff --git a/modules/video_coding/jitter_buffer_unittest.cc b/modules/video_coding/jitter_buffer_unittest.cc
index acfee8c6f7..752ceb835e 100644
--- a/modules/video_coding/jitter_buffer_unittest.cc
+++ b/modules/video_coding/jitter_buffer_unittest.cc
@@ -67,8 +67,7 @@ class TestBasicJitterBuffer : public ::testing::Test {
video_header.is_first_packet_in_frame = true;
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
packet_.reset(new VCMPacket(data_, size_, rtp_header, video_header,
- /*ntp_time_ms=*/0,
- clock_->TimeInMilliseconds()));
+ /*ntp_time_ms=*/0, clock_->CurrentTime()));
}
VCMEncodedFrame* DecodeCompleteFrame() {
@@ -541,7 +540,7 @@ TEST_F(TestBasicJitterBuffer, TestReorderingWithPadding) {
video_header.codec = kVideoCodecGeneric;
video_header.frame_type = VideoFrameType::kEmptyFrame;
VCMPacket empty_packet(data_, 0, rtp_header, video_header,
- /*ntp_time_ms=*/0, clock_->TimeInMilliseconds());
+ /*ntp_time_ms=*/0, clock_->CurrentTime());
EXPECT_EQ(kOldPacket,
jitter_buffer_->InsertPacket(empty_packet, &retransmitted));
empty_packet.seqNum += 1;
diff --git a/modules/video_coding/packet.cc b/modules/video_coding/packet.cc
index 0c4a658b8f..324248ab36 100644
--- a/modules/video_coding/packet.cc
+++ b/modules/video_coding/packet.cc
@@ -34,7 +34,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
const RTPHeader& rtp_header,
const RTPVideoHeader& videoHeader,
int64_t ntp_time_ms,
- int64_t receive_time_ms)
+ Timestamp receive_time)
: payloadType(rtp_header.payloadType),
timestamp(rtp_header.timestamp),
ntp_time_ms_(ntp_time_ms),
@@ -47,7 +47,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
insertStartCode(videoHeader.codec == kVideoCodecH264 &&
videoHeader.is_first_packet_in_frame),
video_header(videoHeader),
- packet_info(rtp_header, receive_time_ms) {
+ packet_info(rtp_header, receive_time) {
if (is_first_packet_in_frame() && markerBit) {
completeNALU = kNaluComplete;
} else if (is_first_packet_in_frame()) {
diff --git a/modules/video_coding/packet.h b/modules/video_coding/packet.h
index f157e10898..9aa2d5ce08 100644
--- a/modules/video_coding/packet.h
+++ b/modules/video_coding/packet.h
@@ -17,6 +17,7 @@
#include "absl/types/optional.h"
#include "api/rtp_headers.h"
#include "api/rtp_packet_info.h"
+#include "api/units/timestamp.h"
#include "api/video/video_frame_type.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
@@ -41,7 +42,7 @@ class VCMPacket {
const RTPHeader& rtp_header,
const RTPVideoHeader& video_header,
int64_t ntp_time_ms,
- int64_t receive_time_ms);
+ Timestamp receive_time);
~VCMPacket();
diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc
index 1291f78e3a..c98ae00389 100644
--- a/modules/video_coding/packet_buffer.cc
+++ b/modules/video_coding/packet_buffer.cc
@@ -35,20 +35,13 @@ namespace webrtc {
namespace video_coding {
PacketBuffer::Packet::Packet(const RtpPacketReceived& rtp_packet,
- const RTPVideoHeader& video_header,
- int64_t receive_time_ms)
+ const RTPVideoHeader& video_header)
: marker_bit(rtp_packet.Marker()),
payload_type(rtp_packet.PayloadType()),
seq_num(rtp_packet.SequenceNumber()),
timestamp(rtp_packet.Timestamp()),
times_nacked(-1),
- video_header(video_header),
- packet_info(rtp_packet.Ssrc(),
- rtp_packet.Csrcs(),
- rtp_packet.Timestamp(),
- /*audio_level=*/absl::nullopt,
- rtp_packet.GetExtension<AbsoluteCaptureTimeExtension>(),
- receive_time_ms) {}
+ video_header(video_header) {}
PacketBuffer::PacketBuffer(size_t start_buffer_size, size_t max_buffer_size)
: max_size_(max_buffer_size),
diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h
index c0cc752c3a..f4dbe31266 100644
--- a/modules/video_coding/packet_buffer.h
+++ b/modules/video_coding/packet_buffer.h
@@ -18,6 +18,7 @@
#include "absl/base/attributes.h"
#include "api/rtp_packet_info.h"
+#include "api/units/timestamp.h"
#include "api/video/encoded_image.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
@@ -33,8 +34,7 @@ class PacketBuffer {
struct Packet {
Packet() = default;
Packet(const RtpPacketReceived& rtp_packet,
- const RTPVideoHeader& video_header,
- int64_t receive_time_ms);
+ const RTPVideoHeader& video_header);
Packet(const Packet&) = delete;
Packet(Packet&&) = delete;
Packet& operator=(const Packet&) = delete;
@@ -63,8 +63,6 @@ class PacketBuffer {
rtc::CopyOnWriteBuffer video_payload;
RTPVideoHeader video_header;
-
- RtpPacketInfo packet_info;
};
struct InsertResult {
std::vector<std::unique_ptr<Packet>> packets;
diff --git a/modules/video_coding/rtp_frame_reference_finder.cc b/modules/video_coding/rtp_frame_reference_finder.cc
index a060f84777..a44b76bf15 100644
--- a/modules/video_coding/rtp_frame_reference_finder.cc
+++ b/modules/video_coding/rtp_frame_reference_finder.cc
@@ -142,31 +142,34 @@ T& RtpFrameReferenceFinderImpl::GetRefFinderAs() {
} // namespace internal
-RtpFrameReferenceFinder::RtpFrameReferenceFinder(
- OnCompleteFrameCallback* frame_callback)
- : RtpFrameReferenceFinder(frame_callback, 0) {}
+RtpFrameReferenceFinder::RtpFrameReferenceFinder()
+ : RtpFrameReferenceFinder(0) {}
RtpFrameReferenceFinder::RtpFrameReferenceFinder(
- OnCompleteFrameCallback* frame_callback,
int64_t picture_id_offset)
: picture_id_offset_(picture_id_offset),
- frame_callback_(frame_callback),
impl_(std::make_unique<internal::RtpFrameReferenceFinderImpl>()) {}
RtpFrameReferenceFinder::~RtpFrameReferenceFinder() = default;
-void RtpFrameReferenceFinder::ManageFrame(
+RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinder::ManageFrame(
std::unique_ptr<RtpFrameObject> frame) {
// If we have cleared past this frame, drop it.
if (cleared_to_seq_num_ != -1 &&
AheadOf<uint16_t>(cleared_to_seq_num_, frame->first_seq_num())) {
- return;
+ return {};
}
- HandOffFrames(impl_->ManageFrame(std::move(frame)));
+
+ auto frames = impl_->ManageFrame(std::move(frame));
+ AddPictureIdOffset(frames);
+ return frames;
}
-void RtpFrameReferenceFinder::PaddingReceived(uint16_t seq_num) {
- HandOffFrames(impl_->PaddingReceived(seq_num));
+RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinder::PaddingReceived(
+ uint16_t seq_num) {
+ auto frames = impl_->PaddingReceived(seq_num);
+ AddPictureIdOffset(frames);
+ return frames;
}
void RtpFrameReferenceFinder::ClearTo(uint16_t seq_num) {
@@ -174,14 +177,12 @@ void RtpFrameReferenceFinder::ClearTo(uint16_t seq_num) {
impl_->ClearTo(seq_num);
}
-void RtpFrameReferenceFinder::HandOffFrames(ReturnVector frames) {
+void RtpFrameReferenceFinder::AddPictureIdOffset(ReturnVector& frames) {
for (auto& frame : frames) {
frame->SetId(frame->Id() + picture_id_offset_);
for (size_t i = 0; i < frame->num_references; ++i) {
frame->references[i] += picture_id_offset_;
}
-
- frame_callback_->OnCompleteFrame(std::move(frame));
}
}
diff --git a/modules/video_coding/rtp_frame_reference_finder.h b/modules/video_coding/rtp_frame_reference_finder.h
index 3577ea8285..d2447773a3 100644
--- a/modules/video_coding/rtp_frame_reference_finder.h
+++ b/modules/video_coding/rtp_frame_reference_finder.h
@@ -20,47 +20,38 @@ namespace internal {
class RtpFrameReferenceFinderImpl;
} // namespace internal
-// A complete frame is a frame which has received all its packets and all its
-// references are known.
-class OnCompleteFrameCallback {
- public:
- virtual ~OnCompleteFrameCallback() {}
- virtual void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) = 0;
-};
-
class RtpFrameReferenceFinder {
public:
using ReturnVector = absl::InlinedVector<std::unique_ptr<RtpFrameObject>, 3>;
- explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback);
- explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback,
- int64_t picture_id_offset);
+ RtpFrameReferenceFinder();
+ explicit RtpFrameReferenceFinder(int64_t picture_id_offset);
~RtpFrameReferenceFinder();
- // Manage this frame until:
- // - We have all information needed to determine its references, after
- // which |frame_callback_| is called with the completed frame, or
- // - We have too many stashed frames (determined by |kMaxStashedFrames|)
- // so we drop this frame, or
- // - It gets cleared by ClearTo, which also means we drop it.
- void ManageFrame(std::unique_ptr<RtpFrameObject> frame);
+ // The RtpFrameReferenceFinder will hold onto the frame until:
+ // - the required information to determine its references has been received,
+ // in which case it (and possibly other) frames are returned, or
+ // - There are too many stashed frames (determined by |kMaxStashedFrames|),
+ // in which case it gets dropped, or
+ // - It gets cleared by ClearTo, in which case its dropped.
+ // - The frame is old, in which case it also gets dropped.
+ ReturnVector ManageFrame(std::unique_ptr<RtpFrameObject> frame);
// Notifies that padding has been received, which the reference finder
// might need to calculate the references of a frame.
- void PaddingReceived(uint16_t seq_num);
+ ReturnVector PaddingReceived(uint16_t seq_num);
// Clear all stashed frames that include packets older than |seq_num|.
void ClearTo(uint16_t seq_num);
private:
- void HandOffFrames(ReturnVector frames);
+ void AddPictureIdOffset(ReturnVector& frames);
// How far frames have been cleared out of the buffer by RTP sequence number.
// A frame will be cleared if it contains a packet with a sequence number
// older than |cleared_to_seq_num_|.
int cleared_to_seq_num_ = -1;
const int64_t picture_id_offset_;
- OnCompleteFrameCallback* frame_callback_;
std::unique_ptr<internal::RtpFrameReferenceFinderImpl> impl_;
};
diff --git a/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/modules/video_coding/rtp_frame_reference_finder_unittest.cc
index 5141b496c0..a5b0fc49ce 100644
--- a/modules/video_coding/rtp_frame_reference_finder_unittest.cc
+++ b/modules/video_coding/rtp_frame_reference_finder_unittest.cc
@@ -60,28 +60,29 @@ std::unique_ptr<RtpFrameObject> CreateFrame(
}
} // namespace
-class TestRtpFrameReferenceFinder : public ::testing::Test,
- public OnCompleteFrameCallback {
+class TestRtpFrameReferenceFinder : public ::testing::Test {
protected:
TestRtpFrameReferenceFinder()
: rand_(0x8739211),
- reference_finder_(new RtpFrameReferenceFinder(this)),
+ reference_finder_(std::make_unique<RtpFrameReferenceFinder>()),
frames_from_callback_(FrameComp()) {}
uint16_t Rand() { return rand_.Rand<uint16_t>(); }
- void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override {
- int64_t pid = frame->Id();
- uint16_t sidx = *frame->SpatialIndex();
- auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
- if (frame_it != frames_from_callback_.end()) {
- ADD_FAILURE() << "Already received frame with (pid:sidx): (" << pid << ":"
- << sidx << ")";
- return;
+ void OnCompleteFrames(RtpFrameReferenceFinder::ReturnVector frames) {
+ for (auto& frame : frames) {
+ int64_t pid = frame->Id();
+ uint16_t sidx = *frame->SpatialIndex();
+ auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
+ if (frame_it != frames_from_callback_.end()) {
+ ADD_FAILURE() << "Already received frame with (pid:sidx): (" << pid
+ << ":" << sidx << ")";
+ return;
+ }
+
+ frames_from_callback_.insert(
+ std::make_pair(std::make_pair(pid, sidx), std::move(frame)));
}
-
- frames_from_callback_.insert(
- std::make_pair(std::make_pair(pid, sidx), std::move(frame)));
}
void InsertGeneric(uint16_t seq_num_start,
@@ -91,14 +92,18 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecGeneric,
RTPVideoTypeHeader());
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
void InsertH264(uint16_t seq_num_start, uint16_t seq_num_end, bool keyframe) {
std::unique_ptr<RtpFrameObject> frame =
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecH264,
RTPVideoTypeHeader());
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
+ }
+
+ void InsertPadding(uint16_t seq_num) {
+ OnCompleteFrames(reference_finder_->PaddingReceived(seq_num));
}
// Check if a frame with picture id |pid| and spatial index |sidx| has been
@@ -165,7 +170,7 @@ TEST_F(TestRtpFrameReferenceFinder, PaddingPackets) {
InsertGeneric(sn, sn, true);
InsertGeneric(sn + 2, sn + 2, false);
EXPECT_EQ(1UL, frames_from_callback_.size());
- reference_finder_->PaddingReceived(sn + 1);
+ InsertPadding(sn + 1);
EXPECT_EQ(2UL, frames_from_callback_.size());
}
@@ -173,8 +178,8 @@ TEST_F(TestRtpFrameReferenceFinder, PaddingPacketsReordered) {
uint16_t sn = Rand();
InsertGeneric(sn, sn, true);
- reference_finder_->PaddingReceived(sn + 1);
- reference_finder_->PaddingReceived(sn + 4);
+ InsertPadding(sn + 1);
+ InsertPadding(sn + 4);
InsertGeneric(sn + 2, sn + 3, false);
EXPECT_EQ(2UL, frames_from_callback_.size());
@@ -186,12 +191,12 @@ TEST_F(TestRtpFrameReferenceFinder, PaddingPacketsReorderedMultipleKeyframes) {
uint16_t sn = Rand();
InsertGeneric(sn, sn, true);
- reference_finder_->PaddingReceived(sn + 1);
- reference_finder_->PaddingReceived(sn + 4);
+ InsertPadding(sn + 1);
+ InsertPadding(sn + 4);
InsertGeneric(sn + 2, sn + 3, false);
InsertGeneric(sn + 5, sn + 5, true);
- reference_finder_->PaddingReceived(sn + 6);
- reference_finder_->PaddingReceived(sn + 9);
+ InsertPadding(sn + 6);
+ InsertPadding(sn + 9);
InsertGeneric(sn + 7, sn + 8, false);
EXPECT_EQ(4UL, frames_from_callback_.size());
@@ -308,7 +313,7 @@ TEST_F(TestRtpFrameReferenceFinder, Av1FrameNoDependencyDescriptor) {
CreateFrame(/*seq_num_start=*/sn, /*seq_num_end=*/sn, /*keyframe=*/true,
kVideoCodecAV1, RTPVideoTypeHeader());
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
ASSERT_EQ(1UL, frames_from_callback_.size());
CheckReferencesGeneric(sn);
diff --git a/modules/video_coding/svc/scalability_structure_simulcast.cc b/modules/video_coding/svc/scalability_structure_simulcast.cc
index 13751daea6..c236066736 100644
--- a/modules/video_coding/svc/scalability_structure_simulcast.cc
+++ b/modules/video_coding/svc/scalability_structure_simulcast.cc
@@ -137,7 +137,7 @@ ScalabilityStructureSimulcast::NextFrameConfig(bool restart) {
}
configs.emplace_back();
ScalableVideoController::LayerFrameConfig& config = configs.back();
- config.S(sid).T(0);
+ config.Id(current_pattern).S(sid).T(0);
if (can_reference_t0_frame_for_spatial_id_[sid]) {
config.ReferenceAndUpdate(BufferIndex(sid, /*tid=*/0));
@@ -155,7 +155,10 @@ ScalabilityStructureSimulcast::NextFrameConfig(bool restart) {
}
configs.emplace_back();
ScalableVideoController::LayerFrameConfig& config = configs.back();
- config.S(sid).T(1).Reference(BufferIndex(sid, /*tid=*/0));
+ config.Id(current_pattern)
+ .S(sid)
+ .T(1)
+ .Reference(BufferIndex(sid, /*tid=*/0));
// Save frame only if there is a higher temporal layer that may need it.
if (num_temporal_layers_ > 2) {
config.Update(BufferIndex(sid, /*tid=*/1));
@@ -171,7 +174,7 @@ ScalabilityStructureSimulcast::NextFrameConfig(bool restart) {
}
configs.emplace_back();
ScalableVideoController::LayerFrameConfig& config = configs.back();
- config.S(sid).T(2);
+ config.Id(current_pattern).S(sid).T(2);
if (can_reference_t1_frame_for_spatial_id_[sid]) {
config.Reference(BufferIndex(sid, /*tid=*/1));
} else {
@@ -184,12 +187,12 @@ ScalabilityStructureSimulcast::NextFrameConfig(bool restart) {
break;
}
- last_pattern_ = current_pattern;
return configs;
}
GenericFrameInfo ScalabilityStructureSimulcast::OnEncodeDone(
const LayerFrameConfig& config) {
+ last_pattern_ = static_cast<FramePattern>(config.Id());
if (config.TemporalId() == 1) {
can_reference_t1_frame_for_spatial_id_.set(config.SpatialId());
}
diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
index 20862d4939..b33e29695f 100644
--- a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
+++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
@@ -52,7 +52,7 @@ class BitstreamReader {
std::function<bool()> f_true,
std::function<bool()> f_false = [] { return true; }) {
uint32_t val;
- if (!buffer_->ReadBits(&val, 1)) {
+ if (!buffer_->ReadBits(1, val)) {
return false;
}
if (val != 0) {
@@ -63,7 +63,7 @@ class BitstreamReader {
absl::optional<bool> ReadBoolean() {
uint32_t val;
- if (!buffer_->ReadBits(&val, 1)) {
+ if (!buffer_->ReadBits(1, val)) {
return {};
}
return {val != 0};
@@ -76,7 +76,7 @@ class BitstreamReader {
// logged as warning, if provided.
bool VerifyNextBooleanIs(bool expected_val, absl::string_view error_msg) {
uint32_t val;
- if (!buffer_->ReadBits(&val, 1)) {
+ if (!buffer_->ReadBits(1, val)) {
return false;
}
if ((val != 0) != expected_val) {
@@ -100,7 +100,7 @@ class BitstreamReader {
RTC_DCHECK_LE(bits, 32);
RTC_DCHECK_LE(bits, sizeof(T) * 8);
uint32_t val;
- if (!buffer_->ReadBits(&val, bits)) {
+ if (!buffer_->ReadBits(bits, val)) {
return {};
}
return (static_cast<T>(val));
@@ -115,7 +115,7 @@ class BitstreamReader {
uint32_t expected_val,
absl::string_view error_msg) {
uint32_t val;
- if (!buffer_->ReadBits(&val, num_bits)) {
+ if (!buffer_->ReadBits(num_bits, val)) {
return false;
}
if (val != expected_val) {
@@ -134,11 +134,11 @@ class BitstreamReader {
template <typename T>
absl::optional<T> ReadSigned(int bits = sizeof(T) * 8) {
uint32_t sign;
- if (!buffer_->ReadBits(&sign, 1)) {
+ if (!buffer_->ReadBits(1, sign)) {
return {};
}
uint32_t val;
- if (!buffer_->ReadBits(&val, bits)) {
+ if (!buffer_->ReadBits(bits, val)) {
return {};
}
int64_t sign_val = val;
diff --git a/modules/video_coding/video_codec_initializer.cc b/modules/video_coding/video_codec_initializer.cc
index 90a02e0c2d..17ea66acb1 100644
--- a/modules/video_coding/video_codec_initializer.cc
+++ b/modules/video_coding/video_codec_initializer.cc
@@ -262,7 +262,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
break;
}
case kVideoCodecAV1:
- if (!SetAv1SvcConfig(video_codec)) {
+ if (SetAv1SvcConfig(video_codec)) {
+ for (size_t i = 0; i < config.spatial_layers.size(); ++i) {
+ video_codec.spatialLayers[i].active = config.spatial_layers[i].active;
+ }
+ } else {
RTC_LOG(LS_WARNING) << "Failed to configure svc bitrates for av1.";
}
break;
diff --git a/modules/video_coding/video_codec_initializer_unittest.cc b/modules/video_coding/video_codec_initializer_unittest.cc
index 1ea145e14f..6c1c2e7a38 100644
--- a/modules/video_coding/video_codec_initializer_unittest.cc
+++ b/modules/video_coding/video_codec_initializer_unittest.cc
@@ -74,13 +74,13 @@ class VideoCodecInitializerTest : public ::testing::Test {
config_.number_of_streams = num_spatial_streams;
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.numberOfTemporalLayers = num_temporal_streams;
- config_.encoder_specific_settings = new rtc::RefCountedObject<
+ config_.encoder_specific_settings = rtc::make_ref_counted<
webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (type == VideoCodecType::kVideoCodecVP9) {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.numberOfSpatialLayers = num_spatial_streams;
vp9_settings.numberOfTemporalLayers = num_temporal_streams;
- config_.encoder_specific_settings = new rtc::RefCountedObject<
+ config_.encoder_specific_settings = rtc::make_ref_counted<
webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
} else if (type != VideoCodecType::kVideoCodecMultiplex) {
ADD_FAILURE() << "Unexpected codec type: " << type;
@@ -426,4 +426,69 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) {
EXPECT_FALSE(codec_out_.spatialLayers[2].active);
}
+TEST_F(VideoCodecInitializerTest, Av1SingleSpatialLayerBitratesAreConsistent) {
+ VideoEncoderConfig config;
+ config.codec_type = VideoCodecType::kVideoCodecAV1;
+ std::vector<VideoStream> streams = {DefaultStream()};
+ streams[0].scalability_mode = "L1T2";
+
+ VideoCodec codec;
+ EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec));
+
+ EXPECT_GE(codec.spatialLayers[0].targetBitrate,
+ codec.spatialLayers[0].minBitrate);
+ EXPECT_LE(codec.spatialLayers[0].targetBitrate,
+ codec.spatialLayers[0].maxBitrate);
+}
+
+TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersBitratesAreConsistent) {
+ VideoEncoderConfig config;
+ config.codec_type = VideoCodecType::kVideoCodecAV1;
+ std::vector<VideoStream> streams = {DefaultStream()};
+ streams[0].scalability_mode = "L2T2";
+
+ VideoCodec codec;
+ EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec));
+
+ EXPECT_GE(codec.spatialLayers[0].targetBitrate,
+ codec.spatialLayers[0].minBitrate);
+ EXPECT_LE(codec.spatialLayers[0].targetBitrate,
+ codec.spatialLayers[0].maxBitrate);
+
+ EXPECT_GE(codec.spatialLayers[1].targetBitrate,
+ codec.spatialLayers[1].minBitrate);
+ EXPECT_LE(codec.spatialLayers[1].targetBitrate,
+ codec.spatialLayers[1].maxBitrate);
+}
+
+TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersActiveByDefault) {
+ VideoEncoderConfig config;
+ config.codec_type = VideoCodecType::kVideoCodecAV1;
+ std::vector<VideoStream> streams = {DefaultStream()};
+ streams[0].scalability_mode = "L2T2";
+ config.spatial_layers = {};
+
+ VideoCodec codec;
+ EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec));
+
+ EXPECT_TRUE(codec.spatialLayers[0].active);
+ EXPECT_TRUE(codec.spatialLayers[1].active);
+}
+
+TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersOneDeactivated) {
+ VideoEncoderConfig config;
+ config.codec_type = VideoCodecType::kVideoCodecAV1;
+ std::vector<VideoStream> streams = {DefaultStream()};
+ streams[0].scalability_mode = "L2T2";
+ config.spatial_layers.resize(2);
+ config.spatial_layers[0].active = true;
+ config.spatial_layers[1].active = false;
+
+ VideoCodec codec;
+ EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec));
+
+ EXPECT_TRUE(codec.spatialLayers[0].active);
+ EXPECT_FALSE(codec.spatialLayers[1].active);
+}
+
} // namespace webrtc
diff --git a/modules/video_coding/video_receiver.cc b/modules/video_coding/video_receiver.cc
index 74b04ac350..43dbc9f0b2 100644
--- a/modules/video_coding/video_receiver.cc
+++ b/modules/video_coding/video_receiver.cc
@@ -279,7 +279,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
// Callers don't provide any ntp time.
const VCMPacket packet(incomingPayload, payloadLength, rtp_header,
video_header, /*ntp_time_ms=*/0,
- clock_->TimeInMilliseconds());
+ clock_->CurrentTime());
int32_t ret = _receiver.InsertPacket(packet);
// TODO(holmer): Investigate if this somehow should use the key frame
diff --git a/net/dcsctp/BUILD.gn b/net/dcsctp/BUILD.gn
index 9f7f541695..8b38a65ca1 100644
--- a/net/dcsctp/BUILD.gn
+++ b/net/dcsctp/BUILD.gn
@@ -14,10 +14,13 @@ if (rtc_include_tests) {
deps = [
"../../test:test_main",
"common:dcsctp_common_unittests",
+ "fuzzers:dcsctp_fuzzers_unittests",
"packet:dcsctp_packet_unittests",
"public:dcsctp_public_unittests",
"rx:dcsctp_rx_unittests",
+ "socket:dcsctp_socket_unittests",
"timer:dcsctp_timer_unittests",
+ "tx:dcsctp_tx_unittests",
]
}
}
diff --git a/net/dcsctp/common/BUILD.gn b/net/dcsctp/common/BUILD.gn
index 72b3f505e5..6e99cdcef4 100644
--- a/net/dcsctp/common/BUILD.gn
+++ b/net/dcsctp/common/BUILD.gn
@@ -9,7 +9,10 @@
import("../../../webrtc.gni")
rtc_source_set("internal_types") {
- deps = [ "../public:strong_alias" ]
+ deps = [
+ "../public:strong_alias",
+ "../public:types",
+ ]
sources = [ "internal_types.h" ]
}
diff --git a/net/dcsctp/common/internal_types.h b/net/dcsctp/common/internal_types.h
index 4551fd17d3..b651d45d91 100644
--- a/net/dcsctp/common/internal_types.h
+++ b/net/dcsctp/common/internal_types.h
@@ -13,6 +13,7 @@
#include <utility>
#include "net/dcsctp/public/strong_alias.h"
+#include "net/dcsctp/public/types.h"
namespace dcsctp {
@@ -34,5 +35,16 @@ using ReconfigRequestSN = StrongAlias<class ReconfigRequestSNTag, uint32_t>;
// Verification Tag, used for packet validation.
using VerificationTag = StrongAlias<class VerificationTagTag, uint32_t>;
+// Tie Tag, used as a nonce when connecting.
+using TieTag = StrongAlias<class TieTagTag, uint64_t>;
+
+// Hasher for separated ordered/unordered stream identifiers.
+struct UnorderedStreamHash {
+ size_t operator()(const std::pair<IsUnordered, StreamID>& p) const {
+ return std::hash<IsUnordered::UnderlyingType>{}(*p.first) ^
+ (std::hash<StreamID::UnderlyingType>{}(*p.second) << 1);
+ }
+};
+
} // namespace dcsctp
#endif // NET_DCSCTP_COMMON_INTERNAL_TYPES_H_
diff --git a/net/dcsctp/fuzzers/BUILD.gn b/net/dcsctp/fuzzers/BUILD.gn
new file mode 100644
index 0000000000..9edbae44d7
--- /dev/null
+++ b/net/dcsctp/fuzzers/BUILD.gn
@@ -0,0 +1,50 @@
+# Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("dcsctp_fuzzers") {
+ testonly = true
+ deps = [
+ "../../../api:array_view",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:math",
+ "../packet:chunk",
+ "../packet:error_cause",
+ "../packet:parameter",
+ "../public:socket",
+ "../public:types",
+ "../socket:dcsctp_socket",
+ ]
+ sources = [
+ "dcsctp_fuzzers.cc",
+ "dcsctp_fuzzers.h",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_library("dcsctp_fuzzers_unittests") {
+ testonly = true
+
+ deps = [
+ ":dcsctp_fuzzers",
+ "../../../api:array_view",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ "../packet:sctp_packet",
+ "../public:socket",
+ "../socket:dcsctp_socket",
+ "../testing:testing_macros",
+ ]
+ sources = [ "dcsctp_fuzzers_test.cc" ]
+ }
+}
diff --git a/net/dcsctp/fuzzers/dcsctp_fuzzers.cc b/net/dcsctp/fuzzers/dcsctp_fuzzers.cc
new file mode 100644
index 0000000000..b4b6224ec4
--- /dev/null
+++ b/net/dcsctp/fuzzers/dcsctp_fuzzers.cc
@@ -0,0 +1,460 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/fuzzers/dcsctp_fuzzers.h"
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "net/dcsctp/common/math.h"
+#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_common.h"
+#include "net/dcsctp/packet/chunk/shutdown_chunk.h"
+#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h"
+#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h"
+#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h"
+#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/state_cookie_parameter.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/socket/dcsctp_socket.h"
+#include "net/dcsctp/socket/state_cookie.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+namespace dcsctp_fuzzers {
+namespace {
+static constexpr int kRandomValue = FuzzerCallbacks::kRandomValue;
+static constexpr size_t kMinInputLength = 5;
+static constexpr size_t kMaxInputLength = 1024;
+
+// A starting state for the socket, when fuzzing.
+enum class StartingState : int {
+ kConnectNotCalled,
+ // When socket initiating Connect
+ kConnectCalled,
+ kReceivedInitAck,
+ kReceivedCookieAck,
+ // When socket initiating Shutdown
+ kShutdownCalled,
+ kReceivedShutdownAck,
+ // When peer socket initiated Connect
+ kReceivedInit,
+ kReceivedCookieEcho,
+ // When peer initiated Shutdown
+ kReceivedShutdown,
+ kReceivedShutdownComplete,
+ kNumberOfStates,
+};
+
+// State about the current fuzzing iteration
+class FuzzState {
+ public:
+ explicit FuzzState(rtc::ArrayView<const uint8_t> data) : data_(data) {}
+
+ uint8_t GetByte() {
+ uint8_t value = 0;
+ if (offset_ < data_.size()) {
+ value = data_[offset_];
+ ++offset_;
+ }
+ return value;
+ }
+
+ TSN GetNextTSN() { return TSN(tsn_++); }
+ MID GetNextMID() { return MID(mid_++); }
+
+ bool empty() const { return offset_ >= data_.size(); }
+
+ private:
+ uint32_t tsn_ = kRandomValue;
+ uint32_t mid_ = 0;
+ rtc::ArrayView<const uint8_t> data_;
+ size_t offset_ = 0;
+};
+
+void SetSocketState(DcSctpSocketInterface& socket,
+ FuzzerCallbacks& socket_cb,
+ StartingState state) {
+ // We'll use another temporary peer socket for the establishment.
+ FuzzerCallbacks peer_cb;
+ DcSctpSocket peer("peer", peer_cb, nullptr, {});
+
+ switch (state) {
+ case StartingState::kConnectNotCalled:
+ return;
+ case StartingState::kConnectCalled:
+ socket.Connect();
+ return;
+ case StartingState::kReceivedInitAck:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ return;
+ case StartingState::kReceivedCookieAck:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK
+ return;
+ case StartingState::kShutdownCalled:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK
+ socket.Shutdown();
+ return;
+ case StartingState::kReceivedShutdownAck:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK
+ socket.Shutdown();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // SHUTDOWN
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN_ACK
+ return;
+ case StartingState::kReceivedInit:
+ peer.Connect();
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT
+ return;
+ case StartingState::kReceivedCookieEcho:
+ peer.Connect();
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT_ACK
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ return;
+ case StartingState::kReceivedShutdown:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK
+ peer.Shutdown();
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN
+ return;
+ case StartingState::kReceivedShutdownComplete:
+ socket.Connect();
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK
+ peer.Shutdown();
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN
+ peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // SHUTDOWN_ACK
+ socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN_COMPLETE
+ return;
+ case StartingState::kNumberOfStates:
+ RTC_CHECK(false);
+ return;
+ }
+}
+
+void MakeDataChunk(FuzzState& state, SctpPacket::Builder& b) {
+ DataChunk::Options options;
+ options.is_unordered = IsUnordered(state.GetByte() != 0);
+ options.is_beginning = Data::IsBeginning(state.GetByte() != 0);
+ options.is_end = Data::IsEnd(state.GetByte() != 0);
+ b.Add(DataChunk(state.GetNextTSN(), StreamID(state.GetByte()),
+ SSN(state.GetByte()), PPID(53), std::vector<uint8_t>(10),
+ options));
+}
+
+void MakeInitChunk(FuzzState& state, SctpPacket::Builder& b) {
+ Parameters::Builder builder;
+ builder.Add(ForwardTsnSupportedParameter());
+
+ b.Add(InitChunk(VerificationTag(kRandomValue), 10000, 1000, 1000,
+ TSN(kRandomValue), builder.Build()));
+}
+
+void MakeInitAckChunk(FuzzState& state, SctpPacket::Builder& b) {
+ Parameters::Builder builder;
+ builder.Add(ForwardTsnSupportedParameter());
+
+ uint8_t state_cookie[] = {1, 2, 3, 4, 5};
+ Parameters::Builder params_builder =
+ Parameters::Builder().Add(StateCookieParameter(state_cookie));
+
+ b.Add(InitAckChunk(VerificationTag(kRandomValue), 10000, 1000, 1000,
+ TSN(kRandomValue), builder.Build()));
+}
+
+void MakeSackChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<SackChunk::GapAckBlock> gap_ack_blocks;
+ uint16_t last_end = 0;
+ while (gap_ack_blocks.size() < 20) {
+ uint8_t delta_start = state.GetByte();
+ if (delta_start < 0x80) {
+ break;
+ }
+ uint8_t delta_end = state.GetByte();
+
+ uint16_t start = last_end + delta_start;
+ uint16_t end = start + delta_end;
+ last_end = end;
+ gap_ack_blocks.emplace_back(start, end);
+ }
+
+ TSN cum_ack_tsn(kRandomValue + state.GetByte());
+ b.Add(SackChunk(cum_ack_tsn, 10000, std::move(gap_ack_blocks), {}));
+}
+
+void MakeHeartbeatRequestChunk(FuzzState& state, SctpPacket::Builder& b) {
+ uint8_t info[] = {1, 2, 3, 4, 5};
+ b.Add(HeartbeatRequestChunk(
+ Parameters::Builder().Add(HeartbeatInfoParameter(info)).Build()));
+}
+
+void MakeHeartbeatAckChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<uint8_t> info(8);
+ b.Add(HeartbeatRequestChunk(
+ Parameters::Builder().Add(HeartbeatInfoParameter(info)).Build()));
+}
+
+void MakeAbortChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(AbortChunk(
+ /*filled_in_verification_tag=*/true,
+ Parameters::Builder().Add(UserInitiatedAbortCause("Fuzzing")).Build()));
+}
+
+void MakeErrorChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(ErrorChunk(
+ Parameters::Builder().Add(ProtocolViolationCause("Fuzzing")).Build()));
+}
+
+void MakeCookieEchoChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<uint8_t> cookie(StateCookie::kCookieSize);
+ b.Add(CookieEchoChunk(cookie));
+}
+
+void MakeCookieAckChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(CookieAckChunk());
+}
+
+void MakeShutdownChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(ShutdownChunk(state.GetNextTSN()));
+}
+
+void MakeShutdownAckChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(ShutdownAckChunk());
+}
+
+void MakeShutdownCompleteChunk(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(ShutdownCompleteChunk(false));
+}
+
+void MakeReConfigChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<StreamID> streams = {StreamID(state.GetByte())};
+ Parameters::Builder params_builder =
+ Parameters::Builder().Add(OutgoingSSNResetRequestParameter(
+ ReconfigRequestSN(kRandomValue), ReconfigRequestSN(kRandomValue),
+ state.GetNextTSN(), streams));
+ b.Add(ReConfigChunk(params_builder.Build()));
+}
+
+void MakeForwardTsnChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<ForwardTsnChunk::SkippedStream> skipped_streams;
+ for (;;) {
+ uint8_t stream = state.GetByte();
+ if (skipped_streams.size() > 20 || stream < 0x80) {
+ break;
+ }
+ skipped_streams.emplace_back(StreamID(stream), SSN(state.GetByte()));
+ }
+ b.Add(ForwardTsnChunk(state.GetNextTSN(), std::move(skipped_streams)));
+}
+
+void MakeIDataChunk(FuzzState& state, SctpPacket::Builder& b) {
+ DataChunk::Options options;
+ options.is_unordered = IsUnordered(state.GetByte() != 0);
+ options.is_beginning = Data::IsBeginning(state.GetByte() != 0);
+ options.is_end = Data::IsEnd(state.GetByte() != 0);
+ b.Add(IDataChunk(state.GetNextTSN(), StreamID(state.GetByte()),
+ state.GetNextMID(), PPID(53), FSN(0),
+ std::vector<uint8_t>(10), options));
+}
+
+void MakeIForwardTsnChunk(FuzzState& state, SctpPacket::Builder& b) {
+ std::vector<ForwardTsnChunk::SkippedStream> skipped_streams;
+ for (;;) {
+ uint8_t stream = state.GetByte();
+ if (skipped_streams.size() > 20 || stream < 0x80) {
+ break;
+ }
+ skipped_streams.emplace_back(StreamID(stream), SSN(state.GetByte()));
+ }
+ b.Add(IForwardTsnChunk(state.GetNextTSN(), std::move(skipped_streams)));
+}
+
+class RandomFuzzedChunk : public Chunk {
+ public:
+ explicit RandomFuzzedChunk(FuzzState& state) : state_(state) {}
+
+ void SerializeTo(std::vector<uint8_t>& out) const override {
+ size_t bytes = state_.GetByte();
+ for (size_t i = 0; i < bytes; ++i) {
+ out.push_back(state_.GetByte());
+ }
+ }
+
+ std::string ToString() const override { return std::string("RANDOM_FUZZED"); }
+
+ private:
+ FuzzState& state_;
+};
+
+void MakeChunkWithRandomContent(FuzzState& state, SctpPacket::Builder& b) {
+ b.Add(RandomFuzzedChunk(state));
+}
+
+std::vector<uint8_t> GeneratePacket(FuzzState& state) {
+ DcSctpOptions options;
+ // Setting a fixed limit to not be dependent on the defaults, which may
+ // change.
+ options.mtu = 2048;
+ SctpPacket::Builder builder(VerificationTag(kRandomValue), options);
+
+ // The largest expected serialized chunk, as created by fuzzers.
+ static constexpr size_t kMaxChunkSize = 256;
+
+ for (int i = 0; i < 5 && builder.bytes_remaining() > kMaxChunkSize; ++i) {
+ switch (state.GetByte()) {
+ case 1:
+ MakeDataChunk(state, builder);
+ break;
+ case 2:
+ MakeInitChunk(state, builder);
+ break;
+ case 3:
+ MakeInitAckChunk(state, builder);
+ break;
+ case 4:
+ MakeSackChunk(state, builder);
+ break;
+ case 5:
+ MakeHeartbeatRequestChunk(state, builder);
+ break;
+ case 6:
+ MakeHeartbeatAckChunk(state, builder);
+ break;
+ case 7:
+ MakeAbortChunk(state, builder);
+ break;
+ case 8:
+ MakeErrorChunk(state, builder);
+ break;
+ case 9:
+ MakeCookieEchoChunk(state, builder);
+ break;
+ case 10:
+ MakeCookieAckChunk(state, builder);
+ break;
+ case 11:
+ MakeShutdownChunk(state, builder);
+ break;
+ case 12:
+ MakeShutdownAckChunk(state, builder);
+ break;
+ case 13:
+ MakeShutdownCompleteChunk(state, builder);
+ break;
+ case 14:
+ MakeReConfigChunk(state, builder);
+ break;
+ case 15:
+ MakeForwardTsnChunk(state, builder);
+ break;
+ case 16:
+ MakeIDataChunk(state, builder);
+ break;
+ case 17:
+ MakeIForwardTsnChunk(state, builder);
+ break;
+ case 18:
+ MakeChunkWithRandomContent(state, builder);
+ break;
+ default:
+ break;
+ }
+ }
+ std::vector<uint8_t> packet = builder.Build();
+ return packet;
+}
+} // namespace
+
+void FuzzSocket(DcSctpSocketInterface& socket,
+ FuzzerCallbacks& cb,
+ rtc::ArrayView<const uint8_t> data) {
+ if (data.size() < kMinInputLength || data.size() > kMaxInputLength) {
+ return;
+ }
+ if (data[0] >= static_cast<int>(StartingState::kNumberOfStates)) {
+ return;
+ }
+
+ // Set the socket in a specified valid starting state
+ SetSocketState(socket, cb, static_cast<StartingState>(data[0]));
+
+ FuzzState state(data.subview(1));
+
+ while (!state.empty()) {
+ switch (state.GetByte()) {
+ case 1:
+ // Generate a valid SCTP packet (based on fuzz data) and "receive it".
+ socket.ReceivePacket(GeneratePacket(state));
+ break;
+ case 2:
+ socket.Connect();
+ break;
+ case 3:
+ socket.Shutdown();
+ break;
+ case 4:
+ socket.Close();
+ break;
+ case 5: {
+ StreamID streams[] = {StreamID(state.GetByte())};
+ socket.ResetStreams(streams);
+ } break;
+ case 6: {
+ uint8_t flags = state.GetByte();
+ SendOptions options;
+ options.unordered = IsUnordered(flags & 0x01);
+ options.max_retransmissions =
+ (flags & 0x02) != 0 ? absl::make_optional(0) : absl::nullopt;
+ size_t payload_exponent = (flags >> 2) % 16;
+ size_t payload_size = static_cast<size_t>(1) << payload_exponent;
+ socket.Send(DcSctpMessage(StreamID(state.GetByte()), PPID(53),
+ std::vector<uint8_t>(payload_size)),
+ options);
+ break;
+ }
+ case 7: {
+ // Expire an active timeout/timer.
+ uint8_t timeout_idx = state.GetByte();
+ absl::optional<TimeoutID> timeout_id = cb.ExpireTimeout(timeout_idx);
+ if (timeout_id.has_value()) {
+ socket.HandleTimeout(*timeout_id);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ }
+}
+} // namespace dcsctp_fuzzers
+} // namespace dcsctp
diff --git a/net/dcsctp/fuzzers/dcsctp_fuzzers.h b/net/dcsctp/fuzzers/dcsctp_fuzzers.h
new file mode 100644
index 0000000000..f3de0722f4
--- /dev/null
+++ b/net/dcsctp/fuzzers/dcsctp_fuzzers.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_
+#define NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_
+
+#include <deque>
+#include <memory>
+#include <set>
+#include <vector>
+
+#include "api/array_view.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+
+namespace dcsctp {
+namespace dcsctp_fuzzers {
+
+// A fake timeout used during fuzzing.
+class FuzzerTimeout : public Timeout {
+ public:
+ explicit FuzzerTimeout(std::set<TimeoutID>& active_timeouts)
+ : active_timeouts_(active_timeouts) {}
+
+ void Start(DurationMs duration_ms, TimeoutID timeout_id) override {
+ // Start is only allowed to be called on stopped or expired timeouts.
+ if (timeout_id_.has_value()) {
+ // It has been started before, but maybe it expired. Ensure that it's not
+ // running at least.
+ RTC_DCHECK(active_timeouts_.find(*timeout_id_) == active_timeouts_.end());
+ }
+ timeout_id_ = timeout_id;
+ RTC_DCHECK(active_timeouts_.insert(timeout_id).second);
+ }
+
+ void Stop() override {
+ // Stop is only allowed to be called on active timeouts. Not stopped or
+ // expired.
+ RTC_DCHECK(timeout_id_.has_value());
+ RTC_DCHECK(active_timeouts_.erase(*timeout_id_) == 1);
+ timeout_id_ = absl::nullopt;
+ }
+
+ // A set of all active timeouts, managed by `FuzzerCallbacks`.
+ std::set<TimeoutID>& active_timeouts_;
+ // If present, the timout is active and will expire reported as `timeout_id`.
+ absl::optional<TimeoutID> timeout_id_;
+};
+
+class FuzzerCallbacks : public DcSctpSocketCallbacks {
+ public:
+ static constexpr int kRandomValue = 42;
+ void SendPacket(rtc::ArrayView<const uint8_t> data) override {
+ sent_packets_.emplace_back(std::vector<uint8_t>(data.begin(), data.end()));
+ }
+ std::unique_ptr<Timeout> CreateTimeout() override {
+ return std::make_unique<FuzzerTimeout>(active_timeouts_);
+ }
+ TimeMs TimeMillis() override { return TimeMs(42); }
+ uint32_t GetRandomInt(uint32_t low, uint32_t high) override {
+ return kRandomValue;
+ }
+ void OnMessageReceived(DcSctpMessage message) override {}
+ void OnError(ErrorKind error, absl::string_view message) override {}
+ void OnAborted(ErrorKind error, absl::string_view message) override {}
+ void OnConnected() override {}
+ void OnClosed() override {}
+ void OnConnectionRestarted() override {}
+ void OnStreamsResetFailed(rtc::ArrayView<const StreamID> outgoing_streams,
+ absl::string_view reason) override {}
+ void OnStreamsResetPerformed(
+ rtc::ArrayView<const StreamID> outgoing_streams) override {}
+ void OnIncomingStreamsReset(
+ rtc::ArrayView<const StreamID> incoming_streams) override {}
+
+ std::vector<uint8_t> ConsumeSentPacket() {
+ if (sent_packets_.empty()) {
+ return {};
+ }
+ std::vector<uint8_t> ret = sent_packets_.front();
+ sent_packets_.pop_front();
+ return ret;
+ }
+
+ // Given an index among the active timeouts, will expire that one.
+ absl::optional<TimeoutID> ExpireTimeout(size_t index) {
+ if (index < active_timeouts_.size()) {
+ auto it = active_timeouts_.begin();
+ std::advance(it, index);
+ TimeoutID timeout_id = *it;
+ active_timeouts_.erase(it);
+ return timeout_id;
+ }
+ return absl::nullopt;
+ }
+
+ private:
+ // Needs to be ordered, to allow fuzzers to expire timers.
+ std::set<TimeoutID> active_timeouts_;
+ std::deque<std::vector<uint8_t>> sent_packets_;
+};
+
+// Given some fuzzing `data` will send packets to the socket as well as calling
+// API methods.
+void FuzzSocket(DcSctpSocketInterface& socket,
+ FuzzerCallbacks& cb,
+ rtc::ArrayView<const uint8_t> data);
+
+} // namespace dcsctp_fuzzers
+} // namespace dcsctp
+#endif // NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_
diff --git a/net/dcsctp/fuzzers/dcsctp_fuzzers_test.cc b/net/dcsctp/fuzzers/dcsctp_fuzzers_test.cc
new file mode 100644
index 0000000000..c7d2cd7c99
--- /dev/null
+++ b/net/dcsctp/fuzzers/dcsctp_fuzzers_test.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/fuzzers/dcsctp_fuzzers.h"
+
+#include "api/array_view.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/socket/dcsctp_socket.h"
+#include "net/dcsctp/testing/testing_macros.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace dcsctp_fuzzers {
+namespace {
+
+// This is a testbed where fuzzed data that cause issues can be evaluated and
+// crashes reproduced. Use `xxd -i ./crash-abc` to generate `data` below.
+TEST(DcsctpFuzzersTest, PassesTestbed) {
+ uint8_t data[] = {0x07, 0x09, 0x00, 0x01, 0x11, 0xff, 0xff};
+
+ FuzzerCallbacks cb;
+ DcSctpOptions options;
+ options.disable_checksum_verification = true;
+ DcSctpSocket socket("A", cb, nullptr, options);
+
+ FuzzSocket(socket, cb, data);
+}
+
+} // namespace
+} // namespace dcsctp_fuzzers
+} // namespace dcsctp
diff --git a/net/dcsctp/packet/BUILD.gn b/net/dcsctp/packet/BUILD.gn
index 9893547b11..9c08ebc80e 100644
--- a/net/dcsctp/packet/BUILD.gn
+++ b/net/dcsctp/packet/BUILD.gn
@@ -33,7 +33,10 @@ rtc_library("tlv_trait") {
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
sources = [
"tlv_trait.cc",
"tlv_trait.h",
@@ -57,6 +60,7 @@ rtc_library("crc32c") {
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "//third_party/crc32c",
]
sources = [
"crc32c.cc",
@@ -102,7 +106,11 @@ rtc_library("parameter") {
"parameter/supported_extensions_parameter.cc",
"parameter/supported_extensions_parameter.h",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_library("error_cause") {
@@ -150,7 +158,11 @@ rtc_library("error_cause") {
"error_cause/user_initiated_abort_cause.cc",
"error_cause/user_initiated_abort_cause.h",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_library("chunk") {
@@ -207,7 +219,11 @@ rtc_library("chunk") {
"chunk/shutdown_complete_chunk.cc",
"chunk/shutdown_complete_chunk.h",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_library("chunk_validators") {
@@ -264,6 +280,10 @@ if (rtc_include_tests) {
"../../../rtc_base:gunit_helpers",
"../../../rtc_base:rtc_base_approved",
"../../../test:test_support",
+ "../common:internal_types",
+ "../common:math",
+ "../public:types",
+ "../testing:testing_macros",
]
sources = [
"bounded_byte_reader_test.cc",
@@ -301,6 +321,7 @@ if (rtc_include_tests) {
"error_cause/unresolvable_address_cause_test.cc",
"error_cause/user_initiated_abort_cause_test.cc",
"parameter/add_incoming_streams_request_parameter_test.cc",
+ "parameter/add_outgoing_streams_request_parameter_test.cc",
"parameter/forward_tsn_supported_parameter_test.cc",
"parameter/incoming_ssn_reset_request_parameter_test.cc",
"parameter/outgoing_ssn_reset_request_parameter_test.cc",
@@ -312,5 +333,6 @@ if (rtc_include_tests) {
"sctp_packet_test.cc",
"tlv_trait_test.cc",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/net/dcsctp/packet/bounded_byte_reader.h b/net/dcsctp/packet/bounded_byte_reader.h
index b87648886e..603ed6ac33 100644
--- a/net/dcsctp/packet/bounded_byte_reader.h
+++ b/net/dcsctp/packet/bounded_byte_reader.h
@@ -52,7 +52,7 @@ template <int FixedSize>
class BoundedByteReader {
public:
explicit BoundedByteReader(rtc::ArrayView<const uint8_t> data) : data_(data) {
- RTC_DCHECK(data.size() >= FixedSize);
+ RTC_CHECK(data.size() >= FixedSize);
}
template <size_t offset>
@@ -77,7 +77,7 @@ class BoundedByteReader {
template <size_t SubSize>
BoundedByteReader<SubSize> sub_reader(size_t variable_offset) const {
- RTC_DCHECK(FixedSize + variable_offset + SubSize <= data_.size());
+ RTC_CHECK(FixedSize + variable_offset + SubSize <= data_.size());
rtc::ArrayView<const uint8_t> sub_span =
data_.subview(FixedSize + variable_offset, SubSize);
diff --git a/net/dcsctp/packet/bounded_byte_writer.h b/net/dcsctp/packet/bounded_byte_writer.h
index 4e547b0528..467f26800b 100644
--- a/net/dcsctp/packet/bounded_byte_writer.h
+++ b/net/dcsctp/packet/bounded_byte_writer.h
@@ -56,7 +56,7 @@ template <int FixedSize>
class BoundedByteWriter {
public:
explicit BoundedByteWriter(rtc::ArrayView<uint8_t> data) : data_(data) {
- RTC_DCHECK(data.size() >= FixedSize);
+ RTC_CHECK(data.size() >= FixedSize);
}
template <size_t offset>
@@ -81,7 +81,7 @@ class BoundedByteWriter {
template <size_t SubSize>
BoundedByteWriter<SubSize> sub_writer(size_t variable_offset) {
- RTC_DCHECK(FixedSize + variable_offset + SubSize <= data_.size());
+ RTC_CHECK(FixedSize + variable_offset + SubSize <= data_.size());
return BoundedByteWriter<SubSize>(
data_.subview(FixedSize + variable_offset, SubSize));
diff --git a/net/dcsctp/packet/chunk/sack_chunk.cc b/net/dcsctp/packet/chunk/sack_chunk.cc
index a9f17d79dd..d80e430082 100644
--- a/net/dcsctp/packet/chunk/sack_chunk.cc
+++ b/net/dcsctp/packet/chunk/sack_chunk.cc
@@ -88,13 +88,12 @@ absl::optional<SackChunk> SackChunk::Parse(rtc::ArrayView<const uint8_t> data) {
offset += kGapAckBlockSize;
}
- std::vector<TSN> duplicate_tsns;
- duplicate_tsns.reserve(nbr_of_gap_blocks);
+ std::set<TSN> duplicate_tsns;
for (int i = 0; i < nbr_of_dup_tsns; ++i) {
BoundedByteReader<kDupTsnBlockSize> sub_reader =
reader->sub_reader<kDupTsnBlockSize>(offset);
- duplicate_tsns.push_back(TSN(sub_reader.Load32<0>()));
+ duplicate_tsns.insert(TSN(sub_reader.Load32<0>()));
offset += kDupTsnBlockSize;
}
RTC_DCHECK(offset == reader->variable_data_size());
@@ -124,11 +123,11 @@ void SackChunk::SerializeTo(std::vector<uint8_t>& out) const {
offset += kGapAckBlockSize;
}
- for (int i = 0; i < nbr_of_dup_tsns; ++i) {
+ for (TSN tsn : duplicate_tsns_) {
BoundedByteWriter<kDupTsnBlockSize> sub_writer =
writer.sub_writer<kDupTsnBlockSize>(offset);
- sub_writer.Store32<0>(*duplicate_tsns_[i]);
+ sub_writer.Store32<0>(*tsn);
offset += kDupTsnBlockSize;
}
diff --git a/net/dcsctp/packet/chunk/sack_chunk.h b/net/dcsctp/packet/chunk/sack_chunk.h
index 0b464fb359..e6758fa332 100644
--- a/net/dcsctp/packet/chunk/sack_chunk.h
+++ b/net/dcsctp/packet/chunk/sack_chunk.h
@@ -12,6 +12,7 @@
#include <stddef.h>
#include <cstdint>
+#include <set>
#include <string>
#include <utility>
#include <vector>
@@ -48,7 +49,7 @@ class SackChunk : public Chunk, public TLVTrait<SackChunkConfig> {
SackChunk(TSN cumulative_tsn_ack,
uint32_t a_rwnd,
std::vector<GapAckBlock> gap_ack_blocks,
- std::vector<TSN> duplicate_tsns)
+ std::set<TSN> duplicate_tsns)
: cumulative_tsn_ack_(cumulative_tsn_ack),
a_rwnd_(a_rwnd),
gap_ack_blocks_(std::move(gap_ack_blocks)),
@@ -63,7 +64,7 @@ class SackChunk : public Chunk, public TLVTrait<SackChunkConfig> {
rtc::ArrayView<const GapAckBlock> gap_ack_blocks() const {
return gap_ack_blocks_;
}
- rtc::ArrayView<const TSN> duplicate_tsns() const { return duplicate_tsns_; }
+ const std::set<TSN>& duplicate_tsns() const { return duplicate_tsns_; }
private:
static constexpr size_t kGapAckBlockSize = 4;
@@ -72,7 +73,7 @@ class SackChunk : public Chunk, public TLVTrait<SackChunkConfig> {
const TSN cumulative_tsn_ack_;
const uint32_t a_rwnd_;
std::vector<GapAckBlock> gap_ack_blocks_;
- std::vector<TSN> duplicate_tsns_;
+ std::set<TSN> duplicate_tsns_;
};
} // namespace dcsctp
diff --git a/net/dcsctp/packet/chunk_validators.cc b/net/dcsctp/packet/chunk_validators.cc
index b3467037c7..48d351827e 100644
--- a/net/dcsctp/packet/chunk_validators.cc
+++ b/net/dcsctp/packet/chunk_validators.cc
@@ -38,9 +38,7 @@ SackChunk ChunkValidators::Clean(SackChunk&& sack) {
// Not more than at most one remaining? Exit early.
if (gap_ack_blocks.size() <= 1) {
return SackChunk(sack.cumulative_tsn_ack(), sack.a_rwnd(),
- std::move(gap_ack_blocks),
- std::vector<TSN>(sack.duplicate_tsns().begin(),
- sack.duplicate_tsns().end()));
+ std::move(gap_ack_blocks), sack.duplicate_tsns());
}
// Sort the intervals by their start value, to aid in the merging below.
@@ -63,8 +61,7 @@ SackChunk ChunkValidators::Clean(SackChunk&& sack) {
}
return SackChunk(sack.cumulative_tsn_ack(), sack.a_rwnd(), std::move(merged),
- std::vector<TSN>(sack.duplicate_tsns().begin(),
- sack.duplicate_tsns().end()));
+ sack.duplicate_tsns());
}
bool ChunkValidators::Validate(const SackChunk& sack) {
diff --git a/net/dcsctp/packet/crc32c.cc b/net/dcsctp/packet/crc32c.cc
index 05f2e0f158..e3f0dc1d19 100644
--- a/net/dcsctp/packet/crc32c.cc
+++ b/net/dcsctp/packet/crc32c.cc
@@ -11,70 +11,14 @@
#include <cstdint>
+#include "third_party/crc32c/src/include/crc32c/crc32c.h"
+
namespace dcsctp {
-namespace {
-constexpr uint32_t kCrc32cLookupTable[256] = {
- 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,
- 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,
- 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
- 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,
- 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,
- 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
- 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,
- 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,
- 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,
- 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,
- 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,
- 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
- 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,
- 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,
- 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,
- 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,
- 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,
- 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
- 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,
- 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,
- 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
- 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,
- 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,
- 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
- 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,
- 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,
- 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,
- 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,
- 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,
- 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
- 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,
- 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,
- 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,
- 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,
- 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,
- 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
- 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,
- 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,
- 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,
- 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,
- 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,
- 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
- 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,
-};
-} // namespace
uint32_t GenerateCrc32C(rtc::ArrayView<const uint8_t> data) {
- uint32_t crc32c = 0xffffffff;
-
- // This is very inefficient, processing one byte at a time. The end goal is to
- // replace it with e.g. https://github.com/google/crc32c.
- for (size_t i = 0; i < data.size(); i++) {
- crc32c = (crc32c >> 8) ^ kCrc32cLookupTable[(crc32c ^ data[i]) & 0xff];
- }
-
- crc32c = ~crc32c;
-
- // Per RFC4960, `crc32c` now holds the negated polynomial remainder,
- // but the table is reflected, which requires an byte swap depending
- // on the CPUs endianness.
+ uint32_t crc32c = crc32c_value(data.data(), data.size());
+ // Byte swapping for little endian byte order:
uint8_t byte0 = crc32c;
uint8_t byte1 = crc32c >> 8;
uint8_t byte2 = crc32c >> 16;
diff --git a/net/dcsctp/packet/data.h b/net/dcsctp/packet/data.h
index 23a5aa4616..f2d2e74904 100644
--- a/net/dcsctp/packet/data.h
+++ b/net/dcsctp/packet/data.h
@@ -64,7 +64,7 @@ struct Data {
Data& operator=(Data&& other) = default;
// Creates a copy of this `Data` object.
- Data Clone() {
+ Data Clone() const {
return Data(stream_id, ssn, message_id, fsn, ppid, payload, is_beginning,
is_end, is_unordered);
}
diff --git a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc
index c4e2961bdd..b89f86e43e 100644
--- a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc
+++ b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc
@@ -48,14 +48,14 @@ MissingMandatoryParameterCause::Parse(rtc::ArrayView<const uint8_t> data) {
}
uint32_t count = reader->Load32<4>();
- if (reader->variable_data_size() != count * kMissingParameterSize) {
+ if (reader->variable_data_size() / kMissingParameterSize != count) {
RTC_DLOG(LS_WARNING) << "Invalid number of missing parameters";
return absl::nullopt;
}
std::vector<uint16_t> missing_parameter_types;
missing_parameter_types.reserve(count);
- for (size_t i = 0; i < count; ++i) {
+ for (uint32_t i = 0; i < count; ++i) {
BoundedByteReader<kMissingParameterSize> sub_reader =
reader->sub_reader<kMissingParameterSize>(i * kMissingParameterSize);
diff --git a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause_test.cc b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause_test.cc
index 8c0434050d..1c526ff0e2 100644
--- a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause_test.cc
+++ b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause_test.cc
@@ -22,6 +22,7 @@
namespace dcsctp {
namespace {
using ::testing::ElementsAre;
+using ::testing::IsEmpty;
TEST(MissingMandatoryParameterCauseTest, SerializeAndDeserialize) {
uint16_t parameter_types[] = {1, 2, 3};
@@ -37,5 +38,22 @@ TEST(MissingMandatoryParameterCauseTest, SerializeAndDeserialize) {
EXPECT_THAT(deserialized.missing_parameter_types(), ElementsAre(1, 2, 3));
}
+TEST(MissingMandatoryParameterCauseTest, HandlesDeserializeZeroParameters) {
+ uint8_t serialized[] = {0, 2, 0, 8, 0, 0, 0, 0};
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ MissingMandatoryParameterCause deserialized,
+ MissingMandatoryParameterCause::Parse(serialized));
+
+ EXPECT_THAT(deserialized.missing_parameter_types(), IsEmpty());
+}
+
+TEST(MissingMandatoryParameterCauseTest, HandlesOverflowParameterCount) {
+ // 0x80000004 * 2 = 2**32 + 8 -> if overflow, would validate correctly.
+ uint8_t serialized[] = {0, 2, 0, 8, 0x80, 0x00, 0x00, 0x04};
+
+ EXPECT_FALSE(MissingMandatoryParameterCause::Parse(serialized).has_value());
+}
+
} // namespace
} // namespace dcsctp
diff --git a/net/dcsctp/packet/tlv_trait.h b/net/dcsctp/packet/tlv_trait.h
index a19a50f692..a3c728efd7 100644
--- a/net/dcsctp/packet/tlv_trait.h
+++ b/net/dcsctp/packet/tlv_trait.h
@@ -88,14 +88,15 @@ class TLVTrait {
}
BoundedByteReader<kTlvHeaderSize> tlv_header(data);
- const int type = (Config::kTypeSizeInBytes == 1) ? tlv_header.Load8<0>()
- : tlv_header.Load16<0>();
+ const int type = (Config::kTypeSizeInBytes == 1)
+ ? tlv_header.template Load8<0>()
+ : tlv_header.template Load16<0>();
if (type != Config::kType) {
tlv_trait_impl::ReportInvalidType(type, Config::kType);
return absl::nullopt;
}
- const uint16_t length = tlv_header.Load16<2>();
+ const uint16_t length = tlv_header.template Load16<2>();
if (Config::kVariableLengthAlignment == 0) {
// Don't expect any variable length data at all.
if (length != Config::kHeaderSize || data.size() != Config::kHeaderSize) {
@@ -138,11 +139,11 @@ class TLVTrait {
BoundedByteWriter<kTlvHeaderSize> tlv_header(
rtc::ArrayView<uint8_t>(out.data() + offset, kTlvHeaderSize));
if (Config::kTypeSizeInBytes == 1) {
- tlv_header.Store8<0>(static_cast<uint8_t>(Config::kType));
+ tlv_header.template Store8<0>(static_cast<uint8_t>(Config::kType));
} else {
- tlv_header.Store16<0>(Config::kType);
+ tlv_header.template Store16<0>(Config::kType);
}
- tlv_header.Store16<2>(size);
+ tlv_header.template Store16<2>(size);
return BoundedByteWriter<Config::kHeaderSize>(
rtc::ArrayView<uint8_t>(out.data() + offset, size));
diff --git a/net/dcsctp/public/BUILD.gn b/net/dcsctp/public/BUILD.gn
index dc23df673a..4a924cae9b 100644
--- a/net/dcsctp/public/BUILD.gn
+++ b/net/dcsctp/public/BUILD.gn
@@ -37,7 +37,23 @@ rtc_source_set("socket") {
"packet_observer.h",
"timeout.h",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("factory") {
+ deps = [
+ ":socket",
+ ":types",
+ "../socket:dcsctp_socket",
+ ]
+ sources = [
+ "dcsctp_socket_factory.cc",
+ "dcsctp_socket_factory.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (rtc_include_tests) {
diff --git a/net/dcsctp/public/dcsctp_options.h b/net/dcsctp/public/dcsctp_options.h
index acc237a0d5..caefcff4f5 100644
--- a/net/dcsctp/public/dcsctp_options.h
+++ b/net/dcsctp/public/dcsctp_options.h
@@ -43,11 +43,33 @@ struct DcSctpOptions {
// port number as destination port.
int remote_port = 5000;
+ // The announced maximum number of incoming streams. Note that this value is
+ // constant and can't be currently increased in run-time as "Add Incoming
+ // Streams Request" in RFC6525 isn't supported.
+ //
+ // The socket implementation doesn't have any per-stream fixed costs, which is
+ // why the default value is set to be the maximum value.
+ uint16_t announced_maximum_incoming_streams = 65535;
+
+ // The announced maximum number of outgoing streams. Note that this value is
+ // constant and can't be currently increased in run-time as "Add Outgoing
+ // Streams Request" in RFC6525 isn't supported.
+ //
+ // The socket implementation doesn't have any per-stream fixed costs, which is
+ // why the default value is set to be the maximum value.
+ uint16_t announced_maximum_outgoing_streams = 65535;
+
// Maximum SCTP packet size. The library will limit the size of generated
// packets to be less than or equal to this number. This does not include any
// overhead of DTLS, TURN, UDP or IP headers.
size_t mtu = kMaxSafeMTUSize;
+ // The largest allowed message payload to be sent. Messages will be rejected
+ // if their payload is larger than this value. Note that this doesn't affect
+ // incoming messages, which may larger than this value (but smaller than
+ // `max_receiver_window_buffer_size`).
+ size_t max_message_size = 256 * 1024;
+
// Maximum received window buffer size. This should be a bit larger than the
// largest sized message you want to be able to receive. This essentially
// limits the memory usage on the receive side. Note that memory is allocated
@@ -59,7 +81,17 @@ struct DcSctpOptions {
// Maximum send buffer size. It will not be possible to queue more data than
// this before sending it.
- size_t max_send_buffer_size = 2 * 1024 * 1024;
+ size_t max_send_buffer_size = 2'000'000;
+
+ // A threshold that, when the amount of data in the send buffer goes below
+ // this value, will trigger `DcSctpCallbacks::OnTotalBufferedAmountLow`.
+ size_t total_buffered_amount_low_threshold = 1'800'000;
+
+ // Max allowed RTT value. When the RTT is measured and it's found to be larger
+ // than this value, it will be discarded and not used for e.g. any RTO
+ // calculation. The default value is an extreme maximum but can be adapted
+ // to better match the environment.
+ DurationMs rtt_max = DurationMs(8000);
// Initial RTO value.
DurationMs rto_initial = DurationMs(500);
@@ -67,8 +99,9 @@ struct DcSctpOptions {
// Maximum RTO value.
DurationMs rto_max = DurationMs(800);
- // Minimum RTO value.
- DurationMs rto_min = DurationMs(120);
+ // Minimum RTO value. This must be larger than an expected peer delayed ack
+ // timeout.
+ DurationMs rto_min = DurationMs(220);
// T1-init timeout.
DurationMs t1_init_timeout = DurationMs(1000);
@@ -79,15 +112,15 @@ struct DcSctpOptions {
// T2-shutdown timeout.
DurationMs t2_shutdown_timeout = DurationMs(1000);
- // Hearbeat interval (on idle connections only).
- DurationMs heartbeat_interval = DurationMs(30'000);
+ // Hearbeat interval (on idle connections only). Set to zero to disable.
+ DurationMs heartbeat_interval = DurationMs(30000);
// The maximum time when a SACK will be sent from the arrival of an
// unacknowledged packet. Whatever is smallest of RTO/2 and this will be used.
DurationMs delayed_ack_max_timeout = DurationMs(200);
// Do slow start as TCP - double cwnd instead of increasing it by MTU.
- bool slow_start_tcp_style = true;
+ bool slow_start_tcp_style = false;
// The initial congestion window size, in number of MTUs.
// See https://tools.ietf.org/html/rfc4960#section-7.2.1 which defaults at ~3
diff --git a/net/dcsctp/public/dcsctp_socket.h b/net/dcsctp/public/dcsctp_socket.h
index e7f21349bd..f07f54e044 100644
--- a/net/dcsctp/public/dcsctp_socket.h
+++ b/net/dcsctp/public/dcsctp_socket.h
@@ -18,12 +18,27 @@
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
#include "net/dcsctp/public/packet_observer.h"
#include "net/dcsctp/public/timeout.h"
#include "net/dcsctp/public/types.h"
namespace dcsctp {
+// The socket/association state
+enum class SocketState {
+ // The socket is closed.
+ kClosed,
+ // The socket has initiated a connection, which is not yet established. Note
+ // that for incoming connections and for reconnections when the socket is
+ // already connected, the socket will not transition to this state.
+ kConnecting,
+ // The socket is connected, and the connection is established.
+ kConnected,
+ // The socket is shutting down, and the connection is not yet closed.
+ kShuttingDown,
+};
+
// Send options for sending messages
struct SendOptions {
// If the message should be sent with unordered message delivery.
@@ -59,6 +74,8 @@ enum class ErrorKind {
kProtocolViolation,
// The receive or send buffers have been exhausted.
kResourceExhaustion,
+ // The client has performed an invalid operation.
+ kUnsupportedOperation,
};
inline constexpr absl::string_view ToString(ErrorKind error) {
@@ -79,19 +96,65 @@ inline constexpr absl::string_view ToString(ErrorKind error) {
return "PROTOCOL_VIOLATION";
case ErrorKind::kResourceExhaustion:
return "RESOURCE_EXHAUSTION";
+ case ErrorKind::kUnsupportedOperation:
+ return "UNSUPPORTED_OPERATION";
}
}
-// Return value of SupportsStreamReset.
-enum class StreamResetSupport {
+enum class SendStatus {
+ // The message was enqueued successfully. As sending the message is done
+ // asynchronously, this is no guarantee that the message has been actually
+ // sent.
+ kSuccess,
+ // The message was rejected as the payload was empty (which is not allowed in
+ // SCTP).
+ kErrorMessageEmpty,
+ // The message was rejected as the payload was larger than what has been set
+ // as `DcSctpOptions.max_message_size`.
+ kErrorMessageTooLarge,
+ // The message could not be enqueued as the socket is out of resources. This
+ // mainly indicates that the send queue is full.
+ kErrorResourceExhaustion,
+ // The message could not be sent as the socket is shutting down.
+ kErrorShuttingDown,
+};
+
+inline constexpr absl::string_view ToString(SendStatus error) {
+ switch (error) {
+ case SendStatus::kSuccess:
+ return "SUCCESS";
+ case SendStatus::kErrorMessageEmpty:
+ return "ERROR_MESSAGE_EMPTY";
+ case SendStatus::kErrorMessageTooLarge:
+ return "ERROR_MESSAGE_TOO_LARGE";
+ case SendStatus::kErrorResourceExhaustion:
+ return "ERROR_RESOURCE_EXHAUSTION";
+ case SendStatus::kErrorShuttingDown:
+ return "ERROR_SHUTTING_DOWN";
+ }
+}
+
+// Return value of ResetStreams.
+enum class ResetStreamsStatus {
// If the connection is not yet established, this will be returned.
- kUnknown,
- // Indicates that Stream Reset is supported by the peer.
- kSupported,
- // Indicates that Stream Reset is not supported by the peer.
+ kNotConnected,
+ // Indicates that ResetStreams operation has been successfully initiated.
+ kPerformed,
+ // Indicates that ResetStreams has failed as it's not supported by the peer.
kNotSupported,
};
+inline constexpr absl::string_view ToString(ResetStreamsStatus error) {
+ switch (error) {
+ case ResetStreamsStatus::kNotConnected:
+ return "NOT_CONNECTED";
+ case ResetStreamsStatus::kPerformed:
+ return "PERFORMED";
+ case ResetStreamsStatus::kNotSupported:
+ return "NOT_SUPPORTED";
+ }
+}
+
// Callbacks that the DcSctpSocket will be done synchronously to the owning
// client. It is allowed to call back into the library from callbacks that start
// with "On". It has been explicitly documented when it's not allowed to call
@@ -123,9 +186,9 @@ class DcSctpSocketCallbacks {
virtual TimeMs TimeMillis() = 0;
// Called when the library needs a random number uniformly distributed between
- // `low` (inclusive) and `high` (exclusive). The random number used by the
- // library are not used for cryptographic purposes there are no requirements
- // on a secure random number generator.
+ // `low` (inclusive) and `high` (exclusive). The random numbers used by the
+ // library are not used for cryptographic purposes. There are no requirements
+ // that the random number generator must be secure.
//
// Note that it's NOT ALLOWED to call into this library from within this
// callback.
@@ -134,12 +197,11 @@ class DcSctpSocketCallbacks {
// Triggered when the outgoing message buffer is empty, meaning that there are
// no more queued messages, but there can still be packets in-flight or to be
// retransmitted. (in contrast to SCTP_SENDER_DRY_EVENT).
- // TODO(boivie): This is currently only used in benchmarks to have a steady
- // flow of packets to send
//
// Note that it's NOT ALLOWED to call into this library from within this
// callback.
- virtual void NotifyOutgoingMessageBufferEmpty() = 0;
+ ABSL_DEPRECATED("Use OnTotalBufferedAmountLow instead")
+ virtual void NotifyOutgoingMessageBufferEmpty() {}
// Called when the library has received an SCTP message in full and delivers
// it to the upper layer.
@@ -201,14 +263,16 @@ class DcSctpSocketCallbacks {
virtual void OnIncomingStreamsReset(
rtc::ArrayView<const StreamID> incoming_streams) = 0;
- // If an outgoing message has expired before being completely sent.
- // TODO(boivie) Add some kind of message identifier.
- // TODO(boivie) Add callbacks for OnMessageSent and OnSentMessageAcked
+ // Will be called when the amount of data buffered to be sent falls to or
+ // below the threshold set when calling `SetBufferedAmountLowThreshold`.
//
// It is allowed to call into this library from within this callback.
- virtual void OnSentMessageExpired(StreamID stream_id,
- PPID ppid,
- bool unsent) = 0;
+ virtual void OnBufferedAmountLow(StreamID stream_id) {}
+
+ // Will be called when the total amount of data buffered (in the entire send
+ // buffer, for all streams) falls to or below the threshold specified in
+ // `DcSctpOptions::total_buffered_amount_low_threshold`.
+ virtual void OnTotalBufferedAmountLow() {}
};
// The DcSctpSocket implementation implements the following interface.
@@ -236,6 +300,25 @@ class DcSctpSocketInterface {
// not already closed. No callbacks will be made after Close() has returned.
virtual void Close() = 0;
+ // The socket state.
+ virtual SocketState state() const = 0;
+
+ // The options it was created with.
+ virtual const DcSctpOptions& options() const = 0;
+
+ // Update the options max_message_size.
+ virtual void SetMaxMessageSize(size_t max_message_size) = 0;
+
+ // Sends the message `message` using the provided send options.
+ // Sending a message is an asynchrous operation, and the `OnError` callback
+ // may be invoked to indicate any errors in sending the message.
+ //
+ // The association does not have to be established before calling this method.
+ // If it's called before there is an established association, the message will
+ // be queued.
+ virtual SendStatus Send(DcSctpMessage message,
+ const SendOptions& send_options) = 0;
+
// Resetting streams is an asynchronous operation and the results will
// be notified using `DcSctpSocketCallbacks::OnStreamsResetDone()` on success
// and `DcSctpSocketCallbacks::OnStreamsResetFailed()` on failure. Note that
@@ -251,27 +334,22 @@ class DcSctpSocketInterface {
// Resetting streams can only be done on an established association that
// supports stream resetting. Calling this method on e.g. a closed association
// or streams that don't support resetting will not perform any operation.
- virtual void ResetStreams(
+ virtual ResetStreamsStatus ResetStreams(
rtc::ArrayView<const StreamID> outgoing_streams) = 0;
- // Indicates if the peer supports resetting streams (RFC6525). Please note
- // that the connection must be established for support to be known.
- virtual StreamResetSupport SupportsStreamReset() const = 0;
+ // Returns the number of bytes of data currently queued to be sent on a given
+ // stream.
+ virtual size_t buffered_amount(StreamID stream_id) const = 0;
- // Sends the message `message` using the provided send options.
- // Sending a message is an asynchrous operation, and the `OnError` callback
- // may be invoked to indicate any errors in sending the message.
- //
- // The association does not have to be established before calling this method.
- // If it's called before there is an established association, the message will
- // be queued.
- void Send(DcSctpMessage message, const SendOptions& send_options = {}) {
- SendMessage(std::move(message), send_options);
- }
+ // Returns the number of buffered outgoing bytes that is considered "low" for
+ // a given stream. See `SetBufferedAmountLowThreshold`.
+ virtual size_t buffered_amount_low_threshold(StreamID stream_id) const = 0;
- private:
- virtual void SendMessage(DcSctpMessage message,
- const SendOptions& send_options) = 0;
+ // Used to specify the number of bytes of buffered outgoing data that is
+ // considered "low" for a given stream, which will trigger an
+ // OnBufferedAmountLow event. The default value is zero (0).
+ virtual void SetBufferedAmountLowThreshold(StreamID stream_id,
+ size_t bytes) = 0;
};
} // namespace dcsctp
diff --git a/net/dcsctp/public/dcsctp_socket_factory.cc b/net/dcsctp/public/dcsctp_socket_factory.cc
new file mode 100644
index 0000000000..338d143424
--- /dev/null
+++ b/net/dcsctp/public/dcsctp_socket_factory.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "net/dcsctp/public/dcsctp_socket_factory.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/packet_observer.h"
+#include "net/dcsctp/socket/dcsctp_socket.h"
+
+namespace dcsctp {
+std::unique_ptr<DcSctpSocketInterface> DcSctpSocketFactory::Create(
+ absl::string_view log_prefix,
+ DcSctpSocketCallbacks& callbacks,
+ std::unique_ptr<PacketObserver> packet_observer,
+ const DcSctpOptions& options) {
+ return std::make_unique<DcSctpSocket>(log_prefix, callbacks,
+ std::move(packet_observer), options);
+}
+} // namespace dcsctp
diff --git a/net/dcsctp/public/dcsctp_socket_factory.h b/net/dcsctp/public/dcsctp_socket_factory.h
new file mode 100644
index 0000000000..dcc68d9b54
--- /dev/null
+++ b/net/dcsctp/public/dcsctp_socket_factory.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_FACTORY_H_
+#define NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_FACTORY_H_
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/packet_observer.h"
+
+namespace dcsctp {
+class DcSctpSocketFactory {
+ public:
+ std::unique_ptr<DcSctpSocketInterface> Create(
+ absl::string_view log_prefix,
+ DcSctpSocketCallbacks& callbacks,
+ std::unique_ptr<PacketObserver> packet_observer,
+ const DcSctpOptions& options);
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_FACTORY_H_
diff --git a/net/dcsctp/public/types.h b/net/dcsctp/public/types.h
index b87fd4e79a..d516daffe3 100644
--- a/net/dcsctp/public/types.h
+++ b/net/dcsctp/public/types.h
@@ -12,6 +12,7 @@
#define NET_DCSCTP_PUBLIC_TYPES_H_
#include <cstdint>
+#include <limits>
#include "net/dcsctp/public/strong_alias.h"
@@ -85,6 +86,10 @@ class TimeMs : public StrongAlias<class TimeMsTag, int64_t> {
value_ -= *d;
return *this;
}
+
+ static constexpr TimeMs InfiniteFuture() {
+ return TimeMs(std::numeric_limits<int64_t>::max());
+ }
};
constexpr inline TimeMs operator+(TimeMs lhs, DurationMs rhs) {
diff --git a/net/dcsctp/rx/BUILD.gn b/net/dcsctp/rx/BUILD.gn
index 75312b96da..fb92513158 100644
--- a/net/dcsctp/rx/BUILD.gn
+++ b/net/dcsctp/rx/BUILD.gn
@@ -14,16 +14,32 @@ rtc_library("data_tracker") {
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "../common:sequence_numbers",
+ "../packet:chunk",
+ "../packet:data",
+ "../timer",
]
sources = [
"data_tracker.cc",
"data_tracker.h",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_source_set("reassembly_streams") {
- deps = [ "../packet:chunk" ]
+ deps = [
+ "../../../api:array_view",
+ "../common:sequence_numbers",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:types",
+ ]
sources = [ "reassembly_streams.h" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("traditional_reassembly_streams") {
@@ -33,25 +49,46 @@ rtc_library("traditional_reassembly_streams") {
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "../common:sequence_numbers",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:types",
]
sources = [
"traditional_reassembly_streams.cc",
"traditional_reassembly_streams.h",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_library("reassembly_queue") {
deps = [
+ ":reassembly_streams",
":traditional_reassembly_streams",
"../../../api:array_view",
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "../common:internal_types",
+ "../common:sequence_numbers",
+ "../common:str_join",
+ "../packet:chunk",
+ "../packet:data",
+ "../packet:parameter",
+ "../public:types",
]
sources = [
"reassembly_queue.cc",
"reassembly_queue.h",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
if (rtc_include_tests) {
@@ -61,14 +98,21 @@ if (rtc_include_tests) {
deps = [
":data_tracker",
":reassembly_queue",
+ ":reassembly_streams",
":traditional_reassembly_streams",
"../../../api:array_view",
"../../../rtc_base:checks",
"../../../rtc_base:gunit_helpers",
"../../../rtc_base:rtc_base_approved",
"../../../test:test_support",
+ "../common:sequence_numbers",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:types",
"../testing:data_generator",
+ "../timer",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
sources = [
"data_tracker_test.cc",
"reassembly_queue_test.cc",
diff --git a/net/dcsctp/rx/data_tracker.cc b/net/dcsctp/rx/data_tracker.cc
index 3e03dfece2..5b563a8463 100644
--- a/net/dcsctp/rx/data_tracker.cc
+++ b/net/dcsctp/rx/data_tracker.cc
@@ -9,6 +9,7 @@
*/
#include "net/dcsctp/rx/data_tracker.h"
+#include <algorithm>
#include <cstdint>
#include <iterator>
#include <set>
@@ -16,6 +17,7 @@
#include <utility>
#include <vector>
+#include "absl/algorithm/container.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "net/dcsctp/common/sequence_numbers.h"
@@ -26,6 +28,86 @@
namespace dcsctp {
+constexpr size_t DataTracker::kMaxDuplicateTsnReported;
+constexpr size_t DataTracker::kMaxGapAckBlocksReported;
+
+bool DataTracker::AdditionalTsnBlocks::Add(UnwrappedTSN tsn) {
+ // Find any block to expand. It will look for any block that includes (also
+ // when expanded) the provided `tsn`. It will return the block that is greater
+ // than, or equal to `tsn`.
+ auto it = absl::c_lower_bound(
+ blocks_, tsn, [&](const TsnRange& elem, const UnwrappedTSN& t) {
+ return elem.last.next_value() < t;
+ });
+
+ if (it == blocks_.end()) {
+ // No matching block found. There is no greater than, or equal block - which
+ // means that this TSN is greater than any block. It can then be inserted at
+ // the end.
+ blocks_.emplace_back(tsn, tsn);
+ return true;
+ }
+
+ if (tsn >= it->first && tsn <= it->last) {
+ // It's already in this block.
+ return false;
+ }
+
+ if (it->last.next_value() == tsn) {
+ // This block can be expanded to the right, or merged with the next.
+ auto next_it = it + 1;
+ if (next_it != blocks_.end() && tsn.next_value() == next_it->first) {
+ // Expanding it would make it adjacent to next block - merge those.
+ it->last = next_it->last;
+ blocks_.erase(next_it);
+ return true;
+ }
+
+ // Expand to the right
+ it->last = tsn;
+ return true;
+ }
+
+ if (it->first == tsn.next_value()) {
+ // This block can be expanded to the left. Merging to the left would've been
+ // covered by the above "merge to the right". Both blocks (expand a
+ // right-most block to the left and expand a left-most block to the right)
+ // would match, but the left-most would be returned by std::lower_bound.
+ RTC_DCHECK(it == blocks_.begin() || (it - 1)->last.next_value() != tsn);
+
+ // Expand to the left.
+ it->first = tsn;
+ return true;
+ }
+
+ // Need to create a new block in the middle.
+ blocks_.emplace(it, tsn, tsn);
+ return true;
+}
+
+void DataTracker::AdditionalTsnBlocks::EraseTo(UnwrappedTSN tsn) {
+ // Find the block that is greater than or equals `tsn`.
+ auto it = absl::c_lower_bound(
+ blocks_, tsn, [&](const TsnRange& elem, const UnwrappedTSN& t) {
+ return elem.last < t;
+ });
+
+ // The block that is found is greater or equal (or possibly ::end, when no
+ // block is greater or equal). All blocks before this block can be safely
+ // removed. the TSN might be within this block, so possibly truncate it.
+ bool tsn_is_within_block = it != blocks_.end() && tsn >= it->first;
+ blocks_.erase(blocks_.begin(), it);
+
+ if (tsn_is_within_block) {
+ blocks_.front().first = tsn.next_value();
+ }
+}
+
+void DataTracker::AdditionalTsnBlocks::PopFront() {
+ RTC_DCHECK(!blocks_.empty());
+ blocks_.erase(blocks_.begin());
+}
+
bool DataTracker::IsTSNValid(TSN tsn) const {
UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.PeekUnwrap(tsn);
@@ -52,21 +134,41 @@ void DataTracker::Observe(TSN tsn,
// Old chunk already seen before?
if (unwrapped_tsn <= last_cumulative_acked_tsn_) {
- // TODO(boivie) Set duplicate TSN, even if it's not used in SCTP yet.
- return;
- }
-
- if (unwrapped_tsn == last_cumulative_acked_tsn_.next_value()) {
- last_cumulative_acked_tsn_ = unwrapped_tsn;
- // The cumulative acked tsn may be moved even further, if a gap was filled.
- while (!additional_tsns_.empty() &&
- *additional_tsns_.begin() ==
- last_cumulative_acked_tsn_.next_value()) {
- last_cumulative_acked_tsn_.Increment();
- additional_tsns_.erase(additional_tsns_.begin());
+ if (duplicate_tsns_.size() < kMaxDuplicateTsnReported) {
+ duplicate_tsns_.insert(unwrapped_tsn.Wrap());
}
+ // https://datatracker.ietf.org/doc/html/rfc4960#section-6.2
+ // "When a packet arrives with duplicate DATA chunk(s) and with no new DATA
+ // chunk(s), the endpoint MUST immediately send a SACK with no delay. If a
+ // packet arrives with duplicate DATA chunk(s) bundled with new DATA chunks,
+ // the endpoint MAY immediately send a SACK."
+ UpdateAckState(AckState::kImmediate, "duplicate data");
} else {
- additional_tsns_.insert(unwrapped_tsn);
+ if (unwrapped_tsn == last_cumulative_acked_tsn_.next_value()) {
+ last_cumulative_acked_tsn_ = unwrapped_tsn;
+ // The cumulative acked tsn may be moved even further, if a gap was
+ // filled.
+ if (!additional_tsn_blocks_.empty() &&
+ additional_tsn_blocks_.front().first ==
+ last_cumulative_acked_tsn_.next_value()) {
+ last_cumulative_acked_tsn_ = additional_tsn_blocks_.front().last;
+ additional_tsn_blocks_.PopFront();
+ }
+ } else {
+ bool inserted = additional_tsn_blocks_.Add(unwrapped_tsn);
+ if (!inserted) {
+ // Already seen before.
+ if (duplicate_tsns_.size() < kMaxDuplicateTsnReported) {
+ duplicate_tsns_.insert(unwrapped_tsn.Wrap());
+ }
+ // https://datatracker.ietf.org/doc/html/rfc4960#section-6.2
+ // "When a packet arrives with duplicate DATA chunk(s) and with no new
+ // DATA chunk(s), the endpoint MUST immediately send a SACK with no
+ // delay. If a packet arrives with duplicate DATA chunk(s) bundled with
+ // new DATA chunks, the endpoint MAY immediately send a SACK."
+ // No need to do this. SACKs are sent immediately on packet loss below.
+ }
+ }
}
// https://tools.ietf.org/html/rfc4960#section-6.7
@@ -75,7 +177,7 @@ void DataTracker::Observe(TSN tsn,
// the received DATA chunk sequence, it SHOULD send a SACK with Gap Ack
// Blocks immediately. The data receiver continues sending a SACK after
// receipt of each SCTP packet that doesn't fill the gap."
- if (!additional_tsns_.empty()) {
+ if (!additional_tsn_blocks_.empty()) {
UpdateAckState(AckState::kImmediate, "packet loss");
}
@@ -139,24 +241,20 @@ void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) {
// `last_cumulative_acked_tsn_`, and if there have been prior "gaps" that are
// now overlapping with the new value, remove them.
last_cumulative_acked_tsn_ = unwrapped_tsn;
- int erased_additional_tsns = std::distance(
- additional_tsns_.begin(), additional_tsns_.upper_bound(unwrapped_tsn));
- additional_tsns_.erase(additional_tsns_.begin(),
- additional_tsns_.upper_bound(unwrapped_tsn));
+ additional_tsn_blocks_.EraseTo(unwrapped_tsn);
// See if the `last_cumulative_acked_tsn_` can be moved even further:
- while (!additional_tsns_.empty() &&
- *additional_tsns_.begin() == last_cumulative_acked_tsn_.next_value()) {
- last_cumulative_acked_tsn_.Increment();
- additional_tsns_.erase(additional_tsns_.begin());
- ++erased_additional_tsns;
+ if (!additional_tsn_blocks_.empty() &&
+ additional_tsn_blocks_.front().first ==
+ last_cumulative_acked_tsn_.next_value()) {
+ last_cumulative_acked_tsn_ = additional_tsn_blocks_.front().last;
+ additional_tsn_blocks_.PopFront();
}
RTC_DLOG(LS_VERBOSE) << log_prefix_ << "FORWARD_TSN, cum_ack_tsn="
<< *prev_last_cum_ack_tsn.Wrap() << "->"
<< *new_cumulative_ack << "->"
- << *last_cumulative_acked_tsn_.Wrap() << ", removed "
- << erased_additional_tsns << " additional TSNs";
+ << *last_cumulative_acked_tsn_.Wrap();
// https://tools.ietf.org/html/rfc3758#section-3.6
// "Any time a FORWARD TSN chunk arrives, for the purposes of sending a
@@ -178,51 +276,26 @@ SackChunk DataTracker::CreateSelectiveAck(size_t a_rwnd) {
// that. So this SACK produced is more like a NR-SACK as explained in
// https://ieeexplore.ieee.org/document/4697037 and which there is an RFC
// draft at https://tools.ietf.org/html/draft-tuexen-tsvwg-sctp-multipath-17.
- std::vector<TSN> duplicate_tsns;
- duplicate_tsns.reserve(duplicates_.size());
- for (UnwrappedTSN tsn : duplicates_) {
- duplicate_tsns.push_back(tsn.Wrap());
- }
- duplicates_.clear();
+ std::set<TSN> duplicate_tsns;
+ duplicate_tsns_.swap(duplicate_tsns);
return SackChunk(last_cumulative_acked_tsn_.Wrap(), a_rwnd,
- CreateGapAckBlocks(), duplicate_tsns);
+ CreateGapAckBlocks(), std::move(duplicate_tsns));
}
std::vector<SackChunk::GapAckBlock> DataTracker::CreateGapAckBlocks() const {
- // This method will calculate the gaps between blocks of contiguous values in
- // `additional_tsns_`, in the same format as the SACK chunk expects it;
- // offsets from the "cumulative ack TSN value".
+ const auto& blocks = additional_tsn_blocks_.blocks();
std::vector<SackChunk::GapAckBlock> gap_ack_blocks;
-
- absl::optional<UnwrappedTSN> first_tsn_in_block = absl::nullopt;
- absl::optional<UnwrappedTSN> last_tsn_in_block = absl::nullopt;
-
- auto flush = [&]() {
- if (first_tsn_in_block.has_value()) {
- auto start_diff = UnwrappedTSN::Difference(*first_tsn_in_block,
- last_cumulative_acked_tsn_);
- auto end_diff = UnwrappedTSN::Difference(*last_tsn_in_block,
- last_cumulative_acked_tsn_);
- gap_ack_blocks.emplace_back(static_cast<uint16_t>(start_diff),
- static_cast<uint16_t>(end_diff));
- first_tsn_in_block = absl::nullopt;
- last_tsn_in_block = absl::nullopt;
- }
- };
- for (UnwrappedTSN tsn : additional_tsns_) {
- if (last_tsn_in_block.has_value() &&
- last_tsn_in_block->next_value() == tsn) {
- // Continuing the same block.
- last_tsn_in_block = tsn;
- } else {
- // New block, or a gap from the old block's last value.
- flush();
- first_tsn_in_block = tsn;
- last_tsn_in_block = tsn;
- }
+ gap_ack_blocks.reserve(std::min(blocks.size(), kMaxGapAckBlocksReported));
+ for (size_t i = 0; i < blocks.size() && i < kMaxGapAckBlocksReported; ++i) {
+ auto start_diff =
+ UnwrappedTSN::Difference(blocks[i].first, last_cumulative_acked_tsn_);
+ auto end_diff =
+ UnwrappedTSN::Difference(blocks[i].last, last_cumulative_acked_tsn_);
+ gap_ack_blocks.emplace_back(static_cast<uint16_t>(start_diff),
+ static_cast<uint16_t>(end_diff));
}
- flush();
+
return gap_ack_blocks;
}
diff --git a/net/dcsctp/rx/data_tracker.h b/net/dcsctp/rx/data_tracker.h
index 6146d2a839..167f5a04e7 100644
--- a/net/dcsctp/rx/data_tracker.h
+++ b/net/dcsctp/rx/data_tracker.h
@@ -16,6 +16,7 @@
#include <cstdint>
#include <set>
#include <string>
+#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
@@ -38,12 +39,17 @@ namespace dcsctp {
// 200ms, whatever is smallest).
class DataTracker {
public:
+ // The maximum number of duplicate TSNs that will be reported in a SACK.
+ static constexpr size_t kMaxDuplicateTsnReported = 20;
+ // The maximum number of gap-ack-blocks that will be reported in a SACK.
+ static constexpr size_t kMaxGapAckBlocksReported = 20;
+
// The maximum number of accepted in-flight DATA chunks. This indicates the
// maximum difference from this buffer's last cumulative ack TSN, and any
// received data. Data received beyond this limit will be dropped, which will
// force the transmitter to send data that actually increases the last
// cumulative acked TSN.
- static constexpr uint32_t kMaxAcceptedOutstandingFragments = 256;
+ static constexpr uint32_t kMaxAcceptedOutstandingFragments = 100000;
explicit DataTracker(absl::string_view log_prefix,
Timer* delayed_ack_timer,
@@ -111,6 +117,49 @@ class DataTracker {
// Send a SACK immediately after handling this packet.
kImmediate,
};
+
+ // Represents ranges of TSNs that have been received that are not directly
+ // following the last cumulative acked TSN. This information is returned to
+ // the sender in the "gap ack blocks" in the SACK chunk. The blocks are always
+ // non-overlapping and non-adjacent.
+ class AdditionalTsnBlocks {
+ public:
+ // Represents an inclusive range of received TSNs, i.e. [first, last].
+ struct TsnRange {
+ TsnRange(UnwrappedTSN first, UnwrappedTSN last)
+ : first(first), last(last) {}
+ UnwrappedTSN first;
+ UnwrappedTSN last;
+ };
+
+ // Adds a TSN to the set. This will try to expand any existing block and
+ // might merge blocks to ensure that all blocks are non-adjacent. If a
+ // current block can't be expanded, a new block is created.
+ //
+ // The return value indicates if `tsn` was added. If false is returned, the
+ // `tsn` was already represented in one of the blocks.
+ bool Add(UnwrappedTSN tsn);
+
+ // Erases all TSNs up to, and including `tsn`. This will remove all blocks
+ // that are completely below `tsn` and may truncate a block where `tsn` is
+ // within that block. In that case, the frontmost block's start TSN will be
+ // the next following tsn after `tsn`.
+ void EraseTo(UnwrappedTSN tsn);
+
+ // Removes the first block. Must not be called on an empty set.
+ void PopFront();
+
+ const std::vector<TsnRange>& blocks() const { return blocks_; }
+
+ bool empty() const { return blocks_.empty(); }
+
+ const TsnRange& front() const { return blocks_.front(); }
+
+ private:
+ // A sorted vector of non-overlapping and non-adjacent blocks.
+ std::vector<TsnRange> blocks_;
+ };
+
std::vector<SackChunk::GapAckBlock> CreateGapAckBlocks() const;
void UpdateAckState(AckState new_state, absl::string_view reason);
static absl::string_view ToString(AckState ack_state);
@@ -125,8 +174,8 @@ class DataTracker {
// All TSNs up until (and including) this value have been seen.
UnwrappedTSN last_cumulative_acked_tsn_;
// Received TSNs that are not directly following `last_cumulative_acked_tsn_`.
- std::set<UnwrappedTSN> additional_tsns_;
- std::set<UnwrappedTSN> duplicates_;
+ AdditionalTsnBlocks additional_tsn_blocks_;
+ std::set<TSN> duplicate_tsns_;
};
} // namespace dcsctp
diff --git a/net/dcsctp/rx/data_tracker_test.cc b/net/dcsctp/rx/data_tracker_test.cc
index d714b0ba9e..5c2e56fb2b 100644
--- a/net/dcsctp/rx/data_tracker_test.cc
+++ b/net/dcsctp/rx/data_tracker_test.cc
@@ -25,6 +25,8 @@ namespace dcsctp {
namespace {
using ::testing::ElementsAre;
using ::testing::IsEmpty;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAre;
constexpr size_t kArwnd = 10000;
constexpr TSN kInitialTSN(11);
@@ -224,11 +226,411 @@ TEST_F(DataTrackerTest, WillNotAcceptInvalidTSNs) {
size_t limit = DataTracker::kMaxAcceptedOutstandingFragments;
EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn + limit + 1)));
EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn - (limit + 1))));
- EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn + 65536)));
- EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn - 65536)));
EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn + 0x8000000)));
EXPECT_FALSE(buf_.IsTSNValid(TSN(*last_tsn - 0x8000000)));
}
+TEST_F(DataTrackerTest, ReportSingleDuplicateTsns) {
+ Observer({11, 12, 11});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(12));
+ EXPECT_THAT(sack.gap_ack_blocks(), IsEmpty());
+ EXPECT_THAT(sack.duplicate_tsns(), UnorderedElementsAre(TSN(11)));
+}
+
+TEST_F(DataTrackerTest, ReportMultipleDuplicateTsns) {
+ Observer({11, 12, 13, 14, 12, 13, 12, 13, 15, 16});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(16));
+ EXPECT_THAT(sack.gap_ack_blocks(), IsEmpty());
+ EXPECT_THAT(sack.duplicate_tsns(), UnorderedElementsAre(TSN(12), TSN(13)));
+}
+
+TEST_F(DataTrackerTest, ReportDuplicateTsnsInGapAckBlocks) {
+ Observer({11, /*12,*/ 13, 14, 13, 14, 15, 16});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(11));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 5)));
+ EXPECT_THAT(sack.duplicate_tsns(), UnorderedElementsAre(TSN(13), TSN(14)));
+}
+
+TEST_F(DataTrackerTest, ClearsDuplicateTsnsAfterCreatingSack) {
+ Observer({11, 12, 13, 14, 12, 13, 12, 13, 15, 16});
+ SackChunk sack1 = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack1.cumulative_tsn_ack(), TSN(16));
+ EXPECT_THAT(sack1.gap_ack_blocks(), IsEmpty());
+ EXPECT_THAT(sack1.duplicate_tsns(), UnorderedElementsAre(TSN(12), TSN(13)));
+
+ Observer({17});
+ SackChunk sack2 = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack2.cumulative_tsn_ack(), TSN(17));
+ EXPECT_THAT(sack2.gap_ack_blocks(), IsEmpty());
+ EXPECT_THAT(sack2.duplicate_tsns(), IsEmpty());
+}
+
+TEST_F(DataTrackerTest, LimitsNumberOfDuplicatesReported) {
+ for (size_t i = 0; i < DataTracker::kMaxDuplicateTsnReported + 10; ++i) {
+ TSN tsn(11 + i);
+ buf_.Observe(tsn, AnyDataChunk::ImmediateAckFlag(false));
+ buf_.Observe(tsn, AnyDataChunk::ImmediateAckFlag(false));
+ }
+
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_THAT(sack.gap_ack_blocks(), IsEmpty());
+ EXPECT_THAT(sack.duplicate_tsns(),
+ SizeIs(DataTracker::kMaxDuplicateTsnReported));
+}
+
+TEST_F(DataTrackerTest, LimitsNumberOfGapAckBlocksReported) {
+ for (size_t i = 0; i < DataTracker::kMaxGapAckBlocksReported + 10; ++i) {
+ TSN tsn(11 + i * 2);
+ buf_.Observe(tsn, AnyDataChunk::ImmediateAckFlag(false));
+ }
+
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(11));
+ EXPECT_THAT(sack.gap_ack_blocks(),
+ SizeIs(DataTracker::kMaxGapAckBlocksReported));
+}
+
+TEST_F(DataTrackerTest, SendsSackForFirstPacketObserved) {
+ Observer({11});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+}
+
+TEST_F(DataTrackerTest, SendsSackEverySecondPacketWhenThereIsNoPacketLoss) {
+ Observer({11});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({12});
+ buf_.ObservePacketEnd();
+ EXPECT_FALSE(buf_.ShouldSendAck());
+ EXPECT_TRUE(timer_->is_running());
+ Observer({13});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({14});
+ buf_.ObservePacketEnd();
+ EXPECT_FALSE(buf_.ShouldSendAck());
+ EXPECT_TRUE(timer_->is_running());
+ Observer({15});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+}
+
+TEST_F(DataTrackerTest, SendsSackEveryPacketOnPacketLoss) {
+ Observer({11});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({13});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({14});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({15});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({16});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ // Fill the hole.
+ Observer({12});
+ buf_.ObservePacketEnd();
+ EXPECT_FALSE(buf_.ShouldSendAck());
+ EXPECT_TRUE(timer_->is_running());
+ // Goes back to every second packet
+ Observer({17});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({18});
+ buf_.ObservePacketEnd();
+ EXPECT_FALSE(buf_.ShouldSendAck());
+ EXPECT_TRUE(timer_->is_running());
+}
+
+TEST_F(DataTrackerTest, SendsSackOnDuplicateDataChunks) {
+ Observer({11});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({11});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ Observer({12});
+ buf_.ObservePacketEnd();
+ EXPECT_FALSE(buf_.ShouldSendAck());
+ EXPECT_TRUE(timer_->is_running());
+ // Goes back to every second packet
+ Observer({13});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+ // Duplicate again
+ Observer({12});
+ buf_.ObservePacketEnd();
+ EXPECT_TRUE(buf_.ShouldSendAck());
+ EXPECT_FALSE(timer_->is_running());
+}
+
+TEST_F(DataTrackerTest, GapAckBlockAddSingleBlock) {
+ Observer({12});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockAddsAnother) {
+ Observer({12});
+ Observer({14});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2),
+ SackChunk::GapAckBlock(4, 4)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockAddsDuplicate) {
+ Observer({12});
+ Observer({12});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 2)));
+ EXPECT_THAT(sack.duplicate_tsns(), ElementsAre(TSN(12)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockExpandsToRight) {
+ Observer({12});
+ Observer({13});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 3)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockExpandsToRightWithOther) {
+ Observer({12});
+ Observer({20});
+ Observer({30});
+ Observer({21});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(2, 2), //
+ SackChunk::GapAckBlock(10, 11), //
+ SackChunk::GapAckBlock(20, 20)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockExpandsToLeft) {
+ Observer({13});
+ Observer({12});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(), ElementsAre(SackChunk::GapAckBlock(2, 3)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockExpandsToLeftWithOther) {
+ Observer({12});
+ Observer({21});
+ Observer({30});
+ Observer({20});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(2, 2), //
+ SackChunk::GapAckBlock(10, 11), //
+ SackChunk::GapAckBlock(20, 20)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockExpandsToLRightAndMerges) {
+ Observer({12});
+ Observer({20});
+ Observer({22});
+ Observer({30});
+ Observer({21});
+ SackChunk sack = buf_.CreateSelectiveAck(kArwnd);
+ EXPECT_EQ(sack.cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(sack.gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(2, 2), //
+ SackChunk::GapAckBlock(10, 12), //
+ SackChunk::GapAckBlock(20, 20)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockMergesManyBlocksIntoOne) {
+ Observer({22});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12)));
+ Observer({30});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12), //
+ SackChunk::GapAckBlock(20, 20)));
+ Observer({24});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12), //
+ SackChunk::GapAckBlock(14, 14), //
+ SackChunk::GapAckBlock(20, 20)));
+ Observer({28});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12), //
+ SackChunk::GapAckBlock(14, 14), //
+ SackChunk::GapAckBlock(18, 18), //
+ SackChunk::GapAckBlock(20, 20)));
+ Observer({26});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12), //
+ SackChunk::GapAckBlock(14, 14), //
+ SackChunk::GapAckBlock(16, 16), //
+ SackChunk::GapAckBlock(18, 18), //
+ SackChunk::GapAckBlock(20, 20)));
+ Observer({29});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 12), //
+ SackChunk::GapAckBlock(14, 14), //
+ SackChunk::GapAckBlock(16, 16), //
+ SackChunk::GapAckBlock(18, 20)));
+ Observer({23});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 14), //
+ SackChunk::GapAckBlock(16, 16), //
+ SackChunk::GapAckBlock(18, 20)));
+ Observer({27});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 14), //
+ SackChunk::GapAckBlock(16, 20)));
+
+ Observer({25});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(12, 20)));
+ Observer({20});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(10, 10), //
+ SackChunk::GapAckBlock(12, 20)));
+ Observer({32});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(10, 10), //
+ SackChunk::GapAckBlock(12, 20), //
+ SackChunk::GapAckBlock(22, 22)));
+ Observer({21});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(10, 20), //
+ SackChunk::GapAckBlock(22, 22)));
+ Observer({31});
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(10, 22)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveBeforeCumAckTsn) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(8));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(10));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(2, 4), //
+ SackChunk::GapAckBlock(10, 12),
+ SackChunk::GapAckBlock(20, 21)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveBeforeFirstBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(11));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(6, 8), //
+ SackChunk::GapAckBlock(16, 17)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveAtBeginningOfFirstBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(12));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(6, 8), //
+ SackChunk::GapAckBlock(16, 17)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveAtMiddleOfFirstBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+ buf_.HandleForwardTsn(TSN(13));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(6, 8), //
+ SackChunk::GapAckBlock(16, 17)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveAtEndOfFirstBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+ buf_.HandleForwardTsn(TSN(14));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(14));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(6, 8), //
+ SackChunk::GapAckBlock(16, 17)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveRightAfterFirstBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(18));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(18));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(2, 4), //
+ SackChunk::GapAckBlock(12, 13)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveRightBeforeSecondBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(19));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(8, 9)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtStartOfSecondBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(20));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(8, 9)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtMiddleOfSecondBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(21));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(8, 9)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveRightAtEndOfSecondBlock) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(22));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(22));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(),
+ ElementsAre(SackChunk::GapAckBlock(8, 9)));
+}
+
+TEST_F(DataTrackerTest, GapAckBlockRemoveeFarAfterAllBlocks) {
+ Observer({12, 13, 14, 20, 21, 22, 30, 31});
+
+ buf_.HandleForwardTsn(TSN(40));
+ EXPECT_EQ(buf_.CreateSelectiveAck(kArwnd).cumulative_tsn_ack(), TSN(40));
+ EXPECT_THAT(buf_.CreateSelectiveAck(kArwnd).gap_ack_blocks(), IsEmpty());
+}
+
} // namespace
} // namespace dcsctp
diff --git a/net/dcsctp/rx/reassembly_queue.h b/net/dcsctp/rx/reassembly_queue.h
index b752e53acc..25cda70c58 100644
--- a/net/dcsctp/rx/reassembly_queue.h
+++ b/net/dcsctp/rx/reassembly_queue.h
@@ -104,8 +104,8 @@ class ReassemblyQueue {
// data.
size_t queued_bytes() const { return queued_bytes_; }
- // The remaining bytes until the queue is full.
- size_t remaining_bytes() const { return max_size_bytes_ - queued_bytes_; }
+ // The remaining bytes until the queue has reached the watermark limit.
+ size_t remaining_bytes() const { return watermark_bytes_ - queued_bytes_; }
// Indicates if the queue is full. Data should not be added to the queue when
// it's full.
diff --git a/net/dcsctp/socket/BUILD.gn b/net/dcsctp/socket/BUILD.gn
new file mode 100644
index 0000000000..58abd7ac31
--- /dev/null
+++ b/net/dcsctp/socket/BUILD.gn
@@ -0,0 +1,234 @@
+# Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_source_set("context") {
+ sources = [ "context.h" ]
+ deps = [
+ "../common:internal_types",
+ "../packet:sctp_packet",
+ "../public:socket",
+ "../public:types",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_library("heartbeat_handler") {
+ deps = [
+ ":context",
+ "../../../api:array_view",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../packet:bounded_io",
+ "../packet:chunk",
+ "../packet:parameter",
+ "../packet:sctp_packet",
+ "../public:socket",
+ "../public:types",
+ "../timer",
+ ]
+ sources = [
+ "heartbeat_handler.cc",
+ "heartbeat_handler.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("stream_reset_handler") {
+ deps = [
+ ":context",
+ "../../../api:array_view",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:internal_types",
+ "../common:str_join",
+ "../packet:chunk",
+ "../packet:parameter",
+ "../packet:sctp_packet",
+ "../packet:tlv_trait",
+ "../public:socket",
+ "../public:types",
+ "../rx:data_tracker",
+ "../rx:reassembly_queue",
+ "../timer",
+ "../tx:retransmission_queue",
+ ]
+ sources = [
+ "stream_reset_handler.cc",
+ "stream_reset_handler.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("transmission_control_block") {
+ deps = [
+ ":context",
+ ":heartbeat_handler",
+ ":stream_reset_handler",
+ "../../../api:array_view",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:sequence_numbers",
+ "../packet:chunk",
+ "../packet:sctp_packet",
+ "../public:socket",
+ "../public:types",
+ "../rx:data_tracker",
+ "../rx:reassembly_queue",
+ "../timer",
+ "../tx:retransmission_error_counter",
+ "../tx:retransmission_queue",
+ "../tx:retransmission_timeout",
+ "../tx:send_queue",
+ ]
+ sources = [
+ "capabilities.h",
+ "transmission_control_block.cc",
+ "transmission_control_block.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("dcsctp_socket") {
+ deps = [
+ ":context",
+ ":heartbeat_handler",
+ ":stream_reset_handler",
+ ":transmission_control_block",
+ "../../../api:array_view",
+ "../../../api:refcountedbase",
+ "../../../api:scoped_refptr",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:internal_types",
+ "../packet:bounded_io",
+ "../packet:chunk",
+ "../packet:chunk_validators",
+ "../packet:data",
+ "../packet:error_cause",
+ "../packet:parameter",
+ "../packet:sctp_packet",
+ "../packet:tlv_trait",
+ "../public:socket",
+ "../public:types",
+ "../rx:data_tracker",
+ "../rx:reassembly_queue",
+ "../timer",
+ "../tx:retransmission_error_counter",
+ "../tx:retransmission_queue",
+ "../tx:retransmission_timeout",
+ "../tx:rr_send_queue",
+ "../tx:send_queue",
+ ]
+ sources = [
+ "callback_deferrer.h",
+ "dcsctp_socket.cc",
+ "dcsctp_socket.h",
+ "state_cookie.cc",
+ "state_cookie.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_source_set("mock_callbacks") {
+ testonly = true
+ sources = [ "mock_dcsctp_socket_callbacks.h" ]
+ deps = [
+ "../../../api:array_view",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ "../public:socket",
+ "../public:types",
+ "../timer",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_source_set("mock_context") {
+ testonly = true
+ sources = [ "mock_context.h" ]
+ deps = [
+ ":context",
+ ":mock_callbacks",
+ "../../../test:test_support",
+ "../common:internal_types",
+ "../packet:sctp_packet",
+ "../public:socket",
+ "../public:types",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("dcsctp_socket_unittests") {
+ testonly = true
+
+ deps = [
+ ":dcsctp_socket",
+ ":heartbeat_handler",
+ ":mock_callbacks",
+ ":mock_context",
+ ":stream_reset_handler",
+ "../../../api:array_view",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ "../common:internal_types",
+ "../packet:chunk",
+ "../packet:error_cause",
+ "../packet:parameter",
+ "../packet:sctp_packet",
+ "../packet:tlv_trait",
+ "../public:socket",
+ "../public:types",
+ "../rx:data_tracker",
+ "../rx:reassembly_queue",
+ "../testing:data_generator",
+ "../testing:testing_macros",
+ "../timer",
+ "../tx:mock_send_queue",
+ "../tx:retransmission_queue",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ sources = [
+ "dcsctp_socket_test.cc",
+ "heartbeat_handler_test.cc",
+ "state_cookie_test.cc",
+ "stream_reset_handler_test.cc",
+ ]
+ }
+}
diff --git a/net/dcsctp/socket/callback_deferrer.h b/net/dcsctp/socket/callback_deferrer.h
new file mode 100644
index 0000000000..197cf434af
--- /dev/null
+++ b/net/dcsctp/socket/callback_deferrer.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_
+#define NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/ref_counted_base.h"
+#include "api/scoped_refptr.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "rtc_base/ref_counted_object.h"
+
+namespace dcsctp {
+
+// Defers callbacks until they can be safely triggered.
+//
+// There are a lot of callbacks from the dcSCTP library to the client,
+// such as when messages are received or streams are closed. When the client
+// receives these callbacks, the client is expected to be able to call into the
+// library - from within the callback. For example, sending a reply message when
+// a certain SCTP message has been received, or to reconnect when the connection
+// was closed for any reason. This means that the dcSCTP library must always be
+// in a consistent and stable state when these callbacks are delivered, and to
+// ensure that's the case, callbacks are not immediately delivered from where
+// they originate, but instead queued (deferred) by this class. At the end of
+// any public API method that may result in callbacks, they are triggered and
+// then delivered.
+//
+// There are a number of exceptions, which is clearly annotated in the API.
+class CallbackDeferrer : public DcSctpSocketCallbacks {
+ public:
+ explicit CallbackDeferrer(DcSctpSocketCallbacks& underlying)
+ : underlying_(underlying) {}
+
+ void TriggerDeferred() {
+ // Need to swap here. The client may call into the library from within a
+ // callback, and that might result in adding new callbacks to this instance,
+ // and the vector can't be modified while iterated on.
+ std::vector<std::function<void(DcSctpSocketCallbacks & cb)>> deferred;
+ deferred.swap(deferred_);
+
+ for (auto& cb : deferred) {
+ cb(underlying_);
+ }
+ }
+
+ void SendPacket(rtc::ArrayView<const uint8_t> data) override {
+ // Will not be deferred - call directly.
+ underlying_.SendPacket(data);
+ }
+
+ std::unique_ptr<Timeout> CreateTimeout() override {
+ // Will not be deferred - call directly.
+ return underlying_.CreateTimeout();
+ }
+
+ TimeMs TimeMillis() override {
+ // Will not be deferred - call directly.
+ return underlying_.TimeMillis();
+ }
+
+ uint32_t GetRandomInt(uint32_t low, uint32_t high) override {
+ // Will not be deferred - call directly.
+ return underlying_.GetRandomInt(low, high);
+ }
+
+ void OnMessageReceived(DcSctpMessage message) override {
+ deferred_.emplace_back(
+ [deliverer = MessageDeliverer(std::move(message))](
+ DcSctpSocketCallbacks& cb) mutable { deliverer.Deliver(cb); });
+ }
+
+ void OnError(ErrorKind error, absl::string_view message) override {
+ deferred_.emplace_back(
+ [error, message = std::string(message)](DcSctpSocketCallbacks& cb) {
+ cb.OnError(error, message);
+ });
+ }
+
+ void OnAborted(ErrorKind error, absl::string_view message) override {
+ deferred_.emplace_back(
+ [error, message = std::string(message)](DcSctpSocketCallbacks& cb) {
+ cb.OnAborted(error, message);
+ });
+ }
+
+ void OnConnected() override {
+ deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnConnected(); });
+ }
+
+ void OnClosed() override {
+ deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnClosed(); });
+ }
+
+ void OnConnectionRestarted() override {
+ deferred_.emplace_back(
+ [](DcSctpSocketCallbacks& cb) { cb.OnConnectionRestarted(); });
+ }
+
+ void OnStreamsResetFailed(rtc::ArrayView<const StreamID> outgoing_streams,
+ absl::string_view reason) override {
+ deferred_.emplace_back(
+ [streams = std::vector<StreamID>(outgoing_streams.begin(),
+ outgoing_streams.end()),
+ reason = std::string(reason)](DcSctpSocketCallbacks& cb) {
+ cb.OnStreamsResetFailed(streams, reason);
+ });
+ }
+
+ void OnStreamsResetPerformed(
+ rtc::ArrayView<const StreamID> outgoing_streams) override {
+ deferred_.emplace_back(
+ [streams = std::vector<StreamID>(outgoing_streams.begin(),
+ outgoing_streams.end())](
+ DcSctpSocketCallbacks& cb) {
+ cb.OnStreamsResetPerformed(streams);
+ });
+ }
+
+ void OnIncomingStreamsReset(
+ rtc::ArrayView<const StreamID> incoming_streams) override {
+ deferred_.emplace_back(
+ [streams = std::vector<StreamID>(incoming_streams.begin(),
+ incoming_streams.end())](
+ DcSctpSocketCallbacks& cb) { cb.OnIncomingStreamsReset(streams); });
+ }
+
+ void OnBufferedAmountLow(StreamID stream_id) override {
+ deferred_.emplace_back([stream_id](DcSctpSocketCallbacks& cb) {
+ cb.OnBufferedAmountLow(stream_id);
+ });
+ }
+
+ void OnTotalBufferedAmountLow() override {
+ deferred_.emplace_back(
+ [](DcSctpSocketCallbacks& cb) { cb.OnTotalBufferedAmountLow(); });
+ }
+
+ private:
+ // A wrapper around the move-only DcSctpMessage, to let it be captured in a
+ // lambda.
+ class MessageDeliverer {
+ public:
+ explicit MessageDeliverer(DcSctpMessage&& message)
+ : state_(rtc::make_ref_counted<State>(std::move(message))) {}
+
+ void Deliver(DcSctpSocketCallbacks& c) {
+ // Really ensure that it's only called once.
+ RTC_DCHECK(!state_->has_delivered);
+ state_->has_delivered = true;
+ c.OnMessageReceived(std::move(state_->message));
+ }
+
+ private:
+ struct State : public rtc::RefCountInterface {
+ explicit State(DcSctpMessage&& m)
+ : has_delivered(false), message(std::move(m)) {}
+ bool has_delivered;
+ DcSctpMessage message;
+ };
+ rtc::scoped_refptr<State> state_;
+ };
+
+ DcSctpSocketCallbacks& underlying_;
+ std::vector<std::function<void(DcSctpSocketCallbacks& cb)>> deferred_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_
diff --git a/net/dcsctp/socket/capabilities.h b/net/dcsctp/socket/capabilities.h
new file mode 100644
index 0000000000..c6d3692b2d
--- /dev/null
+++ b/net/dcsctp/socket/capabilities.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_CAPABILITIES_H_
+#define NET_DCSCTP_SOCKET_CAPABILITIES_H_
+
+namespace dcsctp {
+// Indicates what the association supports, meaning that both parties
+// support it and that feature can be used.
+struct Capabilities {
+ // RFC3758 Partial Reliability Extension
+ bool partial_reliability = false;
+ // RFC8260 Stream Schedulers and User Message Interleaving
+ bool message_interleaving = false;
+ // RFC6525 Stream Reconfiguration
+ bool reconfig = false;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_CAPABILITIES_H_
diff --git a/net/dcsctp/socket/context.h b/net/dcsctp/socket/context.h
new file mode 100644
index 0000000000..eca5b9e4fb
--- /dev/null
+++ b/net/dcsctp/socket/context.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_CONTEXT_H_
+#define NET_DCSCTP_SOCKET_CONTEXT_H_
+
+#include <cstdint>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+
+namespace dcsctp {
+
+// A set of helper methods used by handlers to e.g. send packets.
+//
+// Implemented by the TransmissionControlBlock.
+class Context {
+ public:
+ virtual ~Context() = default;
+
+ // Indicates if a connection has been established.
+ virtual bool is_connection_established() const = 0;
+
+ // Returns this side's initial TSN value.
+ virtual TSN my_initial_tsn() const = 0;
+
+ // Returns the peer's initial TSN value.
+ virtual TSN peer_initial_tsn() const = 0;
+
+ // Returns the socket callbacks.
+ virtual DcSctpSocketCallbacks& callbacks() const = 0;
+
+ // Observes a measured RTT value, in milliseconds.
+ virtual void ObserveRTT(DurationMs rtt_ms) = 0;
+
+ // Returns the current Retransmission Timeout (rto) value, in milliseconds.
+ virtual DurationMs current_rto() const = 0;
+
+ // Increments the transmission error counter, given a human readable reason.
+ virtual bool IncrementTxErrorCounter(absl::string_view reason) = 0;
+
+ // Clears the transmission error counter.
+ virtual void ClearTxErrorCounter() = 0;
+
+ // Returns true if there have been too many retransmission errors.
+ virtual bool HasTooManyTxErrors() const = 0;
+
+ // Returns a PacketBuilder, filled in with the correct verification tag.
+ virtual SctpPacket::Builder PacketBuilder() const = 0;
+
+ // Builds the packet from `builder` and sends it.
+ virtual void Send(SctpPacket::Builder& builder) = 0;
+};
+
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_CONTEXT_H_
diff --git a/net/dcsctp/socket/dcsctp_socket.cc b/net/dcsctp/socket/dcsctp_socket.cc
new file mode 100644
index 0000000000..71bc98c70d
--- /dev/null
+++ b/net/dcsctp/socket/dcsctp_socket.cc
@@ -0,0 +1,1550 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/dcsctp_socket.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <limits>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/chunk/abort_chunk.h"
+#include "net/dcsctp/packet/chunk/chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/data_common.h"
+#include "net/dcsctp/packet/chunk/error_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_common.h"
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/chunk/idata_chunk.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/init_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/init_chunk.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h"
+#include "net/dcsctp/packet/chunk_validators.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h"
+#include "net/dcsctp/packet/error_cause/error_cause.h"
+#include "net/dcsctp/packet/error_cause/no_user_data_cause.h"
+#include "net/dcsctp/packet/error_cause/out_of_resource_error_cause.h"
+#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h"
+#include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h"
+#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h"
+#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h"
+#include "net/dcsctp/packet/parameter/parameter.h"
+#include "net/dcsctp/packet/parameter/state_cookie_parameter.h"
+#include "net/dcsctp/packet/parameter/supported_extensions_parameter.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/packet/tlv_trait.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/packet_observer.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/callback_deferrer.h"
+#include "net/dcsctp/socket/capabilities.h"
+#include "net/dcsctp/socket/heartbeat_handler.h"
+#include "net/dcsctp/socket/state_cookie.h"
+#include "net/dcsctp/socket/stream_reset_handler.h"
+#include "net/dcsctp/socket/transmission_control_block.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/strings/string_format.h"
+
+namespace dcsctp {
+namespace {
+
+// https://tools.ietf.org/html/rfc4960#section-5.1
+constexpr uint32_t kMinVerificationTag = 1;
+constexpr uint32_t kMaxVerificationTag = std::numeric_limits<uint32_t>::max();
+
+// https://tools.ietf.org/html/rfc4960#section-3.3.2
+constexpr uint32_t kMinInitialTsn = 0;
+constexpr uint32_t kMaxInitialTsn = std::numeric_limits<uint32_t>::max();
+
+Capabilities GetCapabilities(const DcSctpOptions& options,
+ const Parameters& parameters) {
+ Capabilities capabilities;
+ absl::optional<SupportedExtensionsParameter> supported_extensions =
+ parameters.get<SupportedExtensionsParameter>();
+
+ if (options.enable_partial_reliability) {
+ capabilities.partial_reliability =
+ parameters.get<ForwardTsnSupportedParameter>().has_value();
+ if (supported_extensions.has_value()) {
+ capabilities.partial_reliability |=
+ supported_extensions->supports(ForwardTsnChunk::kType);
+ }
+ }
+
+ if (options.enable_message_interleaving && supported_extensions.has_value()) {
+ capabilities.message_interleaving =
+ supported_extensions->supports(IDataChunk::kType) &&
+ supported_extensions->supports(IForwardTsnChunk::kType);
+ }
+ if (supported_extensions.has_value() &&
+ supported_extensions->supports(ReConfigChunk::kType)) {
+ capabilities.reconfig = true;
+ }
+ return capabilities;
+}
+
+void AddCapabilityParameters(const DcSctpOptions& options,
+ Parameters::Builder& builder) {
+ std::vector<uint8_t> chunk_types = {ReConfigChunk::kType};
+
+ if (options.enable_partial_reliability) {
+ builder.Add(ForwardTsnSupportedParameter());
+ chunk_types.push_back(ForwardTsnChunk::kType);
+ }
+ if (options.enable_message_interleaving) {
+ chunk_types.push_back(IDataChunk::kType);
+ chunk_types.push_back(IForwardTsnChunk::kType);
+ }
+ builder.Add(SupportedExtensionsParameter(std::move(chunk_types)));
+}
+
+TieTag MakeTieTag(DcSctpSocketCallbacks& cb) {
+ uint32_t tie_tag_upper =
+ cb.GetRandomInt(0, std::numeric_limits<uint32_t>::max());
+ uint32_t tie_tag_lower =
+ cb.GetRandomInt(1, std::numeric_limits<uint32_t>::max());
+ return TieTag(static_cast<uint64_t>(tie_tag_upper) << 32 |
+ static_cast<uint64_t>(tie_tag_lower));
+}
+
+} // namespace
+
+DcSctpSocket::DcSctpSocket(absl::string_view log_prefix,
+ DcSctpSocketCallbacks& callbacks,
+ std::unique_ptr<PacketObserver> packet_observer,
+ const DcSctpOptions& options)
+ : log_prefix_(std::string(log_prefix) + ": "),
+ packet_observer_(std::move(packet_observer)),
+ options_(options),
+ callbacks_(callbacks),
+ timer_manager_([this]() { return callbacks_.CreateTimeout(); }),
+ t1_init_(timer_manager_.CreateTimer(
+ "t1-init",
+ [this]() { return OnInitTimerExpiry(); },
+ TimerOptions(options.t1_init_timeout,
+ TimerBackoffAlgorithm::kExponential,
+ options.max_init_retransmits))),
+ t1_cookie_(timer_manager_.CreateTimer(
+ "t1-cookie",
+ [this]() { return OnCookieTimerExpiry(); },
+ TimerOptions(options.t1_cookie_timeout,
+ TimerBackoffAlgorithm::kExponential,
+ options.max_init_retransmits))),
+ t2_shutdown_(timer_manager_.CreateTimer(
+ "t2-shutdown",
+ [this]() { return OnShutdownTimerExpiry(); },
+ TimerOptions(options.t2_shutdown_timeout,
+ TimerBackoffAlgorithm::kExponential,
+ options.max_retransmissions))),
+ send_queue_(
+ log_prefix_,
+ options_.max_send_buffer_size,
+ [this](StreamID stream_id) {
+ callbacks_.OnBufferedAmountLow(stream_id);
+ },
+ options_.total_buffered_amount_low_threshold,
+ [this]() { callbacks_.OnTotalBufferedAmountLow(); }) {}
+
+std::string DcSctpSocket::log_prefix() const {
+ return log_prefix_ + "[" + std::string(ToString(state_)) + "] ";
+}
+
+bool DcSctpSocket::IsConsistent() const {
+ switch (state_) {
+ case State::kClosed:
+ return (tcb_ == nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && !t2_shutdown_->is_running());
+ case State::kCookieWait:
+ return (tcb_ == nullptr && t1_init_->is_running() &&
+ !t1_cookie_->is_running() && !t2_shutdown_->is_running());
+ case State::kCookieEchoed:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ t1_cookie_->is_running() && !t2_shutdown_->is_running() &&
+ tcb_->has_cookie_echo_chunk());
+ case State::kEstablished:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && !t2_shutdown_->is_running());
+ case State::kShutdownPending:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && !t2_shutdown_->is_running());
+ case State::kShutdownSent:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && t2_shutdown_->is_running());
+ case State::kShutdownReceived:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && !t2_shutdown_->is_running());
+ case State::kShutdownAckSent:
+ return (tcb_ != nullptr && !t1_init_->is_running() &&
+ !t1_cookie_->is_running() && t2_shutdown_->is_running());
+ }
+}
+
+constexpr absl::string_view DcSctpSocket::ToString(DcSctpSocket::State state) {
+ switch (state) {
+ case DcSctpSocket::State::kClosed:
+ return "CLOSED";
+ case DcSctpSocket::State::kCookieWait:
+ return "COOKIE_WAIT";
+ case DcSctpSocket::State::kCookieEchoed:
+ return "COOKIE_ECHOED";
+ case DcSctpSocket::State::kEstablished:
+ return "ESTABLISHED";
+ case DcSctpSocket::State::kShutdownPending:
+ return "SHUTDOWN_PENDING";
+ case DcSctpSocket::State::kShutdownSent:
+ return "SHUTDOWN_SENT";
+ case DcSctpSocket::State::kShutdownReceived:
+ return "SHUTDOWN_RECEIVED";
+ case DcSctpSocket::State::kShutdownAckSent:
+ return "SHUTDOWN_ACK_SENT";
+ }
+}
+
+void DcSctpSocket::SetState(State state, absl::string_view reason) {
+ if (state_ != state) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Socket state changed from "
+ << ToString(state_) << " to " << ToString(state)
+ << " due to " << reason;
+ state_ = state;
+ }
+}
+
+void DcSctpSocket::SendInit() {
+ Parameters::Builder params_builder;
+ AddCapabilityParameters(options_, params_builder);
+ InitChunk init(/*initiate_tag=*/connect_params_.verification_tag,
+ /*a_rwnd=*/options_.max_receiver_window_buffer_size,
+ options_.announced_maximum_outgoing_streams,
+ options_.announced_maximum_incoming_streams,
+ connect_params_.initial_tsn, params_builder.Build());
+ SctpPacket::Builder b(VerificationTag(0), options_);
+ b.Add(init);
+ SendPacket(b);
+}
+
+void DcSctpSocket::MakeConnectionParameters() {
+ VerificationTag new_verification_tag(
+ callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag));
+ TSN initial_tsn(callbacks_.GetRandomInt(kMinInitialTsn, kMaxInitialTsn));
+ connect_params_.initial_tsn = initial_tsn;
+ connect_params_.verification_tag = new_verification_tag;
+}
+
+void DcSctpSocket::Connect() {
+ if (state_ == State::kClosed) {
+ MakeConnectionParameters();
+ RTC_DLOG(LS_INFO)
+ << log_prefix()
+ << rtc::StringFormat(
+ "Connecting. my_verification_tag=%08x, my_initial_tsn=%u",
+ *connect_params_.verification_tag, *connect_params_.initial_tsn);
+ SendInit();
+ t1_init_->Start();
+ SetState(State::kCookieWait, "Connect called");
+ } else {
+ RTC_DLOG(LS_WARNING) << log_prefix()
+ << "Called Connect on a socket that is not closed";
+ }
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+}
+
+void DcSctpSocket::Shutdown() {
+ if (tcb_ != nullptr) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "Upon receipt of the SHUTDOWN primitive from its upper layer, the
+ // endpoint enters the SHUTDOWN-PENDING state and remains there until all
+ // outstanding data has been acknowledged by its peer."
+
+ // TODO(webrtc:12739): Remove this check, as it just hides the problem that
+ // the socket can transition from ShutdownSent to ShutdownPending, or
+ // ShutdownAckSent to ShutdownPending which is illegal.
+ if (state_ != State::kShutdownSent && state_ != State::kShutdownAckSent) {
+ SetState(State::kShutdownPending, "Shutdown called");
+ t1_init_->Stop();
+ t1_cookie_->Stop();
+ MaybeSendShutdownOrAck();
+ }
+ } else {
+ // Connection closed before even starting to connect, or during the initial
+ // connection phase. There is no outstanding data, so the socket can just
+ // be closed (stopping any connection timers, if any), as this is the
+ // client's intention, by calling Shutdown.
+ InternalClose(ErrorKind::kNoError, "");
+ }
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+}
+
+void DcSctpSocket::Close() {
+ if (state_ != State::kClosed) {
+ if (tcb_ != nullptr) {
+ SctpPacket::Builder b = tcb_->PacketBuilder();
+ b.Add(AbortChunk(/*filled_in_verification_tag=*/true,
+ Parameters::Builder()
+ .Add(UserInitiatedAbortCause("Close called"))
+ .Build()));
+ SendPacket(b);
+ }
+ InternalClose(ErrorKind::kNoError, "");
+ } else {
+ RTC_DLOG(LS_INFO) << log_prefix() << "Called Close on a closed socket";
+ }
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+}
+
+void DcSctpSocket::CloseConnectionBecauseOfTooManyTransmissionErrors() {
+ SendPacket(tcb_->PacketBuilder().Add(AbortChunk(
+ true, Parameters::Builder()
+ .Add(UserInitiatedAbortCause("Too many retransmissions"))
+ .Build())));
+ InternalClose(ErrorKind::kTooManyRetries, "Too many retransmissions");
+}
+
+void DcSctpSocket::InternalClose(ErrorKind error, absl::string_view message) {
+ if (state_ != State::kClosed) {
+ t1_init_->Stop();
+ t1_cookie_->Stop();
+ t2_shutdown_->Stop();
+ tcb_ = nullptr;
+
+ if (error == ErrorKind::kNoError) {
+ callbacks_.OnClosed();
+ } else {
+ callbacks_.OnAborted(error, message);
+ }
+ SetState(State::kClosed, message);
+ }
+ // This method's purpose is to abort/close and make it consistent by ensuring
+ // that e.g. all timers really are stopped.
+ RTC_DCHECK(IsConsistent());
+}
+
+SendStatus DcSctpSocket::Send(DcSctpMessage message,
+ const SendOptions& send_options) {
+ if (message.payload().empty()) {
+ callbacks_.OnError(ErrorKind::kProtocolViolation,
+ "Unable to send empty message");
+ return SendStatus::kErrorMessageEmpty;
+ }
+ if (message.payload().size() > options_.max_message_size) {
+ callbacks_.OnError(ErrorKind::kProtocolViolation,
+ "Unable to send too large message");
+ return SendStatus::kErrorMessageTooLarge;
+ }
+ if (state_ == State::kShutdownPending || state_ == State::kShutdownSent ||
+ state_ == State::kShutdownReceived || state_ == State::kShutdownAckSent) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "An endpoint should reject any new data request from its upper layer
+ // if it is in the SHUTDOWN-PENDING, SHUTDOWN-SENT, SHUTDOWN-RECEIVED, or
+ // SHUTDOWN-ACK-SENT state."
+ callbacks_.OnError(ErrorKind::kWrongSequence,
+ "Unable to send message as the socket is shutting down");
+ return SendStatus::kErrorShuttingDown;
+ }
+ if (send_queue_.IsFull()) {
+ callbacks_.OnError(ErrorKind::kResourceExhaustion,
+ "Unable to send message as the send queue is full");
+ return SendStatus::kErrorResourceExhaustion;
+ }
+
+ TimeMs now = callbacks_.TimeMillis();
+ send_queue_.Add(now, std::move(message), send_options);
+ if (tcb_ != nullptr) {
+ tcb_->SendBufferedPackets(now);
+ }
+
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+ return SendStatus::kSuccess;
+}
+
+ResetStreamsStatus DcSctpSocket::ResetStreams(
+ rtc::ArrayView<const StreamID> outgoing_streams) {
+ if (tcb_ == nullptr) {
+ callbacks_.OnError(ErrorKind::kWrongSequence,
+ "Can't reset streams as the socket is not connected");
+ return ResetStreamsStatus::kNotConnected;
+ }
+ if (!tcb_->capabilities().reconfig) {
+ callbacks_.OnError(ErrorKind::kUnsupportedOperation,
+ "Can't reset streams as the peer doesn't support it");
+ return ResetStreamsStatus::kNotSupported;
+ }
+
+ tcb_->stream_reset_handler().ResetStreams(outgoing_streams);
+ absl::optional<ReConfigChunk> reconfig =
+ tcb_->stream_reset_handler().MakeStreamResetRequest();
+ if (reconfig.has_value()) {
+ SctpPacket::Builder builder = tcb_->PacketBuilder();
+ builder.Add(*reconfig);
+ SendPacket(builder);
+ }
+
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+ return ResetStreamsStatus::kPerformed;
+}
+
+SocketState DcSctpSocket::state() const {
+ switch (state_) {
+ case State::kClosed:
+ return SocketState::kClosed;
+ case State::kCookieWait:
+ ABSL_FALLTHROUGH_INTENDED;
+ case State::kCookieEchoed:
+ return SocketState::kConnecting;
+ case State::kEstablished:
+ return SocketState::kConnected;
+ case State::kShutdownPending:
+ ABSL_FALLTHROUGH_INTENDED;
+ case State::kShutdownSent:
+ ABSL_FALLTHROUGH_INTENDED;
+ case State::kShutdownReceived:
+ ABSL_FALLTHROUGH_INTENDED;
+ case State::kShutdownAckSent:
+ return SocketState::kShuttingDown;
+ }
+}
+
+void DcSctpSocket::SetMaxMessageSize(size_t max_message_size) {
+ options_.max_message_size = max_message_size;
+}
+
+size_t DcSctpSocket::buffered_amount(StreamID stream_id) const {
+ return send_queue_.buffered_amount(stream_id);
+}
+
+size_t DcSctpSocket::buffered_amount_low_threshold(StreamID stream_id) const {
+ return send_queue_.buffered_amount_low_threshold(stream_id);
+}
+
+void DcSctpSocket::SetBufferedAmountLowThreshold(StreamID stream_id,
+ size_t bytes) {
+ send_queue_.SetBufferedAmountLowThreshold(stream_id, bytes);
+}
+
+void DcSctpSocket::MaybeSendShutdownOnPacketReceived(const SctpPacket& packet) {
+ if (state_ == State::kShutdownSent) {
+ bool has_data_chunk =
+ std::find_if(packet.descriptors().begin(), packet.descriptors().end(),
+ [](const SctpPacket::ChunkDescriptor& descriptor) {
+ return descriptor.type == DataChunk::kType;
+ }) != packet.descriptors().end();
+ if (has_data_chunk) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "While in the SHUTDOWN-SENT state, the SHUTDOWN sender MUST immediately
+ // respond to each received packet containing one or more DATA chunks with
+ // a SHUTDOWN chunk and restart the T2-shutdown timer.""
+ SendShutdown();
+ t2_shutdown_->set_duration(tcb_->current_rto());
+ t2_shutdown_->Start();
+ }
+ }
+}
+
+bool DcSctpSocket::ValidatePacket(const SctpPacket& packet) {
+ const CommonHeader& header = packet.common_header();
+ VerificationTag my_verification_tag =
+ tcb_ != nullptr ? tcb_->my_verification_tag() : VerificationTag(0);
+
+ if (header.verification_tag == VerificationTag(0)) {
+ if (packet.descriptors().size() == 1 &&
+ packet.descriptors()[0].type == InitChunk::kType) {
+ // https://tools.ietf.org/html/rfc4960#section-8.5.1
+ // "When an endpoint receives an SCTP packet with the Verification Tag
+ // set to 0, it should verify that the packet contains only an INIT chunk.
+ // Otherwise, the receiver MUST silently discard the packet.""
+ return true;
+ }
+ callbacks_.OnError(
+ ErrorKind::kParseFailed,
+ "Only a single INIT chunk can be present in packets sent on "
+ "verification_tag = 0");
+ return false;
+ }
+
+ if (packet.descriptors().size() == 1 &&
+ packet.descriptors()[0].type == AbortChunk::kType) {
+ // https://tools.ietf.org/html/rfc4960#section-8.5.1
+ // "The receiver of an ABORT MUST accept the packet if the Verification
+ // Tag field of the packet matches its own tag and the T bit is not set OR
+ // if it is set to its peer's tag and the T bit is set in the Chunk Flags.
+ // Otherwise, the receiver MUST silently discard the packet and take no
+ // further action."
+ bool t_bit = (packet.descriptors()[0].flags & 0x01) != 0;
+ if (t_bit && tcb_ == nullptr) {
+ // Can't verify the tag - assume it's okey.
+ return true;
+ }
+ if ((!t_bit && header.verification_tag == my_verification_tag) ||
+ (t_bit && header.verification_tag == tcb_->peer_verification_tag())) {
+ return true;
+ }
+ callbacks_.OnError(ErrorKind::kParseFailed,
+ "ABORT chunk verification tag was wrong");
+ return false;
+ }
+
+ if (packet.descriptors()[0].type == InitAckChunk::kType) {
+ if (header.verification_tag == connect_params_.verification_tag) {
+ return true;
+ }
+ callbacks_.OnError(
+ ErrorKind::kParseFailed,
+ rtc::StringFormat(
+ "Packet has invalid verification tag: %08x, expected %08x",
+ *header.verification_tag, *connect_params_.verification_tag));
+ return false;
+ }
+
+ if (packet.descriptors()[0].type == CookieEchoChunk::kType) {
+ // Handled in chunk handler (due to RFC 4960, section 5.2.4).
+ return true;
+ }
+
+ if (packet.descriptors().size() == 1 &&
+ packet.descriptors()[0].type == ShutdownCompleteChunk::kType) {
+ // https://tools.ietf.org/html/rfc4960#section-8.5.1
+ // "The receiver of a SHUTDOWN COMPLETE shall accept the packet if the
+ // Verification Tag field of the packet matches its own tag and the T bit is
+ // not set OR if it is set to its peer's tag and the T bit is set in the
+ // Chunk Flags. Otherwise, the receiver MUST silently discard the packet
+ // and take no further action."
+ bool t_bit = (packet.descriptors()[0].flags & 0x01) != 0;
+ if (t_bit && tcb_ == nullptr) {
+ // Can't verify the tag - assume it's okey.
+ return true;
+ }
+ if ((!t_bit && header.verification_tag == my_verification_tag) ||
+ (t_bit && header.verification_tag == tcb_->peer_verification_tag())) {
+ return true;
+ }
+ callbacks_.OnError(ErrorKind::kParseFailed,
+ "SHUTDOWN_COMPLETE chunk verification tag was wrong");
+ return false;
+ }
+
+ // https://tools.ietf.org/html/rfc4960#section-8.5
+ // "When receiving an SCTP packet, the endpoint MUST ensure that the value
+ // in the Verification Tag field of the received SCTP packet matches its own
+ // tag. If the received Verification Tag value does not match the receiver's
+ // own tag value, the receiver shall silently discard the packet and shall not
+ // process it any further..."
+ if (header.verification_tag == my_verification_tag) {
+ return true;
+ }
+
+ callbacks_.OnError(
+ ErrorKind::kParseFailed,
+ rtc::StringFormat(
+ "Packet has invalid verification tag: %08x, expected %08x",
+ *header.verification_tag, *my_verification_tag));
+ return false;
+}
+
+void DcSctpSocket::HandleTimeout(TimeoutID timeout_id) {
+ timer_manager_.HandleTimeout(timeout_id);
+
+ if (tcb_ != nullptr && tcb_->HasTooManyTxErrors()) {
+ // Tearing down the TCB has to be done outside the handlers.
+ CloseConnectionBecauseOfTooManyTransmissionErrors();
+ }
+
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+}
+
+void DcSctpSocket::ReceivePacket(rtc::ArrayView<const uint8_t> data) {
+ if (packet_observer_ != nullptr) {
+ packet_observer_->OnReceivedPacket(callbacks_.TimeMillis(), data);
+ }
+
+ absl::optional<SctpPacket> packet =
+ SctpPacket::Parse(data, options_.disable_checksum_verification);
+ if (!packet.has_value()) {
+ // https://tools.ietf.org/html/rfc4960#section-6.8
+ // "The default procedure for handling invalid SCTP packets is to
+ // silently discard them."
+ callbacks_.OnError(ErrorKind::kParseFailed,
+ "Failed to parse received SCTP packet");
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+ return;
+ }
+
+ if (RTC_DLOG_IS_ON) {
+ for (const auto& descriptor : packet->descriptors()) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received "
+ << DebugConvertChunkToString(descriptor.data);
+ }
+ }
+
+ if (!ValidatePacket(*packet)) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Packet failed verification tag check - dropping";
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+ return;
+ }
+
+ MaybeSendShutdownOnPacketReceived(*packet);
+
+ for (const auto& descriptor : packet->descriptors()) {
+ if (!Dispatch(packet->common_header(), descriptor)) {
+ break;
+ }
+ }
+
+ if (tcb_ != nullptr) {
+ tcb_->data_tracker().ObservePacketEnd();
+ tcb_->MaybeSendSack();
+ }
+
+ RTC_DCHECK(IsConsistent());
+ callbacks_.TriggerDeferred();
+}
+
+void DcSctpSocket::DebugPrintOutgoing(rtc::ArrayView<const uint8_t> payload) {
+ auto packet = SctpPacket::Parse(payload);
+ RTC_DCHECK(packet.has_value());
+
+ for (const auto& desc : packet->descriptors()) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Sent "
+ << DebugConvertChunkToString(desc.data);
+ }
+}
+
+bool DcSctpSocket::Dispatch(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ switch (descriptor.type) {
+ case DataChunk::kType:
+ HandleData(header, descriptor);
+ break;
+ case InitChunk::kType:
+ HandleInit(header, descriptor);
+ break;
+ case InitAckChunk::kType:
+ HandleInitAck(header, descriptor);
+ break;
+ case SackChunk::kType:
+ HandleSack(header, descriptor);
+ break;
+ case HeartbeatRequestChunk::kType:
+ HandleHeartbeatRequest(header, descriptor);
+ break;
+ case HeartbeatAckChunk::kType:
+ HandleHeartbeatAck(header, descriptor);
+ break;
+ case AbortChunk::kType:
+ HandleAbort(header, descriptor);
+ break;
+ case ErrorChunk::kType:
+ HandleError(header, descriptor);
+ break;
+ case CookieEchoChunk::kType:
+ HandleCookieEcho(header, descriptor);
+ break;
+ case CookieAckChunk::kType:
+ HandleCookieAck(header, descriptor);
+ break;
+ case ShutdownChunk::kType:
+ HandleShutdown(header, descriptor);
+ break;
+ case ShutdownAckChunk::kType:
+ HandleShutdownAck(header, descriptor);
+ break;
+ case ShutdownCompleteChunk::kType:
+ HandleShutdownComplete(header, descriptor);
+ break;
+ case ReConfigChunk::kType:
+ HandleReconfig(header, descriptor);
+ break;
+ case ForwardTsnChunk::kType:
+ HandleForwardTsn(header, descriptor);
+ break;
+ case IDataChunk::kType:
+ HandleIData(header, descriptor);
+ break;
+ case IForwardTsnChunk::kType:
+ HandleForwardTsn(header, descriptor);
+ break;
+ default:
+ return HandleUnrecognizedChunk(descriptor);
+ }
+ return true;
+}
+
+bool DcSctpSocket::HandleUnrecognizedChunk(
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ bool report_as_error = (descriptor.type & 0x40) != 0;
+ bool continue_processing = (descriptor.type & 0x80) != 0;
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received unknown chunk: "
+ << static_cast<int>(descriptor.type);
+ if (report_as_error) {
+ rtc::StringBuilder sb;
+ sb << "Received unknown chunk of type: "
+ << static_cast<int>(descriptor.type) << " with report-error bit set";
+ callbacks_.OnError(ErrorKind::kParseFailed, sb.str());
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix()
+ << "Unknown chunk, with type indicating it should be reported.";
+
+ // https://tools.ietf.org/html/rfc4960#section-3.2
+ // "... report in an ERROR chunk using the 'Unrecognized Chunk Type'
+ // cause."
+ if (tcb_ != nullptr) {
+ // Need TCB - this chunk must be sent with a correct verification tag.
+ SendPacket(tcb_->PacketBuilder().Add(
+ ErrorChunk(Parameters::Builder()
+ .Add(UnrecognizedChunkTypeCause(std::vector<uint8_t>(
+ descriptor.data.begin(), descriptor.data.end())))
+ .Build())));
+ }
+ }
+ if (!continue_processing) {
+ // https://tools.ietf.org/html/rfc4960#section-3.2
+ // "Stop processing this SCTP packet and discard it, do not process any
+ // further chunks within it."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Unknown chunk, with type indicating not to "
+ "process any further chunks";
+ }
+
+ return continue_processing;
+}
+
+absl::optional<DurationMs> DcSctpSocket::OnInitTimerExpiry() {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t1_init_->name()
+ << " has expired: " << t1_init_->expiration_count()
+ << "/" << t1_init_->options().max_restarts;
+ RTC_DCHECK(state_ == State::kCookieWait);
+
+ if (t1_init_->is_running()) {
+ SendInit();
+ } else {
+ InternalClose(ErrorKind::kTooManyRetries, "No INIT_ACK received");
+ }
+ RTC_DCHECK(IsConsistent());
+ return absl::nullopt;
+}
+
+absl::optional<DurationMs> DcSctpSocket::OnCookieTimerExpiry() {
+ // https://tools.ietf.org/html/rfc4960#section-4
+ // "If the T1-cookie timer expires, the endpoint MUST retransmit COOKIE
+ // ECHO and restart the T1-cookie timer without changing state. This MUST
+ // be repeated up to 'Max.Init.Retransmits' times. After that, the endpoint
+ // MUST abort the initialization process and report the error to the SCTP
+ // user."
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t1_cookie_->name()
+ << " has expired: " << t1_cookie_->expiration_count()
+ << "/" << t1_cookie_->options().max_restarts;
+
+ RTC_DCHECK(state_ == State::kCookieEchoed);
+
+ if (t1_cookie_->is_running()) {
+ tcb_->SendBufferedPackets(callbacks_.TimeMillis());
+ } else {
+ InternalClose(ErrorKind::kTooManyRetries, "No COOKIE_ACK received");
+ }
+
+ RTC_DCHECK(IsConsistent());
+ return absl::nullopt;
+}
+
+absl::optional<DurationMs> DcSctpSocket::OnShutdownTimerExpiry() {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t2_shutdown_->name()
+ << " has expired: " << t2_shutdown_->expiration_count()
+ << "/" << t2_shutdown_->options().max_restarts;
+
+ if (!t2_shutdown_->is_running()) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "An endpoint should limit the number of retransmissions of the SHUTDOWN
+ // chunk to the protocol parameter 'Association.Max.Retrans'. If this
+ // threshold is exceeded, the endpoint should destroy the TCB..."
+
+ SendPacket(tcb_->PacketBuilder().Add(
+ AbortChunk(true, Parameters::Builder()
+ .Add(UserInitiatedAbortCause(
+ "Too many retransmissions of SHUTDOWN"))
+ .Build())));
+
+ InternalClose(ErrorKind::kTooManyRetries, "No SHUTDOWN_ACK received");
+ RTC_DCHECK(IsConsistent());
+ return absl::nullopt;
+ }
+
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "If the timer expires, the endpoint must resend the SHUTDOWN with the
+ // updated last sequential TSN received from its peer."
+ SendShutdown();
+ RTC_DCHECK(IsConsistent());
+ return tcb_->current_rto();
+}
+
+void DcSctpSocket::SendPacket(SctpPacket::Builder& builder) {
+ if (builder.empty()) {
+ return;
+ }
+
+ std::vector<uint8_t> payload = builder.Build();
+
+ if (RTC_DLOG_IS_ON) {
+ DebugPrintOutgoing(payload);
+ }
+
+ // The heartbeat interval timer is restarted for every sent packet, to
+ // fire when the outgoing channel is inactive.
+ if (tcb_ != nullptr) {
+ tcb_->heartbeat_handler().RestartTimer();
+ }
+
+ if (packet_observer_ != nullptr) {
+ packet_observer_->OnSentPacket(callbacks_.TimeMillis(), payload);
+ }
+ callbacks_.SendPacket(payload);
+}
+
+bool DcSctpSocket::ValidateHasTCB() {
+ if (tcb_ != nullptr) {
+ return true;
+ }
+
+ callbacks_.OnError(
+ ErrorKind::kNotConnected,
+ "Received unexpected commands on socket that is not connected");
+ return false;
+}
+
+void DcSctpSocket::ReportFailedToParseChunk(int chunk_type) {
+ rtc::StringBuilder sb;
+ sb << "Failed to parse chunk of type: " << chunk_type;
+ callbacks_.OnError(ErrorKind::kParseFailed, sb.str());
+}
+
+void DcSctpSocket::HandleData(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<DataChunk> chunk = DataChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ HandleDataCommon(*chunk);
+ }
+}
+
+void DcSctpSocket::HandleIData(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<IDataChunk> chunk = IDataChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ HandleDataCommon(*chunk);
+ }
+}
+
+void DcSctpSocket::HandleDataCommon(AnyDataChunk& chunk) {
+ TSN tsn = chunk.tsn();
+ AnyDataChunk::ImmediateAckFlag immediate_ack = chunk.options().immediate_ack;
+ Data data = std::move(chunk).extract();
+
+ if (data.payload.empty()) {
+ // Empty DATA chunks are illegal.
+ SendPacket(tcb_->PacketBuilder().Add(
+ ErrorChunk(Parameters::Builder().Add(NoUserDataCause(tsn)).Build())));
+ callbacks_.OnError(ErrorKind::kProtocolViolation,
+ "Received DATA chunk with no user data");
+ return;
+ }
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Handle DATA, queue_size="
+ << tcb_->reassembly_queue().queued_bytes()
+ << ", water_mark="
+ << tcb_->reassembly_queue().watermark_bytes()
+ << ", full=" << tcb_->reassembly_queue().is_full()
+ << ", above="
+ << tcb_->reassembly_queue().is_above_watermark();
+
+ if (tcb_->reassembly_queue().is_full()) {
+ // If the reassembly queue is full, there is nothing that can be done. The
+ // specification only allows dropping gap-ack-blocks, and that's not
+ // likely to help as the socket has been trying to fill gaps since the
+ // watermark was reached.
+ SendPacket(tcb_->PacketBuilder().Add(AbortChunk(
+ true, Parameters::Builder().Add(OutOfResourceErrorCause()).Build())));
+ InternalClose(ErrorKind::kResourceExhaustion,
+ "Reassembly Queue is exhausted");
+ return;
+ }
+
+ if (tcb_->reassembly_queue().is_above_watermark()) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Is above high watermark";
+ // If the reassembly queue is above its high watermark, only accept data
+ // chunks that increase its cumulative ack tsn in an attempt to fill gaps
+ // to deliver messages.
+ if (!tcb_->data_tracker().will_increase_cum_ack_tsn(tsn)) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Rejected data because of exceeding watermark";
+ tcb_->data_tracker().ForceImmediateSack();
+ return;
+ }
+ }
+
+ if (!tcb_->data_tracker().IsTSNValid(tsn)) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Rejected data because of failing TSN validity";
+ return;
+ }
+
+ tcb_->data_tracker().Observe(tsn, immediate_ack);
+ tcb_->reassembly_queue().MaybeResetStreamsDeferred(
+ tcb_->data_tracker().last_cumulative_acked_tsn());
+ tcb_->reassembly_queue().Add(tsn, std::move(data));
+ DeliverReassembledMessages();
+}
+
+void DcSctpSocket::HandleInit(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<InitChunk> chunk = InitChunk::Parse(descriptor.data);
+ if (!ValidateParseSuccess(chunk)) {
+ return;
+ }
+
+ if (chunk->initiate_tag() == VerificationTag(0) ||
+ chunk->nbr_outbound_streams() == 0 || chunk->nbr_inbound_streams() == 0) {
+ // https://tools.ietf.org/html/rfc4960#section-3.3.2
+ // "If the value of the Initiate Tag in a received INIT chunk is found
+ // to be 0, the receiver MUST treat it as an error and close the
+ // association by transmitting an ABORT."
+
+ // "A receiver of an INIT with the OS value set to 0 SHOULD abort the
+ // association."
+
+ // "A receiver of an INIT with the MIS value of 0 SHOULD abort the
+ // association."
+
+ SendPacket(SctpPacket::Builder(VerificationTag(0), options_)
+ .Add(AbortChunk(
+ /*filled_in_verification_tag=*/false,
+ Parameters::Builder()
+ .Add(ProtocolViolationCause("INIT malformed"))
+ .Build())));
+ InternalClose(ErrorKind::kProtocolViolation, "Received invalid INIT");
+ return;
+ }
+
+ if (state_ == State::kShutdownAckSent) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "If an endpoint is in the SHUTDOWN-ACK-SENT state and receives an
+ // INIT chunk (e.g., if the SHUTDOWN COMPLETE was lost) with source and
+ // destination transport addresses (either in the IP addresses or in the
+ // INIT chunk) that belong to this association, it should discard the INIT
+ // chunk and retransmit the SHUTDOWN ACK chunk."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received Init indicating lost ShutdownComplete";
+ SendShutdownAck();
+ return;
+ }
+
+ TieTag tie_tag(0);
+ if (state_ == State::kClosed) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received Init in closed state (normal)";
+
+ MakeConnectionParameters();
+ } else if (state_ == State::kCookieWait || state_ == State::kCookieEchoed) {
+ // https://tools.ietf.org/html/rfc4960#section-5.2.1
+ // "This usually indicates an initialization collision, i.e., each
+ // endpoint is attempting, at about the same time, to establish an
+ // association with the other endpoint. Upon receipt of an INIT in the
+ // COOKIE-WAIT state, an endpoint MUST respond with an INIT ACK using the
+ // same parameters it sent in its original INIT chunk (including its
+ // Initiate Tag, unchanged). When responding, the endpoint MUST send the
+ // INIT ACK back to the same address that the original INIT (sent by this
+ // endpoint) was sent."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received Init indicating simultaneous connections";
+ } else {
+ RTC_DCHECK(tcb_ != nullptr);
+ // https://tools.ietf.org/html/rfc4960#section-5.2.2
+ // "The outbound SCTP packet containing this INIT ACK MUST carry a
+ // Verification Tag value equal to the Initiate Tag found in the
+ // unexpected INIT. And the INIT ACK MUST contain a new Initiate Tag
+ // (randomly generated; see Section 5.3.1). Other parameters for the
+ // endpoint SHOULD be copied from the existing parameters of the
+ // association (e.g., number of outbound streams) into the INIT ACK and
+ // cookie."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received Init indicating restarted connection";
+ // Create a new verification tag - different from the previous one.
+ for (int tries = 0; tries < 10; ++tries) {
+ connect_params_.verification_tag = VerificationTag(
+ callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag));
+ if (connect_params_.verification_tag != tcb_->my_verification_tag()) {
+ break;
+ }
+ }
+
+ // Make the initial TSN make a large jump, so that there is no overlap
+ // with the old and new association.
+ connect_params_.initial_tsn =
+ TSN(*tcb_->retransmission_queue().next_tsn() + 1000000);
+ tie_tag = tcb_->tie_tag();
+ }
+
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix()
+ << rtc::StringFormat(
+ "Proceeding with connection. my_verification_tag=%08x, "
+ "my_initial_tsn=%u, peer_verification_tag=%08x, "
+ "peer_initial_tsn=%u",
+ *connect_params_.verification_tag, *connect_params_.initial_tsn,
+ *chunk->initiate_tag(), *chunk->initial_tsn());
+
+ Capabilities capabilities = GetCapabilities(options_, chunk->parameters());
+
+ SctpPacket::Builder b(chunk->initiate_tag(), options_);
+ Parameters::Builder params_builder =
+ Parameters::Builder().Add(StateCookieParameter(
+ StateCookie(chunk->initiate_tag(), chunk->initial_tsn(),
+ chunk->a_rwnd(), tie_tag, capabilities)
+ .Serialize()));
+ AddCapabilityParameters(options_, params_builder);
+
+ InitAckChunk init_ack(/*initiate_tag=*/connect_params_.verification_tag,
+ options_.max_receiver_window_buffer_size,
+ options_.announced_maximum_outgoing_streams,
+ options_.announced_maximum_incoming_streams,
+ connect_params_.initial_tsn, params_builder.Build());
+ b.Add(init_ack);
+ SendPacket(b);
+}
+
+void DcSctpSocket::HandleInitAck(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<InitAckChunk> chunk = InitAckChunk::Parse(descriptor.data);
+ if (!ValidateParseSuccess(chunk)) {
+ return;
+ }
+
+ if (state_ != State::kCookieWait) {
+ // https://tools.ietf.org/html/rfc4960#section-5.2.3
+ // "If an INIT ACK is received by an endpoint in any state other than
+ // the COOKIE-WAIT state, the endpoint should discard the INIT ACK chunk."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received INIT_ACK in unexpected state";
+ return;
+ }
+
+ auto cookie = chunk->parameters().get<StateCookieParameter>();
+ if (!cookie.has_value()) {
+ SendPacket(SctpPacket::Builder(connect_params_.verification_tag, options_)
+ .Add(AbortChunk(
+ /*filled_in_verification_tag=*/false,
+ Parameters::Builder()
+ .Add(ProtocolViolationCause("INIT-ACK malformed"))
+ .Build())));
+ InternalClose(ErrorKind::kProtocolViolation,
+ "InitAck chunk doesn't contain a cookie");
+ return;
+ }
+ Capabilities capabilities = GetCapabilities(options_, chunk->parameters());
+ t1_init_->Stop();
+
+ tcb_ = std::make_unique<TransmissionControlBlock>(
+ timer_manager_, log_prefix_, options_, capabilities, callbacks_,
+ send_queue_, connect_params_.verification_tag,
+ connect_params_.initial_tsn, chunk->initiate_tag(), chunk->initial_tsn(),
+ chunk->a_rwnd(), MakeTieTag(callbacks_),
+ [this]() { return state_ == State::kEstablished; },
+ [this](SctpPacket::Builder& builder) { return SendPacket(builder); });
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Created peer TCB: " << tcb_->ToString();
+
+ SetState(State::kCookieEchoed, "INIT_ACK received");
+
+ // The connection isn't fully established just yet.
+ tcb_->SetCookieEchoChunk(CookieEchoChunk(cookie->data()));
+ tcb_->SendBufferedPackets(callbacks_.TimeMillis());
+ t1_cookie_->Start();
+}
+
+void DcSctpSocket::HandleCookieEcho(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<CookieEchoChunk> chunk =
+ CookieEchoChunk::Parse(descriptor.data);
+ if (!ValidateParseSuccess(chunk)) {
+ return;
+ }
+
+ absl::optional<StateCookie> cookie =
+ StateCookie::Deserialize(chunk->cookie());
+ if (!cookie.has_value()) {
+ callbacks_.OnError(ErrorKind::kParseFailed, "Failed to parse state cookie");
+ return;
+ }
+
+ if (tcb_ != nullptr) {
+ if (!HandleCookieEchoWithTCB(header, *cookie)) {
+ return;
+ }
+ } else {
+ if (header.verification_tag != connect_params_.verification_tag) {
+ callbacks_.OnError(
+ ErrorKind::kParseFailed,
+ rtc::StringFormat(
+ "Received CookieEcho with invalid verification tag: %08x, "
+ "expected %08x",
+ *header.verification_tag, *connect_params_.verification_tag));
+ return;
+ }
+ }
+
+ // The init timer can be running on simultaneous connections.
+ t1_init_->Stop();
+ t1_cookie_->Stop();
+ if (state_ != State::kEstablished) {
+ if (tcb_ != nullptr) {
+ tcb_->ClearCookieEchoChunk();
+ }
+ SetState(State::kEstablished, "COOKIE_ECHO received");
+ callbacks_.OnConnected();
+ }
+
+ if (tcb_ == nullptr) {
+ tcb_ = std::make_unique<TransmissionControlBlock>(
+ timer_manager_, log_prefix_, options_, cookie->capabilities(),
+ callbacks_, send_queue_, connect_params_.verification_tag,
+ connect_params_.initial_tsn, cookie->initiate_tag(),
+ cookie->initial_tsn(), cookie->a_rwnd(), MakeTieTag(callbacks_),
+ [this]() { return state_ == State::kEstablished; },
+ [this](SctpPacket::Builder& builder) { return SendPacket(builder); });
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Created peer TCB: " << tcb_->ToString();
+ }
+
+ SctpPacket::Builder b = tcb_->PacketBuilder();
+ b.Add(CookieAckChunk());
+
+ // https://tools.ietf.org/html/rfc4960#section-5.1
+ // "A COOKIE ACK chunk may be bundled with any pending DATA chunks (and/or
+ // SACK chunks), but the COOKIE ACK chunk MUST be the first chunk in the
+ // packet."
+ tcb_->SendBufferedPackets(b, callbacks_.TimeMillis());
+}
+
+bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header,
+ const StateCookie& cookie) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Handling CookieEchoChunk with TCB. local_tag="
+ << *tcb_->my_verification_tag()
+ << ", peer_tag=" << *header.verification_tag
+ << ", tcb_tag=" << *tcb_->peer_verification_tag()
+ << ", cookie_tag=" << *cookie.initiate_tag()
+ << ", local_tie_tag=" << *tcb_->tie_tag()
+ << ", peer_tie_tag=" << *cookie.tie_tag();
+ // https://tools.ietf.org/html/rfc4960#section-5.2.4
+ // "Handle a COOKIE ECHO when a TCB Exists"
+ if (header.verification_tag != tcb_->my_verification_tag() &&
+ tcb_->peer_verification_tag() != cookie.initiate_tag() &&
+ cookie.tie_tag() == tcb_->tie_tag()) {
+ // "A) In this case, the peer may have restarted."
+ if (state_ == State::kShutdownAckSent) {
+ // "If the endpoint is in the SHUTDOWN-ACK-SENT state and recognizes
+ // that the peer has restarted ... it MUST NOT set up a new association
+ // but instead resend the SHUTDOWN ACK and send an ERROR chunk with a
+ // "Cookie Received While Shutting Down" error cause to its peer."
+ SctpPacket::Builder b(cookie.initiate_tag(), options_);
+ b.Add(ShutdownAckChunk());
+ b.Add(ErrorChunk(Parameters::Builder()
+ .Add(CookieReceivedWhileShuttingDownCause())
+ .Build()));
+ SendPacket(b);
+ callbacks_.OnError(ErrorKind::kWrongSequence,
+ "Received COOKIE-ECHO while shutting down");
+ return false;
+ }
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received COOKIE-ECHO indicating a restarted peer";
+
+ // If a message was partly sent, and the peer restarted, resend it in
+ // full by resetting the send queue.
+ send_queue_.Reset();
+ tcb_ = nullptr;
+ callbacks_.OnConnectionRestarted();
+ } else if (header.verification_tag == tcb_->my_verification_tag() &&
+ tcb_->peer_verification_tag() != cookie.initiate_tag()) {
+ // TODO(boivie): Handle the peer_tag == 0?
+ // "B) In this case, both sides may be attempting to start an
+ // association at about the same time, but the peer endpoint started its
+ // INIT after responding to the local endpoint's INIT."
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix()
+ << "Received COOKIE-ECHO indicating simultaneous connections";
+ tcb_ = nullptr;
+ } else if (header.verification_tag != tcb_->my_verification_tag() &&
+ tcb_->peer_verification_tag() == cookie.initiate_tag() &&
+ cookie.tie_tag() == TieTag(0)) {
+ // "C) In this case, the local endpoint's cookie has arrived late.
+ // Before it arrived, the local endpoint sent an INIT and received an
+ // INIT ACK and finally sent a COOKIE ECHO with the peer's same tag but
+ // a new tag of its own. The cookie should be silently discarded. The
+ // endpoint SHOULD NOT change states and should leave any timers
+ // running."
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix()
+ << "Received COOKIE-ECHO indicating a late COOKIE-ECHO. Discarding";
+ return false;
+ } else if (header.verification_tag == tcb_->my_verification_tag() &&
+ tcb_->peer_verification_tag() == cookie.initiate_tag()) {
+ // "D) When both local and remote tags match, the endpoint should enter
+ // the ESTABLISHED state, if it is in the COOKIE-ECHOED state. It
+ // should stop any cookie timer that may be running and send a COOKIE
+ // ACK."
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix()
+ << "Received duplicate COOKIE-ECHO, probably because of peer not "
+ "receiving COOKIE-ACK and retransmitting COOKIE-ECHO. Continuing.";
+ }
+ return true;
+}
+
+void DcSctpSocket::HandleCookieAck(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<CookieAckChunk> chunk = CookieAckChunk::Parse(descriptor.data);
+ if (!ValidateParseSuccess(chunk)) {
+ return;
+ }
+
+ if (state_ != State::kCookieEchoed) {
+ // https://tools.ietf.org/html/rfc4960#section-5.2.5
+ // "At any state other than COOKIE-ECHOED, an endpoint should silently
+ // discard a received COOKIE ACK chunk."
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received COOKIE_ACK not in COOKIE_ECHOED state";
+ return;
+ }
+
+ // RFC 4960, Errata ID: 4400
+ t1_cookie_->Stop();
+ tcb_->ClearCookieEchoChunk();
+ SetState(State::kEstablished, "COOKIE_ACK received");
+ tcb_->SendBufferedPackets(callbacks_.TimeMillis());
+ callbacks_.OnConnected();
+}
+
+void DcSctpSocket::DeliverReassembledMessages() {
+ if (tcb_->reassembly_queue().HasMessages()) {
+ for (auto& message : tcb_->reassembly_queue().FlushMessages()) {
+ callbacks_.OnMessageReceived(std::move(message));
+ }
+ }
+}
+
+void DcSctpSocket::HandleSack(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<SackChunk> chunk = SackChunk::Parse(descriptor.data);
+
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ TimeMs now = callbacks_.TimeMillis();
+ SackChunk sack = ChunkValidators::Clean(*std::move(chunk));
+
+ if (tcb_->retransmission_queue().HandleSack(now, sack)) {
+ MaybeSendShutdownOrAck();
+ // Receiving an ACK will decrease outstanding bytes (maybe now below
+ // cwnd?) or indicate packet loss that may result in sending FORWARD-TSN.
+ tcb_->SendBufferedPackets(now);
+ } else {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Dropping out-of-order SACK with TSN "
+ << *sack.cumulative_tsn_ack();
+ }
+ }
+}
+
+void DcSctpSocket::HandleHeartbeatRequest(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<HeartbeatRequestChunk> chunk =
+ HeartbeatRequestChunk::Parse(descriptor.data);
+
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ tcb_->heartbeat_handler().HandleHeartbeatRequest(*std::move(chunk));
+ }
+}
+
+void DcSctpSocket::HandleHeartbeatAck(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<HeartbeatAckChunk> chunk =
+ HeartbeatAckChunk::Parse(descriptor.data);
+
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ tcb_->heartbeat_handler().HandleHeartbeatAck(*std::move(chunk));
+ }
+}
+
+void DcSctpSocket::HandleAbort(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<AbortChunk> chunk = AbortChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk)) {
+ std::string error_string = ErrorCausesToString(chunk->error_causes());
+ if (tcb_ == nullptr) {
+ // https://tools.ietf.org/html/rfc4960#section-3.3.7
+ // "If an endpoint receives an ABORT with a format error or no TCB is
+ // found, it MUST silently discard it."
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received ABORT (" << error_string
+ << ") on a connection with no TCB. Ignoring";
+ return;
+ }
+
+ RTC_DLOG(LS_WARNING) << log_prefix() << "Received ABORT (" << error_string
+ << ") - closing connection.";
+ InternalClose(ErrorKind::kPeerReported, error_string);
+ }
+}
+
+void DcSctpSocket::HandleError(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<ErrorChunk> chunk = ErrorChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk)) {
+ std::string error_string = ErrorCausesToString(chunk->error_causes());
+ if (tcb_ == nullptr) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received ERROR (" << error_string
+ << ") on a connection with no TCB. Ignoring";
+ return;
+ }
+
+ RTC_DLOG(LS_WARNING) << log_prefix() << "Received ERROR: " << error_string;
+ callbacks_.OnError(ErrorKind::kPeerReported,
+ "Peer reported error: " + error_string);
+ }
+}
+
+void DcSctpSocket::HandleReconfig(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<ReConfigChunk> chunk = ReConfigChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ tcb_->stream_reset_handler().HandleReConfig(*std::move(chunk));
+ }
+}
+
+void DcSctpSocket::HandleShutdown(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ if (!ValidateParseSuccess(ShutdownChunk::Parse(descriptor.data))) {
+ return;
+ }
+
+ if (state_ == State::kClosed) {
+ return;
+ } else if (state_ == State::kCookieWait || state_ == State::kCookieEchoed) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "If a SHUTDOWN is received in the COOKIE-WAIT or COOKIE ECHOED state,
+ // the SHUTDOWN chunk SHOULD be silently discarded."
+ } else if (state_ == State::kShutdownSent) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "If an endpoint is in the SHUTDOWN-SENT state and receives a
+ // SHUTDOWN chunk from its peer, the endpoint shall respond immediately
+ // with a SHUTDOWN ACK to its peer, and move into the SHUTDOWN-ACK-SENT
+ // state restarting its T2-shutdown timer."
+ SendShutdownAck();
+ SetState(State::kShutdownAckSent, "SHUTDOWN received");
+ } else if (state_ == State::kShutdownAckSent) {
+ // TODO(webrtc:12739): This condition should be removed and handled by the
+ // next (state_ != State::kShutdownReceived).
+ return;
+ } else if (state_ != State::kShutdownReceived) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix()
+ << "Received SHUTDOWN - shutting down the socket";
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "Upon reception of the SHUTDOWN, the peer endpoint shall enter the
+ // SHUTDOWN-RECEIVED state, stop accepting new data from its SCTP user,
+ // and verify, by checking the Cumulative TSN Ack field of the chunk, that
+ // all its outstanding DATA chunks have been received by the SHUTDOWN
+ // sender."
+ SetState(State::kShutdownReceived, "SHUTDOWN received");
+ MaybeSendShutdownOrAck();
+ }
+}
+
+void DcSctpSocket::HandleShutdownAck(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ if (!ValidateParseSuccess(ShutdownAckChunk::Parse(descriptor.data))) {
+ return;
+ }
+
+ if (state_ == State::kShutdownSent || state_ == State::kShutdownAckSent) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "Upon the receipt of the SHUTDOWN ACK, the SHUTDOWN sender shall stop
+ // the T2-shutdown timer, send a SHUTDOWN COMPLETE chunk to its peer, and
+ // remove all record of the association."
+
+ // "If an endpoint is in the SHUTDOWN-ACK-SENT state and receives a
+ // SHUTDOWN ACK, it shall stop the T2-shutdown timer, send a SHUTDOWN
+ // COMPLETE chunk to its peer, and remove all record of the association."
+
+ SctpPacket::Builder b = tcb_->PacketBuilder();
+ b.Add(ShutdownCompleteChunk(/*tag_reflected=*/false));
+ SendPacket(b);
+ InternalClose(ErrorKind::kNoError, "");
+ } else {
+ // https://tools.ietf.org/html/rfc4960#section-8.5.1
+ // "If the receiver is in COOKIE-ECHOED or COOKIE-WAIT state
+ // the procedures in Section 8.4 SHOULD be followed; in other words, it
+ // should be treated as an Out Of The Blue packet."
+
+ // https://tools.ietf.org/html/rfc4960#section-8.4
+ // "If the packet contains a SHUTDOWN ACK chunk, the receiver
+ // should respond to the sender of the OOTB packet with a SHUTDOWN
+ // COMPLETE. When sending the SHUTDOWN COMPLETE, the receiver of the OOTB
+ // packet must fill in the Verification Tag field of the outbound packet
+ // with the Verification Tag received in the SHUTDOWN ACK and set the T
+ // bit in the Chunk Flags to indicate that the Verification Tag is
+ // reflected."
+
+ SctpPacket::Builder b(header.verification_tag, options_);
+ b.Add(ShutdownCompleteChunk(/*tag_reflected=*/true));
+ SendPacket(b);
+ }
+}
+
+void DcSctpSocket::HandleShutdownComplete(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ if (!ValidateParseSuccess(ShutdownCompleteChunk::Parse(descriptor.data))) {
+ return;
+ }
+
+ if (state_ == State::kShutdownAckSent) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "Upon reception of the SHUTDOWN COMPLETE chunk, the endpoint will
+ // verify that it is in the SHUTDOWN-ACK-SENT state; if it is not, the
+ // chunk should be discarded. If the endpoint is in the SHUTDOWN-ACK-SENT
+ // state, the endpoint should stop the T2-shutdown timer and remove all
+ // knowledge of the association (and thus the association enters the
+ // CLOSED state)."
+ InternalClose(ErrorKind::kNoError, "");
+ }
+}
+
+void DcSctpSocket::HandleForwardTsn(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<ForwardTsnChunk> chunk =
+ ForwardTsnChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ HandleForwardTsnCommon(*chunk);
+ }
+}
+
+void DcSctpSocket::HandleIForwardTsn(
+ const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor) {
+ absl::optional<IForwardTsnChunk> chunk =
+ IForwardTsnChunk::Parse(descriptor.data);
+ if (ValidateParseSuccess(chunk) && ValidateHasTCB()) {
+ HandleForwardTsnCommon(*chunk);
+ }
+}
+
+void DcSctpSocket::HandleForwardTsnCommon(const AnyForwardTsnChunk& chunk) {
+ if (!tcb_->capabilities().partial_reliability) {
+ SctpPacket::Builder b = tcb_->PacketBuilder();
+ b.Add(AbortChunk(/*filled_in_verification_tag=*/true,
+ Parameters::Builder()
+ .Add(ProtocolViolationCause(
+ "I-FORWARD-TSN received, but not indicated "
+ "during connection establishment"))
+ .Build()));
+ SendPacket(b);
+
+ callbacks_.OnError(ErrorKind::kProtocolViolation,
+ "Received a FORWARD_TSN without announced peer support");
+ return;
+ }
+ tcb_->data_tracker().HandleForwardTsn(chunk.new_cumulative_tsn());
+ tcb_->reassembly_queue().Handle(chunk);
+ // A forward TSN - for ordered streams - may allow messages to be
+ // delivered.
+ DeliverReassembledMessages();
+
+ // Processing a FORWARD_TSN might result in sending a SACK.
+ tcb_->MaybeSendSack();
+}
+
+void DcSctpSocket::MaybeSendShutdownOrAck() {
+ if (tcb_->retransmission_queue().outstanding_bytes() != 0) {
+ return;
+ }
+
+ if (state_ == State::kShutdownPending) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "Once all its outstanding data has been acknowledged, the endpoint
+ // shall send a SHUTDOWN chunk to its peer including in the Cumulative TSN
+ // Ack field the last sequential TSN it has received from the peer. It
+ // shall then start the T2-shutdown timer and enter the SHUTDOWN-SENT
+ // state.""
+
+ SendShutdown();
+ t2_shutdown_->set_duration(tcb_->current_rto());
+ t2_shutdown_->Start();
+ SetState(State::kShutdownSent, "No more outstanding data");
+ } else if (state_ == State::kShutdownReceived) {
+ // https://tools.ietf.org/html/rfc4960#section-9.2
+ // "If the receiver of the SHUTDOWN has no more outstanding DATA
+ // chunks, the SHUTDOWN receiver MUST send a SHUTDOWN ACK and start a
+ // T2-shutdown timer of its own, entering the SHUTDOWN-ACK-SENT state. If
+ // the timer expires, the endpoint must resend the SHUTDOWN ACK."
+
+ SendShutdownAck();
+ SetState(State::kShutdownAckSent, "No more outstanding data");
+ }
+}
+
+void DcSctpSocket::SendShutdown() {
+ SctpPacket::Builder b = tcb_->PacketBuilder();
+ b.Add(ShutdownChunk(tcb_->data_tracker().last_cumulative_acked_tsn()));
+ SendPacket(b);
+}
+
+void DcSctpSocket::SendShutdownAck() {
+ SendPacket(tcb_->PacketBuilder().Add(ShutdownAckChunk()));
+ t2_shutdown_->set_duration(tcb_->current_rto());
+ t2_shutdown_->Start();
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/dcsctp_socket.h b/net/dcsctp/socket/dcsctp_socket.h
new file mode 100644
index 0000000000..32e89b50d1
--- /dev/null
+++ b/net/dcsctp/socket/dcsctp_socket.h
@@ -0,0 +1,272 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_
+#define NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_
+
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/chunk/abort_chunk.h"
+#include "net/dcsctp/packet/chunk/chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/data_common.h"
+#include "net/dcsctp/packet/chunk/error_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_common.h"
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/chunk/idata_chunk.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/init_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/init_chunk.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/packet_observer.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/callback_deferrer.h"
+#include "net/dcsctp/socket/state_cookie.h"
+#include "net/dcsctp/socket/transmission_control_block.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_error_counter.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "net/dcsctp/tx/retransmission_timeout.h"
+#include "net/dcsctp/tx/rr_send_queue.h"
+
+namespace dcsctp {
+
+// DcSctpSocket represents a single SCTP socket, to be used over DTLS.
+//
+// Every dcSCTP is completely isolated from any other socket.
+//
+// This class manages all packet and chunk dispatching and mainly handles the
+// connection sequences (connect, close, shutdown, etc) as well as managing
+// the Transmission Control Block (tcb).
+//
+// This class is thread-compatible.
+class DcSctpSocket : public DcSctpSocketInterface {
+ public:
+ // Instantiates a DcSctpSocket, which interacts with the world through the
+ // `callbacks` interface and is configured using `options`.
+ //
+ // For debugging, `log_prefix` will prefix all debug logs, and a
+ // `packet_observer` can be attached to e.g. dump sent and received packets.
+ DcSctpSocket(absl::string_view log_prefix,
+ DcSctpSocketCallbacks& callbacks,
+ std::unique_ptr<PacketObserver> packet_observer,
+ const DcSctpOptions& options);
+
+ DcSctpSocket(const DcSctpSocket&) = delete;
+ DcSctpSocket& operator=(const DcSctpSocket&) = delete;
+
+ // Implementation of `DcSctpSocketInterface`.
+ void ReceivePacket(rtc::ArrayView<const uint8_t> data) override;
+ void HandleTimeout(TimeoutID timeout_id) override;
+ void Connect() override;
+ void Shutdown() override;
+ void Close() override;
+ SendStatus Send(DcSctpMessage message,
+ const SendOptions& send_options) override;
+ ResetStreamsStatus ResetStreams(
+ rtc::ArrayView<const StreamID> outgoing_streams) override;
+ SocketState state() const override;
+ const DcSctpOptions& options() const override { return options_; }
+ void SetMaxMessageSize(size_t max_message_size) override;
+ size_t buffered_amount(StreamID stream_id) const override;
+ size_t buffered_amount_low_threshold(StreamID stream_id) const override;
+ void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) override;
+ // Returns this socket's verification tag, or zero if not yet connected.
+ VerificationTag verification_tag() const {
+ return tcb_ != nullptr ? tcb_->my_verification_tag() : VerificationTag(0);
+ }
+
+ private:
+ // Parameter proposals valid during the connect phase.
+ struct ConnectParameters {
+ TSN initial_tsn = TSN(0);
+ VerificationTag verification_tag = VerificationTag(0);
+ };
+
+ // Detailed state (separate from SocketState, which is the public state).
+ enum class State {
+ kClosed,
+ kCookieWait,
+ // TCB valid in these:
+ kCookieEchoed,
+ kEstablished,
+ kShutdownPending,
+ kShutdownSent,
+ kShutdownReceived,
+ kShutdownAckSent,
+ };
+
+ // Returns the log prefix used for debug logging.
+ std::string log_prefix() const;
+
+ bool IsConsistent() const;
+ static constexpr absl::string_view ToString(DcSctpSocket::State state);
+
+ // Changes the socket state, given a `reason` (for debugging/logging).
+ void SetState(State state, absl::string_view reason);
+ // Fills in `connect_params` with random verification tag and initial TSN.
+ void MakeConnectionParameters();
+ // Closes the association. Note that the TCB will not be valid past this call.
+ void InternalClose(ErrorKind error, absl::string_view message);
+ // Closes the association, because of too many retransmission errors.
+ void CloseConnectionBecauseOfTooManyTransmissionErrors();
+ // Timer expiration handlers
+ absl::optional<DurationMs> OnInitTimerExpiry();
+ absl::optional<DurationMs> OnCookieTimerExpiry();
+ absl::optional<DurationMs> OnShutdownTimerExpiry();
+ // Builds the packet from `builder` and sends it (through callbacks).
+ void SendPacket(SctpPacket::Builder& builder);
+ // Sends SHUTDOWN or SHUTDOWN-ACK if the socket is shutting down and if all
+ // outstanding data has been acknowledged.
+ void MaybeSendShutdownOrAck();
+ // If the socket is shutting down, responds SHUTDOWN to any incoming DATA.
+ void MaybeSendShutdownOnPacketReceived(const SctpPacket& packet);
+ // Sends a INIT chunk.
+ void SendInit();
+ // Sends a SHUTDOWN chunk.
+ void SendShutdown();
+ // Sends a SHUTDOWN-ACK chunk.
+ void SendShutdownAck();
+ // Validates the SCTP packet, as a whole - not the validity of individual
+ // chunks within it, as that's done in the different chunk handlers.
+ bool ValidatePacket(const SctpPacket& packet);
+ // Parses `payload`, which is a serialized packet that is just going to be
+ // sent and prints all chunks.
+ void DebugPrintOutgoing(rtc::ArrayView<const uint8_t> payload);
+ // Called whenever there may be reassembled messages, and delivers those.
+ void DeliverReassembledMessages();
+ // Returns true if there is a TCB, and false otherwise (and reports an error).
+ bool ValidateHasTCB();
+
+ // Returns true if the parsing of a chunk of type `T` succeeded. If it didn't,
+ // it reports an error and returns false.
+ template <class T>
+ bool ValidateParseSuccess(const absl::optional<T>& c) {
+ if (c.has_value()) {
+ return true;
+ }
+
+ ReportFailedToParseChunk(T::kType);
+ return false;
+ }
+
+ // Reports failing to have parsed a chunk with the provided `chunk_type`.
+ void ReportFailedToParseChunk(int chunk_type);
+ // Called when unknown chunks are received. May report an error.
+ bool HandleUnrecognizedChunk(const SctpPacket::ChunkDescriptor& descriptor);
+
+ // Will dispatch more specific chunk handlers.
+ bool Dispatch(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming DATA chunks.
+ void HandleData(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming I-DATA chunks.
+ void HandleIData(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Common handler for DATA and I-DATA chunks.
+ void HandleDataCommon(AnyDataChunk& chunk);
+ // Handles incoming INIT chunks.
+ void HandleInit(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming INIT-ACK chunks.
+ void HandleInitAck(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming SACK chunks.
+ void HandleSack(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming HEARTBEAT chunks.
+ void HandleHeartbeatRequest(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming HEARTBEAT-ACK chunks.
+ void HandleHeartbeatAck(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming ABORT chunks.
+ void HandleAbort(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming ERROR chunks.
+ void HandleError(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming COOKIE-ECHO chunks.
+ void HandleCookieEcho(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles receiving COOKIE-ECHO when there already is a TCB. The return value
+ // indicates if the processing should continue.
+ bool HandleCookieEchoWithTCB(const CommonHeader& header,
+ const StateCookie& cookie);
+ // Handles incoming COOKIE-ACK chunks.
+ void HandleCookieAck(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming SHUTDOWN chunks.
+ void HandleShutdown(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming SHUTDOWN-ACK chunks.
+ void HandleShutdownAck(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming FORWARD-TSN chunks.
+ void HandleForwardTsn(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming I-FORWARD-TSN chunks.
+ void HandleIForwardTsn(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Handles incoming RE-CONFIG chunks.
+ void HandleReconfig(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+ // Common handled for FORWARD-TSN/I-FORWARD-TSN.
+ void HandleForwardTsnCommon(const AnyForwardTsnChunk& chunk);
+ // Handles incoming SHUTDOWN-COMPLETE chunks
+ void HandleShutdownComplete(const CommonHeader& header,
+ const SctpPacket::ChunkDescriptor& descriptor);
+
+ const std::string log_prefix_;
+ const std::unique_ptr<PacketObserver> packet_observer_;
+ DcSctpOptions options_;
+
+ // Enqueues callbacks and dispatches them just before returning to the caller.
+ CallbackDeferrer callbacks_;
+
+ TimerManager timer_manager_;
+ const std::unique_ptr<Timer> t1_init_;
+ const std::unique_ptr<Timer> t1_cookie_;
+ const std::unique_ptr<Timer> t2_shutdown_;
+
+ // The actual SendQueue implementation. As data can be sent on a socket before
+ // the connection is established, this component is not in the TCB.
+ RRSendQueue send_queue_;
+
+ // Contains verification tag and initial TSN between having sent the INIT
+ // until the connection is established (there is no TCB at this point).
+ ConnectParameters connect_params_;
+ // The socket state.
+ State state_ = State::kClosed;
+ // If the connection is established, contains a transmission control block.
+ std::unique_ptr<TransmissionControlBlock> tcb_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_
diff --git a/net/dcsctp/socket/dcsctp_socket_test.cc b/net/dcsctp/socket/dcsctp_socket_test.cc
new file mode 100644
index 0000000000..e5db12cd5a
--- /dev/null
+++ b/net/dcsctp/socket/dcsctp_socket_test.cc
@@ -0,0 +1,1394 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/dcsctp_socket.h"
+
+#include <cstdint>
+#include <deque>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/chunk/chunk.h"
+#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/data_common.h"
+#include "net/dcsctp/packet/chunk/error_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/chunk/idata_chunk.h"
+#include "net/dcsctp/packet/chunk/init_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/chunk/shutdown_chunk.h"
+#include "net/dcsctp/packet/error_cause/error_cause.h"
+#include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h"
+#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h"
+#include "net/dcsctp/packet/parameter/parameter.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/packet/tlv_trait.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/mock_dcsctp_socket_callbacks.h"
+#include "net/dcsctp/testing/testing_macros.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::_;
+using ::testing::AllOf;
+using ::testing::ElementsAre;
+using ::testing::HasSubstr;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+constexpr SendOptions kSendOptions;
+constexpr size_t kLargeMessageSize = DcSctpOptions::kMaxSafeMTUSize * 20;
+static constexpr size_t kSmallMessageSize = 10;
+
+MATCHER_P(HasDataChunkWithSsn, ssn, "") {
+ absl::optional<SctpPacket> packet = SctpPacket::Parse(arg);
+ if (!packet.has_value()) {
+ *result_listener << "data didn't parse as an SctpPacket";
+ return false;
+ }
+
+ if (packet->descriptors()[0].type != DataChunk::kType) {
+ *result_listener << "the first chunk in the packet is not a data chunk";
+ return false;
+ }
+
+ absl::optional<DataChunk> dc =
+ DataChunk::Parse(packet->descriptors()[0].data);
+ if (!dc.has_value()) {
+ *result_listener << "The first chunk didn't parse as a data chunk";
+ return false;
+ }
+
+ if (dc->ssn() != ssn) {
+ *result_listener << "the ssn is " << *dc->ssn();
+ return false;
+ }
+
+ return true;
+}
+
+MATCHER_P(HasDataChunkWithMid, mid, "") {
+ absl::optional<SctpPacket> packet = SctpPacket::Parse(arg);
+ if (!packet.has_value()) {
+ *result_listener << "data didn't parse as an SctpPacket";
+ return false;
+ }
+
+ if (packet->descriptors()[0].type != IDataChunk::kType) {
+ *result_listener << "the first chunk in the packet is not an i-data chunk";
+ return false;
+ }
+
+ absl::optional<IDataChunk> dc =
+ IDataChunk::Parse(packet->descriptors()[0].data);
+ if (!dc.has_value()) {
+ *result_listener << "The first chunk didn't parse as an i-data chunk";
+ return false;
+ }
+
+ if (dc->message_id() != mid) {
+ *result_listener << "the mid is " << *dc->message_id();
+ return false;
+ }
+
+ return true;
+}
+
+MATCHER_P(HasSackWithCumAckTsn, tsn, "") {
+ absl::optional<SctpPacket> packet = SctpPacket::Parse(arg);
+ if (!packet.has_value()) {
+ *result_listener << "data didn't parse as an SctpPacket";
+ return false;
+ }
+
+ if (packet->descriptors()[0].type != SackChunk::kType) {
+ *result_listener << "the first chunk in the packet is not a data chunk";
+ return false;
+ }
+
+ absl::optional<SackChunk> sc =
+ SackChunk::Parse(packet->descriptors()[0].data);
+ if (!sc.has_value()) {
+ *result_listener << "The first chunk didn't parse as a data chunk";
+ return false;
+ }
+
+ if (sc->cumulative_tsn_ack() != tsn) {
+ *result_listener << "the cum_ack_tsn is " << *sc->cumulative_tsn_ack();
+ return false;
+ }
+
+ return true;
+}
+
+MATCHER(HasSackWithNoGapAckBlocks, "") {
+ absl::optional<SctpPacket> packet = SctpPacket::Parse(arg);
+ if (!packet.has_value()) {
+ *result_listener << "data didn't parse as an SctpPacket";
+ return false;
+ }
+
+ if (packet->descriptors()[0].type != SackChunk::kType) {
+ *result_listener << "the first chunk in the packet is not a data chunk";
+ return false;
+ }
+
+ absl::optional<SackChunk> sc =
+ SackChunk::Parse(packet->descriptors()[0].data);
+ if (!sc.has_value()) {
+ *result_listener << "The first chunk didn't parse as a data chunk";
+ return false;
+ }
+
+ if (!sc->gap_ack_blocks().empty()) {
+ *result_listener << "there are gap ack blocks";
+ return false;
+ }
+
+ return true;
+}
+
+TSN AddTo(TSN tsn, int delta) {
+ return TSN(*tsn + delta);
+}
+
+DcSctpOptions MakeOptionsForTest(bool enable_message_interleaving) {
+ DcSctpOptions options;
+ // To make the interval more predictable in tests.
+ options.heartbeat_interval_include_rtt = false;
+ options.enable_message_interleaving = enable_message_interleaving;
+ return options;
+}
+
+class DcSctpSocketTest : public testing::Test {
+ protected:
+ explicit DcSctpSocketTest(bool enable_message_interleaving = false)
+ : options_(MakeOptionsForTest(enable_message_interleaving)),
+ cb_a_("A"),
+ cb_z_("Z"),
+ sock_a_("A", cb_a_, nullptr, options_),
+ sock_z_("Z", cb_z_, nullptr, options_) {}
+
+ void AdvanceTime(DurationMs duration) {
+ cb_a_.AdvanceTime(duration);
+ cb_z_.AdvanceTime(duration);
+ }
+
+ static void ExchangeMessages(DcSctpSocket& sock_a,
+ MockDcSctpSocketCallbacks& cb_a,
+ DcSctpSocket& sock_z,
+ MockDcSctpSocketCallbacks& cb_z) {
+ bool delivered_packet = false;
+ do {
+ delivered_packet = false;
+ std::vector<uint8_t> packet_from_a = cb_a.ConsumeSentPacket();
+ if (!packet_from_a.empty()) {
+ delivered_packet = true;
+ sock_z.ReceivePacket(std::move(packet_from_a));
+ }
+ std::vector<uint8_t> packet_from_z = cb_z.ConsumeSentPacket();
+ if (!packet_from_z.empty()) {
+ delivered_packet = true;
+ sock_a.ReceivePacket(std::move(packet_from_z));
+ }
+ } while (delivered_packet);
+ }
+
+ void RunTimers(MockDcSctpSocketCallbacks& cb, DcSctpSocket& socket) {
+ for (;;) {
+ absl::optional<TimeoutID> timeout_id = cb.GetNextExpiredTimeout();
+ if (!timeout_id.has_value()) {
+ break;
+ }
+ socket.HandleTimeout(*timeout_id);
+ }
+ }
+
+ void RunTimers() {
+ RunTimers(cb_a_, sock_a_);
+ RunTimers(cb_z_, sock_z_);
+ }
+
+ // Calls Connect() on `sock_a_` and make the connection established.
+ void ConnectSockets() {
+ EXPECT_CALL(cb_a_, OnConnected).Times(1);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+
+ sock_a_.Connect();
+ // Z reads INIT, INIT_ACK, COOKIE_ECHO, COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+ }
+
+ const DcSctpOptions options_;
+ testing::NiceMock<MockDcSctpSocketCallbacks> cb_a_;
+ testing::NiceMock<MockDcSctpSocketCallbacks> cb_z_;
+ DcSctpSocket sock_a_;
+ DcSctpSocket sock_z_;
+};
+
+TEST_F(DcSctpSocketTest, EstablishConnection) {
+ EXPECT_CALL(cb_a_, OnConnected).Times(1);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+ EXPECT_CALL(cb_a_, OnConnectionRestarted).Times(0);
+ EXPECT_CALL(cb_z_, OnConnectionRestarted).Times(0);
+
+ sock_a_.Connect();
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, EstablishConnectionWithSetupCollision) {
+ EXPECT_CALL(cb_a_, OnConnected).Times(1);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+ EXPECT_CALL(cb_a_, OnConnectionRestarted).Times(0);
+ EXPECT_CALL(cb_z_, OnConnectionRestarted).Times(0);
+ sock_a_.Connect();
+ sock_z_.Connect();
+
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, ShuttingDownWhileEstablishingConnection) {
+ EXPECT_CALL(cb_a_, OnConnected).Times(0);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+ sock_a_.Connect();
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // Drop COOKIE_ACK, just to more easily verify shutdown protocol.
+ cb_z_.ConsumeSentPacket();
+
+ // As Socket A has received INIT_ACK, it has a TCB and is connected, while
+ // Socket Z needs to receive COOKIE_ECHO to get there. Socket A still has
+ // timers running at this point.
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnecting);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+
+ // Socket A is now shut down, which should make it stop those timers.
+ sock_a_.Shutdown();
+
+ EXPECT_CALL(cb_a_, OnClosed).Times(1);
+ EXPECT_CALL(cb_z_, OnClosed).Times(1);
+
+ // Z reads SHUTDOWN, produces SHUTDOWN_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads SHUTDOWN_ACK, produces SHUTDOWN_COMPLETE
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads SHUTDOWN_COMPLETE.
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ EXPECT_TRUE(cb_a_.ConsumeSentPacket().empty());
+ EXPECT_TRUE(cb_z_.ConsumeSentPacket().empty());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kClosed);
+ EXPECT_EQ(sock_z_.state(), SocketState::kClosed);
+}
+
+TEST_F(DcSctpSocketTest, EstablishSimultaneousConnection) {
+ EXPECT_CALL(cb_a_, OnConnected).Times(1);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+ EXPECT_CALL(cb_a_, OnConnectionRestarted).Times(0);
+ EXPECT_CALL(cb_z_, OnConnectionRestarted).Times(0);
+ sock_a_.Connect();
+
+ // INIT isn't received by Z, as it wasn't ready yet.
+ cb_a_.ConsumeSentPacket();
+
+ sock_z_.Connect();
+
+ // A reads INIT, produces INIT_ACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // Z reads INIT_ACK, sends COOKIE_ECHO
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ // A reads COOKIE_ECHO - establishes connection.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+
+ // Proceed with the remaining packets.
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, EstablishConnectionLostCookieAck) {
+ EXPECT_CALL(cb_a_, OnConnected).Times(1);
+ EXPECT_CALL(cb_z_, OnConnected).Times(1);
+ EXPECT_CALL(cb_a_, OnConnectionRestarted).Times(0);
+ EXPECT_CALL(cb_z_, OnConnectionRestarted).Times(0);
+
+ sock_a_.Connect();
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // COOKIE_ACK is lost.
+ cb_z_.ConsumeSentPacket();
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnecting);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+
+ // This will make A re-send the COOKIE_ECHO
+ AdvanceTime(DurationMs(options_.t1_cookie_timeout));
+ RunTimers();
+
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, ResendInitAndEstablishConnection) {
+ sock_a_.Connect();
+ // INIT is never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(init_packet.descriptors()[0].type, InitChunk::kType);
+
+ AdvanceTime(options_.t1_init_timeout);
+ RunTimers();
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, ResendingInitTooManyTimesAborts) {
+ sock_a_.Connect();
+
+ // INIT is never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(init_packet.descriptors()[0].type, InitChunk::kType);
+
+ for (int i = 0; i < options_.max_init_retransmits; ++i) {
+ AdvanceTime(options_.t1_init_timeout * (1 << i));
+ RunTimers();
+
+ // INIT is resent
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket resent_init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(resent_init_packet.descriptors()[0].type, InitChunk::kType);
+ }
+
+ // Another timeout, after the max init retransmits.
+ AdvanceTime(options_.t1_init_timeout * (1 << options_.max_init_retransmits));
+ EXPECT_CALL(cb_a_, OnAborted).Times(1);
+ RunTimers();
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kClosed);
+}
+
+TEST_F(DcSctpSocketTest, ResendCookieEchoAndEstablishConnection) {
+ sock_a_.Connect();
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // COOKIE_ECHO is never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(init_packet.descriptors()[0].type, CookieEchoChunk::kType);
+
+ AdvanceTime(options_.t1_init_timeout);
+ RunTimers();
+
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+}
+
+TEST_F(DcSctpSocketTest, ResendingCookieEchoTooManyTimesAborts) {
+ sock_a_.Connect();
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // COOKIE_ECHO is never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(init_packet.descriptors()[0].type, CookieEchoChunk::kType);
+
+ for (int i = 0; i < options_.max_init_retransmits; ++i) {
+ AdvanceTime(options_.t1_cookie_timeout * (1 << i));
+ RunTimers();
+
+ // COOKIE_ECHO is resent
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket resent_init_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(resent_init_packet.descriptors()[0].type, CookieEchoChunk::kType);
+ }
+
+ // Another timeout, after the max init retransmits.
+ AdvanceTime(options_.t1_cookie_timeout *
+ (1 << options_.max_init_retransmits));
+ EXPECT_CALL(cb_a_, OnAborted).Times(1);
+ RunTimers();
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kClosed);
+}
+
+TEST_F(DcSctpSocketTest, DoesntSendMorePacketsUntilCookieAckHasBeenReceived) {
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kLargeMessageSize)),
+ kSendOptions);
+ sock_a_.Connect();
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // COOKIE_ECHO is never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket cookie_echo_packet1,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_THAT(cookie_echo_packet1.descriptors(), SizeIs(2));
+ EXPECT_EQ(cookie_echo_packet1.descriptors()[0].type, CookieEchoChunk::kType);
+ EXPECT_EQ(cookie_echo_packet1.descriptors()[1].type, DataChunk::kType);
+
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), IsEmpty());
+
+ // There are DATA chunks in the sent packet (that was lost), which means that
+ // the T3-RTX timer is running, but as the socket is in kCookieEcho state, it
+ // will be T1-COOKIE that drives retransmissions, so when the T3-RTX expires,
+ // nothing should be retransmitted.
+ ASSERT_TRUE(options_.rto_initial < options_.t1_cookie_timeout);
+ AdvanceTime(options_.rto_initial);
+ RunTimers();
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), IsEmpty());
+
+ // When T1-COOKIE expires, both the COOKIE-ECHO and DATA should be present.
+ AdvanceTime(options_.t1_cookie_timeout - options_.rto_initial);
+ RunTimers();
+
+ // And this COOKIE-ECHO and DATA is also lost - never received by Z.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket cookie_echo_packet2,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_THAT(cookie_echo_packet2.descriptors(), SizeIs(2));
+ EXPECT_EQ(cookie_echo_packet2.descriptors()[0].type, CookieEchoChunk::kType);
+ EXPECT_EQ(cookie_echo_packet2.descriptors()[1].type, DataChunk::kType);
+
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), IsEmpty());
+
+ // COOKIE_ECHO has exponential backoff.
+ AdvanceTime(options_.t1_cookie_timeout * 2);
+ RunTimers();
+
+ // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+ EXPECT_THAT(cb_z_.ConsumeReceivedMessage()->payload(),
+ SizeIs(kLargeMessageSize));
+}
+
+TEST_F(DcSctpSocketTest, ShutdownConnection) {
+ ConnectSockets();
+
+ RTC_LOG(LS_INFO) << "Shutting down";
+
+ sock_a_.Shutdown();
+ // Z reads SHUTDOWN, produces SHUTDOWN_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // A reads SHUTDOWN_ACK, produces SHUTDOWN_COMPLETE
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // Z reads SHUTDOWN_COMPLETE.
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kClosed);
+ EXPECT_EQ(sock_z_.state(), SocketState::kClosed);
+}
+
+TEST_F(DcSctpSocketTest, ShutdownTimerExpiresTooManyTimeClosesConnection) {
+ ConnectSockets();
+
+ sock_a_.Shutdown();
+ // Drop first SHUTDOWN packet.
+ cb_a_.ConsumeSentPacket();
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kShuttingDown);
+
+ for (int i = 0; i < options_.max_retransmissions; ++i) {
+ AdvanceTime(DurationMs(options_.rto_initial * (1 << i)));
+ RunTimers();
+
+ // Dropping every shutdown chunk.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(packet.descriptors()[0].type, ShutdownChunk::kType);
+ EXPECT_TRUE(cb_a_.ConsumeSentPacket().empty());
+ }
+ // The last expiry, makes it abort the connection.
+ AdvanceTime(options_.rto_initial * (1 << options_.max_retransmissions));
+ EXPECT_CALL(cb_a_, OnAborted).Times(1);
+ RunTimers();
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kClosed);
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(packet.descriptors()[0].type, AbortChunk::kType);
+ EXPECT_TRUE(cb_a_.ConsumeSentPacket().empty());
+}
+
+TEST_F(DcSctpSocketTest, EstablishConnectionWhileSendingData) {
+ sock_a_.Connect();
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), kSendOptions);
+
+ // Z reads INIT, produces INIT_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // // A reads INIT_ACK, produces COOKIE_ECHO
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+ // // Z reads COOKIE_ECHO, produces COOKIE_ACK
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // // A reads COOKIE_ACK.
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ EXPECT_EQ(sock_a_.state(), SocketState::kConnected);
+ EXPECT_EQ(sock_z_.state(), SocketState::kConnected);
+
+ absl::optional<DcSctpMessage> msg = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+}
+
+TEST_F(DcSctpSocketTest, SendMessageAfterEstablished) {
+ ConnectSockets();
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), kSendOptions);
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ absl::optional<DcSctpMessage> msg = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+}
+
+TEST_F(DcSctpSocketTest, TimeoutResendsPacket) {
+ ConnectSockets();
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), kSendOptions);
+ cb_a_.ConsumeSentPacket();
+
+ RTC_LOG(LS_INFO) << "Advancing time";
+ AdvanceTime(options_.rto_initial);
+ RunTimers();
+
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ absl::optional<DcSctpMessage> msg = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+}
+
+TEST_F(DcSctpSocketTest, SendALotOfBytesMissedSecondPacket) {
+ ConnectSockets();
+
+ std::vector<uint8_t> payload(kLargeMessageSize);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions);
+
+ // First DATA
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // Second DATA (lost)
+ cb_a_.ConsumeSentPacket();
+
+ // Retransmit and handle the rest
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ absl::optional<DcSctpMessage> msg = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+ EXPECT_THAT(msg->payload(), testing::ElementsAreArray(payload));
+}
+
+TEST_F(DcSctpSocketTest, SendingHeartbeatAnswersWithAck) {
+ ConnectSockets();
+
+ // Inject a HEARTBEAT chunk
+ SctpPacket::Builder b(sock_a_.verification_tag(), DcSctpOptions());
+ uint8_t info[] = {1, 2, 3, 4};
+ Parameters::Builder params_builder;
+ params_builder.Add(HeartbeatInfoParameter(info));
+ b.Add(HeartbeatRequestChunk(params_builder.Build()));
+ sock_a_.ReceivePacket(b.Build());
+
+ // HEARTBEAT_ACK is sent as a reply. Capture it.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket ack_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ ASSERT_THAT(ack_packet.descriptors(), SizeIs(1));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatAckChunk ack,
+ HeartbeatAckChunk::Parse(ack_packet.descriptors()[0].data));
+ ASSERT_HAS_VALUE_AND_ASSIGN(HeartbeatInfoParameter info_param, ack.info());
+ EXPECT_THAT(info_param.info(), ElementsAre(1, 2, 3, 4));
+}
+
+TEST_F(DcSctpSocketTest, ExpectHeartbeatToBeSent) {
+ ConnectSockets();
+
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), IsEmpty());
+
+ AdvanceTime(options_.heartbeat_interval);
+ RunTimers();
+
+ std::vector<uint8_t> hb_packet_raw = cb_a_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket hb_packet,
+ SctpPacket::Parse(hb_packet_raw));
+ ASSERT_THAT(hb_packet.descriptors(), SizeIs(1));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatRequestChunk hb,
+ HeartbeatRequestChunk::Parse(hb_packet.descriptors()[0].data));
+ ASSERT_HAS_VALUE_AND_ASSIGN(HeartbeatInfoParameter info_param, hb.info());
+
+ // The info is a single 64-bit number.
+ EXPECT_THAT(hb.info()->info(), SizeIs(8));
+
+ // Feed it to Sock-z and expect a HEARTBEAT_ACK that will be propagated back.
+ sock_z_.ReceivePacket(hb_packet_raw);
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+}
+
+TEST_F(DcSctpSocketTest, CloseConnectionAfterTooManyLostHeartbeats) {
+ ConnectSockets();
+
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), testing::IsEmpty());
+ // Force-close socket Z so that it doesn't interfere from now on.
+ sock_z_.Close();
+
+ DurationMs time_to_next_hearbeat = options_.heartbeat_interval;
+
+ for (int i = 0; i < options_.max_retransmissions; ++i) {
+ RTC_LOG(LS_INFO) << "Letting HEARTBEAT interval timer expire - sending...";
+ AdvanceTime(time_to_next_hearbeat);
+ RunTimers();
+
+ // Dropping every heartbeat.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket hb_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(hb_packet.descriptors()[0].type, HeartbeatRequestChunk::kType);
+
+ RTC_LOG(LS_INFO) << "Letting the heartbeat expire.";
+ AdvanceTime(DurationMs(1000));
+ RunTimers();
+
+ time_to_next_hearbeat = options_.heartbeat_interval - DurationMs(1000);
+ }
+
+ RTC_LOG(LS_INFO) << "Letting HEARTBEAT interval timer expire - sending...";
+ AdvanceTime(time_to_next_hearbeat);
+ RunTimers();
+
+ // Last heartbeat
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), Not(IsEmpty()));
+
+ EXPECT_CALL(cb_a_, OnAborted).Times(1);
+ // Should suffice as exceeding RTO
+ AdvanceTime(DurationMs(1000));
+ RunTimers();
+}
+
+TEST_F(DcSctpSocketTest, RecoversAfterASuccessfulAck) {
+ ConnectSockets();
+
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), testing::IsEmpty());
+ // Force-close socket Z so that it doesn't interfere from now on.
+ sock_z_.Close();
+
+ DurationMs time_to_next_hearbeat = options_.heartbeat_interval;
+
+ for (int i = 0; i < options_.max_retransmissions; ++i) {
+ AdvanceTime(time_to_next_hearbeat);
+ RunTimers();
+
+ // Dropping every heartbeat.
+ cb_a_.ConsumeSentPacket();
+
+ RTC_LOG(LS_INFO) << "Letting the heartbeat expire.";
+ AdvanceTime(DurationMs(1000));
+ RunTimers();
+
+ time_to_next_hearbeat = options_.heartbeat_interval - DurationMs(1000);
+ }
+
+ RTC_LOG(LS_INFO) << "Getting the last heartbeat - and acking it";
+ AdvanceTime(time_to_next_hearbeat);
+ RunTimers();
+
+ std::vector<uint8_t> hb_packet_raw = cb_a_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket hb_packet,
+ SctpPacket::Parse(hb_packet_raw));
+ ASSERT_THAT(hb_packet.descriptors(), SizeIs(1));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatRequestChunk hb,
+ HeartbeatRequestChunk::Parse(hb_packet.descriptors()[0].data));
+
+ SctpPacket::Builder b(sock_a_.verification_tag(), options_);
+ b.Add(HeartbeatAckChunk(std::move(hb).extract_parameters()));
+ sock_a_.ReceivePacket(b.Build());
+
+ // Should suffice as exceeding RTO - which will not fire.
+ EXPECT_CALL(cb_a_, OnAborted).Times(0);
+ AdvanceTime(DurationMs(1000));
+ RunTimers();
+ EXPECT_THAT(cb_a_.ConsumeSentPacket(), IsEmpty());
+
+ // Verify that we get new heartbeats again.
+ RTC_LOG(LS_INFO) << "Expecting a new heartbeat";
+ AdvanceTime(time_to_next_hearbeat);
+ RunTimers();
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket another_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ EXPECT_EQ(another_packet.descriptors()[0].type, HeartbeatRequestChunk::kType);
+}
+
+TEST_F(DcSctpSocketTest, ResetStream) {
+ ConnectSockets();
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), {});
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ absl::optional<DcSctpMessage> msg = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+
+ // Handle SACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // Reset the outgoing stream. This will directly send a RE-CONFIG.
+ sock_a_.ResetStreams(std::vector<StreamID>({StreamID(1)}));
+
+ // Receiving the packet will trigger a callback, indicating that A has
+ // reset its stream. It will also send a RE-CONFIG with a response.
+ EXPECT_CALL(cb_z_, OnIncomingStreamsReset).Times(1);
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ // Receiving a response will trigger a callback. Streams are now reset.
+ EXPECT_CALL(cb_a_, OnStreamsResetPerformed).Times(1);
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+}
+
+TEST_F(DcSctpSocketTest, ResetStreamWillMakeChunksStartAtZeroSsn) {
+ ConnectSockets();
+
+ std::vector<uint8_t> payload(options_.mtu - 100);
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {});
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {});
+
+ auto packet1 = cb_a_.ConsumeSentPacket();
+ EXPECT_THAT(packet1, HasDataChunkWithSsn(SSN(0)));
+ sock_z_.ReceivePacket(packet1);
+
+ auto packet2 = cb_a_.ConsumeSentPacket();
+ EXPECT_THAT(packet2, HasDataChunkWithSsn(SSN(1)));
+ sock_z_.ReceivePacket(packet2);
+
+ // Handle SACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ absl::optional<DcSctpMessage> msg1 = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg1.has_value());
+ EXPECT_EQ(msg1->stream_id(), StreamID(1));
+
+ absl::optional<DcSctpMessage> msg2 = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg2.has_value());
+ EXPECT_EQ(msg2->stream_id(), StreamID(1));
+
+ // Reset the outgoing stream. This will directly send a RE-CONFIG.
+ sock_a_.ResetStreams(std::vector<StreamID>({StreamID(1)}));
+ // RE-CONFIG, req
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // RE-CONFIG, resp
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {});
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {});
+
+ auto packet3 = cb_a_.ConsumeSentPacket();
+ EXPECT_THAT(packet3, HasDataChunkWithSsn(SSN(0)));
+ sock_z_.ReceivePacket(packet3);
+
+ auto packet4 = cb_a_.ConsumeSentPacket();
+ EXPECT_THAT(packet4, HasDataChunkWithSsn(SSN(1)));
+ sock_z_.ReceivePacket(packet4);
+
+ // Handle SACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+}
+
+TEST_F(DcSctpSocketTest, OnePeerReconnects) {
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnConnectionRestarted).Times(1);
+ // Let's be evil here - reconnect while a fragmented packet was about to be
+ // sent. The receiving side should get it in full.
+ std::vector<uint8_t> payload(kLargeMessageSize);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions);
+
+ // First DATA
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ // Create a new association, z2 - and don't use z anymore.
+ testing::NiceMock<MockDcSctpSocketCallbacks> cb_z2("Z2");
+ DcSctpSocket sock_z2("Z2", cb_z2, nullptr, options_);
+
+ sock_z2.Connect();
+
+ // Retransmit and handle the rest. As there will be some chunks in-flight that
+ // have the wrong verification tag, those will yield errors.
+ ExchangeMessages(sock_a_, cb_a_, sock_z2, cb_z2);
+
+ absl::optional<DcSctpMessage> msg = cb_z2.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg.has_value());
+ EXPECT_EQ(msg->stream_id(), StreamID(1));
+ EXPECT_THAT(msg->payload(), testing::ElementsAreArray(payload));
+}
+
+TEST_F(DcSctpSocketTest, SendMessageWithLimitedRtx) {
+ ConnectSockets();
+
+ SendOptions send_options;
+ send_options.max_retransmissions = 0;
+ std::vector<uint8_t> payload(options_.mtu - 100);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(51), payload), send_options);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(52), payload), send_options);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), send_options);
+
+ // First DATA
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+ // Second DATA (lost)
+ cb_a_.ConsumeSentPacket();
+ // Third DATA
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ // Handle SACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ // Now the missing data chunk will be marked as nacked, but it might still be
+ // in-flight and the reported gap could be due to out-of-order delivery. So
+ // the RetransmissionQueue will not mark it as "to be retransmitted" until
+ // after the t3-rtx timer has expired.
+ AdvanceTime(options_.rto_initial);
+ RunTimers();
+
+ // The chunk will be marked as retransmitted, and then as abandoned, which
+ // will trigger a FORWARD-TSN to be sent.
+
+ // FORWARD-TSN (third)
+ sock_z_.ReceivePacket(cb_a_.ConsumeSentPacket());
+
+ // The receiver might have moved into delayed ack mode.
+ AdvanceTime(options_.rto_initial);
+ RunTimers();
+
+ // Handle SACK
+ sock_a_.ReceivePacket(cb_z_.ConsumeSentPacket());
+
+ absl::optional<DcSctpMessage> msg1 = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg1.has_value());
+ EXPECT_EQ(msg1->ppid(), PPID(51));
+
+ absl::optional<DcSctpMessage> msg2 = cb_z_.ConsumeReceivedMessage();
+ ASSERT_TRUE(msg2.has_value());
+ EXPECT_EQ(msg2->ppid(), PPID(53));
+
+ absl::optional<DcSctpMessage> msg3 = cb_z_.ConsumeReceivedMessage();
+ EXPECT_FALSE(msg3.has_value());
+}
+
+struct FakeChunkConfig : ChunkConfig {
+ static constexpr int kType = 0x49;
+ static constexpr size_t kHeaderSize = 4;
+ static constexpr int kVariableLengthAlignment = 0;
+};
+
+class FakeChunk : public Chunk, public TLVTrait<FakeChunkConfig> {
+ public:
+ FakeChunk() {}
+
+ FakeChunk(FakeChunk&& other) = default;
+ FakeChunk& operator=(FakeChunk&& other) = default;
+
+ void SerializeTo(std::vector<uint8_t>& out) const override {
+ AllocateTLV(out);
+ }
+ std::string ToString() const override { return "FAKE"; }
+};
+
+TEST_F(DcSctpSocketTest, ReceivingUnknownChunkRespondsWithError) {
+ ConnectSockets();
+
+ // Inject a FAKE chunk
+ SctpPacket::Builder b(sock_a_.verification_tag(), DcSctpOptions());
+ b.Add(FakeChunk());
+ sock_a_.ReceivePacket(b.Build());
+
+ // ERROR is sent as a reply. Capture it.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket reply_packet,
+ SctpPacket::Parse(cb_a_.ConsumeSentPacket()));
+ ASSERT_THAT(reply_packet.descriptors(), SizeIs(1));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ ErrorChunk error, ErrorChunk::Parse(reply_packet.descriptors()[0].data));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ UnrecognizedChunkTypeCause cause,
+ error.error_causes().get<UnrecognizedChunkTypeCause>());
+ EXPECT_THAT(cause.unrecognized_chunk(), ElementsAre(0x49, 0x00, 0x00, 0x04));
+}
+
+TEST_F(DcSctpSocketTest, ReceivingErrorChunkReportsAsCallback) {
+ ConnectSockets();
+
+ // Inject a ERROR chunk
+ SctpPacket::Builder b(sock_a_.verification_tag(), DcSctpOptions());
+ b.Add(
+ ErrorChunk(Parameters::Builder()
+ .Add(UnrecognizedChunkTypeCause({0x49, 0x00, 0x00, 0x04}))
+ .Build()));
+
+ EXPECT_CALL(cb_a_, OnError(ErrorKind::kPeerReported,
+ HasSubstr("Unrecognized Chunk Type")));
+ sock_a_.ReceivePacket(b.Build());
+}
+
+TEST_F(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) {
+ // Create a new association, z2 - and don't use z anymore.
+ testing::NiceMock<MockDcSctpSocketCallbacks> cb_z2("Z2");
+ DcSctpOptions options = options_;
+ options.max_receiver_window_buffer_size = 100;
+ DcSctpSocket sock_z2("Z2", cb_z2, nullptr, options);
+
+ EXPECT_CALL(cb_z2, OnClosed).Times(0);
+ EXPECT_CALL(cb_z2, OnAborted).Times(0);
+
+ sock_a_.Connect();
+ std::vector<uint8_t> init_data = cb_a_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet,
+ SctpPacket::Parse(init_data));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ InitChunk init_chunk,
+ InitChunk::Parse(init_packet.descriptors()[0].data));
+ sock_z2.ReceivePacket(init_data);
+ sock_a_.ReceivePacket(cb_z2.ConsumeSentPacket());
+ sock_z2.ReceivePacket(cb_a_.ConsumeSentPacket());
+ sock_a_.ReceivePacket(cb_z2.ConsumeSentPacket());
+
+ // Fill up Z2 to the high watermark limit.
+ TSN tsn = init_chunk.initial_tsn();
+ AnyDataChunk::Options opts;
+ opts.is_beginning = Data::IsBeginning(true);
+ sock_z2.ReceivePacket(
+ SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(tsn, StreamID(1), SSN(0), PPID(53),
+ std::vector<uint8_t>(
+ 100 * ReassemblyQueue::kHighWatermarkLimit + 1),
+ opts))
+ .Build());
+
+ // First DATA will always trigger a SACK. It's not interesting.
+ EXPECT_THAT(cb_z2.ConsumeSentPacket(),
+ AllOf(HasSackWithCumAckTsn(tsn), HasSackWithNoGapAckBlocks()));
+
+ // This DATA should be accepted - it's advancing cum ack tsn.
+ sock_z2.ReceivePacket(SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(AddTo(tsn, 1), StreamID(1), SSN(0),
+ PPID(53), std::vector<uint8_t>(1),
+ /*options=*/{}))
+ .Build());
+
+ // The receiver might have moved into delayed ack mode.
+ cb_z2.AdvanceTime(options.rto_initial);
+ RunTimers(cb_z2, sock_z2);
+
+ EXPECT_THAT(
+ cb_z2.ConsumeSentPacket(),
+ AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks()));
+
+ // This DATA will not be accepted - it's not advancing cum ack tsn.
+ sock_z2.ReceivePacket(SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(AddTo(tsn, 3), StreamID(1), SSN(0),
+ PPID(53), std::vector<uint8_t>(1),
+ /*options=*/{}))
+ .Build());
+
+ // Sack will be sent in IMMEDIATE mode when this is happening.
+ EXPECT_THAT(
+ cb_z2.ConsumeSentPacket(),
+ AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks()));
+
+ // This DATA will not be accepted either.
+ sock_z2.ReceivePacket(SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(AddTo(tsn, 4), StreamID(1), SSN(0),
+ PPID(53), std::vector<uint8_t>(1),
+ /*options=*/{}))
+ .Build());
+
+ // Sack will be sent in IMMEDIATE mode when this is happening.
+ EXPECT_THAT(
+ cb_z2.ConsumeSentPacket(),
+ AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks()));
+
+ // This DATA should be accepted, and it fills the reassembly queue.
+ sock_z2.ReceivePacket(
+ SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(AddTo(tsn, 2), StreamID(1), SSN(0), PPID(53),
+ std::vector<uint8_t>(kSmallMessageSize),
+ /*options=*/{}))
+ .Build());
+
+ // The receiver might have moved into delayed ack mode.
+ cb_z2.AdvanceTime(options.rto_initial);
+ RunTimers(cb_z2, sock_z2);
+
+ EXPECT_THAT(
+ cb_z2.ConsumeSentPacket(),
+ AllOf(HasSackWithCumAckTsn(AddTo(tsn, 2)), HasSackWithNoGapAckBlocks()));
+
+ EXPECT_CALL(cb_z2, OnAborted(ErrorKind::kResourceExhaustion, _));
+ EXPECT_CALL(cb_z2, OnClosed).Times(0);
+
+ // This DATA will make the connection close. It's too full now.
+ sock_z2.ReceivePacket(
+ SctpPacket::Builder(sock_z2.verification_tag(), options)
+ .Add(DataChunk(AddTo(tsn, 3), StreamID(1), SSN(0), PPID(53),
+ std::vector<uint8_t>(kSmallMessageSize),
+ /*options=*/{}))
+ .Build());
+}
+
+TEST_F(DcSctpSocketTest, SetMaxMessageSize) {
+ sock_a_.SetMaxMessageSize(42u);
+ EXPECT_EQ(sock_a_.options().max_message_size, 42u);
+}
+
+TEST_F(DcSctpSocketTest, SendsMessagesWithLowLifetime) {
+ ConnectSockets();
+
+ // Mock that the time always goes forward.
+ TimeMs now(0);
+ EXPECT_CALL(cb_a_, TimeMillis).WillRepeatedly([&]() {
+ now += DurationMs(3);
+ return now;
+ });
+ EXPECT_CALL(cb_z_, TimeMillis).WillRepeatedly([&]() {
+ now += DurationMs(3);
+ return now;
+ });
+
+ // Queue a few small messages with low lifetime, both ordered and unordered,
+ // and validate that all are delivered.
+ static constexpr int kIterations = 100;
+ for (int i = 0; i < kIterations; ++i) {
+ SendOptions send_options;
+ send_options.unordered = IsUnordered((i % 2) == 0);
+ send_options.lifetime = DurationMs(i % 3); // 0, 1, 2 ms
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), send_options);
+ }
+
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ for (int i = 0; i < kIterations; ++i) {
+ EXPECT_TRUE(cb_z_.ConsumeReceivedMessage().has_value());
+ }
+
+ EXPECT_FALSE(cb_z_.ConsumeReceivedMessage().has_value());
+
+ // Validate that the sockets really make the time move forward.
+ EXPECT_GE(*now, kIterations * 2);
+}
+
+TEST_F(DcSctpSocketTest, DiscardsMessagesWithLowLifetimeIfMustBuffer) {
+ ConnectSockets();
+
+ SendOptions lifetime_0;
+ lifetime_0.unordered = IsUnordered(true);
+ lifetime_0.lifetime = DurationMs(0);
+
+ SendOptions lifetime_1;
+ lifetime_1.unordered = IsUnordered(true);
+ lifetime_1.lifetime = DurationMs(1);
+
+ // Mock that the time always goes forward.
+ TimeMs now(0);
+ EXPECT_CALL(cb_a_, TimeMillis).WillRepeatedly([&]() {
+ now += DurationMs(3);
+ return now;
+ });
+ EXPECT_CALL(cb_z_, TimeMillis).WillRepeatedly([&]() {
+ now += DurationMs(3);
+ return now;
+ });
+
+ // Fill up the send buffer with a large message.
+ std::vector<uint8_t> payload(kLargeMessageSize);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions);
+
+ // And queue a few small messages with lifetime=0 or 1 ms - can't be sent.
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2, 3}), lifetime_0);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {4, 5, 6}), lifetime_1);
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53), {7, 8, 9}), lifetime_0);
+
+ // Handle all that was sent until congestion window got full.
+ for (;;) {
+ std::vector<uint8_t> packet_from_a = cb_a_.ConsumeSentPacket();
+ if (packet_from_a.empty()) {
+ break;
+ }
+ sock_z_.ReceivePacket(std::move(packet_from_a));
+ }
+
+ // Shouldn't be enough to send that large message.
+ EXPECT_FALSE(cb_z_.ConsumeReceivedMessage().has_value());
+
+ // Exchange the rest of the messages, with the time ever increasing.
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ // The large message should be delivered. It was sent reliably.
+ ASSERT_HAS_VALUE_AND_ASSIGN(DcSctpMessage m1, cb_z_.ConsumeReceivedMessage());
+ EXPECT_EQ(m1.stream_id(), StreamID(1));
+ EXPECT_THAT(m1.payload(), SizeIs(kLargeMessageSize));
+
+ // But none of the smaller messages.
+ EXPECT_FALSE(cb_z_.ConsumeReceivedMessage().has_value());
+}
+
+TEST_F(DcSctpSocketTest, HasReasonableBufferedAmountValues) {
+ ConnectSockets();
+
+ EXPECT_EQ(sock_a_.buffered_amount(StreamID(1)), 0u);
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kSmallMessageSize)),
+ kSendOptions);
+ // Sending a small message will directly send it as a single packet, so
+ // nothing is left in the queue.
+ EXPECT_EQ(sock_a_.buffered_amount(StreamID(1)), 0u);
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kLargeMessageSize)),
+ kSendOptions);
+
+ // Sending a message will directly start sending a few packets, so the
+ // buffered amount is not the full message size.
+ EXPECT_GT(sock_a_.buffered_amount(StreamID(1)), 0u);
+ EXPECT_LT(sock_a_.buffered_amount(StreamID(1)), kLargeMessageSize);
+}
+
+TEST_F(DcSctpSocketTest, HasDefaultOnBufferedAmountLowValueZero) {
+ EXPECT_EQ(sock_a_.buffered_amount_low_threshold(StreamID(1)), 0u);
+}
+
+TEST_F(DcSctpSocketTest, TriggersOnBufferedAmountLowWithDefaultValueZero) {
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow).Times(0);
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow(StreamID(1)));
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kSmallMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+TEST_F(DcSctpSocketTest, DoesntTriggerOnBufferedAmountLowIfBelowThreshold) {
+ static constexpr size_t kMessageSize = 1000;
+ static constexpr size_t kBufferedAmountLowThreshold = kMessageSize * 10;
+
+ sock_a_.SetBufferedAmountLowThreshold(StreamID(1),
+ kBufferedAmountLowThreshold);
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow).Times(0);
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow(StreamID(1))).Times(0);
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+TEST_F(DcSctpSocketTest, TriggersOnBufferedAmountMultipleTimes) {
+ static constexpr size_t kMessageSize = 1000;
+ static constexpr size_t kBufferedAmountLowThreshold = kMessageSize / 2;
+
+ sock_a_.SetBufferedAmountLowThreshold(StreamID(1),
+ kBufferedAmountLowThreshold);
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow).Times(0);
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow(StreamID(1))).Times(3);
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow(StreamID(2))).Times(2);
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ sock_a_.Send(
+ DcSctpMessage(StreamID(2), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ sock_a_.Send(
+ DcSctpMessage(StreamID(2), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+TEST_F(DcSctpSocketTest, TriggersOnBufferedAmountLowOnlyWhenCrossingThreshold) {
+ static constexpr size_t kMessageSize = 1000;
+ static constexpr size_t kBufferedAmountLowThreshold = kMessageSize * 1.5;
+
+ sock_a_.SetBufferedAmountLowThreshold(StreamID(1),
+ kBufferedAmountLowThreshold);
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow).Times(0);
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow).Times(0);
+
+ // Add a few messages to fill up the congestion window. When that is full,
+ // messages will start to be fully buffered.
+ while (sock_a_.buffered_amount(StreamID(1)) == 0) {
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ }
+ size_t initial_buffered = sock_a_.buffered_amount(StreamID(1));
+ ASSERT_GE(initial_buffered, 0u);
+ ASSERT_LT(initial_buffered, kMessageSize);
+
+ // Up to kMessageSize (which is below the threshold)
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kMessageSize - initial_buffered)),
+ kSendOptions);
+ EXPECT_EQ(sock_a_.buffered_amount(StreamID(1)), kMessageSize);
+
+ // Up to 2*kMessageSize (which is above the threshold)
+ sock_a_.Send(
+ DcSctpMessage(StreamID(1), PPID(53), std::vector<uint8_t>(kMessageSize)),
+ kSendOptions);
+ EXPECT_EQ(sock_a_.buffered_amount(StreamID(1)), 2 * kMessageSize);
+
+ // Start ACKing packets, which will empty the send queue, and trigger the
+ // callback.
+ EXPECT_CALL(cb_a_, OnBufferedAmountLow(StreamID(1))).Times(1);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+TEST_F(DcSctpSocketTest, DoesntTriggerOnTotalBufferAmountLowWhenBelow) {
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnTotalBufferedAmountLow).Times(0);
+
+ sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kLargeMessageSize)),
+ kSendOptions);
+
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+TEST_F(DcSctpSocketTest, TriggersOnTotalBufferAmountLowWhenCrossingThreshold) {
+ ConnectSockets();
+
+ EXPECT_CALL(cb_a_, OnTotalBufferedAmountLow).Times(0);
+
+ // Fill up the send queue completely.
+ for (;;) {
+ if (sock_a_.Send(DcSctpMessage(StreamID(1), PPID(53),
+ std::vector<uint8_t>(kLargeMessageSize)),
+ kSendOptions) == SendStatus::kErrorResourceExhaustion) {
+ break;
+ }
+ }
+
+ EXPECT_CALL(cb_a_, OnTotalBufferedAmountLow).Times(1);
+ ExchangeMessages(sock_a_, cb_a_, sock_z_, cb_z_);
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/heartbeat_handler.cc b/net/dcsctp/socket/heartbeat_handler.cc
new file mode 100644
index 0000000000..78616d1033
--- /dev/null
+++ b/net/dcsctp/socket/heartbeat_handler.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/heartbeat_handler.h"
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/bounded_byte_reader.h"
+#include "net/dcsctp/packet/bounded_byte_writer.h"
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h"
+#include "net/dcsctp/packet/parameter/parameter.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/timer/timer.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+
+// This is stored (in serialized form) as HeartbeatInfoParameter sent in
+// HeartbeatRequestChunk and received back in HeartbeatAckChunk. It should be
+// well understood that this data may be modified by the peer, so it can't
+// be trusted.
+//
+// It currently only stores a timestamp, in millisecond precision, to allow for
+// RTT measurements. If that would be manipulated by the peer, it would just
+// result in incorrect RTT measurements, which isn't an issue.
+class HeartbeatInfo {
+ public:
+ static constexpr size_t kBufferSize = sizeof(uint64_t);
+ static_assert(kBufferSize == 8, "Unexpected buffer size");
+
+ explicit HeartbeatInfo(TimeMs created_at) : created_at_(created_at) {}
+
+ std::vector<uint8_t> Serialize() {
+ uint32_t high_bits = static_cast<uint32_t>(*created_at_ >> 32);
+ uint32_t low_bits = static_cast<uint32_t>(*created_at_);
+
+ std::vector<uint8_t> data(kBufferSize);
+ BoundedByteWriter<kBufferSize> writer(data);
+ writer.Store32<0>(high_bits);
+ writer.Store32<4>(low_bits);
+ return data;
+ }
+
+ static absl::optional<HeartbeatInfo> Deserialize(
+ rtc::ArrayView<const uint8_t> data) {
+ if (data.size() != kBufferSize) {
+ RTC_LOG(LS_WARNING) << "Invalid heartbeat info: " << data.size()
+ << " bytes";
+ return absl::nullopt;
+ }
+
+ BoundedByteReader<kBufferSize> reader(data);
+ uint32_t high_bits = reader.Load32<0>();
+ uint32_t low_bits = reader.Load32<4>();
+
+ uint64_t created_at = static_cast<uint64_t>(high_bits) << 32 | low_bits;
+ return HeartbeatInfo(TimeMs(created_at));
+ }
+
+ TimeMs created_at() const { return created_at_; }
+
+ private:
+ const TimeMs created_at_;
+};
+
+HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix,
+ const DcSctpOptions& options,
+ Context* context,
+ TimerManager* timer_manager)
+ : log_prefix_(std::string(log_prefix) + "heartbeat: "),
+ ctx_(context),
+ timer_manager_(timer_manager),
+ interval_duration_(options.heartbeat_interval),
+ interval_duration_should_include_rtt_(
+ options.heartbeat_interval_include_rtt),
+ interval_timer_(timer_manager_->CreateTimer(
+ "heartbeat-interval",
+ [this]() { return OnIntervalTimerExpiry(); },
+ TimerOptions(interval_duration_, TimerBackoffAlgorithm::kFixed))),
+ timeout_timer_(timer_manager_->CreateTimer(
+ "heartbeat-timeout",
+ [this]() { return OnTimeoutTimerExpiry(); },
+ TimerOptions(options.rto_initial,
+ TimerBackoffAlgorithm::kExponential,
+ /*max_restarts=*/0))) {
+ // The interval timer must always be running as long as the association is up.
+ RestartTimer();
+}
+
+void HeartbeatHandler::RestartTimer() {
+ if (interval_duration_ == DurationMs(0)) {
+ // Heartbeating has been disabled.
+ return;
+ }
+
+ if (interval_duration_should_include_rtt_) {
+ // The RTT should be used, but it's not easy accessible. The RTO will
+ // suffice.
+ interval_timer_->set_duration(interval_duration_ + ctx_->current_rto());
+ } else {
+ interval_timer_->set_duration(interval_duration_);
+ }
+
+ interval_timer_->Start();
+}
+
+void HeartbeatHandler::HandleHeartbeatRequest(HeartbeatRequestChunk chunk) {
+ // https://tools.ietf.org/html/rfc4960#section-8.3
+ // "The receiver of the HEARTBEAT should immediately respond with a
+ // HEARTBEAT ACK that contains the Heartbeat Information TLV, together with
+ // any other received TLVs, copied unchanged from the received HEARTBEAT
+ // chunk."
+ ctx_->Send(ctx_->PacketBuilder().Add(
+ HeartbeatAckChunk(std::move(chunk).extract_parameters())));
+}
+
+void HeartbeatHandler::HandleHeartbeatAck(HeartbeatAckChunk chunk) {
+ timeout_timer_->Stop();
+ absl::optional<HeartbeatInfoParameter> info_param = chunk.info();
+ if (!info_param.has_value()) {
+ ctx_->callbacks().OnError(
+ ErrorKind::kParseFailed,
+ "Failed to parse HEARTBEAT-ACK; No Heartbeat Info parameter");
+ return;
+ }
+ absl::optional<HeartbeatInfo> info =
+ HeartbeatInfo::Deserialize(info_param->info());
+ if (!info.has_value()) {
+ ctx_->callbacks().OnError(ErrorKind::kParseFailed,
+ "Failed to parse HEARTBEAT-ACK; Failed to "
+ "deserialized Heartbeat info parameter");
+ return;
+ }
+
+ DurationMs duration(*ctx_->callbacks().TimeMillis() - *info->created_at());
+
+ ctx_->ObserveRTT(duration);
+
+ // https://tools.ietf.org/html/rfc4960#section-8.1
+ // "The counter shall be reset each time ... a HEARTBEAT ACK is received from
+ // the peer endpoint."
+ ctx_->ClearTxErrorCounter();
+}
+
+absl::optional<DurationMs> HeartbeatHandler::OnIntervalTimerExpiry() {
+ if (ctx_->is_connection_established()) {
+ HeartbeatInfo info(ctx_->callbacks().TimeMillis());
+ timeout_timer_->set_duration(ctx_->current_rto());
+ timeout_timer_->Start();
+ RTC_DLOG(LS_INFO) << log_prefix_ << "Sending HEARTBEAT with timeout "
+ << *timeout_timer_->duration();
+
+ Parameters parameters = Parameters::Builder()
+ .Add(HeartbeatInfoParameter(info.Serialize()))
+ .Build();
+
+ ctx_->Send(ctx_->PacketBuilder().Add(
+ HeartbeatRequestChunk(std::move(parameters))));
+ } else {
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix_
+ << "Will not send HEARTBEAT when connection not established";
+ }
+ return absl::nullopt;
+}
+
+absl::optional<DurationMs> HeartbeatHandler::OnTimeoutTimerExpiry() {
+ // Note that the timeout timer is not restarted. It will be started again when
+ // the interval timer expires.
+ RTC_DCHECK(!timeout_timer_->is_running());
+ ctx_->IncrementTxErrorCounter("HEARTBEAT timeout");
+ return absl::nullopt;
+}
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/heartbeat_handler.h b/net/dcsctp/socket/heartbeat_handler.h
new file mode 100644
index 0000000000..14c3109534
--- /dev/null
+++ b/net/dcsctp/socket/heartbeat_handler.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_
+#define NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/timer/timer.h"
+
+namespace dcsctp {
+
+// HeartbeatHandler handles all logic around sending heartbeats and receiving
+// the responses, as well as receiving incoming heartbeat requests.
+//
+// Heartbeats are sent on idle connections to ensure that the connection is
+// still healthy and to measure the RTT. If a number of heartbeats time out,
+// the connection will eventually be closed.
+class HeartbeatHandler {
+ public:
+ HeartbeatHandler(absl::string_view log_prefix,
+ const DcSctpOptions& options,
+ Context* context,
+ TimerManager* timer_manager);
+
+ // Called when the heartbeat interval timer should be restarted. This is
+ // generally done every time data is sent, which makes the timer expire when
+ // the connection is idle.
+ void RestartTimer();
+
+ // Called on received HeartbeatRequestChunk chunks.
+ void HandleHeartbeatRequest(HeartbeatRequestChunk chunk);
+
+ // Called on received HeartbeatRequestChunk chunks.
+ void HandleHeartbeatAck(HeartbeatAckChunk chunk);
+
+ private:
+ absl::optional<DurationMs> OnIntervalTimerExpiry();
+ absl::optional<DurationMs> OnTimeoutTimerExpiry();
+
+ const std::string log_prefix_;
+ Context* ctx_;
+ TimerManager* timer_manager_;
+ // The time for a connection to be idle before a heartbeat is sent.
+ const DurationMs interval_duration_;
+ // Adding RTT to the duration will add some jitter, which is good in
+ // production, but less good in unit tests, which is why it can be disabled.
+ const bool interval_duration_should_include_rtt_;
+ const std::unique_ptr<Timer> interval_timer_;
+ const std::unique_ptr<Timer> timeout_timer_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_
diff --git a/net/dcsctp/socket/heartbeat_handler_test.cc b/net/dcsctp/socket/heartbeat_handler_test.cc
new file mode 100644
index 0000000000..2c5df9fd92
--- /dev/null
+++ b/net/dcsctp/socket/heartbeat_handler_test.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/heartbeat_handler.h"
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h"
+#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h"
+#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/socket/mock_context.h"
+#include "net/dcsctp/testing/testing_macros.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::ElementsAre;
+using ::testing::IsEmpty;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SizeIs;
+
+constexpr DurationMs kHeartbeatInterval = DurationMs(30'000);
+
+DcSctpOptions MakeOptions(DurationMs heartbeat_interval) {
+ DcSctpOptions options;
+ options.heartbeat_interval_include_rtt = false;
+ options.heartbeat_interval = heartbeat_interval;
+ return options;
+}
+
+class HeartbeatHandlerTestBase : public testing::Test {
+ protected:
+ explicit HeartbeatHandlerTestBase(DurationMs heartbeat_interval)
+ : options_(MakeOptions(heartbeat_interval)),
+ context_(&callbacks_),
+ timer_manager_([this]() { return callbacks_.CreateTimeout(); }),
+ handler_("log: ", options_, &context_, &timer_manager_) {}
+
+ void AdvanceTime(DurationMs duration) {
+ callbacks_.AdvanceTime(duration);
+ for (;;) {
+ absl::optional<TimeoutID> timeout_id = callbacks_.GetNextExpiredTimeout();
+ if (!timeout_id.has_value()) {
+ break;
+ }
+ timer_manager_.HandleTimeout(*timeout_id);
+ }
+ }
+
+ const DcSctpOptions options_;
+ NiceMock<MockDcSctpSocketCallbacks> callbacks_;
+ NiceMock<MockContext> context_;
+ TimerManager timer_manager_;
+ HeartbeatHandler handler_;
+};
+
+class HeartbeatHandlerTest : public HeartbeatHandlerTestBase {
+ protected:
+ HeartbeatHandlerTest() : HeartbeatHandlerTestBase(kHeartbeatInterval) {}
+};
+
+class DisabledHeartbeatHandlerTest : public HeartbeatHandlerTestBase {
+ protected:
+ DisabledHeartbeatHandlerTest() : HeartbeatHandlerTestBase(DurationMs(0)) {}
+};
+
+TEST_F(HeartbeatHandlerTest, HasRunningHeartbeatIntervalTimer) {
+ AdvanceTime(options_.heartbeat_interval);
+
+ // Validate that a heartbeat request was sent.
+ std::vector<uint8_t> payload = callbacks_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, SctpPacket::Parse(payload));
+ ASSERT_THAT(packet.descriptors(), SizeIs(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatRequestChunk request,
+ HeartbeatRequestChunk::Parse(packet.descriptors()[0].data));
+
+ EXPECT_TRUE(request.info().has_value());
+}
+
+TEST_F(HeartbeatHandlerTest, RepliesToHeartbeatRequests) {
+ uint8_t info_data[] = {1, 2, 3, 4, 5};
+ HeartbeatRequestChunk request(
+ Parameters::Builder().Add(HeartbeatInfoParameter(info_data)).Build());
+
+ handler_.HandleHeartbeatRequest(std::move(request));
+
+ std::vector<uint8_t> payload = callbacks_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, SctpPacket::Parse(payload));
+ ASSERT_THAT(packet.descriptors(), SizeIs(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatAckChunk response,
+ HeartbeatAckChunk::Parse(packet.descriptors()[0].data));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatInfoParameter param,
+ response.parameters().get<HeartbeatInfoParameter>());
+
+ EXPECT_THAT(param.info(), ElementsAre(1, 2, 3, 4, 5));
+}
+
+TEST_F(HeartbeatHandlerTest, SendsHeartbeatRequestsOnIdleChannel) {
+ AdvanceTime(options_.heartbeat_interval);
+
+ // Grab the request, and make a response.
+ std::vector<uint8_t> payload = callbacks_.ConsumeSentPacket();
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, SctpPacket::Parse(payload));
+ ASSERT_THAT(packet.descriptors(), SizeIs(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ HeartbeatRequestChunk req,
+ HeartbeatRequestChunk::Parse(packet.descriptors()[0].data));
+
+ HeartbeatAckChunk ack(std::move(req).extract_parameters());
+
+ // Respond a while later. This RTT will be measured by the handler
+ constexpr DurationMs rtt(313);
+
+ EXPECT_CALL(context_, ObserveRTT(rtt)).Times(1);
+
+ callbacks_.AdvanceTime(rtt);
+ handler_.HandleHeartbeatAck(std::move(ack));
+}
+
+TEST_F(HeartbeatHandlerTest, IncreasesErrorIfNotAckedInTime) {
+ DurationMs rto(105);
+ EXPECT_CALL(context_, current_rto).WillOnce(Return(rto));
+ AdvanceTime(options_.heartbeat_interval);
+
+ // Validate that a request was sent.
+ EXPECT_THAT(callbacks_.ConsumeSentPacket(), Not(IsEmpty()));
+
+ EXPECT_CALL(context_, IncrementTxErrorCounter).Times(1);
+ AdvanceTime(rto);
+}
+
+TEST_F(DisabledHeartbeatHandlerTest, IsReallyDisabled) {
+ AdvanceTime(options_.heartbeat_interval);
+
+ // Validate that a request was NOT sent.
+ EXPECT_THAT(callbacks_.ConsumeSentPacket(), IsEmpty());
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/mock_context.h b/net/dcsctp/socket/mock_context.h
new file mode 100644
index 0000000000..d86b99a20d
--- /dev/null
+++ b/net/dcsctp/socket/mock_context.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_
+#define NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_
+
+#include <cstdint>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/socket/mock_dcsctp_socket_callbacks.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+
+class MockContext : public Context {
+ public:
+ static constexpr TSN MyInitialTsn() { return TSN(990); }
+ static constexpr TSN PeerInitialTsn() { return TSN(10); }
+ static constexpr VerificationTag PeerVerificationTag() {
+ return VerificationTag(0x01234567);
+ }
+
+ explicit MockContext(MockDcSctpSocketCallbacks* callbacks)
+ : callbacks_(*callbacks) {
+ ON_CALL(*this, is_connection_established)
+ .WillByDefault(testing::Return(true));
+ ON_CALL(*this, my_initial_tsn)
+ .WillByDefault(testing::Return(MyInitialTsn()));
+ ON_CALL(*this, peer_initial_tsn)
+ .WillByDefault(testing::Return(PeerInitialTsn()));
+ ON_CALL(*this, callbacks).WillByDefault(testing::ReturnRef(callbacks_));
+ ON_CALL(*this, current_rto).WillByDefault(testing::Return(DurationMs(123)));
+ ON_CALL(*this, Send).WillByDefault([this](SctpPacket::Builder& builder) {
+ callbacks_.SendPacket(builder.Build());
+ });
+ }
+
+ MOCK_METHOD(bool, is_connection_established, (), (const, override));
+ MOCK_METHOD(TSN, my_initial_tsn, (), (const, override));
+ MOCK_METHOD(TSN, peer_initial_tsn, (), (const, override));
+ MOCK_METHOD(DcSctpSocketCallbacks&, callbacks, (), (const, override));
+
+ MOCK_METHOD(void, ObserveRTT, (DurationMs rtt_ms), (override));
+ MOCK_METHOD(DurationMs, current_rto, (), (const, override));
+ MOCK_METHOD(bool,
+ IncrementTxErrorCounter,
+ (absl::string_view reason),
+ (override));
+ MOCK_METHOD(void, ClearTxErrorCounter, (), (override));
+ MOCK_METHOD(bool, HasTooManyTxErrors, (), (const, override));
+ SctpPacket::Builder PacketBuilder() const override {
+ return SctpPacket::Builder(PeerVerificationTag(), options_);
+ }
+ MOCK_METHOD(void, Send, (SctpPacket::Builder & builder), (override));
+
+ DcSctpOptions options_;
+ MockDcSctpSocketCallbacks& callbacks_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_
diff --git a/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h b/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h
new file mode 100644
index 0000000000..bcf1bde5b8
--- /dev/null
+++ b/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h
@@ -0,0 +1,161 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_
+#define NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_
+
+#include <cstdint>
+#include <deque>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/timeout.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/timer/fake_timeout.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/random.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+
+namespace internal {
+// It can be argued if a mocked random number generator should be deterministic
+// or if it should be have as a "real" random number generator. In this
+// implementation, each instantiation of `MockDcSctpSocketCallbacks` will have
+// their `GetRandomInt` return different sequences, but each instantiation will
+// always generate the same sequence of random numbers. This to make it easier
+// to compare logs from tests, but still to let e.g. two different sockets (used
+// in the same test) get different random numbers, so that they don't start e.g.
+// on the same sequence number. While that isn't an issue in the protocol, it
+// just makes debugging harder as the two sockets would look exactly the same.
+//
+// In a real implementation of `DcSctpSocketCallbacks` the random number
+// generator backing `GetRandomInt` should be seeded externally and correctly.
+inline int GetUniqueSeed() {
+ static int seed = 0;
+ return ++seed;
+}
+} // namespace internal
+
+class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks {
+ public:
+ explicit MockDcSctpSocketCallbacks(absl::string_view name = "")
+ : log_prefix_(name.empty() ? "" : std::string(name) + ": "),
+ random_(internal::GetUniqueSeed()),
+ timeout_manager_([this]() { return now_; }) {
+ ON_CALL(*this, SendPacket)
+ .WillByDefault([this](rtc::ArrayView<const uint8_t> data) {
+ sent_packets_.emplace_back(
+ std::vector<uint8_t>(data.begin(), data.end()));
+ });
+ ON_CALL(*this, OnMessageReceived)
+ .WillByDefault([this](DcSctpMessage message) {
+ received_messages_.emplace_back(std::move(message));
+ });
+
+ ON_CALL(*this, OnError)
+ .WillByDefault([this](ErrorKind error, absl::string_view message) {
+ RTC_LOG(LS_WARNING)
+ << log_prefix_ << "Socket error: " << ToString(error) << "; "
+ << message;
+ });
+ ON_CALL(*this, OnAborted)
+ .WillByDefault([this](ErrorKind error, absl::string_view message) {
+ RTC_LOG(LS_WARNING)
+ << log_prefix_ << "Socket abort: " << ToString(error) << "; "
+ << message;
+ });
+ ON_CALL(*this, TimeMillis).WillByDefault([this]() { return now_; });
+ }
+ MOCK_METHOD(void,
+ SendPacket,
+ (rtc::ArrayView<const uint8_t> data),
+ (override));
+
+ std::unique_ptr<Timeout> CreateTimeout() override {
+ return timeout_manager_.CreateTimeout();
+ }
+
+ MOCK_METHOD(TimeMs, TimeMillis, (), (override));
+ uint32_t GetRandomInt(uint32_t low, uint32_t high) override {
+ return random_.Rand(low, high);
+ }
+
+ MOCK_METHOD(void, OnMessageReceived, (DcSctpMessage message), (override));
+ MOCK_METHOD(void,
+ OnError,
+ (ErrorKind error, absl::string_view message),
+ (override));
+ MOCK_METHOD(void,
+ OnAborted,
+ (ErrorKind error, absl::string_view message),
+ (override));
+ MOCK_METHOD(void, OnConnected, (), (override));
+ MOCK_METHOD(void, OnClosed, (), (override));
+ MOCK_METHOD(void, OnConnectionRestarted, (), (override));
+ MOCK_METHOD(void,
+ OnStreamsResetFailed,
+ (rtc::ArrayView<const StreamID> outgoing_streams,
+ absl::string_view reason),
+ (override));
+ MOCK_METHOD(void,
+ OnStreamsResetPerformed,
+ (rtc::ArrayView<const StreamID> outgoing_streams),
+ (override));
+ MOCK_METHOD(void,
+ OnIncomingStreamsReset,
+ (rtc::ArrayView<const StreamID> incoming_streams),
+ (override));
+ MOCK_METHOD(void, OnBufferedAmountLow, (StreamID stream_id), (override));
+ MOCK_METHOD(void, OnTotalBufferedAmountLow, (), (override));
+
+ bool HasPacket() const { return !sent_packets_.empty(); }
+
+ std::vector<uint8_t> ConsumeSentPacket() {
+ if (sent_packets_.empty()) {
+ return {};
+ }
+ std::vector<uint8_t> ret = std::move(sent_packets_.front());
+ sent_packets_.pop_front();
+ return ret;
+ }
+ absl::optional<DcSctpMessage> ConsumeReceivedMessage() {
+ if (received_messages_.empty()) {
+ return absl::nullopt;
+ }
+ DcSctpMessage ret = std::move(received_messages_.front());
+ received_messages_.pop_front();
+ return ret;
+ }
+
+ void AdvanceTime(DurationMs duration_ms) { now_ = now_ + duration_ms; }
+ void SetTime(TimeMs now) { now_ = now; }
+
+ absl::optional<TimeoutID> GetNextExpiredTimeout() {
+ return timeout_manager_.GetNextExpiredTimeout();
+ }
+
+ private:
+ const std::string log_prefix_;
+ TimeMs now_ = TimeMs(0);
+ webrtc::Random random_;
+ FakeTimeoutManager timeout_manager_;
+ std::deque<std::vector<uint8_t>> sent_packets_;
+ std::deque<DcSctpMessage> received_messages_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_
diff --git a/net/dcsctp/socket/state_cookie.cc b/net/dcsctp/socket/state_cookie.cc
new file mode 100644
index 0000000000..7d04cbb0d7
--- /dev/null
+++ b/net/dcsctp/socket/state_cookie.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/state_cookie.h"
+
+#include <cstdint>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/bounded_byte_reader.h"
+#include "net/dcsctp/packet/bounded_byte_writer.h"
+#include "net/dcsctp/socket/capabilities.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+
+// Magic values, which the state cookie is prefixed with.
+constexpr uint32_t kMagic1 = 1684230979;
+constexpr uint32_t kMagic2 = 1414541360;
+constexpr size_t StateCookie::kCookieSize;
+
+std::vector<uint8_t> StateCookie::Serialize() {
+ std::vector<uint8_t> cookie;
+ cookie.resize(kCookieSize);
+ BoundedByteWriter<kCookieSize> buffer(cookie);
+ buffer.Store32<0>(kMagic1);
+ buffer.Store32<4>(kMagic2);
+ buffer.Store32<8>(*initiate_tag_);
+ buffer.Store32<12>(*initial_tsn_);
+ buffer.Store32<16>(a_rwnd_);
+ buffer.Store32<20>(static_cast<uint32_t>(*tie_tag_ >> 32));
+ buffer.Store32<24>(static_cast<uint32_t>(*tie_tag_));
+ buffer.Store8<28>(capabilities_.partial_reliability);
+ buffer.Store8<29>(capabilities_.message_interleaving);
+ buffer.Store8<30>(capabilities_.reconfig);
+ return cookie;
+}
+
+absl::optional<StateCookie> StateCookie::Deserialize(
+ rtc::ArrayView<const uint8_t> cookie) {
+ if (cookie.size() != kCookieSize) {
+ RTC_DLOG(LS_WARNING) << "Invalid state cookie: " << cookie.size()
+ << " bytes";
+ return absl::nullopt;
+ }
+
+ BoundedByteReader<kCookieSize> buffer(cookie);
+ uint32_t magic1 = buffer.Load32<0>();
+ uint32_t magic2 = buffer.Load32<4>();
+ if (magic1 != kMagic1 || magic2 != kMagic2) {
+ RTC_DLOG(LS_WARNING) << "Invalid state cookie; wrong magic";
+ return absl::nullopt;
+ }
+
+ VerificationTag verification_tag(buffer.Load32<8>());
+ TSN initial_tsn(buffer.Load32<12>());
+ uint32_t a_rwnd = buffer.Load32<16>();
+ uint32_t tie_tag_upper = buffer.Load32<20>();
+ uint32_t tie_tag_lower = buffer.Load32<24>();
+ TieTag tie_tag(static_cast<uint64_t>(tie_tag_upper) << 32 |
+ static_cast<uint64_t>(tie_tag_lower));
+ Capabilities capabilities;
+ capabilities.partial_reliability = buffer.Load8<28>() != 0;
+ capabilities.message_interleaving = buffer.Load8<29>() != 0;
+ capabilities.reconfig = buffer.Load8<30>() != 0;
+
+ return StateCookie(verification_tag, initial_tsn, a_rwnd, tie_tag,
+ capabilities);
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/state_cookie.h b/net/dcsctp/socket/state_cookie.h
new file mode 100644
index 0000000000..df4b801397
--- /dev/null
+++ b/net/dcsctp/socket/state_cookie.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_STATE_COOKIE_H_
+#define NET_DCSCTP_SOCKET_STATE_COOKIE_H_
+
+#include <cstdint>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/socket/capabilities.h"
+
+namespace dcsctp {
+
+// This is serialized as a state cookie and put in INIT_ACK. The client then
+// responds with this in COOKIE_ECHO.
+//
+// NOTE: Expect that the client will modify it to try to exploit the library.
+// Do not trust anything in it; no pointers or anything like that.
+class StateCookie {
+ public:
+ static constexpr size_t kCookieSize = 31;
+
+ StateCookie(VerificationTag initiate_tag,
+ TSN initial_tsn,
+ uint32_t a_rwnd,
+ TieTag tie_tag,
+ Capabilities capabilities)
+ : initiate_tag_(initiate_tag),
+ initial_tsn_(initial_tsn),
+ a_rwnd_(a_rwnd),
+ tie_tag_(tie_tag),
+ capabilities_(capabilities) {}
+
+ // Returns a serialized version of this cookie.
+ std::vector<uint8_t> Serialize();
+
+ // Deserializes the cookie, and returns absl::nullopt if that failed.
+ static absl::optional<StateCookie> Deserialize(
+ rtc::ArrayView<const uint8_t> cookie);
+
+ VerificationTag initiate_tag() const { return initiate_tag_; }
+ TSN initial_tsn() const { return initial_tsn_; }
+ uint32_t a_rwnd() const { return a_rwnd_; }
+ TieTag tie_tag() const { return tie_tag_; }
+ const Capabilities& capabilities() const { return capabilities_; }
+
+ private:
+ const VerificationTag initiate_tag_;
+ const TSN initial_tsn_;
+ const uint32_t a_rwnd_;
+ const TieTag tie_tag_;
+ const Capabilities capabilities_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_STATE_COOKIE_H_
diff --git a/net/dcsctp/socket/state_cookie_test.cc b/net/dcsctp/socket/state_cookie_test.cc
new file mode 100644
index 0000000000..eab41a7a56
--- /dev/null
+++ b/net/dcsctp/socket/state_cookie_test.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/state_cookie.h"
+
+#include "net/dcsctp/testing/testing_macros.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::SizeIs;
+
+TEST(StateCookieTest, SerializeAndDeserialize) {
+ Capabilities capabilities = {/*partial_reliability=*/true,
+ /*message_interleaving=*/false,
+ /*reconfig=*/true};
+ StateCookie cookie(VerificationTag(123), TSN(456),
+ /*a_rwnd=*/789, TieTag(101112), capabilities);
+ std::vector<uint8_t> serialized = cookie.Serialize();
+ EXPECT_THAT(serialized, SizeIs(StateCookie::kCookieSize));
+ ASSERT_HAS_VALUE_AND_ASSIGN(StateCookie deserialized,
+ StateCookie::Deserialize(serialized));
+ EXPECT_EQ(deserialized.initiate_tag(), VerificationTag(123));
+ EXPECT_EQ(deserialized.initial_tsn(), TSN(456));
+ EXPECT_EQ(deserialized.a_rwnd(), 789u);
+ EXPECT_EQ(deserialized.tie_tag(), TieTag(101112));
+ EXPECT_TRUE(deserialized.capabilities().partial_reliability);
+ EXPECT_FALSE(deserialized.capabilities().message_interleaving);
+ EXPECT_TRUE(deserialized.capabilities().reconfig);
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/stream_reset_handler.cc b/net/dcsctp/socket/stream_reset_handler.cc
new file mode 100644
index 0000000000..a1f57e6b2b
--- /dev/null
+++ b/net/dcsctp/socket/stream_reset_handler.cc
@@ -0,0 +1,347 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/stream_reset_handler.h"
+
+#include <cstdint>
+#include <memory>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/common/str_join.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h"
+#include "net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h"
+#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/parameter.h"
+#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h"
+#include "net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/packet/tlv_trait.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+namespace {
+using ResponseResult = ReconfigurationResponseParameter::Result;
+
+bool DescriptorsAre(const std::vector<ParameterDescriptor>& c,
+ uint16_t e1,
+ uint16_t e2) {
+ return (c[0].type == e1 && c[1].type == e2) ||
+ (c[0].type == e2 && c[1].type == e1);
+}
+
+} // namespace
+
+bool StreamResetHandler::Validate(const ReConfigChunk& chunk) {
+ const Parameters& parameters = chunk.parameters();
+
+ // https://tools.ietf.org/html/rfc6525#section-3.1
+ // "Note that each RE-CONFIG chunk holds at least one parameter
+ // and at most two parameters. Only the following combinations are allowed:"
+ std::vector<ParameterDescriptor> descriptors = parameters.descriptors();
+ if (descriptors.size() == 1) {
+ if ((descriptors[0].type == OutgoingSSNResetRequestParameter::kType) ||
+ (descriptors[0].type == IncomingSSNResetRequestParameter::kType) ||
+ (descriptors[0].type == SSNTSNResetRequestParameter::kType) ||
+ (descriptors[0].type == AddOutgoingStreamsRequestParameter::kType) ||
+ (descriptors[0].type == AddIncomingStreamsRequestParameter::kType) ||
+ (descriptors[0].type == ReconfigurationResponseParameter::kType)) {
+ return true;
+ }
+ } else if (descriptors.size() == 2) {
+ if (DescriptorsAre(descriptors, OutgoingSSNResetRequestParameter::kType,
+ IncomingSSNResetRequestParameter::kType) ||
+ DescriptorsAre(descriptors, AddOutgoingStreamsRequestParameter::kType,
+ AddIncomingStreamsRequestParameter::kType) ||
+ DescriptorsAre(descriptors, ReconfigurationResponseParameter::kType,
+ OutgoingSSNResetRequestParameter::kType) ||
+ DescriptorsAre(descriptors, ReconfigurationResponseParameter::kType,
+ ReconfigurationResponseParameter::kType)) {
+ return true;
+ }
+ }
+
+ RTC_LOG(LS_WARNING) << "Invalid set of RE-CONFIG parameters";
+ return false;
+}
+
+absl::optional<std::vector<ReconfigurationResponseParameter>>
+StreamResetHandler::Process(const ReConfigChunk& chunk) {
+ if (!Validate(chunk)) {
+ return absl::nullopt;
+ }
+
+ std::vector<ReconfigurationResponseParameter> responses;
+
+ for (const ParameterDescriptor& desc : chunk.parameters().descriptors()) {
+ switch (desc.type) {
+ case OutgoingSSNResetRequestParameter::kType:
+ HandleResetOutgoing(desc, responses);
+ break;
+
+ case IncomingSSNResetRequestParameter::kType:
+ HandleResetIncoming(desc, responses);
+ break;
+
+ case ReconfigurationResponseParameter::kType:
+ HandleResponse(desc);
+ break;
+ }
+ }
+
+ return responses;
+}
+
+void StreamResetHandler::HandleReConfig(ReConfigChunk chunk) {
+ absl::optional<std::vector<ReconfigurationResponseParameter>> responses =
+ Process(chunk);
+
+ if (!responses.has_value()) {
+ ctx_->callbacks().OnError(ErrorKind::kParseFailed,
+ "Failed to parse RE-CONFIG command");
+ return;
+ }
+
+ if (!responses->empty()) {
+ SctpPacket::Builder b = ctx_->PacketBuilder();
+ Parameters::Builder params_builder;
+ for (const auto& response : *responses) {
+ params_builder.Add(response);
+ }
+ b.Add(ReConfigChunk(params_builder.Build()));
+ ctx_->Send(b);
+ }
+}
+
+bool StreamResetHandler::ValidateReqSeqNbr(
+ ReconfigRequestSN req_seq_nbr,
+ std::vector<ReconfigurationResponseParameter>& responses) {
+ if (req_seq_nbr == last_processed_req_seq_nbr_) {
+ // This has already been performed previously.
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr
+ << " already processed";
+ responses.push_back(ReconfigurationResponseParameter(
+ req_seq_nbr, ResponseResult::kSuccessNothingToDo));
+ return false;
+ }
+
+ if (req_seq_nbr != ReconfigRequestSN(*last_processed_req_seq_nbr_ + 1)) {
+ // Too old, too new, from wrong association etc.
+ // This is expected to happen when handing over a RTCPeerConnection from one
+ // server to another. The client will notice this and may decide to close
+ // old data channels, which may be sent to the wrong (or both) servers
+ // during a handover.
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr
+ << " bad seq_nbr";
+ responses.push_back(ReconfigurationResponseParameter(
+ req_seq_nbr, ResponseResult::kErrorBadSequenceNumber));
+ return false;
+ }
+
+ return true;
+}
+
+void StreamResetHandler::HandleResetOutgoing(
+ const ParameterDescriptor& descriptor,
+ std::vector<ReconfigurationResponseParameter>& responses) {
+ absl::optional<OutgoingSSNResetRequestParameter> req =
+ OutgoingSSNResetRequestParameter::Parse(descriptor.data);
+ if (!req.has_value()) {
+ ctx_->callbacks().OnError(ErrorKind::kParseFailed,
+ "Failed to parse Outgoing Reset command");
+ return;
+ }
+
+ if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) {
+ ResponseResult result;
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "Reset outgoing streams with req_seq_nbr="
+ << *req->request_sequence_number();
+
+ result = reassembly_queue_->ResetStreams(
+ *req, data_tracker_->last_cumulative_acked_tsn());
+ if (result == ResponseResult::kSuccessPerformed) {
+ last_processed_req_seq_nbr_ = req->request_sequence_number();
+ ctx_->callbacks().OnIncomingStreamsReset(req->stream_ids());
+ }
+ responses.push_back(ReconfigurationResponseParameter(
+ req->request_sequence_number(), result));
+ }
+}
+
+void StreamResetHandler::HandleResetIncoming(
+ const ParameterDescriptor& descriptor,
+ std::vector<ReconfigurationResponseParameter>& responses) {
+ absl::optional<IncomingSSNResetRequestParameter> req =
+ IncomingSSNResetRequestParameter::Parse(descriptor.data);
+ if (!req.has_value()) {
+ ctx_->callbacks().OnError(ErrorKind::kParseFailed,
+ "Failed to parse Incoming Reset command");
+ return;
+ }
+ if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) {
+ responses.push_back(ReconfigurationResponseParameter(
+ req->request_sequence_number(), ResponseResult::kSuccessNothingToDo));
+ last_processed_req_seq_nbr_ = req->request_sequence_number();
+ }
+}
+
+void StreamResetHandler::HandleResponse(const ParameterDescriptor& descriptor) {
+ absl::optional<ReconfigurationResponseParameter> resp =
+ ReconfigurationResponseParameter::Parse(descriptor.data);
+ if (!resp.has_value()) {
+ ctx_->callbacks().OnError(
+ ErrorKind::kParseFailed,
+ "Failed to parse Reconfiguration Response command");
+ return;
+ }
+
+ if (current_request_.has_value() && current_request_->has_been_sent() &&
+ resp->response_sequence_number() == current_request_->req_seq_nbr()) {
+ reconfig_timer_->Stop();
+
+ switch (resp->result()) {
+ case ResponseResult::kSuccessNothingToDo:
+ case ResponseResult::kSuccessPerformed:
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix_ << "Reset stream success, req_seq_nbr="
+ << *current_request_->req_seq_nbr() << ", streams="
+ << StrJoin(current_request_->streams(), ",",
+ [](rtc::StringBuilder& sb, StreamID stream_id) {
+ sb << *stream_id;
+ });
+ ctx_->callbacks().OnStreamsResetPerformed(current_request_->streams());
+ current_request_ = absl::nullopt;
+ retransmission_queue_->CommitResetStreams();
+ break;
+ case ResponseResult::kInProgress:
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix_ << "Reset stream still pending, req_seq_nbr="
+ << *current_request_->req_seq_nbr() << ", streams="
+ << StrJoin(current_request_->streams(), ",",
+ [](rtc::StringBuilder& sb, StreamID stream_id) {
+ sb << *stream_id;
+ });
+ // Force this request to be sent again, but with new req_seq_nbr.
+ current_request_->PrepareRetransmission();
+ reconfig_timer_->set_duration(ctx_->current_rto());
+ reconfig_timer_->Start();
+ break;
+ case ResponseResult::kErrorRequestAlreadyInProgress:
+ case ResponseResult::kDenied:
+ case ResponseResult::kErrorWrongSSN:
+ case ResponseResult::kErrorBadSequenceNumber:
+ RTC_DLOG(LS_WARNING)
+ << log_prefix_ << "Reset stream error=" << ToString(resp->result())
+ << ", req_seq_nbr=" << *current_request_->req_seq_nbr()
+ << ", streams="
+ << StrJoin(current_request_->streams(), ",",
+ [](rtc::StringBuilder& sb, StreamID stream_id) {
+ sb << *stream_id;
+ });
+ ctx_->callbacks().OnStreamsResetFailed(current_request_->streams(),
+ ToString(resp->result()));
+ current_request_ = absl::nullopt;
+ retransmission_queue_->RollbackResetStreams();
+ break;
+ }
+ }
+}
+
+absl::optional<ReConfigChunk> StreamResetHandler::MakeStreamResetRequest() {
+ // Only send stream resets if there are streams to reset, and no current
+ // ongoing request (there can only be one at a time), and if the stream
+ // can be reset.
+ if (streams_to_reset_.empty() || current_request_.has_value() ||
+ !retransmission_queue_->CanResetStreams()) {
+ return absl::nullopt;
+ }
+
+ std::vector<StreamID> streams_to_reset(streams_to_reset_.begin(),
+ streams_to_reset_.end());
+ current_request_.emplace(TSN(*retransmission_queue_->next_tsn() - 1),
+ std::move(streams_to_reset));
+ streams_to_reset_.clear();
+ reconfig_timer_->set_duration(ctx_->current_rto());
+ reconfig_timer_->Start();
+ return MakeReconfigChunk();
+}
+
+ReConfigChunk StreamResetHandler::MakeReconfigChunk() {
+ // The req_seq_nbr will be empty if the request has never been sent before,
+ // or if it was sent, but the sender responded "in progress", and then the
+ // req_seq_nbr will be cleared to re-send with a new number. But if the
+ // request is re-sent due to timeout (reconfig-timer expiring), the same
+ // req_seq_nbr will be used.
+ RTC_DCHECK(current_request_.has_value());
+
+ if (!current_request_->has_been_sent()) {
+ current_request_->PrepareToSend(next_outgoing_req_seq_nbr_);
+ next_outgoing_req_seq_nbr_ =
+ ReconfigRequestSN(*next_outgoing_req_seq_nbr_ + 1);
+ }
+
+ Parameters::Builder params_builder =
+ Parameters::Builder().Add(OutgoingSSNResetRequestParameter(
+ current_request_->req_seq_nbr(), current_request_->req_seq_nbr(),
+ current_request_->sender_last_assigned_tsn(),
+ current_request_->streams()));
+
+ return ReConfigChunk(params_builder.Build());
+}
+
+void StreamResetHandler::ResetStreams(
+ rtc::ArrayView<const StreamID> outgoing_streams) {
+ // Enqueue streams to be reset - as this may be called multiple times
+ // while a request is already in progress (and there can only be one).
+ for (StreamID stream_id : outgoing_streams) {
+ streams_to_reset_.insert(stream_id);
+ }
+ if (current_request_.has_value()) {
+ // Already an ongoing request - will need to wait for it to finish as
+ // there can only be one in-flight ReConfig chunk with requests at any
+ // time.
+ } else {
+ retransmission_queue_->PrepareResetStreams(std::vector<StreamID>(
+ streams_to_reset_.begin(), streams_to_reset_.end()));
+ }
+}
+
+absl::optional<DurationMs> StreamResetHandler::OnReconfigTimerExpiry() {
+ if (current_request_->has_been_sent()) {
+ // There is an outstanding request, which timed out while waiting for a
+ // response.
+ if (!ctx_->IncrementTxErrorCounter("RECONFIG timeout")) {
+ // Timed out. The connection will close after processing the timers.
+ return absl::nullopt;
+ }
+ } else {
+ // There is no outstanding request, but there is a prepared one. This means
+ // that the receiver has previously responded "in progress", which resulted
+ // in retrying the request (but with a new req_seq_nbr) after a while.
+ }
+
+ ctx_->Send(ctx_->PacketBuilder().Add(MakeReconfigChunk()));
+ return ctx_->current_rto();
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/stream_reset_handler.h b/net/dcsctp/socket/stream_reset_handler.h
new file mode 100644
index 0000000000..dc0ee5e8cc
--- /dev/null
+++ b/net/dcsctp/socket/stream_reset_handler.h
@@ -0,0 +1,222 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_
+#define NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_
+
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+
+namespace dcsctp {
+
+// StreamResetHandler handles sending outgoing stream reset requests (to close
+// an SCTP stream, which translates to closing a data channel).
+//
+// It also handles incoming "outgoing stream reset requests", when the peer
+// wants to close its data channel.
+//
+// Resetting streams is an asynchronous operation where the client will request
+// a request a stream to be reset, but then it might not be performed exactly at
+// this point. First, the sender might need to discard all messages that have
+// been enqueued for this stream, or it may select to wait until all have been
+// sent. At least, it must wait for the currently sending fragmented message to
+// be fully sent, because a stream can't be reset while having received half a
+// message. In the stream reset request, the "sender's last assigned TSN" is
+// provided, which is simply the TSN for which the receiver should've received
+// all messages before this value, before the stream can be reset. Since
+// fragments can get lost or sent out-of-order, the receiver of a request may
+// not have received all the data just yet, and then it will respond to the
+// sender: "In progress". In other words, try again. The sender will then need
+// to start a timer and try the very same request again (but with a new sequence
+// number) until the receiver successfully performs the operation.
+//
+// All this can take some time, and may be driven by timers, so the client will
+// ultimately be notified using callbacks.
+//
+// In this implementation, when a stream is reset, the queued but not-yet-sent
+// messages will be discarded, but that may change in the future. RFC8831 allows
+// both behaviors.
+class StreamResetHandler {
+ public:
+ StreamResetHandler(absl::string_view log_prefix,
+ Context* context,
+ TimerManager* timer_manager,
+ DataTracker* data_tracker,
+ ReassemblyQueue* reassembly_queue,
+ RetransmissionQueue* retransmission_queue)
+ : log_prefix_(std::string(log_prefix) + "reset: "),
+ ctx_(context),
+ data_tracker_(data_tracker),
+ reassembly_queue_(reassembly_queue),
+ retransmission_queue_(retransmission_queue),
+ reconfig_timer_(timer_manager->CreateTimer(
+ "re-config",
+ [this]() { return OnReconfigTimerExpiry(); },
+ TimerOptions(DurationMs(0)))),
+ next_outgoing_req_seq_nbr_(ReconfigRequestSN(*ctx_->my_initial_tsn())),
+ last_processed_req_seq_nbr_(
+ ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1)) {}
+
+ // Initiates reset of the provided streams. While there can only be one
+ // ongoing stream reset request at any time, this method can be called at any
+ // time and also multiple times. It will enqueue requests that can't be
+ // directly fulfilled, and will asynchronously process them when any ongoing
+ // request has completed.
+ void ResetStreams(rtc::ArrayView<const StreamID> outgoing_streams);
+
+ // Creates a Reset Streams request that must be sent if returned. Will start
+ // the reconfig timer. Will return absl::nullopt if there is no need to
+ // create a request (no streams to reset) or if there already is an ongoing
+ // stream reset request that hasn't completed yet.
+ absl::optional<ReConfigChunk> MakeStreamResetRequest();
+
+ // Called when handling and incoming RE-CONFIG chunk.
+ void HandleReConfig(ReConfigChunk chunk);
+
+ private:
+ // Represents a stream request operation. There can only be one ongoing at
+ // any time, and a sent request may either succeed, fail or result in the
+ // receiver signaling that it can't process it right now, and then it will be
+ // retried.
+ class CurrentRequest {
+ public:
+ CurrentRequest(TSN sender_last_assigned_tsn, std::vector<StreamID> streams)
+ : req_seq_nbr_(absl::nullopt),
+ sender_last_assigned_tsn_(sender_last_assigned_tsn),
+ streams_(std::move(streams)) {}
+
+ // Returns the current request sequence number, if this request has been
+ // sent (check `has_been_sent` first). Will return 0 if the request is just
+ // prepared (or scheduled for retransmission) but not yet sent.
+ ReconfigRequestSN req_seq_nbr() const {
+ return req_seq_nbr_.value_or(ReconfigRequestSN(0));
+ }
+
+ // The sender's last assigned TSN, from the retransmission queue. The
+ // receiver uses this to know when all data up to this TSN has been
+ // received, to know when to safely reset the stream.
+ TSN sender_last_assigned_tsn() const { return sender_last_assigned_tsn_; }
+
+ // The streams that are to be reset.
+ const std::vector<StreamID>& streams() const { return streams_; }
+
+ // If this request has been sent yet. If not, then it's either because it
+ // has only been prepared and not yet sent, or because the received couldn't
+ // apply the request, and then the exact same request will be retried, but
+ // with a new sequence number.
+ bool has_been_sent() const { return req_seq_nbr_.has_value(); }
+
+ // If the receiver can't apply the request yet (and answered "In Progress"),
+ // this will be called to prepare the request to be retransmitted at a later
+ // time.
+ void PrepareRetransmission() { req_seq_nbr_ = absl::nullopt; }
+
+ // If the request hasn't been sent yet, this assigns it a request number.
+ void PrepareToSend(ReconfigRequestSN new_req_seq_nbr) {
+ req_seq_nbr_ = new_req_seq_nbr;
+ }
+
+ private:
+ // If this is set, this request has been sent. If it's not set, the request
+ // has been prepared, but has not yet been sent. This is typically used when
+ // the peer responded "in progress" and the same request (but a different
+ // request number) must be sent again.
+ absl::optional<ReconfigRequestSN> req_seq_nbr_;
+ // The sender's (that's us) last assigned TSN, from the retransmission
+ // queue.
+ TSN sender_last_assigned_tsn_;
+ // The streams that are to be reset in this request.
+ const std::vector<StreamID> streams_;
+ };
+
+ // Called to validate an incoming RE-CONFIG chunk.
+ bool Validate(const ReConfigChunk& chunk);
+
+ // Processes a stream stream reconfiguration chunk and may either return
+ // absl::nullopt (on protocol errors), or a list of responses - either 0, 1
+ // or 2.
+ absl::optional<std::vector<ReconfigurationResponseParameter>> Process(
+ const ReConfigChunk& chunk);
+
+ // Creates the actual RE-CONFIG chunk. A request (which set `current_request`)
+ // must have been created prior.
+ ReConfigChunk MakeReconfigChunk();
+
+ // Called to validate the `req_seq_nbr`, that it's the next in sequence. If it
+ // fails to validate, and returns false, it will also add a response to
+ // `responses`.
+ bool ValidateReqSeqNbr(
+ ReconfigRequestSN req_seq_nbr,
+ std::vector<ReconfigurationResponseParameter>& responses);
+
+ // Called when this socket receives an outgoing stream reset request. It might
+ // either be performed straight away, or have to be deferred, and the result
+ // of that will be put in `responses`.
+ void HandleResetOutgoing(
+ const ParameterDescriptor& descriptor,
+ std::vector<ReconfigurationResponseParameter>& responses);
+
+ // Called when this socket receives an incoming stream reset request. This
+ // isn't really supported, but a successful response is put in `responses`.
+ void HandleResetIncoming(
+ const ParameterDescriptor& descriptor,
+ std::vector<ReconfigurationResponseParameter>& responses);
+
+ // Called when receiving a response to an outgoing stream reset request. It
+ // will either commit the stream resetting, if the operation was successful,
+ // or will schedule a retry if it was deferred. And if it failed, the
+ // operation will be rolled back.
+ void HandleResponse(const ParameterDescriptor& descriptor);
+
+ // Expiration handler for the Reconfig timer.
+ absl::optional<DurationMs> OnReconfigTimerExpiry();
+
+ const std::string log_prefix_;
+ Context* ctx_;
+ DataTracker* data_tracker_;
+ ReassemblyQueue* reassembly_queue_;
+ RetransmissionQueue* retransmission_queue_;
+ const std::unique_ptr<Timer> reconfig_timer_;
+
+ // Outgoing streams that have been requested to be reset, but hasn't yet
+ // been included in an outgoing request.
+ std::unordered_set<StreamID, StreamID::Hasher> streams_to_reset_;
+
+ // The next sequence number for outgoing stream requests.
+ ReconfigRequestSN next_outgoing_req_seq_nbr_;
+
+ // The current stream request operation.
+ absl::optional<CurrentRequest> current_request_;
+
+ // For incoming requests - last processed request sequence number.
+ ReconfigRequestSN last_processed_req_seq_nbr_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_
diff --git a/net/dcsctp/socket/stream_reset_handler_test.cc b/net/dcsctp/socket/stream_reset_handler_test.cc
new file mode 100644
index 0000000000..a8e96fbf20
--- /dev/null
+++ b/net/dcsctp/socket/stream_reset_handler_test.cc
@@ -0,0 +1,550 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/stream_reset_handler.h"
+
+#include <array>
+#include <cstdint>
+#include <memory>
+#include <type_traits>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h"
+#include "net/dcsctp/packet/parameter/parameter.h"
+#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/mock_context.h"
+#include "net/dcsctp/socket/mock_dcsctp_socket_callbacks.h"
+#include "net/dcsctp/testing/data_generator.h"
+#include "net/dcsctp/testing/testing_macros.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/mock_send_queue.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::_;
+using ::testing::IsEmpty;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAre;
+using ResponseResult = ReconfigurationResponseParameter::Result;
+
+constexpr TSN kMyInitialTsn = MockContext::MyInitialTsn();
+constexpr ReconfigRequestSN kMyInitialReqSn = ReconfigRequestSN(*kMyInitialTsn);
+constexpr TSN kPeerInitialTsn = MockContext::PeerInitialTsn();
+constexpr ReconfigRequestSN kPeerInitialReqSn =
+ ReconfigRequestSN(*kPeerInitialTsn);
+constexpr uint32_t kArwnd = 131072;
+constexpr DurationMs kRto = DurationMs(250);
+
+constexpr std::array<uint8_t, 4> kShortPayload = {1, 2, 3, 4};
+
+MATCHER_P3(SctpMessageIs, stream_id, ppid, expected_payload, "") {
+ if (arg.stream_id() != stream_id) {
+ *result_listener << "the stream_id is " << *arg.stream_id();
+ return false;
+ }
+
+ if (arg.ppid() != ppid) {
+ *result_listener << "the ppid is " << *arg.ppid();
+ return false;
+ }
+
+ if (std::vector<uint8_t>(arg.payload().begin(), arg.payload().end()) !=
+ std::vector<uint8_t>(expected_payload.begin(), expected_payload.end())) {
+ *result_listener << "the payload is wrong";
+ return false;
+ }
+ return true;
+}
+
+TSN AddTo(TSN tsn, int delta) {
+ return TSN(*tsn + delta);
+}
+
+ReconfigRequestSN AddTo(ReconfigRequestSN req_sn, int delta) {
+ return ReconfigRequestSN(*req_sn + delta);
+}
+
+class StreamResetHandlerTest : public testing::Test {
+ protected:
+ StreamResetHandlerTest()
+ : ctx_(&callbacks_),
+ timer_manager_([this]() { return callbacks_.CreateTimeout(); }),
+ delayed_ack_timer_(timer_manager_.CreateTimer(
+ "test/delayed_ack",
+ []() { return absl::nullopt; },
+ TimerOptions(DurationMs(0)))),
+ t3_rtx_timer_(timer_manager_.CreateTimer(
+ "test/t3_rtx",
+ []() { return absl::nullopt; },
+ TimerOptions(DurationMs(0)))),
+ buf_("log: ", delayed_ack_timer_.get(), kPeerInitialTsn),
+ reasm_("log: ", kPeerInitialTsn, kArwnd),
+ retransmission_queue_(
+ "",
+ kMyInitialTsn,
+ kArwnd,
+ producer_,
+ [](DurationMs rtt_ms) {},
+ []() {},
+ *t3_rtx_timer_,
+ /*options=*/{}),
+ handler_("log: ",
+ &ctx_,
+ &timer_manager_,
+ &buf_,
+ &reasm_,
+ &retransmission_queue_) {
+ EXPECT_CALL(ctx_, current_rto).WillRepeatedly(Return(kRto));
+ }
+
+ void AdvanceTime(DurationMs duration) {
+ callbacks_.AdvanceTime(kRto);
+ for (;;) {
+ absl::optional<TimeoutID> timeout_id = callbacks_.GetNextExpiredTimeout();
+ if (!timeout_id.has_value()) {
+ break;
+ }
+ timer_manager_.HandleTimeout(*timeout_id);
+ }
+ }
+
+ // Handles the passed in RE-CONFIG `chunk` and returns the responses
+ // that are sent in the response RE-CONFIG.
+ std::vector<ReconfigurationResponseParameter> HandleAndCatchResponse(
+ ReConfigChunk chunk) {
+ handler_.HandleReConfig(std::move(chunk));
+
+ std::vector<uint8_t> payload = callbacks_.ConsumeSentPacket();
+ if (payload.empty()) {
+ EXPECT_TRUE(false);
+ return {};
+ }
+
+ std::vector<ReconfigurationResponseParameter> responses;
+ absl::optional<SctpPacket> p = SctpPacket::Parse(payload);
+ if (!p.has_value()) {
+ EXPECT_TRUE(false);
+ return {};
+ }
+ if (p->descriptors().size() != 1) {
+ EXPECT_TRUE(false);
+ return {};
+ }
+ absl::optional<ReConfigChunk> response_chunk =
+ ReConfigChunk::Parse(p->descriptors()[0].data);
+ if (!response_chunk.has_value()) {
+ EXPECT_TRUE(false);
+ return {};
+ }
+ for (const auto& desc : response_chunk->parameters().descriptors()) {
+ if (desc.type == ReconfigurationResponseParameter::kType) {
+ absl::optional<ReconfigurationResponseParameter> response =
+ ReconfigurationResponseParameter::Parse(desc.data);
+ if (!response.has_value()) {
+ EXPECT_TRUE(false);
+ return {};
+ }
+ responses.emplace_back(*std::move(response));
+ }
+ }
+ return responses;
+ }
+
+ DataGenerator gen_;
+ NiceMock<MockDcSctpSocketCallbacks> callbacks_;
+ NiceMock<MockContext> ctx_;
+ NiceMock<MockSendQueue> producer_;
+ TimerManager timer_manager_;
+ std::unique_ptr<Timer> delayed_ack_timer_;
+ std::unique_ptr<Timer> t3_rtx_timer_;
+ DataTracker buf_;
+ ReassemblyQueue reasm_;
+ RetransmissionQueue retransmission_queue_;
+ StreamResetHandler handler_;
+};
+
+TEST_F(StreamResetHandlerTest, ChunkWithNoParametersReturnsError) {
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ EXPECT_CALL(callbacks_, OnError).Times(1);
+ handler_.HandleReConfig(ReConfigChunk(Parameters()));
+}
+
+TEST_F(StreamResetHandlerTest, ChunkWithInvalidParametersReturnsError) {
+ Parameters::Builder builder;
+ // Two OutgoingSSNResetRequestParameter in a RE-CONFIG is not valid.
+ builder.Add(OutgoingSSNResetRequestParameter(ReconfigRequestSN(1),
+ ReconfigRequestSN(10),
+ kPeerInitialTsn, {StreamID(1)}));
+ builder.Add(OutgoingSSNResetRequestParameter(ReconfigRequestSN(2),
+ ReconfigRequestSN(10),
+ kPeerInitialTsn, {StreamID(2)}));
+
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ EXPECT_CALL(callbacks_, OnError).Times(1);
+ handler_.HandleReConfig(ReConfigChunk(builder.Build()));
+}
+
+TEST_F(StreamResetHandlerTest, FailToDeliverWithoutResettingStream) {
+ reasm_.Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE"));
+ reasm_.Add(AddTo(kPeerInitialTsn, 1), gen_.Ordered({1, 2, 3, 4}, "BE"));
+
+ buf_.Observe(kPeerInitialTsn);
+ buf_.Observe(AddTo(kPeerInitialTsn, 1));
+ EXPECT_THAT(reasm_.FlushMessages(),
+ UnorderedElementsAre(
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload)));
+
+ gen_.ResetStream();
+ reasm_.Add(AddTo(kPeerInitialTsn, 2), gen_.Ordered({1, 2, 3, 4}, "BE"));
+ EXPECT_THAT(reasm_.FlushMessages(), IsEmpty());
+}
+
+TEST_F(StreamResetHandlerTest, ResetStreamsNotDeferred) {
+ reasm_.Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE"));
+ reasm_.Add(AddTo(kPeerInitialTsn, 1), gen_.Ordered({1, 2, 3, 4}, "BE"));
+
+ buf_.Observe(kPeerInitialTsn);
+ buf_.Observe(AddTo(kPeerInitialTsn, 1));
+ EXPECT_THAT(reasm_.FlushMessages(),
+ UnorderedElementsAre(
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload)));
+
+ Parameters::Builder builder;
+ builder.Add(OutgoingSSNResetRequestParameter(
+ kPeerInitialReqSn, ReconfigRequestSN(3), AddTo(kPeerInitialTsn, 1),
+ {StreamID(1)}));
+
+ std::vector<ReconfigurationResponseParameter> responses =
+ HandleAndCatchResponse(ReConfigChunk(builder.Build()));
+ EXPECT_THAT(responses, SizeIs(1));
+ EXPECT_EQ(responses[0].result(), ResponseResult::kSuccessPerformed);
+
+ gen_.ResetStream();
+ reasm_.Add(AddTo(kPeerInitialTsn, 2), gen_.Ordered({1, 2, 3, 4}, "BE"));
+ EXPECT_THAT(reasm_.FlushMessages(),
+ UnorderedElementsAre(
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload)));
+}
+
+TEST_F(StreamResetHandlerTest, ResetStreamsDeferred) {
+ DataGeneratorOptions opts;
+ opts.message_id = MID(0);
+ reasm_.Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+
+ opts.message_id = MID(1);
+ reasm_.Add(AddTo(kPeerInitialTsn, 1), gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+
+ buf_.Observe(kPeerInitialTsn);
+ buf_.Observe(AddTo(kPeerInitialTsn, 1));
+ EXPECT_THAT(reasm_.FlushMessages(),
+ UnorderedElementsAre(
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload)));
+
+ Parameters::Builder builder;
+ builder.Add(OutgoingSSNResetRequestParameter(
+ kPeerInitialReqSn, ReconfigRequestSN(3), AddTo(kPeerInitialTsn, 3),
+ {StreamID(1)}));
+
+ std::vector<ReconfigurationResponseParameter> responses =
+ HandleAndCatchResponse(ReConfigChunk(builder.Build()));
+ EXPECT_THAT(responses, SizeIs(1));
+ EXPECT_EQ(responses[0].result(), ResponseResult::kInProgress);
+
+ opts.message_id = MID(1);
+ opts.ppid = PPID(5);
+ reasm_.Add(AddTo(kPeerInitialTsn, 5), gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+ reasm_.MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1));
+
+ opts.message_id = MID(0);
+ opts.ppid = PPID(4);
+ reasm_.Add(AddTo(kPeerInitialTsn, 4), gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+ reasm_.MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1));
+
+ opts.message_id = MID(3);
+ opts.ppid = PPID(3);
+ reasm_.Add(AddTo(kPeerInitialTsn, 3), gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+ reasm_.MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1));
+
+ opts.message_id = MID(2);
+ opts.ppid = PPID(2);
+ reasm_.Add(AddTo(kPeerInitialTsn, 2), gen_.Ordered({1, 2, 3, 4}, "BE", opts));
+ reasm_.MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 5));
+
+ EXPECT_THAT(
+ reasm_.FlushMessages(),
+ UnorderedElementsAre(SctpMessageIs(StreamID(1), PPID(2), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(3), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(4), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(5), kShortPayload)));
+}
+
+TEST_F(StreamResetHandlerTest, SendOutgoingRequestDirectly) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+ absl::optional<ReConfigChunk> reconfig = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req,
+ reconfig->parameters().get<OutgoingSSNResetRequestParameter>());
+
+ EXPECT_EQ(req.request_sequence_number(), kMyInitialReqSn);
+ EXPECT_EQ(req.sender_last_assigned_tsn(),
+ TSN(*retransmission_queue_.next_tsn() - 1));
+ EXPECT_THAT(req.stream_ids(), UnorderedElementsAre(StreamID(42)));
+}
+
+TEST_F(StreamResetHandlerTest, ResetMultipleStreamsInOneRequest) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(3);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+ handler_.ResetStreams(
+ std::vector<StreamID>({StreamID(43), StreamID(44), StreamID(41)}));
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42), StreamID(40)}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+ absl::optional<ReConfigChunk> reconfig = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req,
+ reconfig->parameters().get<OutgoingSSNResetRequestParameter>());
+
+ EXPECT_EQ(req.request_sequence_number(), kMyInitialReqSn);
+ EXPECT_EQ(req.sender_last_assigned_tsn(),
+ TSN(*retransmission_queue_.next_tsn() - 1));
+ EXPECT_THAT(req.stream_ids(),
+ UnorderedElementsAre(StreamID(40), StreamID(41), StreamID(42),
+ StreamID(43), StreamID(44)));
+}
+
+TEST_F(StreamResetHandlerTest, SendOutgoingRequestDeferred) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+
+ EXPECT_CALL(producer_, CanResetStreams())
+ .WillOnce(Return(false))
+ .WillOnce(Return(false))
+ .WillOnce(Return(true));
+
+ EXPECT_FALSE(handler_.MakeStreamResetRequest().has_value());
+ EXPECT_FALSE(handler_.MakeStreamResetRequest().has_value());
+ EXPECT_TRUE(handler_.MakeStreamResetRequest().has_value());
+}
+
+TEST_F(StreamResetHandlerTest, SendOutgoingResettingOnPositiveResponse) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+
+ absl::optional<ReConfigChunk> reconfig = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req,
+ reconfig->parameters().get<OutgoingSSNResetRequestParameter>());
+
+ Parameters::Builder builder;
+ builder.Add(ReconfigurationResponseParameter(
+ req.request_sequence_number(), ResponseResult::kSuccessPerformed));
+ ReConfigChunk response_reconfig(builder.Build());
+
+ EXPECT_CALL(producer_, CommitResetStreams()).Times(1);
+ EXPECT_CALL(producer_, RollbackResetStreams()).Times(0);
+
+ // Processing a response shouldn't result in sending anything.
+ EXPECT_CALL(callbacks_, OnError).Times(0);
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ handler_.HandleReConfig(std::move(response_reconfig));
+}
+
+TEST_F(StreamResetHandlerTest, SendOutgoingResetRollbackOnError) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+
+ absl::optional<ReConfigChunk> reconfig = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req,
+ reconfig->parameters().get<OutgoingSSNResetRequestParameter>());
+
+ Parameters::Builder builder;
+ builder.Add(ReconfigurationResponseParameter(
+ req.request_sequence_number(), ResponseResult::kErrorBadSequenceNumber));
+ ReConfigChunk response_reconfig(builder.Build());
+
+ EXPECT_CALL(producer_, CommitResetStreams()).Times(0);
+ EXPECT_CALL(producer_, RollbackResetStreams()).Times(1);
+
+ // Only requests should result in sending responses.
+ EXPECT_CALL(callbacks_, OnError).Times(0);
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ handler_.HandleReConfig(std::move(response_reconfig));
+}
+
+TEST_F(StreamResetHandlerTest, SendOutgoingResetRetransmitOnInProgress) {
+ static constexpr StreamID kStreamToReset = StreamID(42);
+
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({kStreamToReset}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+
+ absl::optional<ReConfigChunk> reconfig1 = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig1.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req1,
+ reconfig1->parameters().get<OutgoingSSNResetRequestParameter>());
+
+ // Simulate that the peer responded "In Progress".
+ Parameters::Builder builder;
+ builder.Add(ReconfigurationResponseParameter(req1.request_sequence_number(),
+ ResponseResult::kInProgress));
+ ReConfigChunk response_reconfig(builder.Build());
+
+ EXPECT_CALL(producer_, CommitResetStreams()).Times(0);
+ EXPECT_CALL(producer_, RollbackResetStreams()).Times(0);
+
+ // Processing a response shouldn't result in sending anything.
+ EXPECT_CALL(callbacks_, OnError).Times(0);
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ handler_.HandleReConfig(std::move(response_reconfig));
+
+ // Let some time pass, so that the reconfig timer expires, and retries the
+ // same request.
+ EXPECT_CALL(callbacks_, SendPacket).Times(1);
+ AdvanceTime(kRto);
+
+ std::vector<uint8_t> payload = callbacks_.ConsumeSentPacket();
+ ASSERT_FALSE(payload.empty());
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, SctpPacket::Parse(payload));
+ ASSERT_THAT(packet.descriptors(), SizeIs(1));
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ ReConfigChunk reconfig2,
+ ReConfigChunk::Parse(packet.descriptors()[0].data));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req2,
+ reconfig2.parameters().get<OutgoingSSNResetRequestParameter>());
+
+ EXPECT_EQ(req2.request_sequence_number(),
+ AddTo(req1.request_sequence_number(), 1));
+ EXPECT_THAT(req2.stream_ids(), UnorderedElementsAre(kStreamToReset));
+}
+
+TEST_F(StreamResetHandlerTest, ResetWhileRequestIsSentWillQueue) {
+ EXPECT_CALL(producer_, PrepareResetStreams).Times(1);
+ handler_.ResetStreams(std::vector<StreamID>({StreamID(42)}));
+
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+ absl::optional<ReConfigChunk> reconfig1 = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig1.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req1,
+ reconfig1->parameters().get<OutgoingSSNResetRequestParameter>());
+ EXPECT_EQ(req1.request_sequence_number(), kMyInitialReqSn);
+ EXPECT_EQ(req1.sender_last_assigned_tsn(),
+ AddTo(retransmission_queue_.next_tsn(), -1));
+ EXPECT_THAT(req1.stream_ids(), UnorderedElementsAre(StreamID(42)));
+
+ // Streams reset while the request is in-flight will be queued.
+ StreamID stream_ids[] = {StreamID(41), StreamID(43)};
+ handler_.ResetStreams(stream_ids);
+ EXPECT_EQ(handler_.MakeStreamResetRequest(), absl::nullopt);
+
+ Parameters::Builder builder;
+ builder.Add(ReconfigurationResponseParameter(
+ req1.request_sequence_number(), ResponseResult::kSuccessPerformed));
+ ReConfigChunk response_reconfig(builder.Build());
+
+ EXPECT_CALL(producer_, CommitResetStreams()).Times(1);
+ EXPECT_CALL(producer_, RollbackResetStreams()).Times(0);
+
+ // Processing a response shouldn't result in sending anything.
+ EXPECT_CALL(callbacks_, OnError).Times(0);
+ EXPECT_CALL(callbacks_, SendPacket).Times(0);
+ handler_.HandleReConfig(std::move(response_reconfig));
+
+ // Response has been processed. A new request can be sent.
+ EXPECT_CALL(producer_, CanResetStreams()).WillOnce(Return(true));
+ absl::optional<ReConfigChunk> reconfig2 = handler_.MakeStreamResetRequest();
+ ASSERT_TRUE(reconfig2.has_value());
+ ASSERT_HAS_VALUE_AND_ASSIGN(
+ OutgoingSSNResetRequestParameter req2,
+ reconfig2->parameters().get<OutgoingSSNResetRequestParameter>());
+ EXPECT_EQ(req2.request_sequence_number(), AddTo(kMyInitialReqSn, 1));
+ EXPECT_EQ(req2.sender_last_assigned_tsn(),
+ TSN(*retransmission_queue_.next_tsn() - 1));
+ EXPECT_THAT(req2.stream_ids(),
+ UnorderedElementsAre(StreamID(41), StreamID(43)));
+}
+
+TEST_F(StreamResetHandlerTest, SendIncomingResetJustReturnsNothingPerformed) {
+ Parameters::Builder builder;
+ builder.Add(
+ IncomingSSNResetRequestParameter(kPeerInitialReqSn, {StreamID(1)}));
+
+ std::vector<ReconfigurationResponseParameter> responses =
+ HandleAndCatchResponse(ReConfigChunk(builder.Build()));
+ ASSERT_THAT(responses, SizeIs(1));
+ EXPECT_THAT(responses[0].response_sequence_number(), kPeerInitialReqSn);
+ EXPECT_THAT(responses[0].result(), ResponseResult::kSuccessNothingToDo);
+}
+
+TEST_F(StreamResetHandlerTest, SendSameRequestTwiceReturnsNothingToDo) {
+ reasm_.Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE"));
+ reasm_.Add(AddTo(kPeerInitialTsn, 1), gen_.Ordered({1, 2, 3, 4}, "BE"));
+
+ buf_.Observe(kPeerInitialTsn);
+ buf_.Observe(AddTo(kPeerInitialTsn, 1));
+ EXPECT_THAT(reasm_.FlushMessages(),
+ UnorderedElementsAre(
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload),
+ SctpMessageIs(StreamID(1), PPID(53), kShortPayload)));
+
+ Parameters::Builder builder1;
+ builder1.Add(OutgoingSSNResetRequestParameter(
+ kPeerInitialReqSn, ReconfigRequestSN(3), AddTo(kPeerInitialTsn, 1),
+ {StreamID(1)}));
+
+ std::vector<ReconfigurationResponseParameter> responses1 =
+ HandleAndCatchResponse(ReConfigChunk(builder1.Build()));
+ EXPECT_THAT(responses1, SizeIs(1));
+ EXPECT_EQ(responses1[0].result(), ResponseResult::kSuccessPerformed);
+
+ Parameters::Builder builder2;
+ builder2.Add(OutgoingSSNResetRequestParameter(
+ kPeerInitialReqSn, ReconfigRequestSN(3), AddTo(kPeerInitialTsn, 1),
+ {StreamID(1)}));
+
+ std::vector<ReconfigurationResponseParameter> responses2 =
+ HandleAndCatchResponse(ReConfigChunk(builder2.Build()));
+ EXPECT_THAT(responses2, SizeIs(1));
+ EXPECT_EQ(responses2[0].result(), ResponseResult::kSuccessNothingToDo);
+}
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/transmission_control_block.cc b/net/dcsctp/socket/transmission_control_block.cc
new file mode 100644
index 0000000000..4fde40cee9
--- /dev/null
+++ b/net/dcsctp/socket/transmission_control_block.cc
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/socket/transmission_control_block.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/idata_chunk.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/reconfig_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/capabilities.h"
+#include "net/dcsctp/socket/stream_reset_handler.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "net/dcsctp/tx/retransmission_timeout.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace dcsctp {
+
+void TransmissionControlBlock::ObserveRTT(DurationMs rtt) {
+ DurationMs prev_rto = rto_.rto();
+ rto_.ObserveRTT(rtt);
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "new rtt=" << *rtt
+ << ", srtt=" << *rto_.srtt() << ", rto=" << *rto_.rto()
+ << " (" << *prev_rto << ")";
+ t3_rtx_->set_duration(rto_.rto());
+
+ DurationMs delayed_ack_tmo =
+ std::min(rto_.rto() * 0.5, options_.delayed_ack_max_timeout);
+ delayed_ack_timer_->set_duration(delayed_ack_tmo);
+}
+
+absl::optional<DurationMs> TransmissionControlBlock::OnRtxTimerExpiry() {
+ TimeMs now = callbacks_.TimeMillis();
+ RTC_DLOG(LS_INFO) << log_prefix_ << "Timer " << t3_rtx_->name()
+ << " has expired";
+ if (cookie_echo_chunk_.has_value()) {
+ // In the COOKIE_ECHO state, let the T1-COOKIE timer trigger
+ // retransmissions, to avoid having two timers doing that.
+ RTC_DLOG(LS_VERBOSE) << "Not retransmitting as T1-cookie is active.";
+ } else {
+ if (IncrementTxErrorCounter("t3-rtx expired")) {
+ retransmission_queue_.HandleT3RtxTimerExpiry();
+ SendBufferedPackets(now);
+ }
+ }
+ return absl::nullopt;
+}
+
+absl::optional<DurationMs> TransmissionControlBlock::OnDelayedAckTimerExpiry() {
+ data_tracker_.HandleDelayedAckTimerExpiry();
+ MaybeSendSack();
+ return absl::nullopt;
+}
+
+void TransmissionControlBlock::MaybeSendSack() {
+ if (data_tracker_.ShouldSendAck(/*also_if_delayed=*/false)) {
+ SctpPacket::Builder builder = PacketBuilder();
+ builder.Add(
+ data_tracker_.CreateSelectiveAck(reassembly_queue_.remaining_bytes()));
+ Send(builder);
+ }
+}
+
+void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder,
+ TimeMs now) {
+ for (int packet_idx = 0;; ++packet_idx) {
+ // Only add control chunks to the first packet that is sent, if sending
+ // multiple packets in one go (as allowed by the congestion window).
+ if (packet_idx == 0) {
+ if (cookie_echo_chunk_.has_value()) {
+ // https://tools.ietf.org/html/rfc4960#section-5.1
+ // "The COOKIE ECHO chunk can be bundled with any pending outbound DATA
+ // chunks, but it MUST be the first chunk in the packet..."
+ RTC_DCHECK(builder.empty());
+ builder.Add(*cookie_echo_chunk_);
+ }
+
+ // https://tools.ietf.org/html/rfc4960#section-6
+ // "Before an endpoint transmits a DATA chunk, if any received DATA
+ // chunks have not been acknowledged (e.g., due to delayed ack), the
+ // sender should create a SACK and bundle it with the outbound DATA chunk,
+ // as long as the size of the final SCTP packet does not exceed the
+ // current MTU."
+ if (data_tracker_.ShouldSendAck(/*also_if_delayed=*/true)) {
+ builder.Add(data_tracker_.CreateSelectiveAck(
+ reassembly_queue_.remaining_bytes()));
+ }
+ if (retransmission_queue_.ShouldSendForwardTsn(now)) {
+ if (capabilities_.message_interleaving) {
+ builder.Add(retransmission_queue_.CreateIForwardTsn());
+ } else {
+ builder.Add(retransmission_queue_.CreateForwardTsn());
+ }
+ }
+ absl::optional<ReConfigChunk> reconfig =
+ stream_reset_handler_.MakeStreamResetRequest();
+ if (reconfig.has_value()) {
+ builder.Add(*reconfig);
+ }
+ }
+
+ auto chunks =
+ retransmission_queue_.GetChunksToSend(now, builder.bytes_remaining());
+ for (auto& elem : chunks) {
+ TSN tsn = elem.first;
+ Data data = std::move(elem.second);
+ if (capabilities_.message_interleaving) {
+ builder.Add(IDataChunk(tsn, std::move(data), false));
+ } else {
+ builder.Add(DataChunk(tsn, std::move(data), false));
+ }
+ }
+ if (builder.empty()) {
+ break;
+ }
+ Send(builder);
+
+ if (cookie_echo_chunk_.has_value()) {
+ // https://tools.ietf.org/html/rfc4960#section-5.1
+ // "... until the COOKIE ACK is returned the sender MUST NOT send any
+ // other packets to the peer."
+ break;
+ }
+ }
+}
+
+std::string TransmissionControlBlock::ToString() const {
+ rtc::StringBuilder sb;
+
+ sb.AppendFormat(
+ "verification_tag=%08x, last_cumulative_ack=%u, capabilities=",
+ *peer_verification_tag_, *data_tracker_.last_cumulative_acked_tsn());
+
+ if (capabilities_.partial_reliability) {
+ sb << "PR,";
+ }
+ if (capabilities_.message_interleaving) {
+ sb << "IL,";
+ }
+ if (capabilities_.reconfig) {
+ sb << "Reconfig,";
+ }
+
+ return sb.Release();
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/socket/transmission_control_block.h b/net/dcsctp/socket/transmission_control_block.h
new file mode 100644
index 0000000000..172f7c0c08
--- /dev/null
+++ b/net/dcsctp/socket/transmission_control_block.h
@@ -0,0 +1,220 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_
+#define NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/common/sequence_numbers.h"
+#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/rx/data_tracker.h"
+#include "net/dcsctp/rx/reassembly_queue.h"
+#include "net/dcsctp/socket/capabilities.h"
+#include "net/dcsctp/socket/context.h"
+#include "net/dcsctp/socket/heartbeat_handler.h"
+#include "net/dcsctp/socket/stream_reset_handler.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_error_counter.h"
+#include "net/dcsctp/tx/retransmission_queue.h"
+#include "net/dcsctp/tx/retransmission_timeout.h"
+#include "net/dcsctp/tx/send_queue.h"
+
+namespace dcsctp {
+
+// The TransmissionControlBlock (TCB) represents an open connection to a peer,
+// and holds all the resources for that. If the connection is e.g. shutdown,
+// closed or restarted, this object will be deleted and/or replaced.
+class TransmissionControlBlock : public Context {
+ public:
+ TransmissionControlBlock(TimerManager& timer_manager,
+ absl::string_view log_prefix,
+ const DcSctpOptions& options,
+ const Capabilities& capabilities,
+ DcSctpSocketCallbacks& callbacks,
+ SendQueue& send_queue,
+ VerificationTag my_verification_tag,
+ TSN my_initial_tsn,
+ VerificationTag peer_verification_tag,
+ TSN peer_initial_tsn,
+ size_t a_rwnd,
+ TieTag tie_tag,
+ std::function<bool()> is_connection_established,
+ std::function<void(SctpPacket::Builder&)> send_fn)
+ : log_prefix_(log_prefix),
+ options_(options),
+ timer_manager_(timer_manager),
+ capabilities_(capabilities),
+ callbacks_(callbacks),
+ t3_rtx_(timer_manager_.CreateTimer(
+ "t3-rtx",
+ [this]() { return OnRtxTimerExpiry(); },
+ TimerOptions(options.rto_initial))),
+ delayed_ack_timer_(timer_manager_.CreateTimer(
+ "delayed-ack",
+ [this]() { return OnDelayedAckTimerExpiry(); },
+ TimerOptions(options.delayed_ack_max_timeout,
+ TimerBackoffAlgorithm::kExponential,
+ /*max_restarts=*/0))),
+ my_verification_tag_(my_verification_tag),
+ my_initial_tsn_(my_initial_tsn),
+ peer_verification_tag_(peer_verification_tag),
+ peer_initial_tsn_(peer_initial_tsn),
+ tie_tag_(tie_tag),
+ is_connection_established_(std::move(is_connection_established)),
+ send_fn_(std::move(send_fn)),
+ rto_(options),
+ tx_error_counter_(log_prefix, options),
+ data_tracker_(log_prefix, delayed_ack_timer_.get(), peer_initial_tsn),
+ reassembly_queue_(log_prefix,
+ peer_initial_tsn,
+ options.max_receiver_window_buffer_size),
+ retransmission_queue_(
+ log_prefix,
+ my_initial_tsn,
+ a_rwnd,
+ send_queue,
+ [this](DurationMs rtt) { return ObserveRTT(rtt); },
+ [this]() { tx_error_counter_.Clear(); },
+ *t3_rtx_,
+ options,
+ capabilities.partial_reliability,
+ capabilities.message_interleaving),
+ stream_reset_handler_(log_prefix,
+ this,
+ &timer_manager,
+ &data_tracker_,
+ &reassembly_queue_,
+ &retransmission_queue_),
+ heartbeat_handler_(log_prefix, options, this, &timer_manager_) {}
+
+ // Implementation of `Context`.
+ bool is_connection_established() const override {
+ return is_connection_established_();
+ }
+ TSN my_initial_tsn() const override { return my_initial_tsn_; }
+ TSN peer_initial_tsn() const override { return peer_initial_tsn_; }
+ DcSctpSocketCallbacks& callbacks() const override { return callbacks_; }
+ void ObserveRTT(DurationMs rtt) override;
+ DurationMs current_rto() const override { return rto_.rto(); }
+ bool IncrementTxErrorCounter(absl::string_view reason) override {
+ return tx_error_counter_.Increment(reason);
+ }
+ void ClearTxErrorCounter() override { tx_error_counter_.Clear(); }
+ SctpPacket::Builder PacketBuilder() const override {
+ return SctpPacket::Builder(peer_verification_tag_, options_);
+ }
+ bool HasTooManyTxErrors() const override {
+ return tx_error_counter_.IsExhausted();
+ }
+ void Send(SctpPacket::Builder& builder) override { send_fn_(builder); }
+
+ // Other accessors
+ DataTracker& data_tracker() { return data_tracker_; }
+ ReassemblyQueue& reassembly_queue() { return reassembly_queue_; }
+ RetransmissionQueue& retransmission_queue() { return retransmission_queue_; }
+ StreamResetHandler& stream_reset_handler() { return stream_reset_handler_; }
+ HeartbeatHandler& heartbeat_handler() { return heartbeat_handler_; }
+
+ // Returns this socket's verification tag, set in all packet headers.
+ VerificationTag my_verification_tag() const { return my_verification_tag_; }
+ // Returns the peer's verification tag, which should be in received packets.
+ VerificationTag peer_verification_tag() const {
+ return peer_verification_tag_;
+ }
+ // All negotiated supported capabilities.
+ const Capabilities& capabilities() const { return capabilities_; }
+ // A 64-bit tie-tag, used to e.g. detect reconnections.
+ TieTag tie_tag() const { return tie_tag_; }
+
+ // Sends a SACK, if there is a need to.
+ void MaybeSendSack();
+
+ // Will be set while the socket is in kCookieEcho state. In this state, there
+ // can only be a single packet outstanding, and it must contain the COOKIE
+ // ECHO chunk as the first chunk in that packet, until the COOKIE ACK has been
+ // received, which will make the socket call `ClearCookieEchoChunk`.
+ void SetCookieEchoChunk(CookieEchoChunk chunk) {
+ cookie_echo_chunk_ = std::move(chunk);
+ }
+
+ // Called when the COOKIE ACK chunk has been received, to allow further
+ // packets to be sent.
+ void ClearCookieEchoChunk() { cookie_echo_chunk_ = absl::nullopt; }
+
+ bool has_cookie_echo_chunk() const { return cookie_echo_chunk_.has_value(); }
+
+ // Fills `builder` (which may already be filled with control chunks) with
+ // other control and data chunks, and sends packets as much as can be
+ // allowed by the congestion control algorithm.
+ void SendBufferedPackets(SctpPacket::Builder& builder, TimeMs now);
+
+ // As above, but without passing in a builder. If `cookie_echo_chunk_` is
+ // present, then only one packet will be sent, with this chunk as the first
+ // chunk.
+ void SendBufferedPackets(TimeMs now) {
+ SctpPacket::Builder builder(peer_verification_tag_, options_);
+ SendBufferedPackets(builder, now);
+ }
+
+ // Returns a textual representation of this object, for logging.
+ std::string ToString() const;
+
+ private:
+ // Will be called when the retransmission timer (t3-rtx) expires.
+ absl::optional<DurationMs> OnRtxTimerExpiry();
+ // Will be called when the delayed ack timer expires.
+ absl::optional<DurationMs> OnDelayedAckTimerExpiry();
+
+ const std::string log_prefix_;
+ const DcSctpOptions options_;
+ TimerManager& timer_manager_;
+ // Negotiated capabilities that both peers support.
+ const Capabilities capabilities_;
+ DcSctpSocketCallbacks& callbacks_;
+ // The data retransmission timer, called t3-rtx in SCTP.
+ const std::unique_ptr<Timer> t3_rtx_;
+ // Delayed ack timer, which triggers when acks should be sent (when delayed).
+ const std::unique_ptr<Timer> delayed_ack_timer_;
+ const VerificationTag my_verification_tag_;
+ const TSN my_initial_tsn_;
+ const VerificationTag peer_verification_tag_;
+ const TSN peer_initial_tsn_;
+ // Nonce, used to detect reconnections.
+ const TieTag tie_tag_;
+ const std::function<bool()> is_connection_established_;
+ const std::function<void(SctpPacket::Builder&)> send_fn_;
+
+ RetransmissionTimeout rto_;
+ RetransmissionErrorCounter tx_error_counter_;
+ DataTracker data_tracker_;
+ ReassemblyQueue reassembly_queue_;
+ RetransmissionQueue retransmission_queue_;
+ StreamResetHandler stream_reset_handler_;
+ HeartbeatHandler heartbeat_handler_;
+
+ // Only valid when the socket state == State::kCookieEchoed. In this state,
+ // the socket must wait for COOKIE ACK to continue sending any packets (not
+ // including a COOKIE ECHO). So if `cookie_echo_chunk_` is present, the
+ // SendBufferedChunks will always only just send one packet, with this chunk
+ // as the first chunk in the packet.
+ absl::optional<CookieEchoChunk> cookie_echo_chunk_ = absl::nullopt;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_
diff --git a/net/dcsctp/testing/BUILD.gn b/net/dcsctp/testing/BUILD.gn
index fa20500704..5367ef8c6f 100644
--- a/net/dcsctp/testing/BUILD.gn
+++ b/net/dcsctp/testing/BUILD.gn
@@ -20,9 +20,16 @@ rtc_library("data_generator") {
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "../common:internal_types",
+ "../packet:data",
+ "../public:types",
]
sources = [
"data_generator.cc",
"data_generator.h",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
diff --git a/net/dcsctp/timer/BUILD.gn b/net/dcsctp/timer/BUILD.gn
index d92aca8f5a..a0ba5b030e 100644
--- a/net/dcsctp/timer/BUILD.gn
+++ b/net/dcsctp/timer/BUILD.gn
@@ -14,6 +14,8 @@ rtc_library("timer") {
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
+ "../public:socket",
+ "../public:strong_alias",
"../public:types",
]
sources = [
@@ -21,6 +23,29 @@ rtc_library("timer") {
"timer.cc",
"timer.h",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("task_queue_timeout") {
+ deps = [
+ "../../../api:array_view",
+ "../../../api/task_queue:task_queue",
+ "../../../rtc_base",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base/task_utils:pending_task_safety_flag",
+ "../../../rtc_base/task_utils:to_queued_task",
+ "../public:socket",
+ "../public:strong_alias",
+ "../public:types",
+ ]
+ sources = [
+ "task_queue_timeout.cc",
+ "task_queue_timeout.h",
+ ]
}
if (rtc_include_tests) {
@@ -29,13 +54,20 @@ if (rtc_include_tests) {
defines = []
deps = [
+ ":task_queue_timeout",
":timer",
"../../../api:array_view",
"../../../rtc_base:checks",
"../../../rtc_base:gunit_helpers",
"../../../rtc_base:rtc_base_approved",
"../../../test:test_support",
+ "../../../test/time_controller:time_controller",
+ "../public:socket",
+ ]
+ sources = [
+ "task_queue_timeout_test.cc",
+ "timer_test.cc",
]
- sources = [ "timer_test.cc" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/net/dcsctp/timer/fake_timeout.h b/net/dcsctp/timer/fake_timeout.h
index 265b34edfa..927e6b2808 100644
--- a/net/dcsctp/timer/fake_timeout.h
+++ b/net/dcsctp/timer/fake_timeout.h
@@ -18,7 +18,9 @@
#include <utility>
#include <vector>
+#include "absl/types/optional.h"
#include "net/dcsctp/public/timeout.h"
+#include "rtc_base/checks.h"
namespace dcsctp {
@@ -32,14 +34,18 @@ class FakeTimeout : public Timeout {
~FakeTimeout() override { on_delete_(this); }
void Start(DurationMs duration_ms, TimeoutID timeout_id) override {
+ RTC_DCHECK(expiry_ == TimeMs::InfiniteFuture());
timeout_id_ = timeout_id;
expiry_ = get_time_() + duration_ms;
}
- void Stop() override { expiry_ = InfiniteFuture(); }
+ void Stop() override {
+ RTC_DCHECK(expiry_ != TimeMs::InfiniteFuture());
+ expiry_ = TimeMs::InfiniteFuture();
+ }
bool EvaluateHasExpired(TimeMs now) {
if (now >= expiry_) {
- expiry_ = InfiniteFuture();
+ expiry_ = TimeMs::InfiniteFuture();
return true;
}
return false;
@@ -48,15 +54,11 @@ class FakeTimeout : public Timeout {
TimeoutID timeout_id() const { return timeout_id_; }
private:
- static constexpr TimeMs InfiniteFuture() {
- return TimeMs(std::numeric_limits<TimeMs::UnderlyingType>::max());
- }
-
const std::function<TimeMs()> get_time_;
const std::function<void(FakeTimeout*)> on_delete_;
TimeoutID timeout_id_ = TimeoutID(0);
- TimeMs expiry_ = InfiniteFuture();
+ TimeMs expiry_ = TimeMs::InfiniteFuture();
};
class FakeTimeoutManager {
@@ -73,15 +75,20 @@ class FakeTimeoutManager {
return timer;
}
- std::vector<TimeoutID> RunTimers() {
+ // NOTE: This can't return a vector, as calling EvaluateHasExpired requires
+ // calling socket->HandleTimeout directly afterwards, as the owning Timer
+ // still believes it's running, and it needs to be updated to set
+ // Timer::is_running_ to false before you operate on the Timer or Timeout
+ // again.
+ absl::optional<TimeoutID> GetNextExpiredTimeout() {
TimeMs now = get_time_();
std::vector<TimeoutID> expired_timers;
for (auto& timer : timers_) {
if (timer->EvaluateHasExpired(now)) {
- expired_timers.push_back(timer->timeout_id());
+ return timer->timeout_id();
}
}
- return expired_timers;
+ return absl::nullopt;
}
private:
diff --git a/net/dcsctp/timer/task_queue_timeout.cc b/net/dcsctp/timer/task_queue_timeout.cc
new file mode 100644
index 0000000000..6d3054eeb8
--- /dev/null
+++ b/net/dcsctp/timer/task_queue_timeout.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/timer/task_queue_timeout.h"
+
+#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+
+namespace dcsctp {
+
+TaskQueueTimeoutFactory::TaskQueueTimeout::TaskQueueTimeout(
+ TaskQueueTimeoutFactory& parent)
+ : parent_(parent),
+ pending_task_safety_flag_(webrtc::PendingTaskSafetyFlag::Create()) {}
+
+TaskQueueTimeoutFactory::TaskQueueTimeout::~TaskQueueTimeout() {
+ RTC_DCHECK_RUN_ON(&parent_.thread_checker_);
+ pending_task_safety_flag_->SetNotAlive();
+}
+
+void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms,
+ TimeoutID timeout_id) {
+ RTC_DCHECK_RUN_ON(&parent_.thread_checker_);
+ RTC_DCHECK(timeout_expiration_ == TimeMs::InfiniteFuture());
+ timeout_expiration_ = parent_.get_time_() + duration_ms;
+ timeout_id_ = timeout_id;
+
+ if (timeout_expiration_ >= posted_task_expiration_) {
+ // There is already a running task, and it's scheduled to expire sooner than
+ // the new expiration time. Don't do anything; The `timeout_expiration_` has
+ // already been updated and if the delayed task _does_ expire and the timer
+ // hasn't been stopped, that will be noticed in the timeout handler, and the
+ // task will be re-scheduled. Most timers are stopped before they expire.
+ return;
+ }
+
+ if (posted_task_expiration_ != TimeMs::InfiniteFuture()) {
+ RTC_DLOG(LS_VERBOSE) << "New timeout duration is less than scheduled - "
+ "ghosting old delayed task.";
+ // There is already a scheduled delayed task, but its expiration time is
+ // further away than the new expiration, so it can't be used. It will be
+ // "killed" by replacing the safety flag. This is not expected to happen
+ // especially often; Mainly when a timer did exponential backoff and
+ // later recovered.
+ pending_task_safety_flag_->SetNotAlive();
+ pending_task_safety_flag_ = webrtc::PendingTaskSafetyFlag::Create();
+ }
+
+ posted_task_expiration_ = timeout_expiration_;
+ parent_.task_queue_.PostDelayedTask(
+ webrtc::ToQueuedTask(
+ pending_task_safety_flag_,
+ [timeout_id, this]() {
+ RTC_DLOG(LS_VERBOSE) << "Timout expired: " << timeout_id.value();
+ RTC_DCHECK_RUN_ON(&parent_.thread_checker_);
+ RTC_DCHECK(posted_task_expiration_ != TimeMs::InfiniteFuture());
+ posted_task_expiration_ = TimeMs::InfiniteFuture();
+
+ if (timeout_expiration_ == TimeMs::InfiniteFuture()) {
+ // The timeout was stopped before it expired. Very common.
+ } else {
+ // Note that the timeout might have been restarted, which updated
+ // `timeout_expiration_` but left the scheduled task running. So
+ // if it's not quite time to trigger the timeout yet, schedule a
+ // new delayed task with what's remaining and retry at that point
+ // in time.
+ DurationMs remaining = timeout_expiration_ - parent_.get_time_();
+ timeout_expiration_ = TimeMs::InfiniteFuture();
+ if (*remaining > 0) {
+ Start(remaining, timeout_id_);
+ } else {
+ // It has actually triggered.
+ RTC_DLOG(LS_VERBOSE)
+ << "Timout triggered: " << timeout_id.value();
+ parent_.on_expired_(timeout_id_);
+ }
+ }
+ }),
+ duration_ms.value());
+}
+
+void TaskQueueTimeoutFactory::TaskQueueTimeout::Stop() {
+ // As the TaskQueue doesn't support deleting a posted task, just mark the
+ // timeout as not running.
+ RTC_DCHECK_RUN_ON(&parent_.thread_checker_);
+ timeout_expiration_ = TimeMs::InfiniteFuture();
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/timer/task_queue_timeout.h b/net/dcsctp/timer/task_queue_timeout.h
new file mode 100644
index 0000000000..e8d12df592
--- /dev/null
+++ b/net/dcsctp/timer/task_queue_timeout.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_
+#define NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/task_queue/task_queue_base.h"
+#include "net/dcsctp/public/timeout.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+
+namespace dcsctp {
+
+// The TaskQueueTimeoutFactory creates `Timeout` instances, which schedules
+// itself to be triggered on the provided `task_queue`, which may be a thread,
+// an actual TaskQueue or something else which supports posting a delayed task.
+//
+// Note that each `DcSctpSocket` must have its own `TaskQueueTimeoutFactory`,
+// as the `TimeoutID` are not unique among sockets.
+//
+// This class must outlive any created Timeout that it has created. Note that
+// the `DcSctpSocket` will ensure that all Timeouts are deleted when the socket
+// is destructed, so this means that this class must outlive the `DcSctpSocket`.
+//
+// This class, and the timeouts created it, are not thread safe.
+class TaskQueueTimeoutFactory {
+ public:
+ // The `get_time` function must return the current time, relative to any
+ // epoch. Whenever a timeout expires, the `on_expired` callback will be
+ // triggered, and then the client should provided `timeout_id` to
+ // `DcSctpSocketInterface::HandleTimeout`.
+ TaskQueueTimeoutFactory(webrtc::TaskQueueBase& task_queue,
+ std::function<TimeMs()> get_time,
+ std::function<void(TimeoutID timeout_id)> on_expired)
+ : task_queue_(task_queue),
+ get_time_(std::move(get_time)),
+ on_expired_(std::move(on_expired)) {}
+
+ // Creates an implementation of `Timeout`.
+ std::unique_ptr<Timeout> CreateTimeout() {
+ return std::make_unique<TaskQueueTimeout>(*this);
+ }
+
+ private:
+ class TaskQueueTimeout : public Timeout {
+ public:
+ explicit TaskQueueTimeout(TaskQueueTimeoutFactory& parent);
+ ~TaskQueueTimeout();
+
+ void Start(DurationMs duration_ms, TimeoutID timeout_id) override;
+ void Stop() override;
+
+ private:
+ TaskQueueTimeoutFactory& parent_;
+ // A safety flag to ensure that posted tasks to the task queue don't
+ // reference these object when they go out of scope. Note that this safety
+ // flag will be re-created if the scheduled-but-not-yet-expired task is not
+ // to be run. This happens when there is a posted delayed task with an
+ // expiration time _further away_ than what is now the expected expiration
+ // time. In this scenario, a new delayed task has to be posted with a
+ // shorter duration and the old task has to be forgotten.
+ rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> pending_task_safety_flag_;
+ // The time when the posted delayed task is set to expire. Will be set to
+ // the infinite future if there is no such task running.
+ TimeMs posted_task_expiration_ = TimeMs::InfiniteFuture();
+ // The time when the timeout expires. It will be set to the infinite future
+ // if the timeout is not running/not started.
+ TimeMs timeout_expiration_ = TimeMs::InfiniteFuture();
+ // The current timeout ID that will be reported when expired.
+ TimeoutID timeout_id_ = TimeoutID(0);
+ };
+
+ RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_;
+ webrtc::TaskQueueBase& task_queue_;
+ const std::function<TimeMs()> get_time_;
+ const std::function<void(TimeoutID)> on_expired_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_
diff --git a/net/dcsctp/timer/task_queue_timeout_test.cc b/net/dcsctp/timer/task_queue_timeout_test.cc
new file mode 100644
index 0000000000..9d3846953b
--- /dev/null
+++ b/net/dcsctp/timer/task_queue_timeout_test.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/timer/task_queue_timeout.h"
+
+#include <memory>
+
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::MockFunction;
+
+class TaskQueueTimeoutTest : public testing::Test {
+ protected:
+ TaskQueueTimeoutTest()
+ : time_controller_(webrtc::Timestamp::Millis(1234)),
+ task_queue_(time_controller_.GetMainThread()),
+ factory_(
+ *task_queue_,
+ [this]() {
+ return TimeMs(time_controller_.GetClock()->CurrentTime().ms());
+ },
+ on_expired_.AsStdFunction()) {}
+
+ void AdvanceTime(DurationMs duration) {
+ time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(*duration));
+ }
+
+ MockFunction<void(TimeoutID)> on_expired_;
+ webrtc::GlobalSimulatedTimeController time_controller_;
+
+ rtc::Thread* task_queue_;
+ TaskQueueTimeoutFactory factory_;
+};
+
+TEST_F(TaskQueueTimeoutTest, StartPostsDelayedTask) {
+ std::unique_ptr<Timeout> timeout = factory_.CreateTimeout();
+ timeout->Start(DurationMs(1000), TimeoutID(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(999));
+
+ EXPECT_CALL(on_expired_, Call(TimeoutID(1)));
+ AdvanceTime(DurationMs(1));
+}
+
+TEST_F(TaskQueueTimeoutTest, StopBeforeExpiringDoesntTrigger) {
+ std::unique_ptr<Timeout> timeout = factory_.CreateTimeout();
+ timeout->Start(DurationMs(1000), TimeoutID(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(999));
+
+ timeout->Stop();
+
+ AdvanceTime(DurationMs(1));
+ AdvanceTime(DurationMs(1000));
+}
+
+TEST_F(TaskQueueTimeoutTest, RestartPrologingTimeoutDuration) {
+ std::unique_ptr<Timeout> timeout = factory_.CreateTimeout();
+ timeout->Start(DurationMs(1000), TimeoutID(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(500));
+
+ timeout->Restart(DurationMs(1000), TimeoutID(2));
+
+ AdvanceTime(DurationMs(999));
+
+ EXPECT_CALL(on_expired_, Call(TimeoutID(2)));
+ AdvanceTime(DurationMs(1));
+}
+
+TEST_F(TaskQueueTimeoutTest, RestartWithShorterDurationExpiresWhenExpected) {
+ std::unique_ptr<Timeout> timeout = factory_.CreateTimeout();
+ timeout->Start(DurationMs(1000), TimeoutID(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(500));
+
+ timeout->Restart(DurationMs(200), TimeoutID(2));
+
+ AdvanceTime(DurationMs(199));
+
+ EXPECT_CALL(on_expired_, Call(TimeoutID(2)));
+ AdvanceTime(DurationMs(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(1000));
+}
+
+TEST_F(TaskQueueTimeoutTest, KilledBeforeExpired) {
+ std::unique_ptr<Timeout> timeout = factory_.CreateTimeout();
+ timeout->Start(DurationMs(1000), TimeoutID(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(500));
+
+ timeout = nullptr;
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTime(DurationMs(1000));
+}
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/timer/timer.cc b/net/dcsctp/timer/timer.cc
index 2376e7aecb..593d639fa7 100644
--- a/net/dcsctp/timer/timer.cc
+++ b/net/dcsctp/timer/timer.cc
@@ -9,7 +9,9 @@
*/
#include "net/dcsctp/timer/timer.h"
+#include <algorithm>
#include <cstdint>
+#include <limits>
#include <memory>
#include <unordered_map>
#include <utility>
@@ -17,11 +19,12 @@
#include "absl/memory/memory.h"
#include "absl/strings/string_view.h"
#include "net/dcsctp/public/timeout.h"
+#include "rtc_base/checks.h"
namespace dcsctp {
namespace {
-TimeoutID MakeTimeoutId(uint32_t timer_id, uint32_t generation) {
- return TimeoutID(static_cast<uint64_t>(timer_id) << 32 | generation);
+TimeoutID MakeTimeoutId(TimerID timer_id, TimerGeneration generation) {
+ return TimeoutID(static_cast<uint64_t>(*timer_id) << 32 | *generation);
}
DurationMs GetBackoffDuration(TimerBackoffAlgorithm algorithm,
@@ -30,13 +33,23 @@ DurationMs GetBackoffDuration(TimerBackoffAlgorithm algorithm,
switch (algorithm) {
case TimerBackoffAlgorithm::kFixed:
return base_duration;
- case TimerBackoffAlgorithm::kExponential:
- return DurationMs(*base_duration * (1 << expiration_count));
+ case TimerBackoffAlgorithm::kExponential: {
+ int32_t duration_ms = *base_duration;
+
+ while (expiration_count > 0 && duration_ms < *Timer::kMaxTimerDuration) {
+ duration_ms *= 2;
+ --expiration_count;
+ }
+
+ return DurationMs(std::min(duration_ms, *Timer::kMaxTimerDuration));
+ }
}
}
} // namespace
-Timer::Timer(uint32_t id,
+constexpr DurationMs Timer::kMaxTimerDuration;
+
+Timer::Timer(TimerID id,
absl::string_view name,
OnExpired on_expired,
UnregisterHandler unregister_handler,
@@ -59,11 +72,13 @@ void Timer::Start() {
expiration_count_ = 0;
if (!is_running()) {
is_running_ = true;
- timeout_->Start(duration_, MakeTimeoutId(id_, ++generation_));
+ generation_ = TimerGeneration(*generation_ + 1);
+ timeout_->Start(duration_, MakeTimeoutId(id_, generation_));
} else {
// Timer was running - stop and restart it, to make it expire in `duration_`
// from now.
- timeout_->Restart(duration_, MakeTimeoutId(id_, ++generation_));
+ generation_ = TimerGeneration(*generation_ + 1);
+ timeout_->Restart(duration_, MakeTimeoutId(id_, generation_));
}
}
@@ -75,31 +90,41 @@ void Timer::Stop() {
}
}
-void Timer::Trigger(uint32_t generation) {
+void Timer::Trigger(TimerGeneration generation) {
if (is_running_ && generation == generation_) {
++expiration_count_;
- if (options_.max_restarts >= 0 &&
- expiration_count_ > options_.max_restarts) {
- is_running_ = false;
+ is_running_ = false;
+ if (options_.max_restarts < 0 ||
+ expiration_count_ <= options_.max_restarts) {
+ // The timer should still be running after this triggers. Start a new
+ // timer. Note that it might be very quickly restarted again, if the
+ // `on_expired_` callback returns a new duration.
+ is_running_ = true;
+ DurationMs duration = GetBackoffDuration(options_.backoff_algorithm,
+ duration_, expiration_count_);
+ generation_ = TimerGeneration(*generation_ + 1);
+ timeout_->Start(duration, MakeTimeoutId(id_, generation_));
}
absl::optional<DurationMs> new_duration = on_expired_();
- if (new_duration.has_value()) {
+ if (new_duration.has_value() && new_duration != duration_) {
duration_ = new_duration.value();
- }
+ if (is_running_) {
+ // Restart it with new duration.
+ timeout_->Stop();
- if (is_running_) {
- // Restart it with new duration.
- DurationMs duration = GetBackoffDuration(options_.backoff_algorithm,
- duration_, expiration_count_);
- timeout_->Start(duration, MakeTimeoutId(id_, ++generation_));
+ DurationMs duration = GetBackoffDuration(options_.backoff_algorithm,
+ duration_, expiration_count_);
+ generation_ = TimerGeneration(*generation_ + 1);
+ timeout_->Start(duration, MakeTimeoutId(id_, generation_));
+ }
}
}
}
void TimerManager::HandleTimeout(TimeoutID timeout_id) {
- uint32_t timer_id = *timeout_id >> 32;
- uint32_t generation = *timeout_id;
+ TimerID timer_id(*timeout_id >> 32);
+ TimerGeneration generation(*timeout_id);
auto it = timers_.find(timer_id);
if (it != timers_.end()) {
it->second->Trigger(generation);
@@ -109,7 +134,12 @@ void TimerManager::HandleTimeout(TimeoutID timeout_id) {
std::unique_ptr<Timer> TimerManager::CreateTimer(absl::string_view name,
Timer::OnExpired on_expired,
const TimerOptions& options) {
- uint32_t id = ++next_id_;
+ next_id_ = TimerID(*next_id_ + 1);
+ TimerID id = next_id_;
+ // This would overflow after 4 billion timers created, which in SCTP would be
+ // after 800 million reconnections on a single socket. Ensure this will never
+ // happen.
+ RTC_CHECK_NE(*id, std::numeric_limits<uint32_t>::max());
auto timer = absl::WrapUnique(new Timer(
id, name, std::move(on_expired), [this, id]() { timers_.erase(id); },
create_timeout_(), options));
diff --git a/net/dcsctp/timer/timer.h b/net/dcsctp/timer/timer.h
index 6b68c98374..bf923ea4ca 100644
--- a/net/dcsctp/timer/timer.h
+++ b/net/dcsctp/timer/timer.h
@@ -12,6 +12,7 @@
#include <stdint.h>
+#include <algorithm>
#include <functional>
#include <memory>
#include <string>
@@ -20,10 +21,14 @@
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
+#include "net/dcsctp/public/strong_alias.h"
#include "net/dcsctp/public/timeout.h"
namespace dcsctp {
+using TimerID = StrongAlias<class TimerIDTag, uint32_t>;
+using TimerGeneration = StrongAlias<class TimerGenerationTag, uint32_t>;
+
enum class TimerBackoffAlgorithm {
// The base duration will be used for any restart.
kFixed,
@@ -68,6 +73,9 @@ struct TimerOptions {
// backoff algorithm).
class Timer {
public:
+ // The maximum timer duration - one day.
+ static constexpr DurationMs kMaxTimerDuration = DurationMs(24 * 3600 * 1000);
+
// When expired, the timer handler can optionally return a new duration which
// will be set as `duration` and used as base duration when the timer is
// restarted and as input to the backoff algorithm.
@@ -89,7 +97,9 @@ class Timer {
// Sets the base duration. The actual timer duration may be larger depending
// on the backoff algorithm.
- void set_duration(DurationMs duration) { duration_ = duration; }
+ void set_duration(DurationMs duration) {
+ duration_ = std::min(duration, kMaxTimerDuration);
+ }
// Retrieves the base duration. The actual timer duration may be larger
// depending on the backoff algorithm.
@@ -110,7 +120,7 @@ class Timer {
private:
friend class TimerManager;
using UnregisterHandler = std::function<void()>;
- Timer(uint32_t id,
+ Timer(TimerID id,
absl::string_view name,
OnExpired on_expired,
UnregisterHandler unregister,
@@ -122,9 +132,9 @@ class Timer {
// duration as decided by the backoff algorithm, unless the
// `TimerOptions::max_restarts` has been reached and then it will be stopped
// and `is_running()` will return false.
- void Trigger(uint32_t generation);
+ void Trigger(TimerGeneration generation);
- const uint32_t id_;
+ const TimerID id_;
const std::string name_;
const TimerOptions options_;
const OnExpired on_expired_;
@@ -133,8 +143,16 @@ class Timer {
DurationMs duration_;
- // Increased on each start, and is matched on Trigger, to avoid races.
- uint32_t generation_ = 0;
+ // Increased on each start, and is matched on Trigger, to avoid races. And by
+ // race, meaning that a timeout - which may be evaluated/expired on a
+ // different thread while this thread has stopped that timer already. Note
+ // that the entire socket is not thread-safe, so `TimerManager::HandleTimeout`
+ // is never executed concurrently with any timer starting/stopping.
+ //
+ // This will wrap around after 4 billion timer restarts, and if it wraps
+ // around, it would just trigger _this_ timer in advance (but it's hard to
+ // restart it 4 billion times within its duration).
+ TimerGeneration generation_ = TimerGeneration(0);
bool is_running_ = false;
// Incremented each time time has expired and reset when stopped or restarted.
int expiration_count_ = 0;
@@ -158,8 +176,8 @@ class TimerManager {
private:
const std::function<std::unique_ptr<Timeout>()> create_timeout_;
- std::unordered_map<int, Timer*> timers_;
- uint32_t next_id_ = 0;
+ std::unordered_map<TimerID, Timer*, TimerID::Hasher> timers_;
+ TimerID next_id_ = TimerID(0);
};
} // namespace dcsctp
diff --git a/net/dcsctp/timer/timer_test.cc b/net/dcsctp/timer/timer_test.cc
index 9533234895..a403bb6b4b 100644
--- a/net/dcsctp/timer/timer_test.cc
+++ b/net/dcsctp/timer/timer_test.cc
@@ -32,8 +32,13 @@ class TimerTest : public testing::Test {
void AdvanceTimeAndRunTimers(DurationMs duration) {
now_ = now_ + duration;
- for (TimeoutID timeout_id : timeout_manager_.RunTimers()) {
- manager_.HandleTimeout(timeout_id);
+ for (;;) {
+ absl::optional<TimeoutID> timeout_id =
+ timeout_manager_.GetNextExpiredTimeout();
+ if (!timeout_id.has_value()) {
+ break;
+ }
+ manager_.HandleTimeout(*timeout_id);
}
}
@@ -310,5 +315,76 @@ TEST_F(TimerTest, ReturningNewDurationWhenExpired) {
AdvanceTimeAndRunTimers(DurationMs(1000));
}
+TEST_F(TimerTest, TimersHaveMaximumDuration) {
+ std::unique_ptr<Timer> t1 = manager_.CreateTimer(
+ "t1", on_expired_.AsStdFunction(),
+ TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kExponential));
+
+ t1->set_duration(DurationMs(2 * *Timer::kMaxTimerDuration));
+ EXPECT_EQ(t1->duration(), Timer::kMaxTimerDuration);
+}
+
+TEST_F(TimerTest, TimersHaveMaximumBackoffDuration) {
+ std::unique_ptr<Timer> t1 = manager_.CreateTimer(
+ "t1", on_expired_.AsStdFunction(),
+ TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kExponential));
+
+ t1->Start();
+
+ int max_exponent = static_cast<int>(log2(*Timer::kMaxTimerDuration / 1000));
+ for (int i = 0; i < max_exponent; ++i) {
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(DurationMs(1000 * (1 << i)));
+ }
+
+ // Reached the maximum duration.
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(Timer::kMaxTimerDuration);
+
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(Timer::kMaxTimerDuration);
+
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(Timer::kMaxTimerDuration);
+
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(Timer::kMaxTimerDuration);
+}
+
+TEST_F(TimerTest, TimerCanBeStartedFromWithinExpirationHandler) {
+ std::unique_ptr<Timer> t1 = manager_.CreateTimer(
+ "t1", on_expired_.AsStdFunction(),
+ TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kFixed));
+
+ t1->Start();
+
+ // Start a timer, but don't return any new duration in callback.
+ EXPECT_CALL(on_expired_, Call).WillOnce([&]() {
+ EXPECT_TRUE(t1->is_running());
+ t1->set_duration(DurationMs(5000));
+ t1->Start();
+ return absl::nullopt;
+ });
+ AdvanceTimeAndRunTimers(DurationMs(1000));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTimeAndRunTimers(DurationMs(4999));
+
+ // Start a timer, and return any new duration in callback.
+ EXPECT_CALL(on_expired_, Call).WillOnce([&]() {
+ EXPECT_TRUE(t1->is_running());
+ t1->set_duration(DurationMs(5000));
+ t1->Start();
+ return absl::make_optional(DurationMs(8000));
+ });
+ AdvanceTimeAndRunTimers(DurationMs(1));
+
+ EXPECT_CALL(on_expired_, Call).Times(0);
+ AdvanceTimeAndRunTimers(DurationMs(7999));
+
+ EXPECT_CALL(on_expired_, Call).Times(1);
+ AdvanceTimeAndRunTimers(DurationMs(1));
+}
+
} // namespace
} // namespace dcsctp
diff --git a/net/dcsctp/tx/BUILD.gn b/net/dcsctp/tx/BUILD.gn
new file mode 100644
index 0000000000..2f0b27afc6
--- /dev/null
+++ b/net/dcsctp/tx/BUILD.gn
@@ -0,0 +1,141 @@
+# Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_source_set("send_queue") {
+ deps = [
+ "../../../api:array_view",
+ "../common:internal_types",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:types",
+ ]
+ sources = [ "send_queue.h" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("rr_send_queue") {
+ deps = [
+ ":send_queue",
+ "../../../api:array_view",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:pair_hash",
+ "../packet:data",
+ "../public:socket",
+ "../public:types",
+ ]
+ sources = [
+ "rr_send_queue.cc",
+ "rr_send_queue.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("retransmission_error_counter") {
+ deps = [
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../public:types",
+ ]
+ sources = [
+ "retransmission_error_counter.cc",
+ "retransmission_error_counter.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_library("retransmission_timeout") {
+ deps = [
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../public:types",
+ ]
+ sources = [
+ "retransmission_timeout.cc",
+ "retransmission_timeout.h",
+ ]
+}
+
+rtc_library("retransmission_queue") {
+ deps = [
+ ":retransmission_timeout",
+ ":send_queue",
+ "../../../api:array_view",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_approved",
+ "../common:math",
+ "../common:pair_hash",
+ "../common:sequence_numbers",
+ "../common:str_join",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:types",
+ "../timer",
+ ]
+ sources = [
+ "retransmission_queue.cc",
+ "retransmission_queue.h",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_source_set("mock_send_queue") {
+ testonly = true
+ deps = [
+ ":send_queue",
+ "../../../api:array_view",
+ "../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ sources = [ "mock_send_queue.h" ]
+ }
+
+ rtc_library("dcsctp_tx_unittests") {
+ testonly = true
+
+ deps = [
+ ":mock_send_queue",
+ ":retransmission_error_counter",
+ ":retransmission_queue",
+ ":retransmission_timeout",
+ ":rr_send_queue",
+ ":send_queue",
+ "../../../api:array_view",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ "../packet:chunk",
+ "../packet:data",
+ "../public:socket",
+ "../public:types",
+ "../testing:data_generator",
+ "../testing:testing_macros",
+ "../timer",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ sources = [
+ "retransmission_error_counter_test.cc",
+ "retransmission_queue_test.cc",
+ "retransmission_timeout_test.cc",
+ "rr_send_queue_test.cc",
+ ]
+ }
+}
diff --git a/net/dcsctp/tx/mock_send_queue.h b/net/dcsctp/tx/mock_send_queue.h
new file mode 100644
index 0000000000..0cf64583ae
--- /dev/null
+++ b/net/dcsctp/tx/mock_send_queue.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_
+#define NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_
+
+#include <cstdint>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+
+class MockSendQueue : public SendQueue {
+ public:
+ MockSendQueue() {
+ ON_CALL(*this, Produce).WillByDefault([](TimeMs now, size_t max_size) {
+ return absl::nullopt;
+ });
+ }
+
+ MOCK_METHOD(absl::optional<SendQueue::DataToSend>,
+ Produce,
+ (TimeMs now, size_t max_size),
+ (override));
+ MOCK_METHOD(bool,
+ Discard,
+ (IsUnordered unordered, StreamID stream_id, MID message_id),
+ (override));
+ MOCK_METHOD(void,
+ PrepareResetStreams,
+ (rtc::ArrayView<const StreamID> streams),
+ (override));
+ MOCK_METHOD(bool, CanResetStreams, (), (const, override));
+ MOCK_METHOD(void, CommitResetStreams, (), (override));
+ MOCK_METHOD(void, RollbackResetStreams, (), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(size_t, buffered_amount, (StreamID stream_id), (const, override));
+ MOCK_METHOD(size_t, total_buffered_amount, (), (const, override));
+ MOCK_METHOD(size_t,
+ buffered_amount_low_threshold,
+ (StreamID stream_id),
+ (const, override));
+ MOCK_METHOD(void,
+ SetBufferedAmountLowThreshold,
+ (StreamID stream_id, size_t bytes),
+ (override));
+};
+
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_
diff --git a/net/dcsctp/tx/retransmission_error_counter.cc b/net/dcsctp/tx/retransmission_error_counter.cc
new file mode 100644
index 0000000000..111b6efe96
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_error_counter.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_error_counter.h"
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+bool RetransmissionErrorCounter::Increment(absl::string_view reason) {
+ ++counter_;
+ if (counter_ > limit_) {
+ RTC_DLOG(LS_INFO) << log_prefix_ << reason
+ << ", too many retransmissions, counter=" << counter_;
+ return false;
+ }
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << reason << ", new counter=" << counter_
+ << ", max=" << limit_;
+ return true;
+}
+
+void RetransmissionErrorCounter::Clear() {
+ if (counter_ > 0) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "recovered from counter=" << counter_;
+ counter_ = 0;
+ }
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/retransmission_error_counter.h b/net/dcsctp/tx/retransmission_error_counter.h
new file mode 100644
index 0000000000..bb8d1f754d
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_error_counter.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_
+#define NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_
+
+#include <functional>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+
+namespace dcsctp {
+
+// The RetransmissionErrorCounter is a simple counter with a limit, and when
+// the limit is exceeded, the counter is exhausted and the connection will
+// be closed. It's incremented on retransmission errors, such as the T3-RTX
+// timer expiring, but also missing heartbeats and stream reset requests.
+class RetransmissionErrorCounter {
+ public:
+ RetransmissionErrorCounter(absl::string_view log_prefix,
+ const DcSctpOptions& options)
+ : log_prefix_(std::string(log_prefix) + "rtx-errors: "),
+ limit_(options.max_retransmissions) {}
+
+ // Increments the retransmission timer. If the maximum error count has been
+ // reached, `false` will be returned.
+ bool Increment(absl::string_view reason);
+ bool IsExhausted() const { return counter_ > limit_; }
+
+ // Clears the retransmission errors.
+ void Clear();
+
+ // Returns its current value
+ int value() const { return counter_; }
+
+ private:
+ const std::string log_prefix_;
+ const int limit_;
+ int counter_ = 0;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_
diff --git a/net/dcsctp/tx/retransmission_error_counter_test.cc b/net/dcsctp/tx/retransmission_error_counter_test.cc
new file mode 100644
index 0000000000..61ee82926d
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_error_counter_test.cc
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_error_counter.h"
+
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+
+TEST(RetransmissionErrorCounterTest, HasInitialValue) {
+ DcSctpOptions options;
+ RetransmissionErrorCounter counter("log: ", options);
+ EXPECT_EQ(counter.value(), 0);
+}
+
+TEST(RetransmissionErrorCounterTest, ReturnsFalseAtMaximumValue) {
+ DcSctpOptions options;
+ options.max_retransmissions = 5;
+ RetransmissionErrorCounter counter("log: ", options);
+ EXPECT_TRUE(counter.Increment("test")); // 1
+ EXPECT_TRUE(counter.Increment("test")); // 2
+ EXPECT_TRUE(counter.Increment("test")); // 3
+ EXPECT_TRUE(counter.Increment("test")); // 4
+ EXPECT_TRUE(counter.Increment("test")); // 5
+ EXPECT_FALSE(counter.Increment("test")); // Too many retransmissions
+}
+
+TEST(RetransmissionErrorCounterTest, CanHandleZeroRetransmission) {
+ DcSctpOptions options;
+ options.max_retransmissions = 0;
+ RetransmissionErrorCounter counter("log: ", options);
+ EXPECT_FALSE(counter.Increment("test")); // One is too many.
+}
+
+TEST(RetransmissionErrorCounterTest, IsExhaustedAtMaximum) {
+ DcSctpOptions options;
+ options.max_retransmissions = 3;
+ RetransmissionErrorCounter counter("log: ", options);
+ EXPECT_TRUE(counter.Increment("test")); // 1
+ EXPECT_FALSE(counter.IsExhausted());
+ EXPECT_TRUE(counter.Increment("test")); // 2
+ EXPECT_FALSE(counter.IsExhausted());
+ EXPECT_TRUE(counter.Increment("test")); // 3
+ EXPECT_FALSE(counter.IsExhausted());
+ EXPECT_FALSE(counter.Increment("test")); // Too many retransmissions
+ EXPECT_TRUE(counter.IsExhausted());
+ EXPECT_FALSE(counter.Increment("test")); // One after too many
+ EXPECT_TRUE(counter.IsExhausted());
+}
+
+TEST(RetransmissionErrorCounterTest, ClearingCounter) {
+ DcSctpOptions options;
+ options.max_retransmissions = 3;
+ RetransmissionErrorCounter counter("log: ", options);
+ EXPECT_TRUE(counter.Increment("test")); // 1
+ EXPECT_TRUE(counter.Increment("test")); // 2
+ counter.Clear();
+ EXPECT_TRUE(counter.Increment("test")); // 1
+ EXPECT_TRUE(counter.Increment("test")); // 2
+ EXPECT_TRUE(counter.Increment("test")); // 3
+ EXPECT_FALSE(counter.IsExhausted());
+ EXPECT_FALSE(counter.Increment("test")); // Too many retransmissions
+ EXPECT_TRUE(counter.IsExhausted());
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/retransmission_queue.cc b/net/dcsctp/tx/retransmission_queue.cc
new file mode 100644
index 0000000000..ef2f0e3172
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_queue.cc
@@ -0,0 +1,889 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_queue.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <functional>
+#include <iterator>
+#include <map>
+#include <set>
+#include <string>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/math.h"
+#include "net/dcsctp/common/pair_hash.h"
+#include "net/dcsctp/common/sequence_numbers.h"
+#include "net/dcsctp/common/str_join.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_common.h"
+#include "net/dcsctp/packet/chunk/idata_chunk.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace dcsctp {
+namespace {
+
+// The number of times a packet must be NACKed before it's retransmitted.
+// See https://tools.ietf.org/html/rfc4960#section-7.2.4
+constexpr size_t kNumberOfNacksForRetransmission = 3;
+} // namespace
+
+RetransmissionQueue::RetransmissionQueue(
+ absl::string_view log_prefix,
+ TSN initial_tsn,
+ size_t a_rwnd,
+ SendQueue& send_queue,
+ std::function<void(DurationMs rtt)> on_new_rtt,
+ std::function<void()> on_clear_retransmission_counter,
+ Timer& t3_rtx,
+ const DcSctpOptions& options,
+ bool supports_partial_reliability,
+ bool use_message_interleaving)
+ : options_(options),
+ partial_reliability_(supports_partial_reliability),
+ log_prefix_(std::string(log_prefix) + "tx: "),
+ data_chunk_header_size_(use_message_interleaving
+ ? IDataChunk::kHeaderSize
+ : DataChunk::kHeaderSize),
+ on_new_rtt_(std::move(on_new_rtt)),
+ on_clear_retransmission_counter_(
+ std::move(on_clear_retransmission_counter)),
+ t3_rtx_(t3_rtx),
+ cwnd_(options_.cwnd_mtus_initial * options_.mtu),
+ rwnd_(a_rwnd),
+ // https://tools.ietf.org/html/rfc4960#section-7.2.1
+ // "The initial value of ssthresh MAY be arbitrarily high (for
+ // example, implementations MAY use the size of the receiver advertised
+ // window).""
+ ssthresh_(rwnd_),
+ next_tsn_(tsn_unwrapper_.Unwrap(initial_tsn)),
+ last_cumulative_tsn_ack_(tsn_unwrapper_.Unwrap(TSN(*initial_tsn - 1))),
+ send_queue_(send_queue) {}
+
+bool RetransmissionQueue::IsConsistent() const {
+ size_t actual_outstanding_bytes = 0;
+
+ std::set<UnwrappedTSN> actual_to_be_retransmitted;
+ for (const auto& elem : outstanding_data_) {
+ if (elem.second.is_outstanding()) {
+ actual_outstanding_bytes += GetSerializedChunkSize(elem.second.data());
+ }
+
+ if (elem.second.should_be_retransmitted()) {
+ actual_to_be_retransmitted.insert(elem.first);
+ }
+ }
+
+ return actual_outstanding_bytes == outstanding_bytes_ &&
+ actual_to_be_retransmitted == to_be_retransmitted_;
+}
+
+// Returns how large a chunk will be, serialized, carrying the data
+size_t RetransmissionQueue::GetSerializedChunkSize(const Data& data) const {
+ return RoundUpTo4(data_chunk_header_size_ + data.size());
+}
+
+void RetransmissionQueue::RemoveAcked(UnwrappedTSN cumulative_tsn_ack,
+ AckInfo& ack_info) {
+ auto first_unacked = outstanding_data_.upper_bound(cumulative_tsn_ack);
+
+ for (auto it = outstanding_data_.begin(); it != first_unacked; ++it) {
+ ack_info.bytes_acked_by_cumulative_tsn_ack += it->second.data().size();
+ ack_info.acked_tsns.push_back(it->first.Wrap());
+ if (it->second.is_outstanding()) {
+ outstanding_bytes_ -= GetSerializedChunkSize(it->second.data());
+ } else if (it->second.should_be_retransmitted()) {
+ to_be_retransmitted_.erase(it->first);
+ }
+ }
+
+ outstanding_data_.erase(outstanding_data_.begin(), first_unacked);
+}
+
+void RetransmissionQueue::AckGapBlocks(
+ UnwrappedTSN cumulative_tsn_ack,
+ rtc::ArrayView<const SackChunk::GapAckBlock> gap_ack_blocks,
+ AckInfo& ack_info) {
+ // Mark all non-gaps as ACKED (but they can't be removed) as (from RFC)
+ // "SCTP considers the information carried in the Gap Ack Blocks in the
+ // SACK chunk as advisory.". Note that when NR-SACK is supported, this can be
+ // handled differently.
+
+ for (auto& block : gap_ack_blocks) {
+ auto start = outstanding_data_.lower_bound(
+ UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start));
+ auto end = outstanding_data_.upper_bound(
+ UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end));
+ for (auto iter = start; iter != end; ++iter) {
+ if (!iter->second.is_acked()) {
+ ack_info.bytes_acked_by_new_gap_ack_blocks +=
+ iter->second.data().size();
+ if (iter->second.is_outstanding()) {
+ outstanding_bytes_ -= GetSerializedChunkSize(iter->second.data());
+ }
+ if (iter->second.should_be_retransmitted()) {
+ to_be_retransmitted_.erase(iter->first);
+ }
+ iter->second.Ack();
+ ack_info.highest_tsn_acked =
+ std::max(ack_info.highest_tsn_acked, iter->first);
+ ack_info.acked_tsns.push_back(iter->first.Wrap());
+ }
+ }
+ }
+}
+
+void RetransmissionQueue::NackBetweenAckBlocks(
+ UnwrappedTSN cumulative_tsn_ack,
+ rtc::ArrayView<const SackChunk::GapAckBlock> gap_ack_blocks,
+ AckInfo& ack_info) {
+ // Mark everything between the blocks as NACKED/TO_BE_RETRANSMITTED.
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "Mark the DATA chunk(s) with three miss indications for retransmission."
+ // "For each incoming SACK, miss indications are incremented only for
+ // missing TSNs prior to the highest TSN newly acknowledged in the SACK."
+ //
+ // What this means is that only when there is a increasing stream of data
+ // received and there are new packets seen (since last time), packets that are
+ // in-flight and between gaps should be nacked. This means that SCTP relies on
+ // the T3-RTX-timer to re-send packets otherwise.
+ UnwrappedTSN max_tsn_to_nack = ack_info.highest_tsn_acked;
+ if (is_in_fast_recovery() && cumulative_tsn_ack > last_cumulative_tsn_ack_) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "If an endpoint is in Fast Recovery and a SACK arrives that advances
+ // the Cumulative TSN Ack Point, the miss indications are incremented for
+ // all TSNs reported missing in the SACK."
+ max_tsn_to_nack = UnwrappedTSN::AddTo(
+ cumulative_tsn_ack,
+ gap_ack_blocks.empty() ? 0 : gap_ack_blocks.rbegin()->end);
+ }
+
+ UnwrappedTSN prev_block_last_acked = cumulative_tsn_ack;
+ for (auto& block : gap_ack_blocks) {
+ UnwrappedTSN cur_block_first_acked =
+ UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start);
+ for (auto iter = outstanding_data_.upper_bound(prev_block_last_acked);
+ iter != outstanding_data_.lower_bound(cur_block_first_acked); ++iter) {
+ if (iter->first <= max_tsn_to_nack) {
+ if (iter->second.is_outstanding()) {
+ outstanding_bytes_ -= GetSerializedChunkSize(iter->second.data());
+ }
+
+ if (iter->second.Nack()) {
+ ack_info.has_packet_loss = true;
+ to_be_retransmitted_.insert(iter->first);
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << *iter->first.Wrap()
+ << " marked for retransmission";
+ }
+ }
+ }
+ prev_block_last_acked = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end);
+ }
+
+ // Note that packets are not NACKED which are above the highest gap-ack-block
+ // (or above the cumulative ack TSN if no gap-ack-blocks) as only packets
+ // up until the highest_tsn_acked (see above) should be considered when
+ // NACKing.
+}
+
+void RetransmissionQueue::MaybeExitFastRecovery(
+ UnwrappedTSN cumulative_tsn_ack) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "When a SACK acknowledges all TSNs up to and including this [fast
+ // recovery] exit point, Fast Recovery is exited."
+ if (fast_recovery_exit_tsn_.has_value() &&
+ cumulative_tsn_ack >= *fast_recovery_exit_tsn_) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "exit_point=" << *fast_recovery_exit_tsn_->Wrap()
+ << " reached - exiting fast recovery";
+ fast_recovery_exit_tsn_ = absl::nullopt;
+ }
+}
+
+void RetransmissionQueue::HandleIncreasedCumulativeTsnAck(
+ size_t outstanding_bytes,
+ size_t total_bytes_acked) {
+ // Allow some margin for classifying as fully utilized, due to e.g. that too
+ // small packets (less than kMinimumFragmentedPayload) are not sent +
+ // overhead.
+ bool is_fully_utilized = outstanding_bytes + options_.mtu >= cwnd_;
+ size_t old_cwnd = cwnd_;
+ if (phase() == CongestionAlgorithmPhase::kSlowStart) {
+ if (is_fully_utilized && !is_in_fast_recovery()) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.1
+ // "Only when these three conditions are met can the cwnd be
+ // increased; otherwise, the cwnd MUST not be increased. If these
+ // conditions are met, then cwnd MUST be increased by, at most, the
+ // lesser of 1) the total size of the previously outstanding DATA
+ // chunk(s) acknowledged, and 2) the destination's path MTU."
+ if (options_.slow_start_tcp_style) {
+ cwnd_ += std::min(total_bytes_acked, cwnd_);
+ } else {
+ cwnd_ += std::min(total_bytes_acked, options_.mtu);
+ }
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "SS increase cwnd=" << cwnd_
+ << " (" << old_cwnd << ")";
+ }
+ } else if (phase() == CongestionAlgorithmPhase::kCongestionAvoidance) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.2
+ // "Whenever cwnd is greater than ssthresh, upon each SACK arrival
+ // that advances the Cumulative TSN Ack Point, increase
+ // partial_bytes_acked by the total number of bytes of all new chunks
+ // acknowledged in that SACK including chunks acknowledged by the new
+ // Cumulative TSN Ack and by Gap Ack Blocks."
+ size_t old_pba = partial_bytes_acked_;
+ partial_bytes_acked_ += total_bytes_acked;
+
+ if (partial_bytes_acked_ >= cwnd_ && is_fully_utilized) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.2
+ // "When partial_bytes_acked is equal to or greater than cwnd and
+ // before the arrival of the SACK the sender had cwnd or more bytes of
+ // data outstanding (i.e., before arrival of the SACK, flightsize was
+ // greater than or equal to cwnd), increase cwnd by MTU, and reset
+ // partial_bytes_acked to (partial_bytes_acked - cwnd)."
+ cwnd_ += options_.mtu;
+ partial_bytes_acked_ -= cwnd_;
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "CA increase cwnd=" << cwnd_
+ << " (" << old_cwnd << ") ssthresh=" << ssthresh_
+ << ", pba=" << partial_bytes_acked_ << " ("
+ << old_pba << ")";
+ } else {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "CA unchanged cwnd=" << cwnd_
+ << " (" << old_cwnd << ") ssthresh=" << ssthresh_
+ << ", pba=" << partial_bytes_acked_ << " ("
+ << old_pba << ")";
+ }
+ }
+}
+
+void RetransmissionQueue::HandlePacketLoss(UnwrappedTSN highest_tsn_acked) {
+ if (!is_in_fast_recovery()) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "If not in Fast Recovery, adjust the ssthresh and cwnd of the
+ // destination address(es) to which the missing DATA chunks were last
+ // sent, according to the formula described in Section 7.2.3."
+ size_t old_cwnd = cwnd_;
+ size_t old_pba = partial_bytes_acked_;
+ ssthresh_ = std::max(cwnd_ / 2, options_.cwnd_mtus_min * options_.mtu);
+ cwnd_ = ssthresh_;
+ partial_bytes_acked_ = 0;
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "packet loss detected (not fast recovery). cwnd="
+ << cwnd_ << " (" << old_cwnd
+ << "), ssthresh=" << ssthresh_
+ << ", pba=" << partial_bytes_acked_ << " (" << old_pba
+ << ")";
+
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "If not in Fast Recovery, enter Fast Recovery and mark the highest
+ // outstanding TSN as the Fast Recovery exit point."
+ fast_recovery_exit_tsn_ = outstanding_data_.empty()
+ ? last_cumulative_tsn_ack_
+ : outstanding_data_.rbegin()->first;
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "fast recovery initiated with exit_point="
+ << *fast_recovery_exit_tsn_->Wrap();
+ } else {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "While in Fast Recovery, the ssthresh and cwnd SHOULD NOT change for
+ // any destinations due to a subsequent Fast Recovery event (i.e., one
+ // SHOULD NOT reduce the cwnd further due to a subsequent Fast Retransmit)."
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "packet loss detected (fast recovery). No changes.";
+ }
+}
+
+void RetransmissionQueue::UpdateReceiverWindow(uint32_t a_rwnd) {
+ rwnd_ = outstanding_bytes_ >= a_rwnd ? 0 : a_rwnd - outstanding_bytes_;
+}
+
+void RetransmissionQueue::StartT3RtxTimerIfOutstandingData() {
+ // Note: Can't use `outstanding_bytes()` as that one doesn't count chunks to
+ // be retransmitted.
+ if (outstanding_data_.empty()) {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.2
+ // "Whenever all outstanding data sent to an address have been
+ // acknowledged, turn off the T3-rtx timer of that address.
+ // Note: Already stopped in `StopT3RtxTimerOnIncreasedCumulativeTsnAck`."
+ } else {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.2
+ // "Whenever a SACK is received that acknowledges the DATA chunk
+ // with the earliest outstanding TSN for that address, restart the T3-rtx
+ // timer for that address with its current RTO (if there is still
+ // outstanding data on that address)."
+ // "Whenever a SACK is received missing a TSN that was previously
+ // acknowledged via a Gap Ack Block, start the T3-rtx for the destination
+ // address to which the DATA chunk was originally transmitted if it is not
+ // already running."
+ if (!t3_rtx_.is_running()) {
+ t3_rtx_.Start();
+ }
+ }
+}
+
+bool RetransmissionQueue::IsSackValid(const SackChunk& sack) const {
+ // https://tools.ietf.org/html/rfc4960#section-6.2.1
+ // "If Cumulative TSN Ack is less than the Cumulative TSN Ack Point,
+ // then drop the SACK. Since Cumulative TSN Ack is monotonically increasing,
+ // a SACK whose Cumulative TSN Ack is less than the Cumulative TSN Ack Point
+ // indicates an out-of- order SACK."
+ //
+ // Note: Important not to drop SACKs with identical TSN to that previously
+ // received, as the gap ack blocks or dup tsn fields may have changed.
+ UnwrappedTSN cumulative_tsn_ack =
+ tsn_unwrapper_.PeekUnwrap(sack.cumulative_tsn_ack());
+ if (cumulative_tsn_ack < last_cumulative_tsn_ack_) {
+ // https://tools.ietf.org/html/rfc4960#section-6.2.1
+ // "If Cumulative TSN Ack is less than the Cumulative TSN Ack Point,
+ // then drop the SACK. Since Cumulative TSN Ack is monotonically
+ // increasing, a SACK whose Cumulative TSN Ack is less than the Cumulative
+ // TSN Ack Point indicates an out-of- order SACK."
+ return false;
+ } else if (outstanding_data_.empty() &&
+ cumulative_tsn_ack > last_cumulative_tsn_ack_) {
+ // No in-flight data and cum-tsn-ack above what was last ACKed - not valid.
+ return false;
+ } else if (!outstanding_data_.empty() &&
+ cumulative_tsn_ack > outstanding_data_.rbegin()->first) {
+ // There is in-flight data, but the cum-tsn-ack is beyond that - not valid.
+ return false;
+ }
+ return true;
+}
+
+bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) {
+ if (!IsSackValid(sack)) {
+ return false;
+ }
+
+ size_t old_outstanding_bytes = outstanding_bytes_;
+ size_t old_rwnd = rwnd_;
+ UnwrappedTSN cumulative_tsn_ack =
+ tsn_unwrapper_.Unwrap(sack.cumulative_tsn_ack());
+
+ if (sack.gap_ack_blocks().empty()) {
+ UpdateRTT(now, cumulative_tsn_ack);
+ }
+
+ AckInfo ack_info(cumulative_tsn_ack);
+ // Erase all items up to cumulative_tsn_ack.
+ RemoveAcked(cumulative_tsn_ack, ack_info);
+
+ // ACK packets reported in the gap ack blocks
+ AckGapBlocks(cumulative_tsn_ack, sack.gap_ack_blocks(), ack_info);
+
+ // NACK and possibly mark for retransmit chunks that weren't acked.
+ NackBetweenAckBlocks(cumulative_tsn_ack, sack.gap_ack_blocks(), ack_info);
+
+ // Update of outstanding_data_ is now done. Congestion control remains.
+ UpdateReceiverWindow(sack.a_rwnd());
+
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Received SACK. Acked TSN: "
+ << StrJoin(ack_info.acked_tsns, ",",
+ [](rtc::StringBuilder& sb, TSN tsn) {
+ sb << *tsn;
+ })
+ << ", cum_tsn_ack=" << *cumulative_tsn_ack.Wrap() << " ("
+ << *last_cumulative_tsn_ack_.Wrap()
+ << "), outstanding_bytes=" << outstanding_bytes_ << " ("
+ << old_outstanding_bytes << "), rwnd=" << rwnd_ << " ("
+ << old_rwnd << ")";
+
+ MaybeExitFastRecovery(cumulative_tsn_ack);
+
+ if (cumulative_tsn_ack > last_cumulative_tsn_ack_) {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.2
+ // "Whenever a SACK is received that acknowledges the DATA chunk
+ // with the earliest outstanding TSN for that address, restart the T3-rtx
+ // timer for that address with its current RTO (if there is still
+ // outstanding data on that address)."
+ // Note: It may be started again in a bit further down.
+ t3_rtx_.Stop();
+
+ HandleIncreasedCumulativeTsnAck(
+ old_outstanding_bytes, ack_info.bytes_acked_by_cumulative_tsn_ack +
+ ack_info.bytes_acked_by_new_gap_ack_blocks);
+ }
+
+ if (ack_info.has_packet_loss) {
+ is_in_fast_retransmit_ = true;
+ HandlePacketLoss(ack_info.highest_tsn_acked);
+ }
+
+ // https://tools.ietf.org/html/rfc4960#section-8.2
+ // "When an outstanding TSN is acknowledged [...] the endpoint shall clear
+ // the error counter ..."
+ if (ack_info.bytes_acked_by_cumulative_tsn_ack > 0 ||
+ ack_info.bytes_acked_by_new_gap_ack_blocks > 0) {
+ on_clear_retransmission_counter_();
+ }
+
+ last_cumulative_tsn_ack_ = cumulative_tsn_ack;
+ StartT3RtxTimerIfOutstandingData();
+ RTC_DCHECK(IsConsistent());
+ return true;
+}
+
+void RetransmissionQueue::UpdateRTT(TimeMs now,
+ UnwrappedTSN cumulative_tsn_ack) {
+ // RTT updating is flawed in SCTP, as explained in e.g. Pedersen J, Griwodz C,
+ // Halvorsen P (2006) Considerations of SCTP retransmission delays for thin
+ // streams.
+ // Due to delayed acknowledgement, the SACK may be sent much later which
+ // increases the calculated RTT.
+ // TODO(boivie): Consider occasionally sending DATA chunks with I-bit set and
+ // use only those packets for measurement.
+
+ auto it = outstanding_data_.find(cumulative_tsn_ack);
+ if (it != outstanding_data_.end()) {
+ if (!it->second.has_been_retransmitted()) {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.1
+ // "Karn's algorithm: RTT measurements MUST NOT be made using
+ // packets that were retransmitted (and thus for which it is ambiguous
+ // whether the reply was for the first instance of the chunk or for a
+ // later instance)"
+ DurationMs rtt = now - it->second.time_sent();
+ on_new_rtt_(rtt);
+ }
+ }
+}
+
+void RetransmissionQueue::HandleT3RtxTimerExpiry() {
+ size_t old_cwnd = cwnd_;
+ size_t old_outstanding_bytes = outstanding_bytes_;
+ // https://tools.ietf.org/html/rfc4960#section-6.3.3
+ // "For the destination address for which the timer expires, adjust
+ // its ssthresh with rules defined in Section 7.2.3 and set the cwnd <- MTU."
+ ssthresh_ = std::max(cwnd_ / 2, 4 * options_.mtu);
+ cwnd_ = 1 * options_.mtu;
+
+ // https://tools.ietf.org/html/rfc4960#section-6.3.3
+ // "For the destination address for which the timer expires, set RTO
+ // <- RTO * 2 ("back off the timer"). The maximum value discussed in rule C7
+ // above (RTO.max) may be used to provide an upper bound to this doubling
+ // operation."
+
+ // Already done by the Timer implementation.
+
+ // https://tools.ietf.org/html/rfc4960#section-6.3.3
+ // "Determine how many of the earliest (i.e., lowest TSN) outstanding
+ // DATA chunks for the address for which the T3-rtx has expired will fit into
+ // a single packet"
+
+ // https://tools.ietf.org/html/rfc4960#section-6.3.3
+ // "Note: Any DATA chunks that were sent to the address for which the
+ // T3-rtx timer expired but did not fit in one MTU (rule E3 above) should be
+ // marked for retransmission and sent as soon as cwnd allows (normally, when a
+ // SACK arrives)."
+ int count = 0;
+ for (auto& elem : outstanding_data_) {
+ UnwrappedTSN tsn = elem.first;
+ TxData& item = elem.second;
+ if (!item.is_acked()) {
+ if (item.is_outstanding()) {
+ outstanding_bytes_ -= GetSerializedChunkSize(item.data());
+ }
+ if (item.Nack(/*retransmit_now=*/true)) {
+ to_be_retransmitted_.insert(tsn);
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Chunk " << *tsn.Wrap()
+ << " will be retransmitted due to T3-RTX";
+ ++count;
+ }
+ }
+ }
+
+ // https://tools.ietf.org/html/rfc4960#section-6.3.3
+ // "Start the retransmission timer T3-rtx on the destination address
+ // to which the retransmission is sent, if rule R1 above indicates to do so."
+
+ // Already done by the Timer implementation.
+
+ RTC_DLOG(LS_INFO) << log_prefix_ << "t3-rtx expired. new cwnd=" << cwnd_
+ << " (" << old_cwnd << "), ssthresh=" << ssthresh_
+ << ", rtx-packets=" << count << ", outstanding_bytes "
+ << outstanding_bytes_ << " (" << old_outstanding_bytes
+ << ")";
+ RTC_DCHECK(IsConsistent());
+}
+
+std::vector<std::pair<TSN, Data>>
+RetransmissionQueue::GetChunksToBeRetransmitted(size_t max_size) {
+ std::vector<std::pair<TSN, Data>> result;
+
+ for (auto it = to_be_retransmitted_.begin();
+ it != to_be_retransmitted_.end();) {
+ UnwrappedTSN tsn = *it;
+ auto elem = outstanding_data_.find(tsn);
+ RTC_DCHECK(elem != outstanding_data_.end());
+ TxData& item = elem->second;
+ RTC_DCHECK(item.should_be_retransmitted());
+ RTC_DCHECK(!item.is_outstanding());
+ RTC_DCHECK(!item.is_abandoned());
+ RTC_DCHECK(!item.is_acked());
+
+ size_t serialized_size = GetSerializedChunkSize(item.data());
+ if (serialized_size <= max_size) {
+ item.Retransmit();
+ result.emplace_back(tsn.Wrap(), item.data().Clone());
+ max_size -= serialized_size;
+ outstanding_bytes_ += serialized_size;
+ it = to_be_retransmitted_.erase(it);
+ } else {
+ ++it;
+ }
+ // No point in continuing if the packet is full.
+ if (max_size <= data_chunk_header_size_) {
+ break;
+ }
+ }
+
+ return result;
+}
+
+std::vector<std::pair<TSN, Data>> RetransmissionQueue::GetChunksToSend(
+ TimeMs now,
+ size_t bytes_remaining_in_packet) {
+ // Chunks are always padded to even divisible by four.
+ RTC_DCHECK(IsDivisibleBy4(bytes_remaining_in_packet));
+
+ std::vector<std::pair<TSN, Data>> to_be_sent;
+ size_t old_outstanding_bytes = outstanding_bytes_;
+ size_t old_rwnd = rwnd_;
+ if (is_in_fast_retransmit()) {
+ // https://tools.ietf.org/html/rfc4960#section-7.2.4
+ // "Determine how many of the earliest (i.e., lowest TSN) DATA chunks
+ // marked for retransmission will fit into a single packet ... Retransmit
+ // those K DATA chunks in a single packet. When a Fast Retransmit is being
+ // performed, the sender SHOULD ignore the value of cwnd and SHOULD NOT
+ // delay retransmission for this single packet."
+ is_in_fast_retransmit_ = false;
+ to_be_sent = GetChunksToBeRetransmitted(bytes_remaining_in_packet);
+ size_t to_be_sent_bytes = absl::c_accumulate(
+ to_be_sent, 0, [&](size_t r, const std::pair<TSN, Data>& d) {
+ return r + GetSerializedChunkSize(d.second);
+ });
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "fast-retransmit: sending "
+ << to_be_sent.size() << " chunks, " << to_be_sent_bytes
+ << " bytes";
+ } else {
+ // Normal sending. Calculate the bandwidth budget (how many bytes that is
+ // allowed to be sent), and fill that up first with chunks that are
+ // scheduled to be retransmitted. If there is still budget, send new chunks
+ // (which will have their TSN assigned here.)
+ size_t remaining_cwnd_bytes =
+ outstanding_bytes_ >= cwnd_ ? 0 : cwnd_ - outstanding_bytes_;
+ size_t max_bytes = RoundDownTo4(std::min(
+ std::min(bytes_remaining_in_packet, rwnd()), remaining_cwnd_bytes));
+
+ to_be_sent = GetChunksToBeRetransmitted(max_bytes);
+ max_bytes -= absl::c_accumulate(
+ to_be_sent, 0, [&](size_t r, const std::pair<TSN, Data>& d) {
+ return r + GetSerializedChunkSize(d.second);
+ });
+
+ while (max_bytes > data_chunk_header_size_) {
+ RTC_DCHECK(IsDivisibleBy4(max_bytes));
+ absl::optional<SendQueue::DataToSend> chunk_opt =
+ send_queue_.Produce(now, max_bytes - data_chunk_header_size_);
+ if (!chunk_opt.has_value()) {
+ break;
+ }
+
+ UnwrappedTSN tsn = next_tsn_;
+ next_tsn_.Increment();
+ to_be_sent.emplace_back(tsn.Wrap(), chunk_opt->data.Clone());
+
+ // All chunks are always padded to be even divisible by 4.
+ size_t chunk_size = GetSerializedChunkSize(chunk_opt->data);
+ max_bytes -= chunk_size;
+ outstanding_bytes_ += chunk_size;
+ rwnd_ -= chunk_size;
+ outstanding_data_.emplace(
+ tsn, RetransmissionQueue::TxData(std::move(chunk_opt->data),
+ chunk_opt->max_retransmissions, now,
+ chunk_opt->expires_at));
+ }
+ }
+
+ if (!to_be_sent.empty()) {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.2
+ // "Every time a DATA chunk is sent to any address (including a
+ // retransmission), if the T3-rtx timer of that address is not running,
+ // start it running so that it will expire after the RTO of that address."
+ if (!t3_rtx_.is_running()) {
+ t3_rtx_.Start();
+ }
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Sending TSN "
+ << StrJoin(to_be_sent, ",",
+ [&](rtc::StringBuilder& sb,
+ const std::pair<TSN, Data>& c) {
+ sb << *c.first;
+ })
+ << " - "
+ << absl::c_accumulate(
+ to_be_sent, 0,
+ [&](size_t r, const std::pair<TSN, Data>& d) {
+ return r + GetSerializedChunkSize(d.second);
+ })
+ << " bytes. outstanding_bytes=" << outstanding_bytes_
+ << " (" << old_outstanding_bytes << "), cwnd=" << cwnd_
+ << ", rwnd=" << rwnd_ << " (" << old_rwnd << ")";
+ }
+ RTC_DCHECK(IsConsistent());
+ return to_be_sent;
+}
+
+std::vector<std::pair<TSN, RetransmissionQueue::State>>
+RetransmissionQueue::GetChunkStatesForTesting() const {
+ std::vector<std::pair<TSN, RetransmissionQueue::State>> states;
+ states.emplace_back(last_cumulative_tsn_ack_.Wrap(), State::kAcked);
+ for (const auto& elem : outstanding_data_) {
+ State state;
+ if (elem.second.is_abandoned()) {
+ state = State::kAbandoned;
+ } else if (elem.second.should_be_retransmitted()) {
+ state = State::kToBeRetransmitted;
+ } else if (elem.second.is_acked()) {
+ state = State::kAcked;
+ } else if (elem.second.is_outstanding()) {
+ state = State::kInFlight;
+ } else {
+ state = State::kNacked;
+ }
+
+ states.emplace_back(elem.first.Wrap(), state);
+ }
+ return states;
+}
+
+bool RetransmissionQueue::ShouldSendForwardTsn(TimeMs now) {
+ if (!partial_reliability_) {
+ return false;
+ }
+ ExpireChunks(now);
+ if (!outstanding_data_.empty()) {
+ auto it = outstanding_data_.begin();
+ return it->first == last_cumulative_tsn_ack_.next_value() &&
+ it->second.is_abandoned();
+ }
+ RTC_DCHECK(IsConsistent());
+ return false;
+}
+
+void RetransmissionQueue::TxData::Ack() {
+ ack_state_ = AckState::kAcked;
+ should_be_retransmitted_ = false;
+}
+
+bool RetransmissionQueue::TxData::Nack(bool retransmit_now) {
+ ack_state_ = AckState::kNacked;
+ ++nack_count_;
+ if ((retransmit_now || nack_count_ >= kNumberOfNacksForRetransmission) &&
+ !is_abandoned_) {
+ should_be_retransmitted_ = true;
+ return true;
+ }
+ return false;
+}
+
+void RetransmissionQueue::TxData::Retransmit() {
+ ack_state_ = AckState::kUnacked;
+ should_be_retransmitted_ = false;
+
+ nack_count_ = 0;
+ ++num_retransmissions_;
+}
+
+void RetransmissionQueue::TxData::Abandon() {
+ is_abandoned_ = true;
+ should_be_retransmitted_ = false;
+}
+
+bool RetransmissionQueue::TxData::has_expired(TimeMs now) const {
+ if (ack_state_ != AckState::kAcked && !is_abandoned_) {
+ if (max_retransmissions_.has_value() &&
+ num_retransmissions_ >= *max_retransmissions_) {
+ return true;
+ } else if (expires_at_.has_value() && *expires_at_ <= now) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void RetransmissionQueue::ExpireChunks(TimeMs now) {
+ for (const auto& elem : outstanding_data_) {
+ UnwrappedTSN tsn = elem.first;
+ const TxData& item = elem.second;
+
+ // Chunks that are in-flight (possibly lost?), nacked or to be retransmitted
+ // can be expired easily. There is always a risk that a message is expired
+ // that was already received by the peer, but for which there haven't been
+ // a SACK received. But that's acceptable, and handled.
+ if (item.is_abandoned()) {
+ // Already abandoned.
+ } else if (item.has_expired(now)) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Marking chunk " << *tsn.Wrap()
+ << " and message " << *item.data().message_id
+ << " as expired";
+ ExpireAllFor(item);
+ } else {
+ // A non-expired chunk. No need to iterate any further.
+ break;
+ }
+ }
+}
+
+void RetransmissionQueue::ExpireAllFor(
+ const RetransmissionQueue::TxData& item) {
+ // Erase all remaining chunks from the producer, if any.
+ if (send_queue_.Discard(item.data().is_unordered, item.data().stream_id,
+ item.data().message_id)) {
+ // There were remaining chunks to be produced for this message. Since the
+ // receiver may have already received all chunks (up till now) for this
+ // message, we can't just FORWARD-TSN to the last fragment in this
+ // (abandoned) message and start sending a new message, as the receiver will
+ // then see a new message before the end of the previous one was seen (or
+ // skipped over). So create a new fragment, representing the end, that the
+ // received will never see as it is abandoned immediately and used as cum
+ // TSN in the sent FORWARD-TSN.
+ UnwrappedTSN tsn = next_tsn_;
+ next_tsn_.Increment();
+ Data message_end(item.data().stream_id, item.data().ssn,
+ item.data().message_id, item.data().fsn, item.data().ppid,
+ std::vector<uint8_t>(), Data::IsBeginning(false),
+ Data::IsEnd(true), item.data().is_unordered);
+ TxData& added_item =
+ outstanding_data_
+ .emplace(tsn, RetransmissionQueue::TxData(std::move(message_end),
+ absl::nullopt, TimeMs(0),
+ absl::nullopt))
+ .first->second;
+ // The added chunk shouldn't be included in `outstanding_bytes`, so set it
+ // as acked.
+ added_item.Ack();
+ RTC_DLOG(LS_VERBOSE) << log_prefix_
+ << "Adding unsent end placeholder for message at tsn="
+ << *tsn.Wrap();
+ }
+ for (auto& elem : outstanding_data_) {
+ UnwrappedTSN tsn = elem.first;
+ TxData& other = elem.second;
+
+ if (!other.is_abandoned() &&
+ other.data().stream_id == item.data().stream_id &&
+ other.data().is_unordered == item.data().is_unordered &&
+ other.data().message_id == item.data().message_id) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Marking chunk " << *tsn.Wrap()
+ << " as abandoned";
+ if (other.should_be_retransmitted()) {
+ to_be_retransmitted_.erase(tsn);
+ }
+ other.Abandon();
+ }
+ }
+}
+
+ForwardTsnChunk RetransmissionQueue::CreateForwardTsn() const {
+ std::unordered_map<StreamID, SSN, StreamID::Hasher>
+ skipped_per_ordered_stream;
+ UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_;
+
+ for (const auto& elem : outstanding_data_) {
+ UnwrappedTSN tsn = elem.first;
+ const TxData& item = elem.second;
+
+ if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) {
+ break;
+ }
+ new_cumulative_ack = tsn;
+ if (!item.data().is_unordered &&
+ item.data().ssn > skipped_per_ordered_stream[item.data().stream_id]) {
+ skipped_per_ordered_stream[item.data().stream_id] = item.data().ssn;
+ }
+ }
+
+ std::vector<ForwardTsnChunk::SkippedStream> skipped_streams;
+ skipped_streams.reserve(skipped_per_ordered_stream.size());
+ for (const auto& elem : skipped_per_ordered_stream) {
+ skipped_streams.emplace_back(elem.first, elem.second);
+ }
+ return ForwardTsnChunk(new_cumulative_ack.Wrap(), std::move(skipped_streams));
+}
+
+IForwardTsnChunk RetransmissionQueue::CreateIForwardTsn() const {
+ std::unordered_map<std::pair<IsUnordered, StreamID>, MID, UnorderedStreamHash>
+ skipped_per_stream;
+ UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_;
+
+ for (const auto& elem : outstanding_data_) {
+ UnwrappedTSN tsn = elem.first;
+ const TxData& item = elem.second;
+
+ if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) {
+ break;
+ }
+ new_cumulative_ack = tsn;
+ std::pair<IsUnordered, StreamID> stream_id =
+ std::make_pair(item.data().is_unordered, item.data().stream_id);
+
+ if (item.data().message_id > skipped_per_stream[stream_id]) {
+ skipped_per_stream[stream_id] = item.data().message_id;
+ }
+ }
+
+ std::vector<IForwardTsnChunk::SkippedStream> skipped_streams;
+ skipped_streams.reserve(skipped_per_stream.size());
+ for (const auto& elem : skipped_per_stream) {
+ const std::pair<IsUnordered, StreamID>& stream = elem.first;
+ MID message_id = elem.second;
+ skipped_streams.emplace_back(stream.first, stream.second, message_id);
+ }
+
+ return IForwardTsnChunk(new_cumulative_ack.Wrap(),
+ std::move(skipped_streams));
+}
+
+void RetransmissionQueue::PrepareResetStreams(
+ rtc::ArrayView<const StreamID> streams) {
+ // TODO(boivie): These calls are now only affecting the send queue. The
+ // packet buffer can also change behavior - for example draining the chunk
+ // producer and eagerly assign TSNs so that an "Outgoing SSN Reset Request"
+ // can be sent quickly, with a known `sender_last_assigned_tsn`.
+ send_queue_.PrepareResetStreams(streams);
+}
+bool RetransmissionQueue::CanResetStreams() const {
+ return send_queue_.CanResetStreams();
+}
+void RetransmissionQueue::CommitResetStreams() {
+ send_queue_.CommitResetStreams();
+}
+void RetransmissionQueue::RollbackResetStreams() {
+ send_queue_.RollbackResetStreams();
+}
+
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/retransmission_queue.h b/net/dcsctp/tx/retransmission_queue.h
new file mode 100644
index 0000000000..7f5baf9fff
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_queue.h
@@ -0,0 +1,371 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_
+#define NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_
+
+#include <cstdint>
+#include <functional>
+#include <map>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/sequence_numbers.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/retransmission_timeout.h"
+#include "net/dcsctp/tx/send_queue.h"
+
+namespace dcsctp {
+
+// The RetransmissionQueue manages all DATA/I-DATA chunks that are in-flight and
+// schedules them to be retransmitted if necessary. Chunks are retransmitted
+// when they have been lost for a number of consecutive SACKs, or when the
+// retransmission timer, `t3_rtx` expires.
+//
+// As congestion control is tightly connected with the state of transmitted
+// packets, that's also managed here to limit the amount of data that is
+// in-flight (sent, but not yet acknowledged).
+class RetransmissionQueue {
+ public:
+ static constexpr size_t kMinimumFragmentedPayload = 10;
+ // State for DATA chunks (message fragments) in the queue - used in tests.
+ enum class State {
+ // The chunk has been sent but not received yet (from the sender's point of
+ // view, as no SACK has been received yet that reference this chunk).
+ kInFlight,
+ // A SACK has been received which explicitly marked this chunk as missing -
+ // it's now NACKED and may be retransmitted if NACKED enough times.
+ kNacked,
+ // A chunk that will be retransmitted when possible.
+ kToBeRetransmitted,
+ // A SACK has been received which explicitly marked this chunk as received.
+ kAcked,
+ // A chunk whose message has expired or has been retransmitted too many
+ // times (RFC3758). It will not be retransmitted anymore.
+ kAbandoned,
+ };
+
+ // Creates a RetransmissionQueue which will send data using `initial_tsn` as
+ // the first TSN to use for sent fragments. It will poll data from
+ // `send_queue` and call `on_send_queue_empty` when it is empty. When
+ // SACKs are received, it will estimate the RTT, and call `on_new_rtt`. When
+ // an outstanding chunk has been ACKed, it will call
+ // `on_clear_retransmission_counter` and will also use `t3_rtx`, which is the
+ // SCTP retransmission timer to manage retransmissions.
+ RetransmissionQueue(absl::string_view log_prefix,
+ TSN initial_tsn,
+ size_t a_rwnd,
+ SendQueue& send_queue,
+ std::function<void(DurationMs rtt)> on_new_rtt,
+ std::function<void()> on_clear_retransmission_counter,
+ Timer& t3_rtx,
+ const DcSctpOptions& options,
+ bool supports_partial_reliability = true,
+ bool use_message_interleaving = false);
+
+ // Handles a received SACK. Returns true if the `sack` was processed and
+ // false if it was discarded due to received out-of-order and not relevant.
+ bool HandleSack(TimeMs now, const SackChunk& sack);
+
+ // Handles an expired retransmission timer.
+ void HandleT3RtxTimerExpiry();
+
+ // Returns a list of chunks to send that would fit in one SCTP packet with
+ // `bytes_remaining_in_packet` bytes available. This may be further limited by
+ // the congestion control windows. Note that `ShouldSendForwardTSN` must be
+ // called prior to this method, to abandon expired chunks, as this method will
+ // not expire any chunks.
+ std::vector<std::pair<TSN, Data>> GetChunksToSend(
+ TimeMs now,
+ size_t bytes_remaining_in_packet);
+
+ // Returns the internal state of all queued chunks. This is only used in
+ // unit-tests.
+ std::vector<std::pair<TSN, State>> GetChunkStatesForTesting() const;
+
+ // Returns the next TSN that will be allocated for sent DATA chunks.
+ TSN next_tsn() const { return next_tsn_.Wrap(); }
+
+ // Returns the size of the congestion window, in bytes. This is the number of
+ // bytes that may be in-flight.
+ size_t cwnd() const { return cwnd_; }
+
+ // Overrides the current congestion window size.
+ void set_cwnd(size_t cwnd) { cwnd_ = cwnd; }
+
+ // Returns the current receiver window size.
+ size_t rwnd() const { return rwnd_; }
+
+ // Returns the number of bytes of packets that are in-flight.
+ size_t outstanding_bytes() const { return outstanding_bytes_; }
+
+ // Given the current time `now`, it will evaluate if there are chunks that
+ // have expired and that need to be discarded. It returns true if a
+ // FORWARD-TSN should be sent.
+ bool ShouldSendForwardTsn(TimeMs now);
+
+ // Creates a FORWARD-TSN chunk.
+ ForwardTsnChunk CreateForwardTsn() const;
+
+ // Creates an I-FORWARD-TSN chunk.
+ IForwardTsnChunk CreateIForwardTsn() const;
+
+ // See the SendQueue for a longer description of these methods related
+ // to stream resetting.
+ void PrepareResetStreams(rtc::ArrayView<const StreamID> streams);
+ bool CanResetStreams() const;
+ void CommitResetStreams();
+ void RollbackResetStreams();
+
+ private:
+ enum class CongestionAlgorithmPhase {
+ kSlowStart,
+ kCongestionAvoidance,
+ };
+
+ // A fragmented message's DATA chunk while in the retransmission queue, and
+ // its associated metadata.
+ class TxData {
+ public:
+ explicit TxData(Data data,
+ absl::optional<size_t> max_retransmissions,
+ TimeMs time_sent,
+ absl::optional<TimeMs> expires_at)
+ : max_retransmissions_(max_retransmissions),
+ time_sent_(time_sent),
+ expires_at_(expires_at),
+ data_(std::move(data)) {}
+
+ TimeMs time_sent() const { return time_sent_; }
+
+ const Data& data() const { return data_; }
+
+ // Acks an item.
+ void Ack();
+
+ // Nacks an item. If it has been nacked enough times, or if `retransmit_now`
+ // is set, it might be marked for retransmission, which is indicated by the
+ // return value.
+ bool Nack(bool retransmit_now = false);
+
+ // Prepares the item to be retransmitted. Sets it as outstanding and
+ // clears all nack counters.
+ void Retransmit();
+
+ // Marks this item as abandoned.
+ void Abandon();
+
+ bool is_outstanding() const { return ack_state_ == AckState::kUnacked; }
+ bool is_acked() const { return ack_state_ == AckState::kAcked; }
+ bool is_abandoned() const { return is_abandoned_; }
+
+ // Indicates if this chunk should be retransmitted.
+ bool should_be_retransmitted() const { return should_be_retransmitted_; }
+ // Indicates if this chunk has ever been retransmitted.
+ bool has_been_retransmitted() const { return num_retransmissions_ > 0; }
+
+ // Given the current time, and the current state of this DATA chunk, it will
+ // indicate if it has expired (SCTP Partial Reliability Extension).
+ bool has_expired(TimeMs now) const;
+
+ private:
+ enum class AckState {
+ kUnacked,
+ kAcked,
+ kNacked,
+ };
+ // Indicates the presence of this chunk, if it's in flight (Unacked), has
+ // been received (Acked) or is lost (Nacked).
+ AckState ack_state_ = AckState::kUnacked;
+ // Indicates if this chunk has been abandoned, which is a terminal state.
+ bool is_abandoned_ = false;
+ // Indicates if this chunk should be retransmitted.
+ bool should_be_retransmitted_ = false;
+
+ // The number of times the DATA chunk has been nacked (by having received a
+ // SACK which doesn't include it). Will be cleared on retransmissions.
+ size_t nack_count_ = 0;
+ // The number of times the DATA chunk has been retransmitted.
+ size_t num_retransmissions_ = 0;
+ // If the message was sent with a maximum number of retransmissions, this is
+ // set to that number. The value zero (0) means that it will never be
+ // retransmitted.
+ const absl::optional<size_t> max_retransmissions_;
+ // When the packet was sent, and placed in this queue.
+ const TimeMs time_sent_;
+ // If the message was sent with an expiration time, this is set.
+ const absl::optional<TimeMs> expires_at_;
+ // The actual data to send/retransmit.
+ Data data_;
+ };
+
+ // Contains variables scoped to a processing of an incoming SACK.
+ struct AckInfo {
+ explicit AckInfo(UnwrappedTSN cumulative_tsn_ack)
+ : highest_tsn_acked(cumulative_tsn_ack) {}
+
+ // All TSNs that have been acked (for the first time) in this SACK.
+ std::vector<TSN> acked_tsns;
+
+ // Bytes acked by increasing cumulative_tsn_ack in this SACK
+ size_t bytes_acked_by_cumulative_tsn_ack = 0;
+
+ // Bytes acked by gap blocks in this SACK.
+ size_t bytes_acked_by_new_gap_ack_blocks = 0;
+
+ // Indicates if this SACK indicates that packet loss has occurred. Just
+ // because a packet is missing in the SACK doesn't necessarily mean that
+ // there is packet loss as that packet might be in-flight and received
+ // out-of-order. But when it has been reported missing consecutive times, it
+ // will eventually be considered "lost" and this will be set.
+ bool has_packet_loss = false;
+
+ // Highest TSN Newly Acknowledged, an SCTP variable.
+ UnwrappedTSN highest_tsn_acked;
+ };
+
+ bool IsConsistent() const;
+
+ // Returns how large a chunk will be, serialized, carrying the data
+ size_t GetSerializedChunkSize(const Data& data) const;
+
+ // Indicates if the congestion control algorithm is in "fast recovery".
+ bool is_in_fast_recovery() const {
+ return fast_recovery_exit_tsn_.has_value();
+ }
+
+ // Indicates if the congestion control algorithm is in "fast retransmit".
+ bool is_in_fast_retransmit() const { return is_in_fast_retransmit_; }
+
+ // Indicates if the provided SACK is valid given what has previously been
+ // received. If it returns false, the SACK is most likely a duplicate of
+ // something already seen, so this returning false doesn't necessarily mean
+ // that the SACK is illegal.
+ bool IsSackValid(const SackChunk& sack) const;
+
+ // Given a `cumulative_tsn_ack` from an incoming SACK, will remove those items
+ // in the retransmission queue up until this value and will update `ack_info`
+ // by setting `bytes_acked_by_cumulative_tsn_ack` and `acked_tsns`.
+ void RemoveAcked(UnwrappedTSN cumulative_tsn_ack, AckInfo& ack_info);
+
+ // Will mark the chunks covered by the `gap_ack_blocks` from an incoming SACK
+ // as "acked" and update `ack_info` by adding new TSNs to `added_tsns`.
+ void AckGapBlocks(UnwrappedTSN cumulative_tsn_ack,
+ rtc::ArrayView<const SackChunk::GapAckBlock> gap_ack_blocks,
+ AckInfo& ack_info);
+
+ // Mark chunks reported as "missing", as "nacked" or "to be retransmitted"
+ // depending how many times this has happened. Only packets up until
+ // `ack_info.highest_tsn_acked` (highest TSN newly acknowledged) are
+ // nacked/retransmitted. The method will set `ack_info.has_packet_loss`.
+ void NackBetweenAckBlocks(
+ UnwrappedTSN cumulative_tsn_ack,
+ rtc::ArrayView<const SackChunk::GapAckBlock> gap_ack_blocks,
+ AckInfo& ack_info);
+
+ // When a SACK chunk is received, this method will be called which _may_ call
+ // into the `RetransmissionTimeout` to update the RTO.
+ void UpdateRTT(TimeMs now, UnwrappedTSN cumulative_tsn_ack);
+
+ // If the congestion control is in "fast recovery mode", this may be exited
+ // now.
+ void MaybeExitFastRecovery(UnwrappedTSN cumulative_tsn_ack);
+
+ // If chunks have been ACKed, stop the retransmission timer.
+ void StopT3RtxTimerOnIncreasedCumulativeTsnAck(
+ UnwrappedTSN cumulative_tsn_ack);
+
+ // Update the congestion control algorithm given as the cumulative ack TSN
+ // value has increased, as reported in an incoming SACK chunk.
+ void HandleIncreasedCumulativeTsnAck(size_t outstanding_bytes,
+ size_t total_bytes_acked);
+ // Update the congestion control algorithm, given as packet loss has been
+ // detected, as reported in an incoming SACK chunk.
+ void HandlePacketLoss(UnwrappedTSN highest_tsn_acked);
+ // Update the view of the receiver window size.
+ void UpdateReceiverWindow(uint32_t a_rwnd);
+ // Given `max_size` of space left in a packet, which chunks can be added to
+ // it?
+ std::vector<std::pair<TSN, Data>> GetChunksToBeRetransmitted(size_t max_size);
+ // If there is data sent and not ACKED, ensure that the retransmission timer
+ // is running.
+ void StartT3RtxTimerIfOutstandingData();
+
+ // Given the current time `now_ms`, expire chunks that have a limited
+ // lifetime.
+ void ExpireChunks(TimeMs now);
+ // Given that a message fragment, `item` has expired, expire all other
+ // fragments that share the same message - even never-before-sent fragments
+ // that are still in the SendQueue.
+ void ExpireAllFor(const RetransmissionQueue::TxData& item);
+
+ // Returns the current congestion control algorithm phase.
+ CongestionAlgorithmPhase phase() const {
+ return (cwnd_ <= ssthresh_)
+ ? CongestionAlgorithmPhase::kSlowStart
+ : CongestionAlgorithmPhase::kCongestionAvoidance;
+ }
+
+ const DcSctpOptions options_;
+ // If the peer supports RFC3758 - SCTP Partial Reliability Extension.
+ const bool partial_reliability_;
+ const std::string log_prefix_;
+ // The size of the data chunk (DATA/I-DATA) header that is used.
+ const size_t data_chunk_header_size_;
+ // Called when a new RTT measurement has been done
+ const std::function<void(DurationMs rtt)> on_new_rtt_;
+ // Called when a SACK has been seen that cleared the retransmission counter.
+ const std::function<void()> on_clear_retransmission_counter_;
+ // The retransmission counter.
+ Timer& t3_rtx_;
+ // Unwraps TSNs
+ UnwrappedTSN::Unwrapper tsn_unwrapper_;
+
+ // Congestion Window. Number of bytes that may be in-flight (sent, not acked).
+ size_t cwnd_;
+ // Receive Window. Number of bytes available in the receiver's RX buffer.
+ size_t rwnd_;
+ // Slow Start Threshold. See RFC4960.
+ size_t ssthresh_;
+ // Partial Bytes Acked. See RFC4960.
+ size_t partial_bytes_acked_ = 0;
+ // If set, fast recovery is enabled until this TSN has been cumulative
+ // acked.
+ absl::optional<UnwrappedTSN> fast_recovery_exit_tsn_ = absl::nullopt;
+ // Indicates if the congestion algorithm is in fast retransmit.
+ bool is_in_fast_retransmit_ = false;
+
+ // Next TSN to used.
+ UnwrappedTSN next_tsn_;
+ // The last cumulative TSN ack number
+ UnwrappedTSN last_cumulative_tsn_ack_;
+ // The send queue.
+ SendQueue& send_queue_;
+ // All the outstanding data chunks that are in-flight and that have not been
+ // cumulative acked. Note that it also contains chunks that have been acked in
+ // gap ack blocks.
+ std::map<UnwrappedTSN, TxData> outstanding_data_;
+ // Data chunks that are to be retransmitted.
+ std::set<UnwrappedTSN> to_be_retransmitted_;
+ // The number of bytes that are in-flight (sent but not yet acked or nacked).
+ size_t outstanding_bytes_ = 0;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_
diff --git a/net/dcsctp/tx/retransmission_queue_test.cc b/net/dcsctp/tx/retransmission_queue_test.cc
new file mode 100644
index 0000000000..e02b111b5a
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_queue_test.cc
@@ -0,0 +1,1007 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_queue.h"
+
+#include <cstddef>
+#include <cstdint>
+#include <functional>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/chunk/data_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/forward_tsn_common.h"
+#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h"
+#include "net/dcsctp/packet/chunk/sack_chunk.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/testing/data_generator.h"
+#include "net/dcsctp/timer/fake_timeout.h"
+#include "net/dcsctp/timer/timer.h"
+#include "net/dcsctp/tx/mock_send_queue.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::MockFunction;
+using State = ::dcsctp::RetransmissionQueue::State;
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::IsEmpty;
+using ::testing::NiceMock;
+using ::testing::Pair;
+using ::testing::Return;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAre;
+
+constexpr uint32_t kArwnd = 100000;
+constexpr uint32_t kMaxMtu = 1191;
+
+class RetransmissionQueueTest : public testing::Test {
+ protected:
+ RetransmissionQueueTest()
+ : gen_(MID(42)),
+ timeout_manager_([this]() { return now_; }),
+ timer_manager_([this]() { return timeout_manager_.CreateTimeout(); }),
+ timer_(timer_manager_.CreateTimer(
+ "test/t3_rtx",
+ []() { return absl::nullopt; },
+ TimerOptions(DurationMs(0)))) {}
+
+ std::function<SendQueue::DataToSend(TimeMs, size_t)> CreateChunk() {
+ return [this](TimeMs now, size_t max_size) {
+ return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ };
+ }
+
+ std::vector<TSN> GetSentPacketTSNs(RetransmissionQueue& queue) {
+ std::vector<TSN> tsns;
+ for (const auto& elem : queue.GetChunksToSend(now_, 10000)) {
+ tsns.push_back(elem.first);
+ }
+ return tsns;
+ }
+
+ RetransmissionQueue CreateQueue(bool supports_partial_reliability = true,
+ bool use_message_interleaving = false) {
+ DcSctpOptions options;
+ options.mtu = kMaxMtu;
+ return RetransmissionQueue(
+ "", TSN(10), kArwnd, producer_, on_rtt_.AsStdFunction(),
+ on_clear_retransmission_counter_.AsStdFunction(), *timer_, options,
+ supports_partial_reliability, use_message_interleaving);
+ }
+
+ DataGenerator gen_;
+ TimeMs now_ = TimeMs(0);
+ FakeTimeoutManager timeout_manager_;
+ TimerManager timer_manager_;
+ NiceMock<MockFunction<void(DurationMs rtt_ms)>> on_rtt_;
+ NiceMock<MockFunction<void()>> on_clear_retransmission_counter_;
+ NiceMock<MockSendQueue> producer_;
+ std::unique_ptr<Timer> timer_;
+};
+
+TEST_F(RetransmissionQueueTest, InitialAckedPrevTsn) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked)));
+}
+
+TEST_F(RetransmissionQueueTest, SendOneChunk) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10)));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, SendOneChunkAndAck) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10)));
+
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(10), State::kAcked)));
+}
+
+TEST_F(RetransmissionQueueTest, SendThreeChunksAndAckTwo) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12)));
+
+ queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(11), State::kAcked), //
+ Pair(TSN(12), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, AckWithGapBlocksFromRFC4960Section334) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14),
+ TSN(15), TSN(16), TSN(17)));
+
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd,
+ {SackChunk::GapAckBlock(2, 3),
+ SackChunk::GapAckBlock(5, 5)},
+ {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(12), State::kAcked), //
+ Pair(TSN(13), State::kNacked), //
+ Pair(TSN(14), State::kAcked), //
+ Pair(TSN(15), State::kAcked), //
+ Pair(TSN(16), State::kNacked), //
+ Pair(TSN(17), State::kAcked)));
+}
+
+TEST_F(RetransmissionQueueTest, ResendPacketsWhenNackedThreeTimes) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14),
+ TSN(15), TSN(16), TSN(17)));
+
+ // Send more chunks, but leave some as gaps to force retransmission after
+ // three NACKs.
+
+ // Send 18
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(18)));
+
+ // Ack 12, 14-15, 17-18
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd,
+ {SackChunk::GapAckBlock(2, 3),
+ SackChunk::GapAckBlock(5, 6)},
+ {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(12), State::kAcked), //
+ Pair(TSN(13), State::kNacked), //
+ Pair(TSN(14), State::kAcked), //
+ Pair(TSN(15), State::kAcked), //
+ Pair(TSN(16), State::kNacked), //
+ Pair(TSN(17), State::kAcked), //
+ Pair(TSN(18), State::kAcked)));
+
+ // Send 19
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(19)));
+
+ // Ack 12, 14-15, 17-19
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd,
+ {SackChunk::GapAckBlock(2, 3),
+ SackChunk::GapAckBlock(5, 7)},
+ {}));
+
+ // Send 20
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(20)));
+
+ // Ack 12, 14-15, 17-20
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd,
+ {SackChunk::GapAckBlock(2, 3),
+ SackChunk::GapAckBlock(5, 8)},
+ {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(12), State::kAcked), //
+ Pair(TSN(13), State::kToBeRetransmitted), //
+ Pair(TSN(14), State::kAcked), //
+ Pair(TSN(15), State::kAcked), //
+ Pair(TSN(16), State::kToBeRetransmitted), //
+ Pair(TSN(17), State::kAcked), //
+ Pair(TSN(18), State::kAcked), //
+ Pair(TSN(19), State::kAcked), //
+ Pair(TSN(20), State::kAcked)));
+
+ // This will trigger "fast retransmit" mode and only chunks 13 and 16 will be
+ // resent right now. The send queue will not even be queried.
+ EXPECT_CALL(producer_, Produce).Times(0);
+
+ EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(13), TSN(16)));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(12), State::kAcked), //
+ Pair(TSN(13), State::kInFlight), //
+ Pair(TSN(14), State::kAcked), //
+ Pair(TSN(15), State::kAcked), //
+ Pair(TSN(16), State::kInFlight), //
+ Pair(TSN(17), State::kAcked), //
+ Pair(TSN(18), State::kAcked), //
+ Pair(TSN(19), State::kAcked), //
+ Pair(TSN(20), State::kAcked)));
+}
+
+TEST_F(RetransmissionQueueTest, CanOnlyProduceTwoPacketsButWantsToSendThree) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _)));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, RetransmitsOnT3Expiry) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+
+ // Will force chunks to be retransmitted
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted)));
+
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted)));
+
+ std::vector<std::pair<TSN, Data>> chunks_to_rtx =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_rtx, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, LimitedRetransmissionOnlyWithRfc3758Support) {
+ RetransmissionQueue queue =
+ CreateQueue(/*supports_partial_reliability=*/false);
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+
+ // Will force chunks to be retransmitted
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(0);
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+} // namespace dcsctp
+
+TEST_F(RetransmissionQueueTest, LimitsRetransmissionsAsUdp) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+
+ // Will force chunks to be retransmitted
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(1);
+
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned)));
+
+ std::vector<std::pair<TSN, Data>> chunks_to_rtx =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_rtx, testing::IsEmpty());
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned)));
+}
+
+TEST_F(RetransmissionQueueTest, LimitsRetransmissionsToThreeSends) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE"));
+ dts.max_retransmissions = 3;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(0);
+
+ // Retransmission 1
+ queue.HandleT3RtxTimerExpiry();
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ EXPECT_THAT(queue.GetChunksToSend(now_, 1000), SizeIs(1));
+
+ // Retransmission 2
+ queue.HandleT3RtxTimerExpiry();
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ EXPECT_THAT(queue.GetChunksToSend(now_, 1000), SizeIs(1));
+
+ // Retransmission 3
+ queue.HandleT3RtxTimerExpiry();
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+ EXPECT_THAT(queue.GetChunksToSend(now_, 1000), SizeIs(1));
+
+ // Retransmission 4 - not allowed.
+ queue.HandleT3RtxTimerExpiry();
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(1);
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+ EXPECT_THAT(queue.GetChunksToSend(now_, 1000), IsEmpty());
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned)));
+}
+
+TEST_F(RetransmissionQueueTest, RetransmitsWhenSendBufferIsFullT3Expiry) {
+ RetransmissionQueue queue = CreateQueue();
+ static constexpr size_t kCwnd = 1200;
+ queue.set_cwnd(kCwnd);
+ EXPECT_EQ(queue.cwnd(), kCwnd);
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+
+ std::vector<uint8_t> payload(1000);
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this, payload](TimeMs, size_t) {
+ return SendQueue::DataToSend(gen_.Ordered(payload, "BE"));
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1500);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+ EXPECT_EQ(queue.outstanding_bytes(), payload.size() + DataChunk::kHeaderSize);
+
+ // Will force chunks to be retransmitted
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted)));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+
+ std::vector<std::pair<TSN, Data>> chunks_to_rtx =
+ queue.GetChunksToSend(now_, 1500);
+ EXPECT_THAT(chunks_to_rtx, ElementsAre(Pair(TSN(10), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight)));
+ EXPECT_EQ(queue.outstanding_bytes(), payload.size() + DataChunk::kHeaderSize);
+}
+
+TEST_F(RetransmissionQueueTest, ProducesValidForwardTsn) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ // Send and ack first chunk (TSN 10)
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _),
+ Pair(TSN(12), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight)));
+
+ // Chunk 10 is acked, but the remaining are lost
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(10), State::kAcked), //
+ Pair(TSN(11), State::kToBeRetransmitted), //
+ Pair(TSN(12), State::kToBeRetransmitted)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .WillOnce(Return(true));
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ // NOTE: The TSN=13 represents the end fragment.
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(10), State::kAcked), //
+ Pair(TSN(11), State::kAbandoned), //
+ Pair(TSN(12), State::kAbandoned), //
+ Pair(TSN(13), State::kAbandoned)));
+
+ ForwardTsnChunk forward_tsn = queue.CreateForwardTsn();
+ EXPECT_EQ(forward_tsn.new_cumulative_tsn(), TSN(13));
+ EXPECT_THAT(forward_tsn.skipped_streams(),
+ UnorderedElementsAre(
+ ForwardTsnChunk::SkippedStream(StreamID(1), SSN(42))));
+}
+
+TEST_F(RetransmissionQueueTest, ProducesValidForwardTsnWhenFullySent) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "E"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ // Send and ack first chunk (TSN 10)
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _),
+ Pair(TSN(12), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight)));
+
+ // Chunk 10 is acked, but the remaining are lost
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(10), State::kAcked), //
+ Pair(TSN(11), State::kToBeRetransmitted), //
+ Pair(TSN(12), State::kToBeRetransmitted)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .WillOnce(Return(false));
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ // NOTE: No additional TSN representing the end fragment, as that's TSN=12.
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(10), State::kAcked), //
+ Pair(TSN(11), State::kAbandoned), //
+ Pair(TSN(12), State::kAbandoned)));
+
+ ForwardTsnChunk forward_tsn = queue.CreateForwardTsn();
+ EXPECT_EQ(forward_tsn.new_cumulative_tsn(), TSN(12));
+ EXPECT_THAT(forward_tsn.skipped_streams(),
+ UnorderedElementsAre(
+ ForwardTsnChunk::SkippedStream(StreamID(1), SSN(42))));
+}
+
+TEST_F(RetransmissionQueueTest, ProducesValidIForwardTsn) {
+ RetransmissionQueue queue = CreateQueue(/*use_message_interleaving=*/true);
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ DataGeneratorOptions opts;
+ opts.stream_id = StreamID(1);
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B", opts));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ DataGeneratorOptions opts;
+ opts.stream_id = StreamID(2);
+ SendQueue::DataToSend dts(gen_.Unordered({1, 2, 3, 4}, "B", opts));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ DataGeneratorOptions opts;
+ opts.stream_id = StreamID(3);
+ SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "B", opts));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ DataGeneratorOptions opts;
+ opts.stream_id = StreamID(4);
+ SendQueue::DataToSend dts(gen_.Ordered({13, 14, 15, 16}, "B", opts));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _),
+ Pair(TSN(12), _), Pair(TSN(13), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight), //
+ Pair(TSN(13), State::kInFlight)));
+
+ // Chunk 13 is acked, but the remaining are lost
+ queue.HandleSack(
+ now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(4, 4)}, {}));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kNacked), //
+ Pair(TSN(11), State::kNacked), //
+ Pair(TSN(12), State::kNacked), //
+ Pair(TSN(13), State::kAcked)));
+
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kToBeRetransmitted), //
+ Pair(TSN(11), State::kToBeRetransmitted), //
+ Pair(TSN(12), State::kToBeRetransmitted), //
+ Pair(TSN(13), State::kAcked)));
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(producer_, Discard(IsUnordered(true), StreamID(2), MID(42)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(3), MID(42)))
+ .WillOnce(Return(true));
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned), //
+ Pair(TSN(11), State::kAbandoned), //
+ Pair(TSN(12), State::kAbandoned), //
+ Pair(TSN(13), State::kAcked),
+ // Representing end fragments of stream 1-3
+ Pair(TSN(14), State::kAbandoned), //
+ Pair(TSN(15), State::kAbandoned), //
+ Pair(TSN(16), State::kAbandoned)));
+
+ IForwardTsnChunk forward_tsn1 = queue.CreateIForwardTsn();
+ EXPECT_EQ(forward_tsn1.new_cumulative_tsn(), TSN(12));
+ EXPECT_THAT(
+ forward_tsn1.skipped_streams(),
+ UnorderedElementsAre(IForwardTsnChunk::SkippedStream(
+ IsUnordered(false), StreamID(1), MID(42)),
+ IForwardTsnChunk::SkippedStream(
+ IsUnordered(true), StreamID(2), MID(42)),
+ IForwardTsnChunk::SkippedStream(
+ IsUnordered(false), StreamID(3), MID(42))));
+
+ // When TSN 13 is acked, the placeholder "end fragments" must be skipped as
+ // well.
+
+ // A receiver is more likely to ack TSN 13, but do it incrementally.
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd, {}, {}));
+
+ EXPECT_CALL(producer_, Discard).Times(0);
+ EXPECT_FALSE(queue.ShouldSendForwardTsn(now_));
+
+ queue.HandleSack(now_, SackChunk(TSN(13), kArwnd, {}, {}));
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(13), State::kAcked), //
+ Pair(TSN(14), State::kAbandoned), //
+ Pair(TSN(15), State::kAbandoned), //
+ Pair(TSN(16), State::kAbandoned)));
+
+ IForwardTsnChunk forward_tsn2 = queue.CreateIForwardTsn();
+ EXPECT_EQ(forward_tsn2.new_cumulative_tsn(), TSN(16));
+ EXPECT_THAT(
+ forward_tsn2.skipped_streams(),
+ UnorderedElementsAre(IForwardTsnChunk::SkippedStream(
+ IsUnordered(false), StreamID(1), MID(42)),
+ IForwardTsnChunk::SkippedStream(
+ IsUnordered(true), StreamID(2), MID(42)),
+ IForwardTsnChunk::SkippedStream(
+ IsUnordered(false), StreamID(3), MID(42))));
+}
+
+TEST_F(RetransmissionQueueTest, MeasureRTT) {
+ RetransmissionQueue queue = CreateQueue(/*use_message_interleaving=*/true);
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _)));
+
+ now_ = now_ + DurationMs(123);
+
+ EXPECT_CALL(on_rtt_, Call(DurationMs(123))).Times(1);
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+}
+
+TEST_F(RetransmissionQueueTest, ValidateCumTsnAtRest) {
+ RetransmissionQueue queue = CreateQueue(/*use_message_interleaving=*/true);
+
+ EXPECT_FALSE(queue.HandleSack(now_, SackChunk(TSN(8), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(9), kArwnd, {}, {})));
+ EXPECT_FALSE(queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})));
+}
+
+TEST_F(RetransmissionQueueTest, ValidateCumTsnAckOnInflightData) {
+ RetransmissionQueue queue = CreateQueue();
+
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14),
+ TSN(15), TSN(16), TSN(17)));
+
+ EXPECT_FALSE(queue.HandleSack(now_, SackChunk(TSN(8), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(9), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(12), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(13), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(14), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(15), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(16), kArwnd, {}, {})));
+ EXPECT_TRUE(queue.HandleSack(now_, SackChunk(TSN(17), kArwnd, {}, {})));
+ EXPECT_FALSE(queue.HandleSack(now_, SackChunk(TSN(18), kArwnd, {}, {})));
+}
+
+TEST_F(RetransmissionQueueTest, HandleGapAckBlocksMatchingNoInflightData) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14),
+ TSN(15), TSN(16), TSN(17)));
+
+ // Ack 9, 20-25. This is an invalid SACK, but should still be handled.
+ queue.HandleSack(
+ now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(11, 16)}, {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight), //
+ Pair(TSN(13), State::kInFlight), //
+ Pair(TSN(14), State::kInFlight), //
+ Pair(TSN(15), State::kInFlight), //
+ Pair(TSN(16), State::kInFlight), //
+ Pair(TSN(17), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, HandleInvalidGapAckBlocks) {
+ RetransmissionQueue queue = CreateQueue();
+
+ // Nothing produced - nothing in retransmission queue
+
+ // Ack 9, 12-13
+ queue.HandleSack(
+ now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(3, 4)}, {}));
+
+ // Gap ack blocks are just ignore.
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked)));
+}
+
+TEST_F(RetransmissionQueueTest, GapAckBlocksDoNotMoveCumTsnAck) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillOnce(CreateChunk())
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ EXPECT_THAT(GetSentPacketTSNs(queue),
+ testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14),
+ TSN(15), TSN(16), TSN(17)));
+
+ // Ack 9, 10-14. This is actually an invalid ACK as the first gap can't be
+ // adjacent to the cum-tsn-ack, but it's not strictly forbidden. However, the
+ // cum-tsn-ack should not move, as the gap-ack-blocks are just advisory.
+ queue.HandleSack(
+ now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(1, 5)}, {}));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAcked), //
+ Pair(TSN(11), State::kAcked), //
+ Pair(TSN(12), State::kAcked), //
+ Pair(TSN(13), State::kAcked), //
+ Pair(TSN(14), State::kAcked), //
+ Pair(TSN(15), State::kInFlight), //
+ Pair(TSN(16), State::kInFlight), //
+ Pair(TSN(17), State::kInFlight)));
+}
+
+TEST_F(RetransmissionQueueTest, StaysWithinAvailableSize) {
+ RetransmissionQueue queue = CreateQueue();
+
+ // See SctpPacketTest::ReturnsCorrectSpaceAvailableToStayWithinMTU for the
+ // magic numbers in this test.
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t size) {
+ EXPECT_EQ(size, 1176 - DataChunk::kHeaderSize);
+
+ std::vector<uint8_t> payload(183);
+ return SendQueue::DataToSend(gen_.Ordered(payload, "BE"));
+ })
+ .WillOnce([this](TimeMs, size_t size) {
+ EXPECT_EQ(size, 976 - DataChunk::kHeaderSize);
+
+ std::vector<uint8_t> payload(957);
+ return SendQueue::DataToSend(gen_.Ordered(payload, "BE"));
+ });
+
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1188 - 12);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _)));
+}
+
+TEST_F(RetransmissionQueueTest, AccountsInflightAbandonedChunksAsOutstanding) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ // Send and ack first chunk (TSN 10)
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _),
+ Pair(TSN(12), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight)));
+ EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 3u);
+
+ // Discard the message while it was outstanding.
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(1);
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned), //
+ Pair(TSN(11), State::kAbandoned), //
+ Pair(TSN(12), State::kAbandoned)));
+ EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 3u);
+
+ // Now ACK those, one at a time.
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 2u);
+
+ queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 1u);
+
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+}
+
+TEST_F(RetransmissionQueueTest, AccountsNackedAbandonedChunksAsNotOutstanding) {
+ RetransmissionQueue queue = CreateQueue();
+ EXPECT_CALL(producer_, Produce)
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B"));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillOnce([this](TimeMs, size_t) {
+ SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, ""));
+ dts.max_retransmissions = 0;
+ return dts;
+ })
+ .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; });
+
+ // Send and ack first chunk (TSN 10)
+ std::vector<std::pair<TSN, Data>> chunks_to_send =
+ queue.GetChunksToSend(now_, 1000);
+ EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _), Pair(TSN(11), _),
+ Pair(TSN(12), _)));
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kInFlight), //
+ Pair(TSN(11), State::kInFlight), //
+ Pair(TSN(12), State::kInFlight)));
+ EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 3u);
+
+ // Mark the message as lost.
+ queue.HandleT3RtxTimerExpiry();
+
+ EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42)))
+ .Times(1);
+ EXPECT_TRUE(queue.ShouldSendForwardTsn(now_));
+
+ EXPECT_THAT(queue.GetChunkStatesForTesting(),
+ ElementsAre(Pair(TSN(9), State::kAcked), //
+ Pair(TSN(10), State::kAbandoned), //
+ Pair(TSN(11), State::kAbandoned), //
+ Pair(TSN(12), State::kAbandoned)));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+
+ // Now ACK those, one at a time.
+ queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+
+ queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+
+ queue.HandleSack(now_, SackChunk(TSN(12), kArwnd, {}, {}));
+ EXPECT_EQ(queue.outstanding_bytes(), 0u);
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/retransmission_timeout.cc b/net/dcsctp/tx/retransmission_timeout.cc
new file mode 100644
index 0000000000..7d545a07d0
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_timeout.cc
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_timeout.h"
+
+#include <cmath>
+#include <cstdint>
+
+#include "net/dcsctp/public/dcsctp_options.h"
+
+namespace dcsctp {
+namespace {
+// https://tools.ietf.org/html/rfc4960#section-15
+constexpr double kRtoAlpha = 0.125;
+constexpr double kRtoBeta = 0.25;
+} // namespace
+
+RetransmissionTimeout::RetransmissionTimeout(const DcSctpOptions& options)
+ : min_rto_(*options.rto_min),
+ max_rto_(*options.rto_max),
+ max_rtt_(*options.rtt_max),
+ rto_(*options.rto_initial) {}
+
+void RetransmissionTimeout::ObserveRTT(DurationMs measured_rtt) {
+ double rtt = *measured_rtt;
+
+ // Unrealistic values will be skipped. If a wrongly measured (or otherwise
+ // corrupt) value was processed, it could change the state in a way that would
+ // take a very long time to recover.
+ if (rtt < 0.0 || rtt > max_rtt_) {
+ return;
+ }
+
+ if (first_measurement_) {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.1
+ // "When the first RTT measurement R is made, set
+ // SRTT <- R,
+ // RTTVAR <- R/2, and
+ // RTO <- SRTT + 4 * RTTVAR."
+ srtt_ = rtt;
+ rttvar_ = rtt * 0.5;
+ rto_ = srtt_ + 4 * rttvar_;
+ first_measurement_ = false;
+ } else {
+ // https://tools.ietf.org/html/rfc4960#section-6.3.1
+ // "When a new RTT measurement R' is made, set
+ // RTTVAR <- (1 - RTO.Beta) * RTTVAR + RTO.Beta * |SRTT - R'|
+ // SRTT <- (1 - RTO.Alpha) * SRTT + RTO.Alpha * R'
+ // RTO <- SRTT + 4 * RTTVAR."
+ rttvar_ = (1 - kRtoBeta) * rttvar_ + kRtoBeta * std::abs(srtt_ - rtt);
+ srtt_ = (1 - kRtoAlpha) * srtt_ + kRtoAlpha * rtt;
+ rto_ = srtt_ + 4 * rttvar_;
+ }
+
+ // If the RTO becomes smaller or equal to RTT, expiration timers will be
+ // scheduled at the same time as packets are expected. Only happens in
+ // extremely stable RTTs, i.e. in simulations.
+ rto_ = std::fmax(rto_, rtt + 1);
+
+ // Clamp RTO between min and max.
+ rto_ = std::fmin(std::fmax(rto_, min_rto_), max_rto_);
+}
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/retransmission_timeout.h b/net/dcsctp/tx/retransmission_timeout.h
new file mode 100644
index 0000000000..0fac33e59c
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_timeout.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_
+#define NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_
+
+#include <cstdint>
+#include <functional>
+
+#include "net/dcsctp/public/dcsctp_options.h"
+
+namespace dcsctp {
+
+// Manages updating of the Retransmission Timeout (RTO) SCTP variable, which is
+// used directly as the base timeout for T3-RTX and for other timers, such as
+// delayed ack.
+//
+// When a round-trip-time (RTT) is calculated (outside this class), `Observe`
+// is called, which calculates the retransmission timeout (RTO) value. The RTO
+// value will become larger if the RTT is high and/or the RTT values are varying
+// a lot, which is an indicator of a bad connection.
+class RetransmissionTimeout {
+ public:
+ explicit RetransmissionTimeout(const DcSctpOptions& options);
+
+ // To be called when a RTT has been measured, to update the RTO value.
+ void ObserveRTT(DurationMs measured_rtt);
+
+ // Returns the Retransmission Timeout (RTO) value, in milliseconds.
+ DurationMs rto() const { return DurationMs(rto_); }
+
+ // Returns the smoothed RTT value, in milliseconds.
+ DurationMs srtt() const { return DurationMs(srtt_); }
+
+ private:
+ // Note that all intermediate state calculation is done in the floating point
+ // domain, to maintain precision.
+ const double min_rto_;
+ const double max_rto_;
+ const double max_rtt_;
+ // If this is the first measurement
+ bool first_measurement_ = true;
+ // Smoothed Round-Trip Time
+ double srtt_ = 0.0;
+ // Round-Trip Time Variation
+ double rttvar_ = 0.0;
+ // Retransmission Timeout
+ double rto_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_
diff --git a/net/dcsctp/tx/retransmission_timeout_test.cc b/net/dcsctp/tx/retransmission_timeout_test.cc
new file mode 100644
index 0000000000..3b2e3399fe
--- /dev/null
+++ b/net/dcsctp/tx/retransmission_timeout_test.cc
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/retransmission_timeout.h"
+
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+
+constexpr DurationMs kMaxRtt = DurationMs(8'000);
+constexpr DurationMs kInitialRto = DurationMs(200);
+constexpr DurationMs kMaxRto = DurationMs(800);
+constexpr DurationMs kMinRto = DurationMs(120);
+
+DcSctpOptions MakeOptions() {
+ DcSctpOptions options;
+ options.rtt_max = kMaxRtt;
+ options.rto_initial = kInitialRto;
+ options.rto_max = kMaxRto;
+ options.rto_min = kMinRto;
+ return options;
+}
+
+TEST(RetransmissionTimeoutTest, HasValidInitialRto) {
+ RetransmissionTimeout rto_(MakeOptions());
+ EXPECT_EQ(rto_.rto(), kInitialRto);
+}
+
+TEST(RetransmissionTimeoutTest, NegativeValuesDoNotAffectRTO) {
+ RetransmissionTimeout rto_(MakeOptions());
+ // Initial negative value
+ rto_.ObserveRTT(DurationMs(-10));
+ EXPECT_EQ(rto_.rto(), kInitialRto);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 372);
+ // Subsequent negative value
+ rto_.ObserveRTT(DurationMs(-10));
+ EXPECT_EQ(*rto_.rto(), 372);
+}
+
+TEST(RetransmissionTimeoutTest, TooLargeValuesDoNotAffectRTO) {
+ RetransmissionTimeout rto_(MakeOptions());
+ // Initial too large value
+ rto_.ObserveRTT(kMaxRtt + DurationMs(100));
+ EXPECT_EQ(rto_.rto(), kInitialRto);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 372);
+ // Subsequent too large value
+ rto_.ObserveRTT(kMaxRtt + DurationMs(100));
+ EXPECT_EQ(*rto_.rto(), 372);
+}
+
+TEST(RetransmissionTimeoutTest, WillNeverGoBelowMinimumRto) {
+ RetransmissionTimeout rto_(MakeOptions());
+ for (int i = 0; i < 1000; ++i) {
+ rto_.ObserveRTT(DurationMs(1));
+ }
+ EXPECT_GE(rto_.rto(), kMinRto);
+}
+
+TEST(RetransmissionTimeoutTest, WillNeverGoAboveMaximumRto) {
+ RetransmissionTimeout rto_(MakeOptions());
+ for (int i = 0; i < 1000; ++i) {
+ rto_.ObserveRTT(kMaxRtt - DurationMs(1));
+ // Adding jitter, which would make it RTO be well above RTT.
+ rto_.ObserveRTT(kMaxRtt - DurationMs(100));
+ }
+ EXPECT_LE(rto_.rto(), kMaxRto);
+}
+
+TEST(RetransmissionTimeoutTest, CalculatesRtoForStableRtt) {
+ RetransmissionTimeout rto_(MakeOptions());
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 372);
+ rto_.ObserveRTT(DurationMs(128));
+ EXPECT_EQ(*rto_.rto(), 314);
+ rto_.ObserveRTT(DurationMs(123));
+ EXPECT_EQ(*rto_.rto(), 268);
+ rto_.ObserveRTT(DurationMs(125));
+ EXPECT_EQ(*rto_.rto(), 233);
+ rto_.ObserveRTT(DurationMs(127));
+ EXPECT_EQ(*rto_.rto(), 208);
+}
+
+TEST(RetransmissionTimeoutTest, CalculatesRtoForUnstableRtt) {
+ RetransmissionTimeout rto_(MakeOptions());
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 372);
+ rto_.ObserveRTT(DurationMs(402));
+ EXPECT_EQ(*rto_.rto(), 622);
+ rto_.ObserveRTT(DurationMs(728));
+ EXPECT_EQ(*rto_.rto(), 800);
+ rto_.ObserveRTT(DurationMs(89));
+ EXPECT_EQ(*rto_.rto(), 800);
+ rto_.ObserveRTT(DurationMs(126));
+ EXPECT_EQ(*rto_.rto(), 800);
+}
+
+TEST(RetransmissionTimeoutTest, WillStabilizeAfterAWhile) {
+ RetransmissionTimeout rto_(MakeOptions());
+ rto_.ObserveRTT(DurationMs(124));
+ rto_.ObserveRTT(DurationMs(402));
+ rto_.ObserveRTT(DurationMs(728));
+ rto_.ObserveRTT(DurationMs(89));
+ rto_.ObserveRTT(DurationMs(126));
+ EXPECT_EQ(*rto_.rto(), 800);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 800);
+ rto_.ObserveRTT(DurationMs(122));
+ EXPECT_EQ(*rto_.rto(), 709);
+ rto_.ObserveRTT(DurationMs(123));
+ EXPECT_EQ(*rto_.rto(), 630);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 561);
+ rto_.ObserveRTT(DurationMs(122));
+ EXPECT_EQ(*rto_.rto(), 504);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 453);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 409);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 372);
+ rto_.ObserveRTT(DurationMs(124));
+ EXPECT_EQ(*rto_.rto(), 339);
+}
+
+TEST(RetransmissionTimeoutTest, WillAlwaysStayAboveRTT) {
+ // In simulations, it's quite common to have a very stable RTT, and having an
+ // RTO at the same value will cause issues as expiry timers will be scheduled
+ // to be expire exactly when a packet is supposed to arrive. The RTO must be
+ // larger than the RTT. In non-simulated environments, this is a non-issue as
+ // any jitter will increase the RTO.
+ RetransmissionTimeout rto_(MakeOptions());
+
+ for (int i = 0; i < 100; ++i) {
+ rto_.ObserveRTT(DurationMs(124));
+ }
+ EXPECT_GT(*rto_.rto(), 124);
+}
+
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/rr_send_queue.cc b/net/dcsctp/tx/rr_send_queue.cc
new file mode 100644
index 0000000000..4bfbaf718b
--- /dev/null
+++ b/net/dcsctp/tx/rr_send_queue.cc
@@ -0,0 +1,432 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/rr_send_queue.h"
+
+#include <cstdint>
+#include <deque>
+#include <map>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "rtc_base/logging.h"
+
+namespace dcsctp {
+
+bool RRSendQueue::OutgoingStream::HasDataToSend(TimeMs now) {
+ while (!items_.empty()) {
+ RRSendQueue::OutgoingStream::Item& item = items_.front();
+ if (item.message_id.has_value()) {
+ // Already partially sent messages can always continue to be sent.
+ return true;
+ }
+
+ // Message has expired. Remove it and inspect the next one.
+ if (item.expires_at.has_value() && *item.expires_at <= now) {
+ buffered_amount_.Decrease(item.remaining_size);
+ total_buffered_amount_.Decrease(item.remaining_size);
+ items_.pop_front();
+ RTC_DCHECK(IsConsistent());
+ continue;
+ }
+
+ if (is_paused_) {
+ // The stream has paused (and there is no partially sent message).
+ return false;
+ }
+ return true;
+ }
+ return false;
+}
+
+bool RRSendQueue::IsConsistent() const {
+ size_t total_buffered_amount = 0;
+ for (const auto& stream_entry : streams_) {
+ total_buffered_amount += stream_entry.second.buffered_amount().value();
+ }
+
+ if (previous_message_has_ended_) {
+ auto it = streams_.find(current_stream_id_);
+ if (it != streams_.end() && it->second.has_partially_sent_message()) {
+ RTC_DLOG(LS_ERROR)
+ << "Previous message has ended, but still partial message in stream";
+ return false;
+ }
+ } else {
+ auto it = streams_.find(current_stream_id_);
+ if (it == streams_.end() || !it->second.has_partially_sent_message()) {
+ RTC_DLOG(LS_ERROR)
+ << "Previous message has NOT ended, but there is no partial message";
+ return false;
+ }
+ }
+
+ return total_buffered_amount == total_buffered_amount_.value();
+}
+
+bool RRSendQueue::OutgoingStream::IsConsistent() const {
+ size_t bytes = 0;
+ for (const auto& item : items_) {
+ bytes += item.remaining_size;
+ }
+ return bytes == buffered_amount_.value();
+}
+
+void RRSendQueue::ThresholdWatcher::Decrease(size_t bytes) {
+ RTC_DCHECK(bytes <= value_);
+ size_t old_value = value_;
+ value_ -= bytes;
+
+ if (old_value > low_threshold_ && value_ <= low_threshold_) {
+ on_threshold_reached_();
+ }
+}
+
+void RRSendQueue::ThresholdWatcher::SetLowThreshold(size_t low_threshold) {
+ // Betting on https://github.com/w3c/webrtc-pc/issues/2654 being accepted.
+ if (low_threshold_ < value_ && low_threshold >= value_) {
+ on_threshold_reached_();
+ }
+ low_threshold_ = low_threshold;
+}
+
+void RRSendQueue::OutgoingStream::Add(DcSctpMessage message,
+ absl::optional<TimeMs> expires_at,
+ const SendOptions& send_options) {
+ buffered_amount_.Increase(message.payload().size());
+ total_buffered_amount_.Increase(message.payload().size());
+ items_.emplace_back(std::move(message), expires_at, send_options);
+
+ RTC_DCHECK(IsConsistent());
+}
+
+absl::optional<SendQueue::DataToSend> RRSendQueue::OutgoingStream::Produce(
+ TimeMs now,
+ size_t max_size) {
+ RTC_DCHECK(!items_.empty());
+
+ Item* item = &items_.front();
+ DcSctpMessage& message = item->message;
+
+ if (item->remaining_size > max_size && max_size < kMinimumFragmentedPayload) {
+ RTC_DCHECK(IsConsistent());
+ return absl::nullopt;
+ }
+
+ // Allocate Message ID and SSN when the first fragment is sent.
+ if (!item->message_id.has_value()) {
+ MID& mid =
+ item->send_options.unordered ? next_unordered_mid_ : next_ordered_mid_;
+ item->message_id = mid;
+ mid = MID(*mid + 1);
+ }
+ if (!item->send_options.unordered && !item->ssn.has_value()) {
+ item->ssn = next_ssn_;
+ next_ssn_ = SSN(*next_ssn_ + 1);
+ }
+
+ // Grab the next `max_size` fragment from this message and calculate flags.
+ rtc::ArrayView<const uint8_t> chunk_payload =
+ item->message.payload().subview(item->remaining_offset, max_size);
+ rtc::ArrayView<const uint8_t> message_payload = message.payload();
+ Data::IsBeginning is_beginning(chunk_payload.data() ==
+ message_payload.data());
+ Data::IsEnd is_end((chunk_payload.data() + chunk_payload.size()) ==
+ (message_payload.data() + message_payload.size()));
+
+ StreamID stream_id = message.stream_id();
+ PPID ppid = message.ppid();
+
+ // Zero-copy the payload if the message fits in a single chunk.
+ std::vector<uint8_t> payload =
+ is_beginning && is_end
+ ? std::move(message).ReleasePayload()
+ : std::vector<uint8_t>(chunk_payload.begin(), chunk_payload.end());
+
+ FSN fsn(item->current_fsn);
+ item->current_fsn = FSN(*item->current_fsn + 1);
+ buffered_amount_.Decrease(payload.size());
+ total_buffered_amount_.Decrease(payload.size());
+
+ SendQueue::DataToSend chunk(Data(stream_id, item->ssn.value_or(SSN(0)),
+ item->message_id.value(), fsn, ppid,
+ std::move(payload), is_beginning, is_end,
+ item->send_options.unordered));
+ chunk.max_retransmissions = item->send_options.max_retransmissions;
+ chunk.expires_at = item->expires_at;
+
+ if (is_end) {
+ // The entire message has been sent, and its last data copied to `chunk`, so
+ // it can safely be discarded.
+ items_.pop_front();
+ } else {
+ item->remaining_offset += chunk_payload.size();
+ item->remaining_size -= chunk_payload.size();
+ RTC_DCHECK(item->remaining_offset + item->remaining_size ==
+ item->message.payload().size());
+ RTC_DCHECK(item->remaining_size > 0);
+ }
+ RTC_DCHECK(IsConsistent());
+ return chunk;
+}
+
+bool RRSendQueue::OutgoingStream::Discard(IsUnordered unordered,
+ MID message_id) {
+ bool result = false;
+ if (!items_.empty()) {
+ Item& item = items_.front();
+ if (item.send_options.unordered == unordered &&
+ item.message_id.has_value() && *item.message_id == message_id) {
+ buffered_amount_.Decrease(item.remaining_size);
+ total_buffered_amount_.Decrease(item.remaining_size);
+ items_.pop_front();
+ // As the item still existed, it had unsent data.
+ result = true;
+ }
+ }
+ RTC_DCHECK(IsConsistent());
+ return result;
+}
+
+void RRSendQueue::OutgoingStream::Pause() {
+ is_paused_ = true;
+
+ // A stream is paused when it's about to be reset. In this implementation,
+ // it will throw away all non-partially send messages. This is subject to
+ // change. It will however not discard any partially sent messages - only
+ // whole messages. Partially delivered messages (at the time of receiving a
+ // Stream Reset command) will always deliver all the fragments before
+ // actually resetting the stream.
+ for (auto it = items_.begin(); it != items_.end();) {
+ if (it->remaining_offset == 0) {
+ buffered_amount_.Decrease(it->remaining_size);
+ total_buffered_amount_.Decrease(it->remaining_size);
+ it = items_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+void RRSendQueue::OutgoingStream::Reset() {
+ if (!items_.empty()) {
+ // If this message has been partially sent, reset it so that it will be
+ // re-sent.
+ auto& item = items_.front();
+ buffered_amount_.Increase(item.message.payload().size() -
+ item.remaining_size);
+ total_buffered_amount_.Increase(item.message.payload().size() -
+ item.remaining_size);
+ item.remaining_offset = 0;
+ item.remaining_size = item.message.payload().size();
+ item.message_id = absl::nullopt;
+ item.ssn = absl::nullopt;
+ item.current_fsn = FSN(0);
+ }
+ is_paused_ = false;
+ next_ordered_mid_ = MID(0);
+ next_unordered_mid_ = MID(0);
+ next_ssn_ = SSN(0);
+ RTC_DCHECK(IsConsistent());
+}
+
+bool RRSendQueue::OutgoingStream::has_partially_sent_message() const {
+ if (items_.empty()) {
+ return false;
+ }
+ return items_.front().message_id.has_value();
+}
+
+void RRSendQueue::Add(TimeMs now,
+ DcSctpMessage message,
+ const SendOptions& send_options) {
+ RTC_DCHECK(!message.payload().empty());
+ // Any limited lifetime should start counting from now - when the message
+ // has been added to the queue.
+ absl::optional<TimeMs> expires_at = absl::nullopt;
+ if (send_options.lifetime.has_value()) {
+ // `expires_at` is the time when it expires. Which is slightly larger than
+ // the message's lifetime, as the message is alive during its entire
+ // lifetime (which may be zero).
+ expires_at = now + *send_options.lifetime + DurationMs(1);
+ }
+ GetOrCreateStreamInfo(message.stream_id())
+ .Add(std::move(message), expires_at, send_options);
+ RTC_DCHECK(IsConsistent());
+}
+
+bool RRSendQueue::IsFull() const {
+ return total_buffered_amount() >= buffer_size_;
+}
+
+bool RRSendQueue::IsEmpty() const {
+ return total_buffered_amount() == 0;
+}
+
+std::map<StreamID, RRSendQueue::OutgoingStream>::iterator
+RRSendQueue::GetNextStream(TimeMs now) {
+ auto start_it = streams_.lower_bound(StreamID(*current_stream_id_ + 1));
+
+ for (auto it = start_it; it != streams_.end(); ++it) {
+ if (it->second.HasDataToSend(now)) {
+ current_stream_id_ = it->first;
+ return it;
+ }
+ }
+
+ for (auto it = streams_.begin(); it != start_it; ++it) {
+ if (it->second.HasDataToSend(now)) {
+ current_stream_id_ = it->first;
+ return it;
+ }
+ }
+ return streams_.end();
+}
+
+absl::optional<SendQueue::DataToSend> RRSendQueue::Produce(TimeMs now,
+ size_t max_size) {
+ std::map<StreamID, RRSendQueue::OutgoingStream>::iterator stream_it;
+
+ if (previous_message_has_ended_) {
+ // Previous message has ended. Round-robin to a different stream, if there
+ // even is one with data to send.
+ stream_it = GetNextStream(now);
+ if (stream_it == streams_.end()) {
+ RTC_DLOG(LS_VERBOSE)
+ << log_prefix_
+ << "There is no stream with data; Can't produce any data.";
+ return absl::nullopt;
+ }
+ } else {
+ // The previous message has not ended; Continue from the current stream.
+ stream_it = streams_.find(current_stream_id_);
+ RTC_DCHECK(stream_it != streams_.end());
+ }
+
+ absl::optional<DataToSend> data = stream_it->second.Produce(now, max_size);
+ if (data.has_value()) {
+ RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Producing DATA, type="
+ << (data->data.is_unordered ? "unordered" : "ordered")
+ << "::"
+ << (*data->data.is_beginning && *data->data.is_end
+ ? "complete"
+ : *data->data.is_beginning
+ ? "first"
+ : *data->data.is_end ? "last" : "middle")
+ << ", stream_id=" << *stream_it->first
+ << ", ppid=" << *data->data.ppid
+ << ", length=" << data->data.payload.size();
+
+ previous_message_has_ended_ = *data->data.is_end;
+ }
+
+ RTC_DCHECK(IsConsistent());
+ return data;
+}
+
+bool RRSendQueue::Discard(IsUnordered unordered,
+ StreamID stream_id,
+ MID message_id) {
+ bool has_discarded =
+ GetOrCreateStreamInfo(stream_id).Discard(unordered, message_id);
+ if (has_discarded) {
+ // Only partially sent messages are discarded, so if a message was
+ // discarded, then it was the currently sent message.
+ previous_message_has_ended_ = true;
+ }
+
+ return has_discarded;
+}
+
+void RRSendQueue::PrepareResetStreams(rtc::ArrayView<const StreamID> streams) {
+ for (StreamID stream_id : streams) {
+ GetOrCreateStreamInfo(stream_id).Pause();
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+bool RRSendQueue::CanResetStreams() const {
+ // Streams can be reset if those streams that are paused don't have any
+ // messages that are partially sent.
+ for (auto& stream : streams_) {
+ if (stream.second.is_paused() &&
+ stream.second.has_partially_sent_message()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void RRSendQueue::CommitResetStreams() {
+ Reset();
+ RTC_DCHECK(IsConsistent());
+}
+
+void RRSendQueue::RollbackResetStreams() {
+ for (auto& stream_entry : streams_) {
+ stream_entry.second.Resume();
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+void RRSendQueue::Reset() {
+ // Recalculate buffered amount, as partially sent messages may have been put
+ // fully back in the queue.
+ for (auto& stream_entry : streams_) {
+ OutgoingStream& stream = stream_entry.second;
+ stream.Reset();
+ }
+ previous_message_has_ended_ = true;
+}
+
+size_t RRSendQueue::buffered_amount(StreamID stream_id) const {
+ auto it = streams_.find(stream_id);
+ if (it == streams_.end()) {
+ return 0;
+ }
+ return it->second.buffered_amount().value();
+}
+
+size_t RRSendQueue::buffered_amount_low_threshold(StreamID stream_id) const {
+ auto it = streams_.find(stream_id);
+ if (it == streams_.end()) {
+ return 0;
+ }
+ return it->second.buffered_amount().low_threshold();
+}
+
+void RRSendQueue::SetBufferedAmountLowThreshold(StreamID stream_id,
+ size_t bytes) {
+ GetOrCreateStreamInfo(stream_id).buffered_amount().SetLowThreshold(bytes);
+}
+
+RRSendQueue::OutgoingStream& RRSendQueue::GetOrCreateStreamInfo(
+ StreamID stream_id) {
+ auto it = streams_.find(stream_id);
+ if (it != streams_.end()) {
+ return it->second;
+ }
+
+ return streams_
+ .emplace(stream_id,
+ OutgoingStream(
+ [this, stream_id]() { on_buffered_amount_low_(stream_id); },
+ total_buffered_amount_))
+ .first->second;
+}
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/rr_send_queue.h b/net/dcsctp/tx/rr_send_queue.h
new file mode 100644
index 0000000000..3ec45af17d
--- /dev/null
+++ b/net/dcsctp/tx/rr_send_queue.h
@@ -0,0 +1,238 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_RR_SEND_QUEUE_H_
+#define NET_DCSCTP_TX_RR_SEND_QUEUE_H_
+
+#include <cstdint>
+#include <deque>
+#include <map>
+#include <string>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/pair_hash.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/tx/send_queue.h"
+
+namespace dcsctp {
+
+// The Round Robin SendQueue holds all messages that the client wants to send,
+// but that haven't yet been split into chunks and fully sent on the wire.
+//
+// As defined in https://datatracker.ietf.org/doc/html/rfc8260#section-3.2,
+// it will cycle to send messages from different streams. It will send all
+// fragments from one message before continuing with a different message on
+// possibly a different stream, until support for message interleaving has been
+// implemented.
+//
+// As messages can be (requested to be) sent before the connection is properly
+// established, this send queue is always present - even for closed connections.
+class RRSendQueue : public SendQueue {
+ public:
+ // How small a data chunk's payload may be, if having to fragment a message.
+ static constexpr size_t kMinimumFragmentedPayload = 10;
+
+ RRSendQueue(absl::string_view log_prefix,
+ size_t buffer_size,
+ std::function<void(StreamID)> on_buffered_amount_low,
+ size_t total_buffered_amount_low_threshold,
+ std::function<void()> on_total_buffered_amount_low)
+ : log_prefix_(std::string(log_prefix) + "fcfs: "),
+ buffer_size_(buffer_size),
+ on_buffered_amount_low_(std::move(on_buffered_amount_low)),
+ total_buffered_amount_(std::move(on_total_buffered_amount_low)) {
+ total_buffered_amount_.SetLowThreshold(total_buffered_amount_low_threshold);
+ }
+
+ // Indicates if the buffer is full. Note that it's up to the caller to ensure
+ // that the buffer is not full prior to adding new items to it.
+ bool IsFull() const;
+ // Indicates if the buffer is empty.
+ bool IsEmpty() const;
+
+ // Adds the message to be sent using the `send_options` provided. The current
+ // time should be in `now`. Note that it's the responsibility of the caller to
+ // ensure that the buffer is not full (by calling `IsFull`) before adding
+ // messages to it.
+ void Add(TimeMs now,
+ DcSctpMessage message,
+ const SendOptions& send_options = {});
+
+ // Implementation of `SendQueue`.
+ absl::optional<DataToSend> Produce(TimeMs now, size_t max_size) override;
+ bool Discard(IsUnordered unordered,
+ StreamID stream_id,
+ MID message_id) override;
+ void PrepareResetStreams(rtc::ArrayView<const StreamID> streams) override;
+ bool CanResetStreams() const override;
+ void CommitResetStreams() override;
+ void RollbackResetStreams() override;
+ void Reset() override;
+ size_t buffered_amount(StreamID stream_id) const override;
+ size_t total_buffered_amount() const override {
+ return total_buffered_amount_.value();
+ }
+ size_t buffered_amount_low_threshold(StreamID stream_id) const override;
+ void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) override;
+
+ private:
+ // Represents a value and a "low threshold" that when the value reaches or
+ // goes under the "low threshold", will trigger `on_threshold_reached`
+ // callback.
+ class ThresholdWatcher {
+ public:
+ explicit ThresholdWatcher(std::function<void()> on_threshold_reached)
+ : on_threshold_reached_(std::move(on_threshold_reached)) {}
+ // Increases the value.
+ void Increase(size_t bytes) { value_ += bytes; }
+ // Decreases the value and triggers `on_threshold_reached` if it's at or
+ // below `low_threshold()`.
+ void Decrease(size_t bytes);
+
+ size_t value() const { return value_; }
+ size_t low_threshold() const { return low_threshold_; }
+ void SetLowThreshold(size_t low_threshold);
+
+ private:
+ const std::function<void()> on_threshold_reached_;
+ size_t value_ = 0;
+ size_t low_threshold_ = 0;
+ };
+
+ // Per-stream information.
+ class OutgoingStream {
+ public:
+ explicit OutgoingStream(std::function<void()> on_buffered_amount_low,
+ ThresholdWatcher& total_buffered_amount)
+ : buffered_amount_(std::move(on_buffered_amount_low)),
+ total_buffered_amount_(total_buffered_amount) {}
+
+ // Enqueues a message to this stream.
+ void Add(DcSctpMessage message,
+ absl::optional<TimeMs> expires_at,
+ const SendOptions& send_options);
+
+ // Possibly produces a data chunk to send.
+ absl::optional<DataToSend> Produce(TimeMs now, size_t max_size);
+
+ const ThresholdWatcher& buffered_amount() const { return buffered_amount_; }
+ ThresholdWatcher& buffered_amount() { return buffered_amount_; }
+
+ // Discards a partially sent message, see `SendQueue::Discard`.
+ bool Discard(IsUnordered unordered, MID message_id);
+
+ // Pauses this stream, which is used before resetting it.
+ void Pause();
+
+ // Resumes a paused stream.
+ void Resume() { is_paused_ = false; }
+
+ bool is_paused() const { return is_paused_; }
+
+ // Resets this stream, meaning MIDs and SSNs are set to zero.
+ void Reset();
+
+ // Indicates if this stream has a partially sent message in it.
+ bool has_partially_sent_message() const;
+
+ // Indicates if the stream has data to send. It will also try to remove any
+ // expired non-partially sent message.
+ bool HasDataToSend(TimeMs now);
+
+ private:
+ // An enqueued message and metadata.
+ struct Item {
+ explicit Item(DcSctpMessage msg,
+ absl::optional<TimeMs> expires_at,
+ const SendOptions& send_options)
+ : message(std::move(msg)),
+ expires_at(expires_at),
+ send_options(send_options),
+ remaining_offset(0),
+ remaining_size(message.payload().size()) {}
+ DcSctpMessage message;
+ absl::optional<TimeMs> expires_at;
+ SendOptions send_options;
+ // The remaining payload (offset and size) to be sent, when it has been
+ // fragmented.
+ size_t remaining_offset;
+ size_t remaining_size;
+ // If set, an allocated Message ID and SSN. Will be allocated when the
+ // first fragment is sent.
+ absl::optional<MID> message_id = absl::nullopt;
+ absl::optional<SSN> ssn = absl::nullopt;
+ // The current Fragment Sequence Number, incremented for each fragment.
+ FSN current_fsn = FSN(0);
+ };
+
+ bool IsConsistent() const;
+
+ // Streams are pause when they are about to be reset.
+ bool is_paused_ = false;
+ // MIDs are different for unordered and ordered messages sent on a stream.
+ MID next_unordered_mid_ = MID(0);
+ MID next_ordered_mid_ = MID(0);
+
+ SSN next_ssn_ = SSN(0);
+ // Enqueued messages, and metadata.
+ std::deque<Item> items_;
+
+ // The current amount of buffered data.
+ ThresholdWatcher buffered_amount_;
+
+ // Reference to the total buffered amount, which is updated directly by each
+ // stream.
+ ThresholdWatcher& total_buffered_amount_;
+ };
+
+ bool IsConsistent() const;
+ OutgoingStream& GetOrCreateStreamInfo(StreamID stream_id);
+ absl::optional<DataToSend> Produce(
+ std::map<StreamID, OutgoingStream>::iterator it,
+ TimeMs now,
+ size_t max_size);
+
+ // Return the next stream, in round-robin fashion.
+ std::map<StreamID, OutgoingStream>::iterator GetNextStream(TimeMs now);
+
+ const std::string log_prefix_;
+ const size_t buffer_size_;
+
+ // Called when the buffered amount is below what has been set using
+ // `SetBufferedAmountLowThreshold`.
+ const std::function<void(StreamID)> on_buffered_amount_low_;
+
+ // Called when the total buffered amount is below what has been set using
+ // `SetTotalBufferedAmountLowThreshold`.
+ const std::function<void()> on_total_buffered_amount_low_;
+
+ // The total amount of buffer data, for all streams.
+ ThresholdWatcher total_buffered_amount_;
+
+ // Indicates if the previous fragment sent was the end of a message. For
+ // non-interleaved sending, this means that the next message may come from a
+ // different stream. If not true, the next fragment must be produced from the
+ // same stream as last time.
+ bool previous_message_has_ended_ = true;
+
+ // The current stream to send chunks from. Modified by `GetNextStream`.
+ StreamID current_stream_id_ = StreamID(0);
+
+ // All streams, and messages added to those.
+ std::map<StreamID, OutgoingStream> streams_;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_RR_SEND_QUEUE_H_
diff --git a/net/dcsctp/tx/rr_send_queue_test.cc b/net/dcsctp/tx/rr_send_queue_test.cc
new file mode 100644
index 0000000000..682c16af0b
--- /dev/null
+++ b/net/dcsctp/tx/rr_send_queue_test.cc
@@ -0,0 +1,742 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/tx/rr_send_queue.h"
+
+#include <cstdint>
+#include <type_traits>
+#include <vector>
+
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/public/types.h"
+#include "net/dcsctp/testing/testing_macros.h"
+#include "net/dcsctp/tx/send_queue.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+
+namespace dcsctp {
+namespace {
+using ::testing::SizeIs;
+
+constexpr TimeMs kNow = TimeMs(0);
+constexpr StreamID kStreamID(1);
+constexpr PPID kPPID(53);
+constexpr size_t kMaxQueueSize = 1000;
+constexpr size_t kBufferedAmountLowThreshold = 500;
+constexpr size_t kOneFragmentPacketSize = 100;
+constexpr size_t kTwoFragmentPacketSize = 101;
+
+class RRSendQueueTest : public testing::Test {
+ protected:
+ RRSendQueueTest()
+ : buf_("log: ",
+ kMaxQueueSize,
+ on_buffered_amount_low_.AsStdFunction(),
+ kBufferedAmountLowThreshold,
+ on_total_buffered_amount_low_.AsStdFunction()) {}
+
+ const DcSctpOptions options_;
+ testing::NiceMock<testing::MockFunction<void(StreamID)>>
+ on_buffered_amount_low_;
+ testing::NiceMock<testing::MockFunction<void()>>
+ on_total_buffered_amount_low_;
+ RRSendQueue buf_;
+};
+
+TEST_F(RRSendQueueTest, EmptyBuffer) {
+ EXPECT_TRUE(buf_.IsEmpty());
+ EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value());
+ EXPECT_FALSE(buf_.IsFull());
+}
+
+TEST_F(RRSendQueueTest, AddAndGetSingleChunk) {
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 4, 5, 6}));
+
+ EXPECT_FALSE(buf_.IsEmpty());
+ EXPECT_FALSE(buf_.IsFull());
+ absl::optional<SendQueue::DataToSend> chunk_opt =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_opt.has_value());
+ EXPECT_TRUE(chunk_opt->data.is_beginning);
+ EXPECT_TRUE(chunk_opt->data.is_end);
+}
+
+TEST_F(RRSendQueueTest, CarveOutBeginningMiddleAndEnd) {
+ std::vector<uint8_t> payload(60);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_beg =
+ buf_.Produce(kNow, /*max_size=*/20);
+ ASSERT_TRUE(chunk_beg.has_value());
+ EXPECT_TRUE(chunk_beg->data.is_beginning);
+ EXPECT_FALSE(chunk_beg->data.is_end);
+
+ absl::optional<SendQueue::DataToSend> chunk_mid =
+ buf_.Produce(kNow, /*max_size=*/20);
+ ASSERT_TRUE(chunk_mid.has_value());
+ EXPECT_FALSE(chunk_mid->data.is_beginning);
+ EXPECT_FALSE(chunk_mid->data.is_end);
+
+ absl::optional<SendQueue::DataToSend> chunk_end =
+ buf_.Produce(kNow, /*max_size=*/20);
+ ASSERT_TRUE(chunk_end.has_value());
+ EXPECT_FALSE(chunk_end->data.is_beginning);
+ EXPECT_TRUE(chunk_end->data.is_end);
+
+ EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value());
+}
+
+TEST_F(RRSendQueueTest, GetChunksFromTwoMessages) {
+ std::vector<uint8_t> payload(60);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ EXPECT_EQ(chunk_one->data.ppid, kPPID);
+ EXPECT_TRUE(chunk_one->data.is_beginning);
+ EXPECT_TRUE(chunk_one->data.is_end);
+
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_EQ(chunk_two->data.stream_id, StreamID(3));
+ EXPECT_EQ(chunk_two->data.ppid, PPID(54));
+ EXPECT_TRUE(chunk_two->data.is_beginning);
+ EXPECT_TRUE(chunk_two->data.is_end);
+}
+
+TEST_F(RRSendQueueTest, BufferBecomesFullAndEmptied) {
+ std::vector<uint8_t> payload(600);
+ EXPECT_FALSE(buf_.IsFull());
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ EXPECT_FALSE(buf_.IsFull());
+ buf_.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload));
+ EXPECT_TRUE(buf_.IsFull());
+ // However, it's still possible to add messages. It's a soft limit, and it
+ // might be necessary to forcefully add messages due to e.g. external
+ // fragmentation.
+ buf_.Add(kNow, DcSctpMessage(StreamID(5), PPID(55), payload));
+ EXPECT_TRUE(buf_.IsFull());
+
+ absl::optional<SendQueue::DataToSend> chunk_one = buf_.Produce(kNow, 1000);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ EXPECT_EQ(chunk_one->data.ppid, kPPID);
+
+ EXPECT_TRUE(buf_.IsFull());
+
+ absl::optional<SendQueue::DataToSend> chunk_two = buf_.Produce(kNow, 1000);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_EQ(chunk_two->data.stream_id, StreamID(3));
+ EXPECT_EQ(chunk_two->data.ppid, PPID(54));
+
+ EXPECT_FALSE(buf_.IsFull());
+ EXPECT_FALSE(buf_.IsEmpty());
+
+ absl::optional<SendQueue::DataToSend> chunk_three = buf_.Produce(kNow, 1000);
+ ASSERT_TRUE(chunk_three.has_value());
+ EXPECT_EQ(chunk_three->data.stream_id, StreamID(5));
+ EXPECT_EQ(chunk_three->data.ppid, PPID(55));
+
+ EXPECT_FALSE(buf_.IsFull());
+ EXPECT_TRUE(buf_.IsEmpty());
+}
+
+TEST_F(RRSendQueueTest, WillNotSendTooSmallPacket) {
+ std::vector<uint8_t> payload(RRSendQueue::kMinimumFragmentedPayload + 1);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ // Wouldn't fit enough payload (wouldn't want to fragment)
+ EXPECT_FALSE(
+ buf_.Produce(kNow,
+ /*max_size=*/RRSendQueue::kMinimumFragmentedPayload - 1)
+ .has_value());
+
+ // Minimum fragment
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow,
+ /*max_size=*/RRSendQueue::kMinimumFragmentedPayload);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ EXPECT_EQ(chunk_one->data.ppid, kPPID);
+
+ // There is only one byte remaining - it can be fetched as it doesn't require
+ // additional fragmentation.
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, /*max_size=*/1);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_EQ(chunk_two->data.stream_id, kStreamID);
+ EXPECT_EQ(chunk_two->data.ppid, kPPID);
+
+ EXPECT_TRUE(buf_.IsEmpty());
+}
+
+TEST_F(RRSendQueueTest, DefaultsToOrderedSend) {
+ std::vector<uint8_t> payload(20);
+
+ // Default is ordered
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_FALSE(chunk_one->data.is_unordered);
+
+ // Explicitly unordered.
+ SendOptions opts;
+ opts.unordered = IsUnordered(true);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), opts);
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_TRUE(chunk_two->data.is_unordered);
+}
+
+TEST_F(RRSendQueueTest, ProduceWithLifetimeExpiry) {
+ std::vector<uint8_t> payload(20);
+
+ // Default is no expiry
+ TimeMs now = kNow;
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload));
+ now += DurationMs(1000000);
+ ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize));
+
+ SendOptions expires_2_seconds;
+ expires_2_seconds.lifetime = DurationMs(2000);
+
+ // Add and consume within lifetime
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds);
+ now += DurationMs(2000);
+ ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize));
+
+ // Add and consume just outside lifetime
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds);
+ now += DurationMs(2001);
+ ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize));
+
+ // A long time after expiry
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds);
+ now += DurationMs(1000000);
+ ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize));
+
+ // Expire one message, but produce the second that is not expired.
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds);
+
+ SendOptions expires_4_seconds;
+ expires_4_seconds.lifetime = DurationMs(4000);
+
+ buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_4_seconds);
+ now += DurationMs(2001);
+
+ ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize));
+ ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize));
+}
+
+TEST_F(RRSendQueueTest, DiscardPartialPackets) {
+ std::vector<uint8_t> payload(120);
+
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_FALSE(chunk_one->data.is_end);
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ buf_.Discard(IsUnordered(false), chunk_one->data.stream_id,
+ chunk_one->data.message_id);
+
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_FALSE(chunk_two->data.is_end);
+ EXPECT_EQ(chunk_two->data.stream_id, StreamID(2));
+
+ absl::optional<SendQueue::DataToSend> chunk_three =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_three.has_value());
+ EXPECT_TRUE(chunk_three->data.is_end);
+ EXPECT_EQ(chunk_three->data.stream_id, StreamID(2));
+ ASSERT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize));
+
+ // Calling it again shouldn't cause issues.
+ buf_.Discard(IsUnordered(false), chunk_one->data.stream_id,
+ chunk_one->data.message_id);
+ ASSERT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize));
+}
+
+TEST_F(RRSendQueueTest, PrepareResetStreamsDiscardsStream) {
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 3}));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), {1, 2, 3, 4, 5}));
+ EXPECT_EQ(buf_.total_buffered_amount(), 8u);
+
+ buf_.PrepareResetStreams(std::vector<StreamID>({StreamID(1)}));
+ EXPECT_EQ(buf_.total_buffered_amount(), 5u);
+ buf_.CommitResetStreams();
+ buf_.PrepareResetStreams(std::vector<StreamID>({StreamID(2)}));
+ EXPECT_EQ(buf_.total_buffered_amount(), 0u);
+}
+
+TEST_F(RRSendQueueTest, PrepareResetStreamsNotPartialPackets) {
+ std::vector<uint8_t> payload(120);
+
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_one = buf_.Produce(kNow, 50);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ EXPECT_EQ(buf_.total_buffered_amount(), 2 * payload.size() - 50);
+
+ StreamID stream_ids[] = {StreamID(1)};
+ buf_.PrepareResetStreams(stream_ids);
+ EXPECT_EQ(buf_.total_buffered_amount(), payload.size() - 50);
+}
+
+TEST_F(RRSendQueueTest, EnqueuedItemsArePausedDuringStreamReset) {
+ std::vector<uint8_t> payload(50);
+
+ buf_.PrepareResetStreams(std::vector<StreamID>({StreamID(1)}));
+ EXPECT_EQ(buf_.total_buffered_amount(), 0u);
+
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ EXPECT_EQ(buf_.total_buffered_amount(), payload.size());
+
+ EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value());
+ buf_.CommitResetStreams();
+ EXPECT_EQ(buf_.total_buffered_amount(), payload.size());
+
+ absl::optional<SendQueue::DataToSend> chunk_one = buf_.Produce(kNow, 50);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.stream_id, kStreamID);
+ EXPECT_EQ(buf_.total_buffered_amount(), 0u);
+}
+
+TEST_F(RRSendQueueTest, CommittingResetsSSN) {
+ std::vector<uint8_t> payload(50);
+
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.ssn, SSN(0));
+
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_EQ(chunk_two->data.ssn, SSN(1));
+
+ StreamID stream_ids[] = {StreamID(1)};
+ buf_.PrepareResetStreams(stream_ids);
+
+ // Buffered
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ EXPECT_TRUE(buf_.CanResetStreams());
+ buf_.CommitResetStreams();
+
+ absl::optional<SendQueue::DataToSend> chunk_three =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_three.has_value());
+ EXPECT_EQ(chunk_three->data.ssn, SSN(0));
+}
+
+TEST_F(RRSendQueueTest, RollBackResumesSSN) {
+ std::vector<uint8_t> payload(50);
+
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ absl::optional<SendQueue::DataToSend> chunk_one =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_one.has_value());
+ EXPECT_EQ(chunk_one->data.ssn, SSN(0));
+
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_two.has_value());
+ EXPECT_EQ(chunk_two->data.ssn, SSN(1));
+
+ buf_.PrepareResetStreams(std::vector<StreamID>({StreamID(1)}));
+
+ // Buffered
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+
+ EXPECT_TRUE(buf_.CanResetStreams());
+ buf_.RollbackResetStreams();
+
+ absl::optional<SendQueue::DataToSend> chunk_three =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+ ASSERT_TRUE(chunk_three.has_value());
+ EXPECT_EQ(chunk_three->data.ssn, SSN(2));
+}
+
+TEST_F(RRSendQueueTest, ReturnsFragmentsForOneMessageBeforeMovingToNext) {
+ std::vector<uint8_t> payload(200);
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(2));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk4.data.stream_id, StreamID(2));
+}
+
+TEST_F(RRSendQueueTest, ReturnsAlsoSmallFragmentsBeforeMovingToNext) {
+ std::vector<uint8_t> payload(kTwoFragmentPacketSize);
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload,
+ SizeIs(kTwoFragmentPacketSize - kOneFragmentPacketSize));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(2));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk4.data.stream_id, StreamID(2));
+ EXPECT_THAT(chunk4.data.payload,
+ SizeIs(kTwoFragmentPacketSize - kOneFragmentPacketSize));
+}
+
+TEST_F(RRSendQueueTest, WillCycleInRoundRobinFashionBetweenStreams) {
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(1)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(2)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector<uint8_t>(3)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector<uint8_t>(4)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector<uint8_t>(5)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector<uint8_t>(6)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector<uint8_t>(7)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector<uint8_t>(8)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(1));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(2));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(3));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(3));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(5));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk4.data.stream_id, StreamID(4));
+ EXPECT_THAT(chunk4.data.payload, SizeIs(7));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk5,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk5.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk5.data.payload, SizeIs(2));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk6,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk6.data.stream_id, StreamID(2));
+ EXPECT_THAT(chunk6.data.payload, SizeIs(4));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk7,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk7.data.stream_id, StreamID(3));
+ EXPECT_THAT(chunk7.data.payload, SizeIs(6));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk8,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk8.data.stream_id, StreamID(4));
+ EXPECT_THAT(chunk8.data.payload, SizeIs(8));
+}
+
+TEST_F(RRSendQueueTest, DoesntTriggerOnBufferedAmountLowWhenSetToZero) {
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 0u);
+}
+
+TEST_F(RRSendQueueTest, TriggersOnBufferedAmountAtZeroLowWhenSent) {
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(1)));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 1u);
+
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(1));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u);
+}
+
+TEST_F(RRSendQueueTest, WillRetriggerOnBufferedAmountLowIfAddingMore) {
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(1)));
+
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(1));
+
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(1)));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 1u);
+
+ // Should now trigger again, as buffer_amount went above the threshold.
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(1));
+}
+
+TEST_F(RRSendQueueTest, OnlyTriggersWhenTransitioningFromAboveToBelowOrEqual) {
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 1000);
+
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(10)));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 10u);
+
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(10));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u);
+
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(20)));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 20u);
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(20));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u);
+}
+
+TEST_F(RRSendQueueTest, WillTriggerOnBufferedAmountLowSetAboveZero) {
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 700);
+
+ std::vector<uint8_t> payload(1000);
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(kOneFragmentPacketSize));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 900u);
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(kOneFragmentPacketSize));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 800u);
+
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 700u);
+
+ // Doesn't trigger when reducing even further.
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u);
+}
+
+TEST_F(RRSendQueueTest, WillRetriggerOnBufferedAmountLowSetAboveZero) {
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 700);
+
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(1000)));
+
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, 400));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(400));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u);
+
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(200)));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 800u);
+
+ // Will trigger again, as it went above the limit.
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, 200));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(200));
+ EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u);
+}
+
+TEST_F(RRSendQueueTest, TriggersOnBufferedAmountLowOnThresholdChanged) {
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+
+ buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector<uint8_t>(100)));
+
+ // Modifying the threshold, still under buffered_amount, should not trigger.
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 50);
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 99);
+
+ // When the threshold reaches buffered_amount, it will trigger.
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 100);
+
+ // But not when it's set low again.
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 50);
+
+ // But it will trigger when it overshoots.
+ EXPECT_CALL(on_buffered_amount_low_, Call(StreamID(1)));
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 150);
+
+ // But not when it's set low again.
+ EXPECT_CALL(on_buffered_amount_low_, Call).Times(0);
+ buf_.SetBufferedAmountLowThreshold(StreamID(1), 0);
+}
+
+TEST_F(RRSendQueueTest,
+ OnTotalBufferedAmountLowDoesNotTriggerOnBufferFillingUp) {
+ EXPECT_CALL(on_total_buffered_amount_low_, Call).Times(0);
+ std::vector<uint8_t> payload(kBufferedAmountLowThreshold - 1);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ EXPECT_EQ(buf_.total_buffered_amount(), payload.size());
+
+ // Will not trigger if going above but never below.
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID,
+ std::vector<uint8_t>(kOneFragmentPacketSize)));
+}
+
+TEST_F(RRSendQueueTest, TriggersOnTotalBufferedAmountLowWhenCrossing) {
+ EXPECT_CALL(on_total_buffered_amount_low_, Call).Times(0);
+ std::vector<uint8_t> payload(kBufferedAmountLowThreshold);
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload));
+ EXPECT_EQ(buf_.total_buffered_amount(), payload.size());
+
+ // Reaches it.
+ buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, std::vector<uint8_t>(1)));
+
+ // Drain it a bit - will trigger.
+ EXPECT_CALL(on_total_buffered_amount_low_, Call).Times(1);
+ absl::optional<SendQueue::DataToSend> chunk_two =
+ buf_.Produce(kNow, kOneFragmentPacketSize);
+}
+
+TEST_F(RRSendQueueTest, WillStayInAStreamAsLongAsThatMessageIsSending) {
+ buf_.Add(kNow, DcSctpMessage(StreamID(5), kPPID, std::vector<uint8_t>(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(5));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(1));
+
+ // Next, it should pick a different stream.
+
+ buf_.Add(kNow,
+ DcSctpMessage(StreamID(1), kPPID,
+ std::vector<uint8_t>(kOneFragmentPacketSize * 2)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ // It should still stay on the Stream1 now, even if might be tempted to switch
+ // to this stream, as it's the stream following 5.
+ buf_.Add(kNow, DcSctpMessage(StreamID(6), kPPID, std::vector<uint8_t>(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(1));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ // After stream id 1 is complete, it's time to do stream 6.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk4.data.stream_id, StreamID(6));
+ EXPECT_THAT(chunk4.data.payload, SizeIs(1));
+
+ EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value());
+}
+
+TEST_F(RRSendQueueTest, WillStayInStreamWhenOnlySmallFragmentRemaining) {
+ buf_.Add(kNow,
+ DcSctpMessage(StreamID(5), kPPID,
+ std::vector<uint8_t>(kOneFragmentPacketSize * 2)));
+ buf_.Add(kNow, DcSctpMessage(StreamID(6), kPPID, std::vector<uint8_t>(1)));
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk1.data.stream_id, StreamID(5));
+ EXPECT_THAT(chunk1.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ // Now assume that there will be a lot of previous chunks that need to be
+ // retransmitted, which fills up the next packet and there is little space
+ // left in the packet for new chunks. What it should NOT do right now is to
+ // try to send a message from StreamID 6. And it should not try to send a very
+ // small fragment from StreamID 5 either. So just skip this one.
+ EXPECT_FALSE(buf_.Produce(kNow, 8).has_value());
+
+ // When the next produce request comes with a large buffer to fill, continue
+ // sending from StreamID 5.
+
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk2.data.stream_id, StreamID(5));
+ EXPECT_THAT(chunk2.data.payload, SizeIs(kOneFragmentPacketSize));
+
+ // Lastly, produce a message on StreamID 6.
+ ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3,
+ buf_.Produce(kNow, kOneFragmentPacketSize));
+ EXPECT_EQ(chunk3.data.stream_id, StreamID(6));
+ EXPECT_THAT(chunk3.data.payload, SizeIs(1));
+
+ EXPECT_FALSE(buf_.Produce(kNow, 8).has_value());
+}
+} // namespace
+} // namespace dcsctp
diff --git a/net/dcsctp/tx/send_queue.h b/net/dcsctp/tx/send_queue.h
new file mode 100644
index 0000000000..877dbdda59
--- /dev/null
+++ b/net/dcsctp/tx/send_queue.h
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef NET_DCSCTP_TX_SEND_QUEUE_H_
+#define NET_DCSCTP_TX_SEND_QUEUE_H_
+
+#include <cstdint>
+#include <limits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "net/dcsctp/common/internal_types.h"
+#include "net/dcsctp/packet/data.h"
+#include "net/dcsctp/public/types.h"
+
+namespace dcsctp {
+
+class SendQueue {
+ public:
+ // Container for a data chunk that is produced by the SendQueue
+ struct DataToSend {
+ explicit DataToSend(Data data) : data(std::move(data)) {}
+ // The data to send, including all parameters.
+ Data data;
+
+ // Partial reliability - RFC3758
+ absl::optional<int> max_retransmissions;
+ absl::optional<TimeMs> expires_at;
+ };
+
+ virtual ~SendQueue() = default;
+
+ // TODO(boivie): This interface is obviously missing an "Add" function, but
+ // that is postponed a bit until the story around how to model message
+ // prioritization, which is important for any advanced stream scheduler, is
+ // further clarified.
+
+ // Produce a chunk to be sent.
+ //
+ // `max_size` refers to how many payload bytes that may be produced, not
+ // including any headers.
+ virtual absl::optional<DataToSend> Produce(TimeMs now, size_t max_size) = 0;
+
+ // Discards a partially sent message identified by the parameters `unordered`,
+ // `stream_id` and `message_id`. The `message_id` comes from the returned
+ // information when having called `Produce`. A partially sent message means
+ // that it has had at least one fragment of it returned when `Produce` was
+ // called prior to calling this method).
+ //
+ // This is used when a message has been found to be expired (by the partial
+ // reliability extension), and the retransmission queue will signal the
+ // receiver that any partially received message fragments should be skipped.
+ // This means that any remaining fragments in the Send Queue must be removed
+ // as well so that they are not sent.
+ //
+ // This function returns true if this message had unsent fragments still in
+ // the queue that were discarded, and false if there were no such fragments.
+ virtual bool Discard(IsUnordered unordered,
+ StreamID stream_id,
+ MID message_id) = 0;
+
+ // Prepares the streams to be reset. This is used to close a WebRTC data
+ // channel and will be signaled to the other side.
+ //
+ // Concretely, it discards all whole (not partly sent) messages in the given
+ // streams and pauses those streams so that future added messages aren't
+ // produced until `ResumeStreams` is called.
+ //
+ // TODO(boivie): Investigate if it really should discard any message at all.
+ // RFC8831 only mentions that "[RFC6525] also guarantees that all the messages
+ // are delivered (or abandoned) before the stream is reset."
+ //
+ // This method can be called multiple times to add more streams to be
+ // reset, and paused while they are resetting. This is the first part of the
+ // two-phase commit protocol to reset streams, where the caller completes the
+ // procedure by either calling `CommitResetStreams` or `RollbackResetStreams`.
+ virtual void PrepareResetStreams(rtc::ArrayView<const StreamID> streams) = 0;
+
+ // Returns true if all non-discarded messages during `PrepareResetStreams`
+ // (which are those that was partially sent before that method was called)
+ // have been sent.
+ virtual bool CanResetStreams() const = 0;
+
+ // Called to commit to reset the streams provided to `PrepareResetStreams`.
+ // It will reset the stream sequence numbers (SSNs) and message identifiers
+ // (MIDs) and resume the paused streams.
+ virtual void CommitResetStreams() = 0;
+
+ // Called to abort the resetting of streams provided to `PrepareResetStreams`.
+ // Will resume the paused streams without resetting the stream sequence
+ // numbers (SSNs) or message identifiers (MIDs). Note that the non-partial
+ // messages that were discarded when calling `PrepareResetStreams` will not be
+ // recovered, to better match the intention from the sender to "close the
+ // channel".
+ virtual void RollbackResetStreams() = 0;
+
+ // Resets all message identifier counters (MID, SSN) and makes all partially
+ // messages be ready to be re-sent in full. This is used when the peer has
+ // been detected to have restarted and is used to try to minimize the amount
+ // of data loss. However, data loss cannot be completely guaranteed when a
+ // peer restarts.
+ virtual void Reset() = 0;
+
+ // Returns the amount of buffered data. This doesn't include packets that are
+ // e.g. inflight.
+ virtual size_t buffered_amount(StreamID stream_id) const = 0;
+
+ // Returns the total amount of buffer data, for all streams.
+ virtual size_t total_buffered_amount() const = 0;
+
+ // Returns the limit for the `OnBufferedAmountLow` event. Default value is 0.
+ virtual size_t buffered_amount_low_threshold(StreamID stream_id) const = 0;
+
+ // Sets a limit for the `OnBufferedAmountLow` event.
+ virtual void SetBufferedAmountLowThreshold(StreamID stream_id,
+ size_t bytes) = 0;
+};
+} // namespace dcsctp
+
+#endif // NET_DCSCTP_TX_SEND_QUEUE_H_
diff --git a/p2p/base/default_ice_transport_factory.cc b/p2p/base/default_ice_transport_factory.cc
index 7d2fdb8fb4..0a7175cfd8 100644
--- a/p2p/base/default_ice_transport_factory.cc
+++ b/p2p/base/default_ice_transport_factory.cc
@@ -44,7 +44,7 @@ DefaultIceTransportFactory::CreateIceTransport(
int component,
IceTransportInit init) {
BasicIceControllerFactory factory;
- return new rtc::RefCountedObject<DefaultIceTransport>(
+ return rtc::make_ref_counted<DefaultIceTransport>(
cricket::P2PTransportChannel::Create(
transport_name, component, init.port_allocator(),
init.async_dns_resolver_factory(), init.event_log(), &factory));
diff --git a/p2p/base/dtls_transport.cc b/p2p/base/dtls_transport.cc
index 99ee0f1a16..76b94a8d79 100644
--- a/p2p/base/dtls_transport.cc
+++ b/p2p/base/dtls_transport.cc
@@ -15,6 +15,7 @@
#include <utility>
#include "absl/memory/memory.h"
+#include "api/dtls_transport_interface.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h"
#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h"
@@ -148,7 +149,7 @@ DtlsTransport::DtlsTransport(IceTransportInternal* ice_transport,
DtlsTransport::~DtlsTransport() = default;
-DtlsTransportState DtlsTransport::dtls_state() const {
+webrtc::DtlsTransportState DtlsTransport::dtls_state() const {
return dtls_state_;
}
@@ -218,7 +219,7 @@ bool DtlsTransport::GetDtlsRole(rtc::SSLRole* role) const {
}
bool DtlsTransport::GetSslCipherSuite(int* cipher) {
- if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
+ if (dtls_state() != webrtc::DtlsTransportState::kConnected) {
return false;
}
@@ -276,7 +277,7 @@ bool DtlsTransport::SetRemoteFingerprint(const std::string& digest_alg,
remote_fingerprint_value_.size(), &err)) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Couldn't set DTLS certificate digest.";
- set_dtls_state(DTLS_TRANSPORT_FAILED);
+ set_dtls_state(webrtc::DtlsTransportState::kFailed);
// If the error is "verification failed", don't return false, because
// this means the fingerprint was formatted correctly but didn't match
// the certificate from the DTLS handshake. Thus the DTLS state should go
@@ -290,12 +291,12 @@ bool DtlsTransport::SetRemoteFingerprint(const std::string& digest_alg,
// create a new one, resetting our state.
if (dtls_ && fingerprint_changing) {
dtls_.reset(nullptr);
- set_dtls_state(DTLS_TRANSPORT_NEW);
+ set_dtls_state(webrtc::DtlsTransportState::kNew);
set_writable(false);
}
if (!SetupDtls()) {
- set_dtls_state(DTLS_TRANSPORT_FAILED);
+ set_dtls_state(webrtc::DtlsTransportState::kFailed);
return false;
}
@@ -373,7 +374,7 @@ bool DtlsTransport::SetupDtls() {
}
bool DtlsTransport::GetSrtpCryptoSuite(int* cipher) {
- if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
+ if (dtls_state() != webrtc::DtlsTransportState::kConnected) {
return false;
}
@@ -381,7 +382,7 @@ bool DtlsTransport::GetSrtpCryptoSuite(int* cipher) {
}
bool DtlsTransport::GetSslVersionBytes(int* version) const {
- if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
+ if (dtls_state() != webrtc::DtlsTransportState::kConnected) {
return false;
}
@@ -399,14 +400,14 @@ int DtlsTransport::SendPacket(const char* data,
}
switch (dtls_state()) {
- case DTLS_TRANSPORT_NEW:
+ case webrtc::DtlsTransportState::kNew:
// Can't send data until the connection is active.
// TODO(ekr@rtfm.com): assert here if dtls_ is NULL?
return -1;
- case DTLS_TRANSPORT_CONNECTING:
+ case webrtc::DtlsTransportState::kConnecting:
// Can't send data until the connection is active.
return -1;
- case DTLS_TRANSPORT_CONNECTED:
+ case webrtc::DtlsTransportState::kConnected:
if (flags & PF_SRTP_BYPASS) {
RTC_DCHECK(!srtp_ciphers_.empty());
if (!IsRtpPacket(data, size)) {
@@ -419,17 +420,17 @@ int DtlsTransport::SendPacket(const char* data,
? static_cast<int>(size)
: -1;
}
- case DTLS_TRANSPORT_FAILED:
+ case webrtc::DtlsTransportState::kFailed:
// Can't send anything when we're failed.
- RTC_LOG(LS_ERROR)
- << ToString()
- << ": Couldn't send packet due to DTLS_TRANSPORT_FAILED.";
+ RTC_LOG(LS_ERROR) << ToString()
+ << ": Couldn't send packet due to "
+ "webrtc::DtlsTransportState::kFailed.";
return -1;
- case DTLS_TRANSPORT_CLOSED:
+ case webrtc::DtlsTransportState::kClosed:
// Can't send anything when we're closed.
- RTC_LOG(LS_ERROR)
- << ToString()
- << ": Couldn't send packet due to DTLS_TRANSPORT_CLOSED.";
+ RTC_LOG(LS_ERROR) << ToString()
+ << ": Couldn't send packet due to "
+ "webrtc::DtlsTransportState::kClosed.";
return -1;
default:
RTC_NOTREACHED();
@@ -508,27 +509,30 @@ void DtlsTransport::OnWritableState(rtc::PacketTransportInternal* transport) {
}
switch (dtls_state()) {
- case DTLS_TRANSPORT_NEW:
+ case webrtc::DtlsTransportState::kNew:
MaybeStartDtls();
break;
- case DTLS_TRANSPORT_CONNECTED:
+ case webrtc::DtlsTransportState::kConnected:
// Note: SignalWritableState fired by set_writable.
set_writable(ice_transport_->writable());
break;
- case DTLS_TRANSPORT_CONNECTING:
+ case webrtc::DtlsTransportState::kConnecting:
// Do nothing.
break;
- case DTLS_TRANSPORT_FAILED:
+ case webrtc::DtlsTransportState::kFailed:
// Should not happen. Do nothing.
- RTC_LOG(LS_ERROR)
- << ToString()
- << ": OnWritableState() called in state DTLS_TRANSPORT_FAILED.";
+ RTC_LOG(LS_ERROR) << ToString()
+ << ": OnWritableState() called in state "
+ "webrtc::DtlsTransportState::kFailed.";
break;
- case DTLS_TRANSPORT_CLOSED:
+ case webrtc::DtlsTransportState::kClosed:
// Should not happen. Do nothing.
- RTC_LOG(LS_ERROR)
- << ToString()
- << ": OnWritableState() called in state DTLS_TRANSPORT_CLOSED.";
+ RTC_LOG(LS_ERROR) << ToString()
+ << ": OnWritableState() called in state "
+ "webrtc::DtlsTransportState::kClosed.";
+ break;
+ case webrtc::DtlsTransportState::kNumValues:
+ RTC_NOTREACHED();
break;
}
}
@@ -540,7 +544,7 @@ void DtlsTransport::OnReceivingState(rtc::PacketTransportInternal* transport) {
<< ": ice_transport "
"receiving state changed to "
<< ice_transport_->receiving();
- if (!dtls_active_ || dtls_state() == DTLS_TRANSPORT_CONNECTED) {
+ if (!dtls_active_ || dtls_state() == webrtc::DtlsTransportState::kConnected) {
// Note: SignalReceivingState fired by set_receiving.
set_receiving(ice_transport_->receiving());
}
@@ -562,7 +566,7 @@ void DtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
}
switch (dtls_state()) {
- case DTLS_TRANSPORT_NEW:
+ case webrtc::DtlsTransportState::kNew:
if (dtls_) {
RTC_LOG(LS_INFO) << ToString()
<< ": Packet received before DTLS started.";
@@ -591,8 +595,8 @@ void DtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
}
break;
- case DTLS_TRANSPORT_CONNECTING:
- case DTLS_TRANSPORT_CONNECTED:
+ case webrtc::DtlsTransportState::kConnecting:
+ case webrtc::DtlsTransportState::kConnected:
// We should only get DTLS or SRTP packets; STUN's already been demuxed.
// Is this potentially a DTLS packet?
if (IsDtlsPacket(data, size)) {
@@ -602,7 +606,7 @@ void DtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
}
} else {
// Not a DTLS packet; our handshake should be complete by now.
- if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
+ if (dtls_state() != webrtc::DtlsTransportState::kConnected) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Received non-DTLS packet before DTLS "
"complete.";
@@ -623,8 +627,9 @@ void DtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
SignalReadPacket(this, data, size, packet_time_us, PF_SRTP_BYPASS);
}
break;
- case DTLS_TRANSPORT_FAILED:
- case DTLS_TRANSPORT_CLOSED:
+ case webrtc::DtlsTransportState::kFailed:
+ case webrtc::DtlsTransportState::kClosed:
+ case webrtc::DtlsTransportState::kNumValues:
// This shouldn't be happening. Drop the packet.
break;
}
@@ -652,7 +657,7 @@ void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) {
if (dtls_->GetState() == rtc::SS_OPEN) {
// The check for OPEN shouldn't be necessary but let's make
// sure we don't accidentally frob the state if it's closed.
- set_dtls_state(DTLS_TRANSPORT_CONNECTED);
+ set_dtls_state(webrtc::DtlsTransportState::kConnected);
set_writable(true);
}
}
@@ -671,7 +676,7 @@ void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) {
// Remote peer shut down the association with no error.
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed by remote";
set_writable(false);
- set_dtls_state(DTLS_TRANSPORT_CLOSED);
+ set_dtls_state(webrtc::DtlsTransportState::kClosed);
SignalClosed(this);
} else if (ret == rtc::SR_ERROR) {
// Remote peer shut down the association with an error.
@@ -680,7 +685,7 @@ void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) {
<< ": Closed by remote with DTLS transport error, code="
<< read_error;
set_writable(false);
- set_dtls_state(DTLS_TRANSPORT_FAILED);
+ set_dtls_state(webrtc::DtlsTransportState::kFailed);
SignalClosed(this);
}
} while (ret == rtc::SR_SUCCESS);
@@ -690,10 +695,10 @@ void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) {
set_writable(false);
if (!err) {
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed";
- set_dtls_state(DTLS_TRANSPORT_CLOSED);
+ set_dtls_state(webrtc::DtlsTransportState::kClosed);
} else {
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport error, code=" << err;
- set_dtls_state(DTLS_TRANSPORT_FAILED);
+ set_dtls_state(webrtc::DtlsTransportState::kFailed);
}
}
}
@@ -717,11 +722,11 @@ void DtlsTransport::MaybeStartDtls() {
// configuration and therefore are our fault.
RTC_NOTREACHED() << "StartSSL failed.";
RTC_LOG(LS_ERROR) << ToString() << ": Couldn't start DTLS handshake";
- set_dtls_state(DTLS_TRANSPORT_FAILED);
+ set_dtls_state(webrtc::DtlsTransportState::kFailed);
return;
}
RTC_LOG(LS_INFO) << ToString() << ": DtlsTransport: Started DTLS handshake";
- set_dtls_state(DTLS_TRANSPORT_CONNECTING);
+ set_dtls_state(webrtc::DtlsTransportState::kConnecting);
// Now that the handshake has started, we can process a cached ClientHello
// (if one exists).
if (cached_client_hello_.size()) {
@@ -789,16 +794,17 @@ void DtlsTransport::set_writable(bool writable) {
SignalWritableState(this);
}
-void DtlsTransport::set_dtls_state(DtlsTransportState state) {
+void DtlsTransport::set_dtls_state(webrtc::DtlsTransportState state) {
if (dtls_state_ == state) {
return;
}
if (event_log_) {
- event_log_->Log(std::make_unique<webrtc::RtcEventDtlsTransportState>(
- ConvertDtlsTransportState(state)));
+ event_log_->Log(
+ std::make_unique<webrtc::RtcEventDtlsTransportState>(state));
}
- RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:" << dtls_state_
- << " to " << state;
+ RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:"
+ << static_cast<int>(dtls_state_) << " to "
+ << static_cast<int>(state);
dtls_state_ = state;
SendDtlsState(this, state);
}
diff --git a/p2p/base/dtls_transport.h b/p2p/base/dtls_transport.h
index f37e468571..0296a742c0 100644
--- a/p2p/base/dtls_transport.h
+++ b/p2p/base/dtls_transport.h
@@ -16,6 +16,7 @@
#include <vector>
#include "api/crypto/crypto_options.h"
+#include "api/dtls_transport_interface.h"
#include "api/sequence_checker.h"
#include "p2p/base/dtls_transport_internal.h"
#include "p2p/base/ice_transport_internal.h"
@@ -109,7 +110,7 @@ class DtlsTransport : public DtlsTransportInternal {
~DtlsTransport() override;
- DtlsTransportState dtls_state() const override;
+ webrtc::DtlsTransportState dtls_state() const override;
const std::string& transport_name() const override;
int component() const override;
@@ -219,12 +220,12 @@ class DtlsTransport : public DtlsTransportInternal {
void set_receiving(bool receiving);
void set_writable(bool writable);
// Sets the DTLS state, signaling if necessary.
- void set_dtls_state(DtlsTransportState state);
+ void set_dtls_state(webrtc::DtlsTransportState state);
webrtc::SequenceChecker thread_checker_;
const int component_;
- DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW;
+ webrtc::DtlsTransportState dtls_state_ = webrtc::DtlsTransportState::kNew;
// Underlying ice_transport, not owned by this class.
IceTransportInternal* const ice_transport_;
std::unique_ptr<rtc::SSLStreamAdapter> dtls_; // The DTLS stream
diff --git a/p2p/base/dtls_transport_internal.cc b/p2p/base/dtls_transport_internal.cc
index dd23b1baa7..6997dbc702 100644
--- a/p2p/base/dtls_transport_internal.cc
+++ b/p2p/base/dtls_transport_internal.cc
@@ -16,22 +16,4 @@ DtlsTransportInternal::DtlsTransportInternal() = default;
DtlsTransportInternal::~DtlsTransportInternal() = default;
-webrtc::DtlsTransportState ConvertDtlsTransportState(
- cricket::DtlsTransportState cricket_state) {
- switch (cricket_state) {
- case DtlsTransportState::DTLS_TRANSPORT_NEW:
- return webrtc::DtlsTransportState::kNew;
- case DtlsTransportState::DTLS_TRANSPORT_CONNECTING:
- return webrtc::DtlsTransportState::kConnecting;
- case DtlsTransportState::DTLS_TRANSPORT_CONNECTED:
- return webrtc::DtlsTransportState::kConnected;
- case DtlsTransportState::DTLS_TRANSPORT_CLOSED:
- return webrtc::DtlsTransportState::kClosed;
- case DtlsTransportState::DTLS_TRANSPORT_FAILED:
- return webrtc::DtlsTransportState::kFailed;
- }
- RTC_NOTREACHED();
- return webrtc::DtlsTransportState::kNew;
-}
-
} // namespace cricket
diff --git a/p2p/base/dtls_transport_internal.h b/p2p/base/dtls_transport_internal.h
index bd4579028c..0b26a7fd7a 100644
--- a/p2p/base/dtls_transport_internal.h
+++ b/p2p/base/dtls_transport_internal.h
@@ -32,22 +32,6 @@
namespace cricket {
-enum DtlsTransportState {
- // Haven't started negotiating.
- DTLS_TRANSPORT_NEW = 0,
- // Have started negotiating.
- DTLS_TRANSPORT_CONNECTING,
- // Negotiated, and has a secure connection.
- DTLS_TRANSPORT_CONNECTED,
- // Transport is closed.
- DTLS_TRANSPORT_CLOSED,
- // Failed due to some error in the handshake process.
- DTLS_TRANSPORT_FAILED,
-};
-
-webrtc::DtlsTransportState ConvertDtlsTransportState(
- cricket::DtlsTransportState cricket_state);
-
enum PacketFlags {
PF_NORMAL = 0x00, // A normal packet.
PF_SRTP_BYPASS = 0x01, // An encrypted SRTP packet; bypass any additional
@@ -64,7 +48,7 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal {
public:
~DtlsTransportInternal() override;
- virtual DtlsTransportState dtls_state() const = 0;
+ virtual webrtc::DtlsTransportState dtls_state() const = 0;
virtual int component() const = 0;
@@ -115,24 +99,25 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal {
// Expose the underneath IceTransport.
virtual IceTransportInternal* ice_transport() = 0;
- // F: void(DtlsTransportInternal*, const DtlsTransportState)
+ // F: void(DtlsTransportInternal*, const webrtc::DtlsTransportState)
template <typename F>
- void SubscribeDtlsState(F&& callback) {
- dtls_state_callback_list_.AddReceiver(std::forward<F>(callback));
+ void SubscribeDtlsTransportState(F&& callback) {
+ dtls_transport_state_callback_list_.AddReceiver(std::forward<F>(callback));
}
template <typename F>
- void SubscribeDtlsState(const void* id, F&& callback) {
- dtls_state_callback_list_.AddReceiver(id, std::forward<F>(callback));
+ void SubscribeDtlsTransportState(const void* id, F&& callback) {
+ dtls_transport_state_callback_list_.AddReceiver(id,
+ std::forward<F>(callback));
}
// Unsubscribe the subscription with given id.
- void UnsubscribeDtlsState(const void* id) {
- dtls_state_callback_list_.RemoveReceivers(id);
+ void UnsubscribeDtlsTransportState(const void* id) {
+ dtls_transport_state_callback_list_.RemoveReceivers(id);
}
void SendDtlsState(DtlsTransportInternal* transport,
- DtlsTransportState state) {
- dtls_state_callback_list_.Send(transport, state);
+ webrtc::DtlsTransportState state) {
+ dtls_transport_state_callback_list_.Send(transport, state);
}
// Emitted whenever the Dtls handshake failed on some transport channel.
@@ -153,8 +138,8 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal {
RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransportInternal);
webrtc::CallbackList<const rtc::SSLHandshakeError>
dtls_handshake_error_callback_list_;
- webrtc::CallbackList<DtlsTransportInternal*, const DtlsTransportState>
- dtls_state_callback_list_;
+ webrtc::CallbackList<DtlsTransportInternal*, const webrtc::DtlsTransportState>
+ dtls_transport_state_callback_list_;
};
} // namespace cricket
diff --git a/p2p/base/dtls_transport_unittest.cc b/p2p/base/dtls_transport_unittest.cc
index a2ad213435..f01566d263 100644
--- a/p2p/base/dtls_transport_unittest.cc
+++ b/p2p/base/dtls_transport_unittest.cc
@@ -15,6 +15,7 @@
#include <set>
#include <utility>
+#include "api/dtls_transport_interface.h"
#include "p2p/base/fake_ice_transport.h"
#include "p2p/base/packet_transport_internal.h"
#include "rtc_base/checks.h"
@@ -668,18 +669,19 @@ class DtlsEventOrderingTest
// Sanity check that the handshake hasn't already finished.
EXPECT_FALSE(client1_.dtls_transport()->IsDtlsConnected() ||
client1_.dtls_transport()->dtls_state() ==
- DTLS_TRANSPORT_FAILED);
+ webrtc::DtlsTransportState::kFailed);
EXPECT_TRUE_SIMULATED_WAIT(
client1_.dtls_transport()->IsDtlsConnected() ||
client1_.dtls_transport()->dtls_state() ==
- DTLS_TRANSPORT_FAILED,
+ webrtc::DtlsTransportState::kFailed,
kTimeout, fake_clock_);
break;
}
}
- DtlsTransportState expected_final_state =
- valid_fingerprint ? DTLS_TRANSPORT_CONNECTED : DTLS_TRANSPORT_FAILED;
+ webrtc::DtlsTransportState expected_final_state =
+ valid_fingerprint ? webrtc::DtlsTransportState::kConnected
+ : webrtc::DtlsTransportState::kFailed;
EXPECT_EQ_SIMULATED_WAIT(expected_final_state,
client1_.dtls_transport()->dtls_state(), kTimeout,
fake_clock_);
diff --git a/p2p/base/fake_dtls_transport.h b/p2p/base/fake_dtls_transport.h
index 0628c4ce00..e02755c68f 100644
--- a/p2p/base/fake_dtls_transport.h
+++ b/p2p/base/fake_dtls_transport.h
@@ -17,6 +17,7 @@
#include <vector>
#include "api/crypto/crypto_options.h"
+#include "api/dtls_transport_interface.h"
#include "p2p/base/dtls_transport_internal.h"
#include "p2p/base/fake_ice_transport.h"
#include "rtc_base/fake_ssl_identity.h"
@@ -89,7 +90,7 @@ class FakeDtlsTransport : public DtlsTransportInternal {
ice_transport_->SetReceiving(receiving);
set_receiving(receiving);
}
- void SetDtlsState(DtlsTransportState state) {
+ void SetDtlsState(webrtc::DtlsTransportState state) {
dtls_state_ = state;
SendDtlsState(this, dtls_state_);
}
@@ -121,7 +122,7 @@ class FakeDtlsTransport : public DtlsTransportInternal {
if (!dtls_role_) {
dtls_role_ = std::move(rtc::SSL_CLIENT);
}
- SetDtlsState(DTLS_TRANSPORT_CONNECTED);
+ SetDtlsState(webrtc::DtlsTransportState::kConnected);
ice_transport_->SetDestination(
static_cast<FakeIceTransport*>(dest->ice_transport()), asymmetric);
} else {
@@ -133,7 +134,7 @@ class FakeDtlsTransport : public DtlsTransportInternal {
}
// Fake DtlsTransportInternal implementation.
- DtlsTransportState dtls_state() const override { return dtls_state_; }
+ webrtc::DtlsTransportState dtls_state() const override { return dtls_state_; }
const std::string& transport_name() const override { return transport_name_; }
int component() const override { return component_; }
const rtc::SSLFingerprint& dtls_fingerprint() const {
@@ -295,7 +296,7 @@ class FakeDtlsTransport : public DtlsTransportInternal {
int crypto_suite_ = rtc::SRTP_AES128_CM_SHA1_80;
absl::optional<int> ssl_cipher_suite_;
- DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW;
+ webrtc::DtlsTransportState dtls_state_ = webrtc::DtlsTransportState::kNew;
bool receiving_ = false;
bool writable_ = false;
diff --git a/p2p/base/fake_port_allocator.h b/p2p/base/fake_port_allocator.h
index 9e0e333041..efe9a53a16 100644
--- a/p2p/base/fake_port_allocator.h
+++ b/p2p/base/fake_port_allocator.h
@@ -238,10 +238,19 @@ class FakePortAllocator : public cricket::PortAllocator {
bool initialized() const { return initialized_; }
+ // For testing: Manipulate MdnsObfuscationEnabled()
+ bool MdnsObfuscationEnabled() const override {
+ return mdns_obfuscation_enabled_;
+ }
+ void SetMdnsObfuscationEnabledForTesting(bool enabled) {
+ mdns_obfuscation_enabled_ = enabled;
+ }
+
private:
rtc::Thread* network_thread_;
rtc::PacketSocketFactory* factory_;
std::unique_ptr<rtc::BasicPacketSocketFactory> owned_factory_;
+ bool mdns_obfuscation_enabled_ = false;
};
} // namespace cricket
diff --git a/p2p/base/p2p_transport_channel.cc b/p2p/base/p2p_transport_channel.cc
index eff79ab9be..836721c151 100644
--- a/p2p/base/p2p_transport_channel.cc
+++ b/p2p/base/p2p_transport_channel.cc
@@ -43,6 +43,7 @@
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@@ -193,6 +194,7 @@ P2PTransportChannel::P2PTransportChannel(
true /* presume_writable_when_fully_relayed */,
REGATHER_ON_FAILED_NETWORKS_INTERVAL,
RECEIVING_SWITCHING_DELAY) {
+ TRACE_EVENT0("webrtc", "P2PTransportChannel::P2PTransportChannel");
RTC_DCHECK(allocator_ != nullptr);
weak_ping_interval_ = GetWeakPingIntervalInFieldTrial();
// Validate IceConfig even for mostly built-in constant default values in case
@@ -247,6 +249,7 @@ P2PTransportChannel::P2PTransportChannel(
ice_controller_factory) {}
P2PTransportChannel::~P2PTransportChannel() {
+ TRACE_EVENT0("webrtc", "P2PTransportChannel::~P2PTransportChannel");
RTC_DCHECK_RUN_ON(network_thread_);
std::vector<Connection*> copy(connections().begin(), connections().end());
for (Connection* con : copy) {
diff --git a/p2p/base/port.cc b/p2p/base/port.cc
index 79b83b7f2e..9b2adaf484 100644
--- a/p2p/base/port.cc
+++ b/p2p/base/port.cc
@@ -32,7 +32,9 @@
#include "rtc_base/string_encode.h"
#include "rtc_base/string_utils.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/base64/base64.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
namespace {
@@ -104,16 +106,6 @@ std::string Port::ComputeFoundation(const std::string& type,
return rtc::ToString(rtc::ComputeCrc32(sb.Release()));
}
-CandidateStats::CandidateStats() = default;
-
-CandidateStats::CandidateStats(const CandidateStats&) = default;
-
-CandidateStats::CandidateStats(Candidate candidate) {
- this->candidate = candidate;
-}
-
-CandidateStats::~CandidateStats() = default;
-
Port::Port(rtc::Thread* thread,
const std::string& type,
rtc::PacketSocketFactory* factory,
@@ -137,6 +129,7 @@ Port::Port(rtc::Thread* thread,
tiebreaker_(0),
shared_socket_(true),
weak_factory_(this) {
+ RTC_DCHECK(factory_ != NULL);
Construct();
}
@@ -181,15 +174,13 @@ void Port::Construct() {
network_->SignalTypeChanged.connect(this, &Port::OnNetworkTypeChanged);
network_cost_ = network_->GetCost();
- thread_->PostDelayed(RTC_FROM_HERE, timeout_delay_, this,
- MSG_DESTROY_IF_DEAD);
+ ScheduleDelayedDestructionIfDead();
RTC_LOG(LS_INFO) << ToString() << ": Port created with network cost "
<< network_cost_;
}
Port::~Port() {
RTC_DCHECK_RUN_ON(thread_);
- CancelPendingTasks();
// Delete all of the remaining connections. We copy the list up front
// because each deletion will cause it to be modified.
@@ -830,18 +821,11 @@ void Port::KeepAliveUntilPruned() {
void Port::Prune() {
state_ = State::PRUNED;
- thread_->Post(RTC_FROM_HERE, this, MSG_DESTROY_IF_DEAD);
+ thread_->PostTask(webrtc::ToQueuedTask(safety_, [this] { DestroyIfDead(); }));
}
-// Call to stop any currently pending operations from running.
-void Port::CancelPendingTasks() {
+void Port::DestroyIfDead() {
RTC_DCHECK_RUN_ON(thread_);
- thread_->Clear(this);
-}
-
-void Port::OnMessage(rtc::Message* pmsg) {
- RTC_DCHECK_RUN_ON(thread_);
- RTC_DCHECK(pmsg->message_id == MSG_DESTROY_IF_DEAD);
bool dead =
(state_ == State::INIT || state_ == State::PRUNED) &&
connections_.empty() &&
@@ -865,6 +849,12 @@ void Port::OnNetworkTypeChanged(const rtc::Network* network) {
UpdateNetworkCost();
}
+void Port::ScheduleDelayedDestructionIfDead() {
+ thread_->PostDelayedTask(
+ webrtc::ToQueuedTask(safety_, [this] { DestroyIfDead(); }),
+ timeout_delay_);
+}
+
std::string Port::ToString() const {
rtc::StringBuilder ss;
ss << "Port[" << rtc::ToHex(reinterpret_cast<uintptr_t>(this)) << ":"
@@ -915,8 +905,7 @@ void Port::OnConnectionDestroyed(Connection* conn) {
// not cause the Port to be destroyed.
if (connections_.empty()) {
last_time_all_connections_removed_ = rtc::TimeMillis();
- thread_->PostDelayed(RTC_FROM_HERE, timeout_delay_, this,
- MSG_DESTROY_IF_DEAD);
+ ScheduleDelayedDestructionIfDead();
}
}
diff --git a/p2p/base/port.h b/p2p/base/port.h
index 66da9b841c..9a0073a5da 100644
--- a/p2p/base/port.h
+++ b/p2p/base/port.h
@@ -41,6 +41,7 @@
#include "rtc_base/rate_tracker.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
#include "rtc_base/weak_ptr.h"
@@ -99,14 +100,24 @@ class StunStats {
// Stats that we can return about a candidate.
class CandidateStats {
public:
- CandidateStats();
- explicit CandidateStats(Candidate candidate);
- CandidateStats(const CandidateStats&);
- ~CandidateStats();
+ CandidateStats() = default;
+ CandidateStats(const CandidateStats&) = default;
+ CandidateStats(CandidateStats&&) = default;
+ CandidateStats(Candidate candidate,
+ absl::optional<StunStats> stats = absl::nullopt)
+ : candidate_(std::move(candidate)), stun_stats_(std::move(stats)) {}
+ ~CandidateStats() = default;
- Candidate candidate;
+ CandidateStats& operator=(const CandidateStats& other) = default;
+
+ const Candidate& candidate() const { return candidate_; }
+
+ const absl::optional<StunStats>& stun_stats() const { return stun_stats_; }
+
+ private:
+ Candidate candidate_;
// STUN port stats if this candidate is a STUN candidate.
- absl::optional<StunStats> stun_stats;
+ absl::optional<StunStats> stun_stats_;
};
typedef std::vector<CandidateStats> CandidateStatsList;
@@ -161,7 +172,6 @@ typedef std::set<rtc::SocketAddress> ServerAddresses;
// connections to similar mechanisms of the other client. Subclasses of this
// one add support for specific mechanisms like local UDP ports.
class Port : public PortInterface,
- public rtc::MessageHandler,
public sigslot::has_slots<> {
public:
// INIT: The state when a port is just created.
@@ -210,17 +220,11 @@ class Port : public PortInterface,
// Allows a port to be destroyed if no connection is using it.
void Prune();
- // Call to stop any currently pending operations from running.
- void CancelPendingTasks();
-
// The thread on which this port performs its I/O.
rtc::Thread* thread() { return thread_; }
// The factory used to create the sockets of this port.
rtc::PacketSocketFactory* socket_factory() const { return factory_; }
- void set_socket_factory(rtc::PacketSocketFactory* factory) {
- factory_ = factory;
- }
// For debugging purposes.
const std::string& content_name() const { return content_name_; }
@@ -321,8 +325,6 @@ class Port : public PortInterface,
// Called if the port has no connections and is no longer useful.
void Destroy();
- void OnMessage(rtc::Message* pmsg) override;
-
// Debugging description of this port
std::string ToString() const override;
uint16_t min_port() { return min_port_; }
@@ -373,8 +375,6 @@ class Port : public PortInterface,
const rtc::SocketAddress& base_address);
protected:
- enum { MSG_DESTROY_IF_DEAD = 0, MSG_FIRST_AVAILABLE };
-
virtual void UpdateNetworkCost();
void set_type(const std::string& type) { type_ = type; }
@@ -441,11 +441,12 @@ class Port : public PortInterface,
void Construct();
// Called when one of our connections deletes itself.
void OnConnectionDestroyed(Connection* conn);
-
void OnNetworkTypeChanged(const rtc::Network* network);
+ void ScheduleDelayedDestructionIfDead();
+ void DestroyIfDead();
rtc::Thread* const thread_;
- rtc::PacketSocketFactory* factory_;
+ rtc::PacketSocketFactory* const factory_;
std::string type_;
bool send_retransmit_count_attribute_;
rtc::Network* network_;
@@ -492,6 +493,7 @@ class Port : public PortInterface,
friend class Connection;
webrtc::CallbackList<PortInterface*> port_destroyed_callback_list_;
+ webrtc::ScopedTaskSafety safety_;
};
} // namespace cricket
diff --git a/p2p/base/port_allocator.cc b/p2p/base/port_allocator.cc
index b13896c4bc..d8ff637e2c 100644
--- a/p2p/base/port_allocator.cc
+++ b/p2p/base/port_allocator.cc
@@ -317,7 +317,8 @@ Candidate PortAllocator::SanitizeCandidate(const Candidate& c) const {
// For a local host candidate, we need to conceal its IP address candidate if
// the mDNS obfuscation is enabled.
bool use_hostname_address =
- c.type() == LOCAL_PORT_TYPE && MdnsObfuscationEnabled();
+ (c.type() == LOCAL_PORT_TYPE || c.type() == PRFLX_PORT_TYPE) &&
+ MdnsObfuscationEnabled();
// If adapter enumeration is disabled or host candidates are disabled,
// clear the raddr of STUN candidates to avoid local address leakage.
bool filter_stun_related_address =
diff --git a/p2p/base/port_allocator_unittest.cc b/p2p/base/port_allocator_unittest.cc
index 70946a3d81..cbac5cccaf 100644
--- a/p2p/base/port_allocator_unittest.cc
+++ b/p2p/base/port_allocator_unittest.cc
@@ -305,3 +305,56 @@ TEST_F(PortAllocatorTest, RestrictIceCredentialsChange) {
credentials[0].pwd));
allocator_->DiscardCandidatePool();
}
+
+// Constants for testing candidates
+const char kIpv4Address[] = "12.34.56.78";
+const char kIpv4AddressWithPort[] = "12.34.56.78:443";
+
+TEST_F(PortAllocatorTest, SanitizeEmptyCandidateDefaultConfig) {
+ cricket::Candidate input;
+ cricket::Candidate output = allocator_->SanitizeCandidate(input);
+ EXPECT_EQ("", output.address().ipaddr().ToString());
+}
+
+TEST_F(PortAllocatorTest, SanitizeIpv4CandidateDefaultConfig) {
+ cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1,
+ "username", "password", cricket::LOCAL_PORT_TYPE, 1,
+ "foundation", 1, 1);
+ cricket::Candidate output = allocator_->SanitizeCandidate(input);
+ EXPECT_EQ(kIpv4AddressWithPort, output.address().ToString());
+ EXPECT_EQ(kIpv4Address, output.address().ipaddr().ToString());
+}
+
+TEST_F(PortAllocatorTest, SanitizeIpv4CandidateMdnsObfuscationEnabled) {
+ allocator_->SetMdnsObfuscationEnabledForTesting(true);
+ cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1,
+ "username", "password", cricket::LOCAL_PORT_TYPE, 1,
+ "foundation", 1, 1);
+ cricket::Candidate output = allocator_->SanitizeCandidate(input);
+ EXPECT_NE(kIpv4AddressWithPort, output.address().ToString());
+ EXPECT_EQ("", output.address().ipaddr().ToString());
+}
+
+TEST_F(PortAllocatorTest, SanitizePrflxCandidateMdnsObfuscationEnabled) {
+ allocator_->SetMdnsObfuscationEnabledForTesting(true);
+ // Create the candidate from an IP literal. This populates the hostname.
+ cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1,
+ "username", "password", cricket::PRFLX_PORT_TYPE, 1,
+ "foundation", 1, 1);
+ cricket::Candidate output = allocator_->SanitizeCandidate(input);
+ EXPECT_NE(kIpv4AddressWithPort, output.address().ToString());
+ EXPECT_EQ("", output.address().ipaddr().ToString());
+}
+
+TEST_F(PortAllocatorTest, SanitizeIpv4NonLiteralMdnsObfuscationEnabled) {
+ // Create the candidate with an empty hostname.
+ allocator_->SetMdnsObfuscationEnabledForTesting(true);
+ rtc::IPAddress ip;
+ EXPECT_TRUE(IPFromString(kIpv4Address, &ip));
+ cricket::Candidate input(1, "udp", rtc::SocketAddress(ip, 443), 1, "username",
+ "password", cricket::LOCAL_PORT_TYPE, 1,
+ "foundation", 1, 1);
+ cricket::Candidate output = allocator_->SanitizeCandidate(input);
+ EXPECT_NE(kIpv4AddressWithPort, output.address().ToString());
+ EXPECT_EQ("", output.address().ipaddr().ToString());
+}
diff --git a/p2p/base/stun_port.cc b/p2p/base/stun_port.cc
index 7a76af5fa1..7b1a2a83a2 100644
--- a/p2p/base/stun_port.cc
+++ b/p2p/base/stun_port.cc
@@ -306,7 +306,9 @@ int UDPPort::SendTo(const void* data,
if (send_error_count_ < kSendErrorLogLimit) {
++send_error_count_;
RTC_LOG(LS_ERROR) << ToString() << ": UDP send of " << size
- << " bytes failed with error " << error_;
+ << " bytes to host " << addr.ToSensitiveString() << " ("
+ << addr.ToResolvedSensitiveString()
+ << ") failed with error " << error_;
}
} else {
send_error_count_ = 0;
@@ -593,7 +595,11 @@ void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) {
options.info_signaled_after_sent.packet_type = rtc::PacketType::kStunMessage;
CopyPortInformationToPacketInfo(&options.info_signaled_after_sent);
if (socket_->SendTo(data, size, sreq->server_addr(), options) < 0) {
- RTC_LOG_ERR_EX(LERROR, socket_->GetError()) << "sendto";
+ RTC_LOG_ERR_EX(LERROR, socket_->GetError())
+ << "UDP send of " << size << " bytes to host "
+ << sreq->server_addr().ToSensitiveString() << " ("
+ << sreq->server_addr().ToResolvedSensitiveString()
+ << ") failed with error " << error_;
}
stats_.stun_binding_requests_sent++;
}
diff --git a/p2p/base/turn_port.cc b/p2p/base/turn_port.cc
index 33925d43e7..a018caafa7 100644
--- a/p2p/base/turn_port.cc
+++ b/p2p/base/turn_port.cc
@@ -990,7 +990,7 @@ void TurnPort::OnMessage(rtc::Message* message) {
Close();
break;
default:
- Port::OnMessage(message);
+ RTC_NOTREACHED();
}
}
diff --git a/p2p/base/turn_port.h b/p2p/base/turn_port.h
index 55dbda5ece..8ed7cefa8e 100644
--- a/p2p/base/turn_port.h
+++ b/p2p/base/turn_port.h
@@ -25,6 +25,7 @@
#include "p2p/client/basic_port_allocator.h"
#include "rtc_base/async_packet_socket.h"
#include "rtc_base/async_resolver_interface.h"
+#include "rtc_base/message_handler.h"
#include "rtc_base/ssl_certificate.h"
#include "rtc_base/task_utils/pending_task_safety_flag.h"
@@ -41,7 +42,7 @@ extern const char TURN_PORT_TYPE[];
class TurnAllocateRequest;
class TurnEntry;
-class TurnPort : public Port {
+class TurnPort : public Port, public rtc::MessageHandler {
public:
enum PortState {
STATE_CONNECTING, // Initial state, cannot send any packets.
@@ -298,7 +299,7 @@ class TurnPort : public Port {
private:
enum {
- MSG_ALLOCATE_ERROR = MSG_FIRST_AVAILABLE,
+ MSG_ALLOCATE_ERROR,
MSG_ALLOCATE_MISMATCH,
MSG_TRY_ALTERNATE_SERVER,
MSG_REFRESH_ERROR,
diff --git a/p2p/client/basic_port_allocator.cc b/p2p/client/basic_port_allocator.cc
index 7e1f970fad..1d38a4c19f 100644
--- a/p2p/client/basic_port_allocator.cc
+++ b/p2p/client/basic_port_allocator.cc
@@ -12,12 +12,14 @@
#include <algorithm>
#include <functional>
+#include <memory>
#include <set>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
#include "p2p/base/basic_packet_socket_factory.h"
#include "p2p/base/port.h"
#include "p2p/base/stun_port.h"
@@ -27,6 +29,8 @@
#include "rtc_base/checks.h"
#include "rtc_base/helpers.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@@ -35,15 +39,6 @@ using rtc::CreateRandomId;
namespace cricket {
namespace {
-enum {
- MSG_CONFIG_START,
- MSG_CONFIG_READY,
- MSG_ALLOCATE,
- MSG_ALLOCATION_PHASE,
- MSG_SEQUENCEOBJECTS_CREATED,
- MSG_CONFIG_STOP,
-};
-
const int PHASE_UDP = 0;
const int PHASE_RELAY = 1;
const int PHASE_TCP = 2;
@@ -268,16 +263,18 @@ BasicPortAllocatorSession::BasicPortAllocatorSession(
network_manager_started_(false),
allocation_sequences_created_(false),
turn_port_prune_policy_(allocator->turn_port_prune_policy()) {
+ TRACE_EVENT0("webrtc",
+ "BasicPortAllocatorSession::BasicPortAllocatorSession");
allocator_->network_manager()->SignalNetworksChanged.connect(
this, &BasicPortAllocatorSession::OnNetworksChanged);
allocator_->network_manager()->StartUpdating();
}
BasicPortAllocatorSession::~BasicPortAllocatorSession() {
+ TRACE_EVENT0("webrtc",
+ "BasicPortAllocatorSession::~BasicPortAllocatorSession");
RTC_DCHECK_RUN_ON(network_thread_);
allocator_->network_manager()->StopUpdating();
- if (network_thread_ != NULL)
- network_thread_->Clear(this);
for (uint32_t i = 0; i < sequences_.size(); ++i) {
// AllocationSequence should clear it's map entry for turn ports before
@@ -289,8 +286,7 @@ BasicPortAllocatorSession::~BasicPortAllocatorSession() {
for (it = ports_.begin(); it != ports_.end(); it++)
delete it->port();
- for (uint32_t i = 0; i < configs_.size(); ++i)
- delete configs_[i];
+ configs_.clear();
for (uint32_t i = 0; i < sequences_.size(); ++i)
delete sequences_[i];
@@ -370,7 +366,8 @@ void BasicPortAllocatorSession::StartGettingPorts() {
socket_factory_ = owned_socket_factory_.get();
}
- network_thread_->Post(RTC_FROM_HERE, this, MSG_CONFIG_START);
+ network_thread_->PostTask(webrtc::ToQueuedTask(
+ network_safety_, [this] { GetPortConfigurations(); }));
RTC_LOG(LS_INFO) << "Start getting ports with turn_port_prune_policy "
<< turn_port_prune_policy_;
@@ -386,11 +383,12 @@ void BasicPortAllocatorSession::StopGettingPorts() {
void BasicPortAllocatorSession::ClearGettingPorts() {
RTC_DCHECK_RUN_ON(network_thread_);
- network_thread_->Clear(this, MSG_ALLOCATE);
+ ++allocation_epoch_;
for (uint32_t i = 0; i < sequences_.size(); ++i) {
sequences_[i]->Stop();
}
- network_thread_->Post(RTC_FROM_HERE, this, MSG_CONFIG_STOP);
+ network_thread_->PostTask(
+ webrtc::ToQueuedTask(network_safety_, [this] { OnConfigStop(); }));
state_ = SessionState::CLEARED;
}
@@ -489,8 +487,10 @@ void BasicPortAllocatorSession::GetCandidateStatsFromReadyPorts(
for (auto* port : ports) {
auto candidates = port->Candidates();
for (const auto& candidate : candidates) {
- CandidateStats candidate_stats(allocator_->SanitizeCandidate(candidate));
- port->GetStunStats(&candidate_stats.stun_stats);
+ absl::optional<StunStats> stun_stats;
+ port->GetStunStats(&stun_stats);
+ CandidateStats candidate_stats(allocator_->SanitizeCandidate(candidate),
+ std::move(stun_stats));
candidate_stats_list->push_back(std::move(candidate_stats));
}
}
@@ -574,28 +574,6 @@ bool BasicPortAllocatorSession::CandidatesAllocationDone() const {
ports_, [](const PortData& port) { return port.inprogress(); });
}
-void BasicPortAllocatorSession::OnMessage(rtc::Message* message) {
- switch (message->message_id) {
- case MSG_CONFIG_START:
- GetPortConfigurations();
- break;
- case MSG_CONFIG_READY:
- OnConfigReady(static_cast<PortConfiguration*>(message->pdata));
- break;
- case MSG_ALLOCATE:
- OnAllocate();
- break;
- case MSG_SEQUENCEOBJECTS_CREATED:
- OnAllocationSequenceObjectsCreated();
- break;
- case MSG_CONFIG_STOP:
- OnConfigStop();
- break;
- default:
- RTC_NOTREACHED();
- }
-}
-
void BasicPortAllocatorSession::UpdateIceParametersInternal() {
RTC_DCHECK_RUN_ON(network_thread_);
for (PortData& port : ports_) {
@@ -607,26 +585,35 @@ void BasicPortAllocatorSession::UpdateIceParametersInternal() {
void BasicPortAllocatorSession::GetPortConfigurations() {
RTC_DCHECK_RUN_ON(network_thread_);
- PortConfiguration* config =
- new PortConfiguration(allocator_->stun_servers(), username(), password());
+ auto config = std::make_unique<PortConfiguration>(allocator_->stun_servers(),
+ username(), password());
for (const RelayServerConfig& turn_server : allocator_->turn_servers()) {
config->AddRelay(turn_server);
}
- ConfigReady(config);
+ ConfigReady(std::move(config));
}
void BasicPortAllocatorSession::ConfigReady(PortConfiguration* config) {
RTC_DCHECK_RUN_ON(network_thread_);
- network_thread_->Post(RTC_FROM_HERE, this, MSG_CONFIG_READY, config);
+ ConfigReady(absl::WrapUnique(config));
+}
+
+void BasicPortAllocatorSession::ConfigReady(
+ std::unique_ptr<PortConfiguration> config) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ network_thread_->PostTask(webrtc::ToQueuedTask(
+ network_safety_, [this, config = std::move(config)]() mutable {
+ OnConfigReady(std::move(config));
+ }));
}
// Adds a configuration to the list.
-void BasicPortAllocatorSession::OnConfigReady(PortConfiguration* config) {
+void BasicPortAllocatorSession::OnConfigReady(
+ std::unique_ptr<PortConfiguration> config) {
RTC_DCHECK_RUN_ON(network_thread_);
- if (config) {
- configs_.push_back(config);
- }
+ if (config)
+ configs_.push_back(std::move(config));
AllocatePorts();
}
@@ -664,11 +651,16 @@ void BasicPortAllocatorSession::OnConfigStop() {
void BasicPortAllocatorSession::AllocatePorts() {
RTC_DCHECK_RUN_ON(network_thread_);
- network_thread_->Post(RTC_FROM_HERE, this, MSG_ALLOCATE);
+ network_thread_->PostTask(webrtc::ToQueuedTask(
+ network_safety_, [this, allocation_epoch = allocation_epoch_] {
+ OnAllocate(allocation_epoch);
+ }));
}
-void BasicPortAllocatorSession::OnAllocate() {
+void BasicPortAllocatorSession::OnAllocate(int allocation_epoch) {
RTC_DCHECK_RUN_ON(network_thread_);
+ if (allocation_epoch != allocation_epoch_)
+ return;
if (network_manager_started_ && !IsStopped()) {
bool disable_equivalent_phases = true;
@@ -774,7 +766,8 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) {
done_signal_needed = true;
} else {
RTC_LOG(LS_INFO) << "Allocate ports on " << networks.size() << " networks";
- PortConfiguration* config = configs_.empty() ? nullptr : configs_.back();
+ PortConfiguration* config =
+ configs_.empty() ? nullptr : configs_.back().get();
for (uint32_t i = 0; i < networks.size(); ++i) {
uint32_t sequence_flags = flags();
if ((sequence_flags & DISABLE_ALL_PHASES) == DISABLE_ALL_PHASES) {
@@ -814,9 +807,11 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) {
}
AllocationSequence* sequence =
- new AllocationSequence(this, networks[i], config, sequence_flags);
- sequence->SignalPortAllocationComplete.connect(
- this, &BasicPortAllocatorSession::OnPortAllocationComplete);
+ new AllocationSequence(this, networks[i], config, sequence_flags,
+ [this, safety_flag = network_safety_.flag()] {
+ if (safety_flag->alive())
+ OnPortAllocationComplete();
+ });
sequence->Init();
sequence->Start();
sequences_.push_back(sequence);
@@ -824,7 +819,8 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) {
}
}
if (done_signal_needed) {
- network_thread_->Post(RTC_FROM_HERE, this, MSG_SEQUENCEOBJECTS_CREATED);
+ network_thread_->PostTask(webrtc::ToQueuedTask(
+ network_safety_, [this] { OnAllocationSequenceObjectsCreated(); }));
}
}
@@ -1128,8 +1124,7 @@ bool BasicPortAllocatorSession::CandidatePairable(const Candidate& c,
!host_candidates_disabled);
}
-void BasicPortAllocatorSession::OnPortAllocationComplete(
- AllocationSequence* seq) {
+void BasicPortAllocatorSession::OnPortAllocationComplete() {
RTC_DCHECK_RUN_ON(network_thread_);
// Send candidate allocation complete signal if all ports are done.
MaybeSignalCandidatesAllocationDone();
@@ -1220,10 +1215,12 @@ void BasicPortAllocatorSession::PrunePortsAndRemoveCandidates(
// AllocationSequence
-AllocationSequence::AllocationSequence(BasicPortAllocatorSession* session,
- rtc::Network* network,
- PortConfiguration* config,
- uint32_t flags)
+AllocationSequence::AllocationSequence(
+ BasicPortAllocatorSession* session,
+ rtc::Network* network,
+ PortConfiguration* config,
+ uint32_t flags,
+ std::function<void()> port_allocation_complete_callback)
: session_(session),
network_(network),
config_(config),
@@ -1231,7 +1228,9 @@ AllocationSequence::AllocationSequence(BasicPortAllocatorSession* session,
flags_(flags),
udp_socket_(),
udp_port_(NULL),
- phase_(0) {}
+ phase_(0),
+ port_allocation_complete_callback_(
+ std::move(port_allocation_complete_callback)) {}
void AllocationSequence::Init() {
if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) {
@@ -1248,6 +1247,7 @@ void AllocationSequence::Init() {
}
void AllocationSequence::Clear() {
+ TRACE_EVENT0("webrtc", "AllocationSequence::Clear");
udp_port_ = NULL;
relay_ports_.clear();
}
@@ -1259,10 +1259,6 @@ void AllocationSequence::OnNetworkFailed() {
Stop();
}
-AllocationSequence::~AllocationSequence() {
- session_->network_thread()->Clear(this);
-}
-
void AllocationSequence::DisableEquivalentPhases(rtc::Network* network,
PortConfiguration* config,
uint32_t* flags) {
@@ -1337,7 +1333,9 @@ void AllocationSequence::DisableEquivalentPhases(rtc::Network* network,
void AllocationSequence::Start() {
state_ = kRunning;
- session_->network_thread()->Post(RTC_FROM_HERE, this, MSG_ALLOCATION_PHASE);
+
+ session_->network_thread()->PostTask(webrtc::ToQueuedTask(
+ safety_, [this, epoch = epoch_] { Process(epoch); }));
// Take a snapshot of the best IP, so that when DisableEquivalentPhases is
// called next time, we enable all phases if the best IP has since changed.
previous_best_ip_ = network_->GetBestIP();
@@ -1347,16 +1345,18 @@ void AllocationSequence::Stop() {
// If the port is completed, don't set it to stopped.
if (state_ == kRunning) {
state_ = kStopped;
- session_->network_thread()->Clear(this, MSG_ALLOCATION_PHASE);
+ // Cause further Process calls in the previous epoch to be ignored.
+ ++epoch_;
}
}
-void AllocationSequence::OnMessage(rtc::Message* msg) {
+void AllocationSequence::Process(int epoch) {
RTC_DCHECK(rtc::Thread::Current() == session_->network_thread());
- RTC_DCHECK(msg->message_id == MSG_ALLOCATION_PHASE);
-
const char* const PHASE_NAMES[kNumPhases] = {"Udp", "Relay", "Tcp"};
+ if (epoch != epoch_)
+ return;
+
// Perform all of the phases in the current step.
RTC_LOG(LS_INFO) << network_->ToString()
<< ": Allocation Phase=" << PHASE_NAMES[phase_];
@@ -1382,14 +1382,16 @@ void AllocationSequence::OnMessage(rtc::Message* msg) {
if (state() == kRunning) {
++phase_;
- session_->network_thread()->PostDelayed(RTC_FROM_HERE,
- session_->allocator()->step_delay(),
- this, MSG_ALLOCATION_PHASE);
+ session_->network_thread()->PostDelayedTask(
+ webrtc::ToQueuedTask(safety_,
+ [this, epoch = epoch_] { Process(epoch); }),
+ session_->allocator()->step_delay());
} else {
- // If all phases in AllocationSequence are completed, no allocation
- // steps needed further. Canceling pending signal.
- session_->network_thread()->Clear(this, MSG_ALLOCATION_PHASE);
- SignalPortAllocationComplete(this);
+ // No allocation steps needed further if all phases in AllocationSequence
+ // are completed. Cause further Process calls in the previous epoch to be
+ // ignored.
+ ++epoch_;
+ port_allocation_complete_callback_();
}
}
@@ -1657,8 +1659,6 @@ PortConfiguration::PortConfiguration(const ServerAddresses& stun_servers,
webrtc::field_trial::IsDisabled("WebRTC-UseTurnServerAsStunServer");
}
-PortConfiguration::~PortConfiguration() = default;
-
ServerAddresses PortConfiguration::StunServers() {
if (!stun_address.IsNil() &&
stun_servers.find(stun_address) == stun_servers.end()) {
diff --git a/p2p/client/basic_port_allocator.h b/p2p/client/basic_port_allocator.h
index b27016a1dc..77aceb1e9c 100644
--- a/p2p/client/basic_port_allocator.h
+++ b/p2p/client/basic_port_allocator.h
@@ -22,7 +22,9 @@
#include "rtc_base/checks.h"
#include "rtc_base/network.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
namespace cricket {
@@ -106,8 +108,9 @@ enum class SessionState {
// process will be started.
};
-class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
- public rtc::MessageHandler {
+// This class is thread-compatible and assumes it's created, operated upon and
+// destroyed on the network thread.
+class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession {
public:
BasicPortAllocatorSession(BasicPortAllocator* allocator,
const std::string& content_name,
@@ -155,10 +158,11 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
// Adds a port configuration that is now ready. Once we have one for each
// network (or a timeout occurs), we will start allocating ports.
- virtual void ConfigReady(PortConfiguration* config);
-
- // MessageHandler. Can be overriden if message IDs do not conflict.
- void OnMessage(rtc::Message* message) override;
+ void ConfigReady(std::unique_ptr<PortConfiguration> config);
+ // TODO(bugs.webrtc.org/12840) Remove once unused in downstream projects.
+ ABSL_DEPRECATED(
+ "Use ConfigReady(std::unique_ptr<PortConfiguration>) instead!")
+ void ConfigReady(PortConfiguration* config);
private:
class PortData {
@@ -213,10 +217,10 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
State state_ = STATE_INPROGRESS;
};
- void OnConfigReady(PortConfiguration* config);
+ void OnConfigReady(std::unique_ptr<PortConfiguration> config);
void OnConfigStop();
void AllocatePorts();
- void OnAllocate();
+ void OnAllocate(int allocation_epoch);
void DoAllocate(bool disable_equivalent_phases);
void OnNetworksChanged();
void OnAllocationSequenceObjectsCreated();
@@ -233,7 +237,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
void OnProtocolEnabled(AllocationSequence* seq, ProtocolType proto);
void OnPortDestroyed(PortInterface* port);
void MaybeSignalCandidatesAllocationDone();
- void OnPortAllocationComplete(AllocationSequence* seq);
+ void OnPortAllocationComplete();
PortData* FindPort(Port* port);
std::vector<rtc::Network*> GetNetworks();
std::vector<rtc::Network*> GetFailedNetworks();
@@ -266,7 +270,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
bool allocation_started_;
bool network_manager_started_;
bool allocation_sequences_created_;
- std::vector<PortConfiguration*> configs_;
+ std::vector<std::unique_ptr<PortConfiguration>> configs_;
std::vector<AllocationSequence*> sequences_;
std::vector<PortData> ports_;
std::vector<IceCandidateErrorEvent> candidate_error_events_;
@@ -274,13 +278,15 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
// Policy on how to prune turn ports, taken from the port allocator.
webrtc::PortPrunePolicy turn_port_prune_policy_;
SessionState state_ = SessionState::CLEARED;
+ int allocation_epoch_ RTC_GUARDED_BY(network_thread_) = 0;
+ webrtc::ScopedTaskSafety network_safety_;
friend class AllocationSequence;
};
// Records configuration information useful in creating ports.
// TODO(deadbeef): Rename "relay" to "turn_server" in this struct.
-struct RTC_EXPORT PortConfiguration : public rtc::MessageData {
+struct RTC_EXPORT PortConfiguration {
// TODO(jiayl): remove |stun_address| when Chrome is updated.
rtc::SocketAddress stun_address;
ServerAddresses stun_servers;
@@ -300,8 +306,6 @@ struct RTC_EXPORT PortConfiguration : public rtc::MessageData {
const std::string& username,
const std::string& password);
- ~PortConfiguration() override;
-
// Returns addresses of both the explicitly configured STUN servers,
// and TURN servers that should be used as STUN servers.
ServerAddresses StunServers();
@@ -323,8 +327,8 @@ class TurnPort;
// Performs the allocation of ports, in a sequenced (timed) manner, for a given
// network and IP address.
-class AllocationSequence : public rtc::MessageHandler,
- public sigslot::has_slots<> {
+// This class is thread-compatible.
+class AllocationSequence : public sigslot::has_slots<> {
public:
enum State {
kInit, // Initial state.
@@ -334,11 +338,18 @@ class AllocationSequence : public rtc::MessageHandler,
// kInit --> kRunning --> {kCompleted|kStopped}
};
+ // |port_allocation_complete_callback| is called when AllocationSequence is
+ // done with allocating ports. This signal is useful when port allocation
+ // fails which doesn't result in any candidates. Using this signal
+ // BasicPortAllocatorSession can send its candidate discovery conclusion
+ // signal. Without this signal, BasicPortAllocatorSession doesn't have any
+ // event to trigger signal. This can also be achieved by starting a timer in
+ // BPAS, but this is less deterministic.
AllocationSequence(BasicPortAllocatorSession* session,
rtc::Network* network,
PortConfiguration* config,
- uint32_t flags);
- ~AllocationSequence() override;
+ uint32_t flags,
+ std::function<void()> port_allocation_complete_callback);
void Init();
void Clear();
void OnNetworkFailed();
@@ -360,17 +371,6 @@ class AllocationSequence : public rtc::MessageHandler,
void Start();
void Stop();
- // MessageHandler
- void OnMessage(rtc::Message* msg) override;
-
- // Signal from AllocationSequence, when it's done with allocating ports.
- // This signal is useful, when port allocation fails which doesn't result
- // in any candidates. Using this signal BasicPortAllocatorSession can send
- // its candidate discovery conclusion signal. Without this signal,
- // BasicPortAllocatorSession doesn't have any event to trigger signal. This
- // can also be achieved by starting timer in BPAS.
- sigslot::signal1<AllocationSequence*> SignalPortAllocationComplete;
-
protected:
// For testing.
void CreateTurnPort(const RelayServerConfig& config);
@@ -378,6 +378,7 @@ class AllocationSequence : public rtc::MessageHandler,
private:
typedef std::vector<ProtocolType> ProtocolList;
+ void Process(int epoch);
bool IsFlagSet(uint32_t flag) { return ((flags_ & flag) != 0); }
void CreateUDPPorts();
void CreateTCPPorts();
@@ -406,6 +407,12 @@ class AllocationSequence : public rtc::MessageHandler,
UDPPort* udp_port_;
std::vector<Port*> relay_ports_;
int phase_;
+ std::function<void()> port_allocation_complete_callback_;
+ // This counter is sampled and passed together with tasks when tasks are
+ // posted. If the sampled counter doesn't match |epoch_| on reception, the
+ // posted task is ignored.
+ int epoch_ = 0;
+ webrtc::ScopedTaskSafety safety_;
};
} // namespace cricket
diff --git a/p2p/g3doc/ice.md b/p2p/g3doc/ice.md
index 08a25ba8b8..be81ff9e22 100644
--- a/p2p/g3doc/ice.md
+++ b/p2p/g3doc/ice.md
@@ -1,6 +1,7 @@
# ICE
-<?% config.freshness.owner = 'jonaso' %?> <?% config.freshness.reviewed = '2021-04-12' %?>
+<?% config.freshness.owner = 'jonaso' %?>
+<?% config.freshness.reviewed = '2021-04-12' %?>
## Overview
diff --git a/pc/BUILD.gn b/pc/BUILD.gn
index 2a18414981..59b0558bbd 100644
--- a/pc/BUILD.gn
+++ b/pc/BUILD.gn
@@ -23,6 +23,20 @@ config("rtc_pc_config") {
}
}
+rtc_library("proxy") {
+ sources = [
+ "proxy.cc",
+ "proxy.h",
+ ]
+ deps = [
+ "../api:scoped_refptr",
+ "../api/task_queue",
+ "../rtc_base:rtc_base_approved",
+ "../rtc_base:threading",
+ "../rtc_base/system:rtc_export",
+ ]
+}
+
rtc_library("rtc_pc_base") {
visibility = [ "*" ]
defines = []
@@ -42,14 +56,22 @@ rtc_library("rtc_pc_base") {
"ice_transport.h",
"jsep_transport.cc",
"jsep_transport.h",
+ "jsep_transport_collection.cc",
+ "jsep_transport_collection.h",
"jsep_transport_controller.cc",
"jsep_transport_controller.h",
"media_session.cc",
"media_session.h",
+ "media_stream_proxy.h",
+ "media_stream_track_proxy.h",
+ "peer_connection_factory_proxy.h",
+ "peer_connection_proxy.h",
"rtcp_mux_filter.cc",
"rtcp_mux_filter.h",
"rtp_media_utils.cc",
"rtp_media_utils.h",
+ "rtp_receiver_proxy.h",
+ "rtp_sender_proxy.h",
"rtp_transport.cc",
"rtp_transport.h",
"rtp_transport_internal.h",
@@ -59,10 +81,6 @@ rtc_library("rtc_pc_base") {
"sctp_transport.h",
"sctp_utils.cc",
"sctp_utils.h",
- "session_description.cc",
- "session_description.h",
- "simulcast_description.cc",
- "simulcast_description.h",
"srtp_filter.cc",
"srtp_filter.h",
"srtp_session.cc",
@@ -72,10 +90,15 @@ rtc_library("rtc_pc_base") {
"transport_stats.cc",
"transport_stats.h",
"used_ids.h",
+ "video_track_source_proxy.cc",
+ "video_track_source_proxy.h",
]
deps = [
":media_protocol_names",
+ ":proxy",
+ ":session_description",
+ ":simulcast_description",
"../api:array_view",
"../api:async_dns_resolver",
"../api:audio_options_api",
@@ -83,6 +106,7 @@ rtc_library("rtc_pc_base") {
"../api:function_view",
"../api:ice_transport_factory",
"../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
"../api:packet_socket_factory",
"../api:priority",
"../api:rtc_error",
@@ -127,6 +151,7 @@ rtc_library("rtc_pc_base") {
"../rtc_base/network:sent_packet",
"../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper",
+ "../rtc_base/system:no_unique_address",
"../rtc_base/system:rtc_export",
"../rtc_base/task_utils:pending_task_safety_flag",
"../rtc_base/task_utils:to_queued_task",
@@ -150,6 +175,43 @@ rtc_library("rtc_pc_base") {
public_configs = [ ":rtc_pc_config" ]
}
+rtc_source_set("session_description") {
+ visibility = [ "*" ]
+ sources = [
+ "session_description.cc",
+ "session_description.h",
+ ]
+ deps = [
+ ":media_protocol_names",
+ ":simulcast_description",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../media:rtc_media_base",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:socket_address",
+ "../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory:memory",
+ ]
+}
+
+rtc_source_set("simulcast_description") {
+ visibility = [ "*" ]
+ sources = [
+ "simulcast_description.cc",
+ "simulcast_description.h",
+ ]
+ deps = [
+ "../rtc_base:checks",
+ "../rtc_base:socket_address",
+ "../rtc_base/system:rtc_export",
+ ]
+}
+
rtc_source_set("rtc_pc") {
visibility = [ "*" ]
allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
@@ -216,11 +278,10 @@ rtc_library("peerconnection") {
":connection_context",
":dtmf_sender",
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
- ":jitter_buffer_delay_proxy",
":media_protocol_names",
":media_stream",
":peer_connection_message_handler",
+ ":proxy",
":remote_audio_source",
":rtc_pc_base",
":rtp_parameters_conversion",
@@ -229,6 +290,8 @@ rtc_library("peerconnection") {
":rtp_transceiver",
":rtp_transmission_manager",
":sdp_state_provider",
+ ":session_description",
+ ":simulcast_description",
":stats_collector_interface",
":transceiver_list",
":usage_pattern",
@@ -278,6 +341,8 @@ rtc_library("peerconnection") {
"../api/video:video_rtp_headers",
"../api/video_codecs:video_codecs_api",
"../call:call_interfaces",
+ "../call:rtp_interfaces",
+ "../call:rtp_sender",
"../common_video",
"../logging:ice_log",
"../media:rtc_data_sctp_transport_internal",
@@ -329,6 +394,7 @@ rtc_library("connection_context") {
"../api:callfactory_api",
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
+ "../api:refcountedbase",
"../api:scoped_refptr",
"../api:sequence_checker",
"../api/neteq:neteq_api",
@@ -381,10 +447,12 @@ rtc_library("rtp_transceiver") {
"rtp_transceiver.h",
]
deps = [
+ ":proxy",
":rtc_pc_base",
":rtp_parameters_conversion",
":rtp_receiver",
":rtp_sender",
+ ":session_description",
"../api:array_view",
"../api:libjingle_peerconnection_api",
"../api:rtc_error",
@@ -399,6 +467,8 @@ rtc_library("rtp_transceiver") {
"../rtc_base:macromagic",
"../rtc_base:refcount",
"../rtc_base:threading",
+ "../rtc_base/task_utils:pending_task_safety_flag",
+ "../rtc_base/task_utils:to_queued_task",
"../rtc_base/third_party/sigslot",
]
absl_deps = [
@@ -470,6 +540,7 @@ rtc_library("rtp_receiver") {
]
deps = [
":media_stream",
+ ":rtc_pc_base",
":video_track_source",
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
@@ -499,10 +570,9 @@ rtc_library("audio_rtp_receiver") {
deps = [
":audio_track",
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
- ":jitter_buffer_delay_proxy",
":media_stream",
":remote_audio_source",
+ ":rtc_pc_base",
":rtp_receiver",
"../api:frame_transformer_interface",
"../api:libjingle_peerconnection_api",
@@ -517,6 +587,9 @@ rtc_library("audio_rtp_receiver") {
"../rtc_base:checks",
"../rtc_base:refcount",
"../rtc_base:threading",
+ "../rtc_base/system:no_unique_address",
+ "../rtc_base/task_utils:pending_task_safety_flag",
+ "../rtc_base/task_utils:to_queued_task",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
@@ -532,9 +605,8 @@ rtc_library("video_rtp_receiver") {
]
deps = [
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
- ":jitter_buffer_delay_proxy",
":media_stream",
+ ":rtc_pc_base",
":rtp_receiver",
":video_rtp_track_source",
":video_track",
@@ -553,6 +625,7 @@ rtc_library("video_rtp_receiver") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base:threading",
+ "../rtc_base/system:no_unique_address",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
@@ -612,19 +685,6 @@ rtc_library("video_track") {
]
}
-rtc_source_set("jitter_buffer_delay_interface") {
- sources = [ "jitter_buffer_delay_interface.h" ]
- deps = [
- "../media:rtc_media_base",
- "../rtc_base:refcount",
- ]
- absl_deps = [
- "//third_party/abseil-cpp/absl/algorithm:container",
- "//third_party/abseil-cpp/absl/strings",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
-
rtc_source_set("sdp_state_provider") {
sources = [ "sdp_state_provider.h" ]
deps = [
@@ -633,35 +693,19 @@ rtc_source_set("sdp_state_provider") {
]
}
-rtc_source_set("jitter_buffer_delay_proxy") {
- sources = [ "jitter_buffer_delay_proxy.h" ]
- deps = [
- ":jitter_buffer_delay_interface",
- "../api:libjingle_peerconnection_api",
- "../media:rtc_media_base",
- ]
-}
-
rtc_library("jitter_buffer_delay") {
sources = [
"jitter_buffer_delay.cc",
"jitter_buffer_delay.h",
]
deps = [
- ":jitter_buffer_delay_interface",
"../api:sequence_checker",
- "../media:rtc_media_base",
- "../rtc_base",
"../rtc_base:checks",
- "../rtc_base:refcount",
+ "../rtc_base:safe_conversions",
"../rtc_base:safe_minmax",
- "../rtc_base:threading",
- ]
- absl_deps = [
- "//third_party/abseil-cpp/absl/algorithm:container",
- "//third_party/abseil-cpp/absl/strings",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../rtc_base/system:no_unique_address",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("remote_audio_source") {
@@ -730,6 +774,7 @@ rtc_library("rtp_parameters_conversion") {
]
deps = [
":rtc_pc_base",
+ ":session_description",
"../api:array_view",
"../api:libjingle_peerconnection_api",
"../api:rtc_error",
@@ -751,6 +796,7 @@ rtc_library("dtmf_sender") {
"dtmf_sender.h",
]
deps = [
+ ":proxy",
"../api:libjingle_peerconnection_api",
"../api:scoped_refptr",
"../rtc_base:checks",
@@ -859,6 +905,7 @@ if (rtc_include_tests && !build_with_chromium) {
":peerconnection",
":rtc_pc",
":rtc_pc_base",
+ ":session_description",
":video_rtp_receiver",
"../api:array_view",
"../api:audio_options_api",
@@ -885,6 +932,8 @@ if (rtc_include_tests && !build_with_chromium) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base:threading",
+ "../rtc_base/task_utils:pending_task_safety_flag",
+ "../rtc_base/task_utils:to_queued_task",
"../rtc_base/third_party/sigslot",
"../system_wrappers:metrics",
"../test:field_trial",
@@ -1019,15 +1068,16 @@ if (rtc_include_tests && !build_with_chromium) {
":dtmf_sender",
":integration_test_helpers",
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
":media_stream",
":peerconnection",
+ ":proxy",
":remote_audio_source",
":rtc_pc_base",
":rtp_parameters_conversion",
":rtp_receiver",
":rtp_sender",
":rtp_transceiver",
+ ":session_description",
":usage_pattern",
":video_rtp_receiver",
":video_rtp_track_source",
@@ -1083,10 +1133,12 @@ if (rtc_include_tests && !build_with_chromium) {
"../rtc_base/synchronization:mutex",
"../rtc_base/third_party/base64",
"../rtc_base/third_party/sigslot",
+ "../system_wrappers:field_trial",
"../system_wrappers:metrics",
"../test:field_trial",
"../test:fileutils",
"../test:rtp_test_utils",
+ "../test:test_common",
"../test/pc/sctp:fake_sctp_transport",
"./scenario_tests:pc_scenario_tests",
"//third_party/abseil-cpp/absl/algorithm:container",
@@ -1176,7 +1228,6 @@ if (rtc_include_tests && !build_with_chromium) {
":audio_track",
":dtmf_sender",
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
":media_stream",
":pc_test_utils",
":peerconnection",
@@ -1186,6 +1237,7 @@ if (rtc_include_tests && !build_with_chromium) {
":rtp_receiver",
":rtp_sender",
":rtp_transceiver",
+ ":session_description",
":usage_pattern",
":video_rtp_receiver",
":video_rtp_track_source",
@@ -1286,7 +1338,6 @@ if (rtc_include_tests && !build_with_chromium) {
"test/frame_generator_capturer_video_track_source.h",
"test/mock_channel_interface.h",
"test/mock_data_channel.h",
- "test/mock_delayable.h",
"test/mock_peer_connection_observers.h",
"test/mock_rtp_receiver_internal.h",
"test/mock_rtp_sender_internal.h",
@@ -1298,7 +1349,6 @@ if (rtc_include_tests && !build_with_chromium) {
deps = [
":jitter_buffer_delay",
- ":jitter_buffer_delay_interface",
":libjingle_peerconnection",
":peerconnection",
":rtc_pc_base",
diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc
index 48553ba9f5..4efab24d15 100644
--- a/pc/audio_rtp_receiver.cc
+++ b/pc/audio_rtp_receiver.cc
@@ -15,14 +15,13 @@
#include <utility>
#include <vector>
-#include "api/media_stream_track_proxy.h"
#include "api/sequence_checker.h"
#include "pc/audio_track.h"
-#include "pc/jitter_buffer_delay.h"
-#include "pc/jitter_buffer_delay_proxy.h"
+#include "pc/media_stream_track_proxy.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/to_queued_task.h"
namespace webrtc {
@@ -42,7 +41,7 @@ AudioRtpReceiver::AudioRtpReceiver(
bool is_unified_plan)
: worker_thread_(worker_thread),
id_(receiver_id),
- source_(new rtc::RefCountedObject<RemoteAudioSource>(
+ source_(rtc::make_ref_counted<RemoteAudioSource>(
worker_thread,
is_unified_plan
? RemoteAudioSource::OnAudioChannelGoneAction::kSurvive
@@ -52,10 +51,7 @@ AudioRtpReceiver::AudioRtpReceiver(
AudioTrack::Create(receiver_id, source_))),
cached_track_enabled_(track_->enabled()),
attachment_id_(GenerateUniqueId()),
- delay_(JitterBufferDelayProxy::Create(
- rtc::Thread::Current(),
- worker_thread_,
- new rtc::RefCountedObject<JitterBufferDelay>(worker_thread))) {
+ worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) {
RTC_DCHECK(worker_thread_);
RTC_DCHECK(track_->GetSource()->remote());
track_->RegisterObserver(this);
@@ -64,140 +60,188 @@ AudioRtpReceiver::AudioRtpReceiver(
}
AudioRtpReceiver::~AudioRtpReceiver() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK(stopped_);
+ RTC_DCHECK(!media_channel_);
+
track_->GetSource()->UnregisterAudioObserver(this);
track_->UnregisterObserver(this);
- Stop();
}
void AudioRtpReceiver::OnChanged() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
- Reconfigure();
+ worker_thread_->PostTask(ToQueuedTask(
+ worker_thread_safety_,
+ [this, enabled = cached_track_enabled_, volume = cached_volume_]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ Reconfigure(enabled, volume);
+ }));
}
}
-bool AudioRtpReceiver::SetOutputVolume(double volume) {
+// RTC_RUN_ON(worker_thread_)
+void AudioRtpReceiver::SetOutputVolume_w(double volume) {
RTC_DCHECK_GE(volume, 0.0);
RTC_DCHECK_LE(volume, 10.0);
- RTC_DCHECK(media_channel_);
- RTC_DCHECK(!stopped_);
- return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
- return ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume)
- : media_channel_->SetDefaultOutputVolume(volume);
- });
+ ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume)
+ : media_channel_->SetDefaultOutputVolume(volume);
}
void AudioRtpReceiver::OnSetVolume(double volume) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RTC_DCHECK_GE(volume, 0);
RTC_DCHECK_LE(volume, 10);
- cached_volume_ = volume;
- if (!media_channel_ || stopped_) {
- RTC_LOG(LS_ERROR)
- << "AudioRtpReceiver::OnSetVolume: No audio channel exists.";
+ if (stopped_)
return;
- }
+
+ cached_volume_ = volume;
+
// When the track is disabled, the volume of the source, which is the
// corresponding WebRtc Voice Engine channel will be 0. So we do not allow
// setting the volume to the source when the track is disabled.
- if (!stopped_ && track_->enabled()) {
- if (!SetOutputVolume(cached_volume_)) {
- RTC_NOTREACHED();
- }
+ if (track_->enabled()) {
+ worker_thread_->PostTask(
+ ToQueuedTask(worker_thread_safety_, [this, volume = cached_volume_]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ SetOutputVolume_w(volume);
+ }));
}
}
+rtc::scoped_refptr<DtlsTransportInterface> AudioRtpReceiver::dtls_transport()
+ const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return dtls_transport_;
+}
+
std::vector<std::string> AudioRtpReceiver::stream_ids() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
std::vector<std::string> stream_ids(streams_.size());
for (size_t i = 0; i < streams_.size(); ++i)
stream_ids[i] = streams_[i]->id();
return stream_ids;
}
+std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+AudioRtpReceiver::streams() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return streams_;
+}
+
RtpParameters AudioRtpReceiver::GetParameters() const {
- if (!media_channel_ || stopped_) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
return RtpParameters();
- }
- return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
- return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_)
- : media_channel_->GetDefaultRtpReceiveParameters();
- });
+ return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_)
+ : media_channel_->GetDefaultRtpReceiveParameters();
}
void AudioRtpReceiver::SetFrameDecryptor(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
frame_decryptor_ = std::move(frame_decryptor);
// Special Case: Set the frame decryptor to any value on any existing channel.
- if (media_channel_ && ssrc_.has_value() && !stopped_) {
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
- });
+ if (media_channel_ && ssrc_) {
+ media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
}
}
rtc::scoped_refptr<FrameDecryptorInterface>
AudioRtpReceiver::GetFrameDecryptor() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
return frame_decryptor_;
}
void AudioRtpReceiver::Stop() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
- if (stopped_) {
- return;
- }
- source_->SetState(MediaSourceInterface::kEnded);
- if (media_channel_) {
- // Allow that SetOutputVolume fail. This is the normal case when the
- // underlying media channel has already been deleted.
- SetOutputVolume(0.0);
+ if (!stopped_) {
+ source_->SetState(MediaSourceInterface::kEnded);
+ stopped_ = true;
}
- stopped_ = true;
+
+ worker_thread_->Invoke<void>(RTC_FROM_HERE, [&]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (media_channel_)
+ SetOutputVolume_w(0.0);
+ SetMediaChannel_w(nullptr);
+ });
}
void AudioRtpReceiver::StopAndEndTrack() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
Stop();
track_->internal()->set_ended();
}
void AudioRtpReceiver::RestartMediaChannel(absl::optional<uint32_t> ssrc) {
- RTC_DCHECK(media_channel_);
- if (!stopped_ && ssrc_ == ssrc) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ bool ok = worker_thread_->Invoke<bool>(
+ RTC_FROM_HERE, [&, enabled = cached_track_enabled_,
+ volume = cached_volume_, was_stopped = stopped_]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_) {
+ RTC_DCHECK(was_stopped);
+ return false; // Can't restart.
+ }
+
+ if (!was_stopped && ssrc_ == ssrc) {
+ // Already running with that ssrc.
+ RTC_DCHECK(worker_thread_safety_->alive());
+ return true;
+ }
+
+ if (!was_stopped) {
+ source_->Stop(media_channel_, ssrc_);
+ }
+
+ ssrc_ = std::move(ssrc);
+ source_->Start(media_channel_, ssrc_);
+ if (ssrc_) {
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs());
+ }
+
+ Reconfigure(enabled, volume);
+ return true;
+ });
+
+ if (!ok)
return;
- }
- if (!stopped_) {
- source_->Stop(media_channel_, ssrc_);
- delay_->OnStop();
- }
- ssrc_ = ssrc;
stopped_ = false;
- source_->Start(media_channel_, ssrc);
- delay_->OnStart(media_channel_, ssrc.value_or(0));
- Reconfigure();
}
void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
- if (!media_channel_) {
- RTC_LOG(LS_ERROR)
- << "AudioRtpReceiver::SetupMediaChannel: No audio channel exists.";
- return;
- }
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RestartMediaChannel(ssrc);
}
void AudioRtpReceiver::SetupUnsignaledMediaChannel() {
- if (!media_channel_) {
- RTC_LOG(LS_ERROR) << "AudioRtpReceiver::SetupUnsignaledMediaChannel: No "
- "audio channel exists.";
- }
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RestartMediaChannel(absl::nullopt);
}
+uint32_t AudioRtpReceiver::ssrc() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return ssrc_.value_or(0);
+}
+
void AudioRtpReceiver::set_stream_ids(std::vector<std::string> stream_ids) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
SetStreams(CreateStreamsFromIds(std::move(stream_ids)));
}
+void AudioRtpReceiver::set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ dtls_transport_ = std::move(dtls_transport);
+}
+
void AudioRtpReceiver::SetStreams(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
// Remove remote track from any streams that are going away.
for (const auto& existing_stream : streams_) {
bool removed = true;
@@ -230,51 +274,42 @@ void AudioRtpReceiver::SetStreams(
}
std::vector<RtpSource> AudioRtpReceiver::GetSources() const {
- if (!media_channel_ || !ssrc_ || stopped_) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_ || !ssrc_) {
return {};
}
- return worker_thread_->Invoke<std::vector<RtpSource>>(
- RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); });
+ return media_channel_->GetSources(*ssrc_);
}
void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
- worker_thread_->Invoke<void>(
- RTC_FROM_HERE, [this, frame_transformer = std::move(frame_transformer)] {
- RTC_DCHECK_RUN_ON(worker_thread_);
- frame_transformer_ = frame_transformer;
- if (media_channel_ && ssrc_.has_value() && !stopped_) {
- media_channel_->SetDepacketizerToDecoderFrameTransformer(
- *ssrc_, frame_transformer);
- }
- });
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (media_channel_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(ssrc_.value_or(0),
+ frame_transformer);
+ }
+ frame_transformer_ = std::move(frame_transformer);
}
-void AudioRtpReceiver::Reconfigure() {
- if (!media_channel_ || stopped_) {
- RTC_LOG(LS_ERROR)
- << "AudioRtpReceiver::Reconfigure: No audio channel exists.";
- return;
- }
- if (!SetOutputVolume(track_->enabled() ? cached_volume_ : 0)) {
- RTC_NOTREACHED();
+// RTC_RUN_ON(worker_thread_)
+void AudioRtpReceiver::Reconfigure(bool track_enabled, double volume) {
+ RTC_DCHECK(media_channel_);
+
+ SetOutputVolume_w(track_enabled ? volume : 0);
+
+ if (ssrc_ && frame_decryptor_) {
+ // Reattach the frame decryptor if we were reconfigured.
+ media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
}
- // Reattach the frame decryptor if we were reconfigured.
- MaybeAttachFrameDecryptorToMediaChannel(
- ssrc_, worker_thread_, frame_decryptor_, media_channel_, stopped_);
-
- if (media_channel_ && ssrc_.has_value() && !stopped_) {
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [this] {
- RTC_DCHECK_RUN_ON(worker_thread_);
- if (!frame_transformer_)
- return;
- media_channel_->SetDepacketizerToDecoderFrameTransformer(
- *ssrc_, frame_transformer_);
- });
+
+ if (frame_transformer_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ ssrc_.value_or(0), frame_transformer_);
}
}
void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
observer_ = observer;
// Deliver any notifications the observer may have missed by being set late.
if (received_first_packet_ && observer_) {
@@ -284,16 +319,35 @@ void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
void AudioRtpReceiver::SetJitterBufferMinimumDelay(
absl::optional<double> delay_seconds) {
- delay_->Set(delay_seconds);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ delay_.Set(delay_seconds);
+ if (media_channel_ && ssrc_)
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs());
}
void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RTC_DCHECK(media_channel == nullptr ||
media_channel->media_type() == media_type());
+
+ if (stopped_ && !media_channel)
+ return;
+
+ worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ SetMediaChannel_w(media_channel);
+ });
+}
+
+// RTC_RUN_ON(worker_thread_)
+void AudioRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) {
+ media_channel ? worker_thread_safety_->SetAlive()
+ : worker_thread_safety_->SetNotAlive();
media_channel_ = static_cast<cricket::VoiceMediaChannel*>(media_channel);
}
void AudioRtpReceiver::NotifyFirstPacketReceived() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
if (observer_) {
observer_->OnFirstPacketReceived(media_type());
}
diff --git a/pc/audio_rtp_receiver.h b/pc/audio_rtp_receiver.h
index 789d4a0f52..c3468721d8 100644
--- a/pc/audio_rtp_receiver.h
+++ b/pc/audio_rtp_receiver.h
@@ -12,6 +12,7 @@
#define PC_AUDIO_RTP_RECEIVER_H_
#include <stdint.h>
+
#include <string>
#include <vector>
@@ -20,18 +21,21 @@
#include "api/dtls_transport_interface.h"
#include "api/frame_transformer_interface.h"
#include "api/media_stream_interface.h"
-#include "api/media_stream_track_proxy.h"
#include "api/media_types.h"
#include "api/rtp_parameters.h"
#include "api/rtp_receiver_interface.h"
#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
#include "api/transport/rtp/rtp_source.h"
#include "media/base/media_channel.h"
#include "pc/audio_track.h"
-#include "pc/jitter_buffer_delay_interface.h"
+#include "pc/jitter_buffer_delay.h"
+#include "pc/media_stream_track_proxy.h"
#include "pc/remote_audio_source.h"
#include "pc/rtp_receiver.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
@@ -39,7 +43,7 @@ namespace webrtc {
class AudioRtpReceiver : public ObserverInterface,
public AudioSourceInterface::AudioObserver,
- public rtc::RefCountedObject<RtpReceiverInternal> {
+ public RtpReceiverInternal {
public:
AudioRtpReceiver(rtc::Thread* worker_thread,
std::string receiver_id,
@@ -59,22 +63,16 @@ class AudioRtpReceiver : public ObserverInterface,
// AudioSourceInterface::AudioObserver implementation
void OnSetVolume(double volume) override;
- rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
- return track_.get();
- }
+ rtc::scoped_refptr<AudioTrackInterface> audio_track() const { return track_; }
// RtpReceiverInterface implementation
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
- return track_.get();
- }
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
- return dtls_transport_;
+ return track_;
}
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override;
std::vector<std::string> stream_ids() const override;
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams()
- const override {
- return streams_;
- }
+ const override;
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
@@ -95,13 +93,11 @@ class AudioRtpReceiver : public ObserverInterface,
void StopAndEndTrack() override;
void SetupMediaChannel(uint32_t ssrc) override;
void SetupUnsignaledMediaChannel() override;
- uint32_t ssrc() const override { return ssrc_.value_or(0); }
+ uint32_t ssrc() const override;
void NotifyFirstPacketReceived() override;
void set_stream_ids(std::vector<std::string> stream_ids) override;
void set_transport(
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
- dtls_transport_ = dtls_transport;
- }
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override;
void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
streams) override;
void SetObserver(RtpReceiverObserverInterface* observer) override;
@@ -119,29 +115,40 @@ class AudioRtpReceiver : public ObserverInterface,
private:
void RestartMediaChannel(absl::optional<uint32_t> ssrc);
- void Reconfigure();
- bool SetOutputVolume(double volume);
+ void Reconfigure(bool track_enabled, double volume)
+ RTC_RUN_ON(worker_thread_);
+ void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_);
+ void SetMediaChannel_w(cricket::MediaChannel* media_channel)
+ RTC_RUN_ON(worker_thread_);
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
rtc::Thread* const worker_thread_;
const std::string id_;
const rtc::scoped_refptr<RemoteAudioSource> source_;
const rtc::scoped_refptr<AudioTrackProxyWithInternal<AudioTrack>> track_;
- cricket::VoiceMediaChannel* media_channel_ = nullptr;
- absl::optional<uint32_t> ssrc_;
- std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_;
- bool cached_track_enabled_;
- double cached_volume_ = 1;
- bool stopped_ = true;
- RtpReceiverObserverInterface* observer_ = nullptr;
- bool received_first_packet_ = false;
- int attachment_id_ = 0;
- rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_;
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
- // Allows to thread safely change playout delay. Handles caching cases if
+ cricket::VoiceMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) =
+ nullptr;
+ absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_);
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_);
+ double cached_volume_ RTC_GUARDED_BY(&signaling_thread_checker_) = 1.0;
+ bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true;
+ RtpReceiverObserverInterface* observer_
+ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr;
+ bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) =
+ false;
+ const int attachment_id_;
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_
+ RTC_GUARDED_BY(worker_thread_);
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ // Stores and updates the playout delay. Handles caching cases if
// |SetJitterBufferMinimumDelay| is called before start.
- rtc::scoped_refptr<JitterBufferDelayInterface> delay_;
+ JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
RTC_GUARDED_BY(worker_thread_);
+ const rtc::scoped_refptr<PendingTaskSafetyFlag> worker_thread_safety_;
};
} // namespace webrtc
diff --git a/pc/audio_track.cc b/pc/audio_track.cc
index 4f4c6b4757..be087f693b 100644
--- a/pc/audio_track.cc
+++ b/pc/audio_track.cc
@@ -19,7 +19,7 @@ namespace webrtc {
rtc::scoped_refptr<AudioTrack> AudioTrack::Create(
const std::string& id,
const rtc::scoped_refptr<AudioSourceInterface>& source) {
- return new rtc::RefCountedObject<AudioTrack>(id, source);
+ return rtc::make_ref_counted<AudioTrack>(id, source);
}
AudioTrack::AudioTrack(const std::string& label,
@@ -32,7 +32,7 @@ AudioTrack::AudioTrack(const std::string& label,
}
AudioTrack::~AudioTrack() {
- RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
set_state(MediaStreamTrackInterface::kEnded);
if (audio_source_)
audio_source_->UnregisterObserver(this);
@@ -43,24 +43,24 @@ std::string AudioTrack::kind() const {
}
AudioSourceInterface* AudioTrack::GetSource() const {
- RTC_DCHECK(thread_checker_.IsCurrent());
+ // Callable from any thread.
return audio_source_.get();
}
void AudioTrack::AddSink(AudioTrackSinkInterface* sink) {
- RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
if (audio_source_)
audio_source_->AddSink(sink);
}
void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) {
- RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
if (audio_source_)
audio_source_->RemoveSink(sink);
}
void AudioTrack::OnChanged() {
- RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK_RUN_ON(&thread_checker_);
if (audio_source_->state() == MediaSourceInterface::kEnded) {
set_state(kEnded);
} else {
diff --git a/pc/audio_track.h b/pc/audio_track.h
index 07511a5c94..8a705cf8fb 100644
--- a/pc/audio_track.h
+++ b/pc/audio_track.h
@@ -41,13 +41,13 @@ class AudioTrack : public MediaStreamTrack<AudioTrackInterface>,
// MediaStreamTrack implementation.
std::string kind() const override;
- private:
// AudioTrackInterface implementation.
AudioSourceInterface* GetSource() const override;
void AddSink(AudioTrackSinkInterface* sink) override;
void RemoveSink(AudioTrackSinkInterface* sink) override;
+ private:
// ObserverInterface implementation.
void OnChanged() override;
diff --git a/pc/channel.cc b/pc/channel.cc
index 5c0b204cf0..8630703be1 100644
--- a/pc/channel.cc
+++ b/pc/channel.cc
@@ -44,11 +44,6 @@ using ::webrtc::PendingTaskSafetyFlag;
using ::webrtc::SdpType;
using ::webrtc::ToQueuedTask;
-struct SendPacketMessageData : public rtc::MessageData {
- rtc::CopyOnWriteBuffer packet;
- rtc::PacketOptions options;
-};
-
// Finds a stream based on target's Primary SSRC or RIDs.
// This struct is used in BaseChannel::UpdateLocalStreams_w.
struct StreamFinder {
@@ -84,14 +79,6 @@ struct StreamFinder {
} // namespace
-enum {
- MSG_SEND_RTP_PACKET = 1,
- MSG_SEND_RTCP_PACKET,
- MSG_READYTOSENDDATA,
- MSG_DATARECEIVED,
- MSG_FIRSTPACKETRECEIVED,
-};
-
static void SafeSetError(const std::string& message, std::string* error_desc) {
if (error_desc) {
*error_desc = message;
@@ -156,7 +143,6 @@ BaseChannel::~BaseChannel() {
// Eats any outstanding messages or packets.
alive_->SetNotAlive();
- signaling_thread_->Clear(this);
// The media channel is destroyed at the end of the destructor, since it
// is a std::unique_ptr. The transport channel (rtp_transport) must outlive
// the media channel.
@@ -210,33 +196,31 @@ void BaseChannel::DisconnectFromRtpTransport() {
void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
RTC_DCHECK_RUN_ON(worker_thread());
- network_thread_->Invoke<void>(
- RTC_FROM_HERE, [this, rtp_transport] { SetRtpTransport(rtp_transport); });
-
- // Both RTP and RTCP channels should be set, we can call SetInterface on
- // the media channel and it can set network options.
- media_channel_->SetInterface(this);
+ network_thread_->Invoke<void>(RTC_FROM_HERE, [this, rtp_transport] {
+ SetRtpTransport(rtp_transport);
+ // Both RTP and RTCP channels should be set, we can call SetInterface on
+ // the media channel and it can set network options.
+ media_channel_->SetInterface(this);
+ });
}
void BaseChannel::Deinit() {
RTC_DCHECK_RUN_ON(worker_thread());
- media_channel_->SetInterface(/*iface=*/nullptr);
// Packets arrive on the network thread, processing packets calls virtual
// functions, so need to stop this process in Deinit that is called in
// derived classes destructor.
network_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
RTC_DCHECK_RUN_ON(network_thread());
- FlushRtcpMessages_n();
+ media_channel_->SetInterface(/*iface=*/nullptr);
if (rtp_transport_) {
DisconnectFromRtpTransport();
}
- // Clear pending read packets/messages.
- network_thread_->Clear(this);
});
}
bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) {
+ TRACE_EVENT0("webrtc", "BaseChannel::SetRtpTransport");
RTC_DCHECK_RUN_ON(network_thread());
if (rtp_transport == rtp_transport_) {
return true;
@@ -271,56 +255,59 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) {
return true;
}
-bool BaseChannel::Enable(bool enable) {
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [this, enable] {
+void BaseChannel::Enable(bool enable) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ if (enable == enabled_s_)
+ return;
+
+ enabled_s_ = enable;
+
+ worker_thread_->PostTask(ToQueuedTask(alive_, [this, enable] {
RTC_DCHECK_RUN_ON(worker_thread());
+ // Sanity check to make sure that enabled_ and enabled_s_
+ // stay in sync.
+ RTC_DCHECK_NE(enabled_, enable);
if (enable) {
EnableMedia_w();
} else {
DisableMedia_w();
}
- });
- return true;
+ }));
}
bool BaseChannel::SetLocalContent(const MediaContentDescription* content,
SdpType type,
std::string* error_desc) {
+ RTC_DCHECK_RUN_ON(worker_thread());
TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent");
- return InvokeOnWorker<bool>(RTC_FROM_HERE, [this, content, type, error_desc] {
- RTC_DCHECK_RUN_ON(worker_thread());
- return SetLocalContent_w(content, type, error_desc);
- });
+ return SetLocalContent_w(content, type, error_desc);
}
bool BaseChannel::SetRemoteContent(const MediaContentDescription* content,
SdpType type,
std::string* error_desc) {
+ RTC_DCHECK_RUN_ON(worker_thread());
TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent");
- return InvokeOnWorker<bool>(RTC_FROM_HERE, [this, content, type, error_desc] {
- RTC_DCHECK_RUN_ON(worker_thread());
- return SetRemoteContent_w(content, type, error_desc);
- });
+ return SetRemoteContent_w(content, type, error_desc);
}
bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) {
+ RTC_DCHECK_RUN_ON(worker_thread());
TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled");
- return InvokeOnWorker<bool>(RTC_FROM_HERE, [this, enabled] {
- RTC_DCHECK_RUN_ON(worker_thread());
- return SetPayloadTypeDemuxingEnabled_w(enabled);
- });
+ return SetPayloadTypeDemuxingEnabled_w(enabled);
}
bool BaseChannel::IsReadyToReceiveMedia_w() const {
// Receive data if we are enabled and have local content,
- return enabled() &&
+ return enabled_ &&
webrtc::RtpTransceiverDirectionHasRecv(local_content_direction_);
}
bool BaseChannel::IsReadyToSendMedia_w() const {
// Send outgoing data if we are enabled, have local and remote content,
// and we have had some form of connectivity.
- return enabled() &&
+ return enabled_ &&
webrtc::RtpTransceiverDirectionHasRecv(remote_content_direction_) &&
webrtc::RtpTransceiverDirectionHasSend(local_content_direction_) &&
was_ever_writable();
@@ -339,15 +326,7 @@ bool BaseChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet,
int BaseChannel::SetOption(SocketType type,
rtc::Socket::Option opt,
int value) {
- return network_thread_->Invoke<int>(RTC_FROM_HERE, [this, type, opt, value] {
- RTC_DCHECK_RUN_ON(network_thread());
- return SetOption_n(type, opt, value);
- });
-}
-
-int BaseChannel::SetOption_n(SocketType type,
- rtc::Socket::Option opt,
- int value) {
+ RTC_DCHECK_RUN_ON(network_thread());
RTC_DCHECK(rtp_transport_);
switch (type) {
case ST_RTP:
@@ -387,9 +366,11 @@ void BaseChannel::OnNetworkRouteChanged(
media_channel_->OnNetworkRouteChanged(transport_name_, new_route);
}
-sigslot::signal1<ChannelInterface*>& BaseChannel::SignalFirstPacketReceived() {
- RTC_DCHECK_RUN_ON(signaling_thread_);
- return SignalFirstPacketReceived_;
+void BaseChannel::SetFirstPacketReceivedCallback(
+ std::function<void()> callback) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(!on_first_packet_received_ || !callback);
+ on_first_packet_received_ = std::move(callback);
}
void BaseChannel::OnTransportReadyToSend(bool ready) {
@@ -400,6 +381,7 @@ void BaseChannel::OnTransportReadyToSend(bool ready) {
bool BaseChannel::SendPacket(bool rtcp,
rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) {
+ RTC_DCHECK_RUN_ON(network_thread());
// Until all the code is migrated to use RtpPacketType instead of bool.
RtpPacketType packet_type = rtcp ? RtpPacketType::kRtcp : RtpPacketType::kRtp;
// SendPacket gets called from MediaEngine, on a pacer or an encoder thread.
@@ -409,16 +391,6 @@ bool BaseChannel::SendPacket(bool rtcp,
// SRTP and the inner workings of the transport channels.
// The only downside is that we can't return a proper failure code if
// needed. Since UDP is unreliable anyway, this should be a non-issue.
- if (!network_thread_->IsCurrent()) {
- // Avoid a copy by transferring the ownership of the packet data.
- int message_id = rtcp ? MSG_SEND_RTCP_PACKET : MSG_SEND_RTP_PACKET;
- SendPacketMessageData* data = new SendPacketMessageData;
- data->packet = std::move(*packet);
- data->options = options;
- network_thread_->Post(RTC_FROM_HERE, this, message_id, data);
- return true;
- }
- RTC_DCHECK_RUN_ON(network_thread());
TRACE_EVENT0("webrtc", "BaseChannel::SendPacket");
@@ -466,16 +438,11 @@ bool BaseChannel::SendPacket(bool rtcp,
}
void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) {
- // Take packet time from the |parsed_packet|.
- // RtpPacketReceived.arrival_time_ms = (timestamp_us + 500) / 1000;
- int64_t packet_time_us = -1;
- if (parsed_packet.arrival_time_ms() > 0) {
- packet_time_us = parsed_packet.arrival_time_ms() * 1000;
- }
+ RTC_DCHECK_RUN_ON(network_thread());
- if (!has_received_packet_) {
- has_received_packet_ = true;
- signaling_thread()->Post(RTC_FROM_HERE, this, MSG_FIRSTPACKETRECEIVED);
+ if (on_first_packet_received_) {
+ on_first_packet_received_();
+ on_first_packet_received_ = nullptr;
}
if (!srtp_active() && srtp_required_) {
@@ -496,7 +463,10 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) {
return;
}
- media_channel_->OnPacketReceived(parsed_packet.Buffer(), packet_time_us);
+ webrtc::Timestamp packet_time = parsed_packet.arrival_time();
+ media_channel_->OnPacketReceived(
+ parsed_packet.Buffer(),
+ packet_time.IsMinusInfinity() ? -1 : packet_time.us());
}
void BaseChannel::UpdateRtpHeaderExtensionMap(
@@ -555,6 +525,7 @@ void BaseChannel::DisableMedia_w() {
}
void BaseChannel::UpdateWritableState_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::UpdateWritableState_n");
if (rtp_transport_->IsWritable(/*rtcp=*/true) &&
rtp_transport_->IsWritable(/*rtcp=*/false)) {
ChannelWritable_n();
@@ -564,6 +535,7 @@ void BaseChannel::UpdateWritableState_n() {
}
void BaseChannel::ChannelWritable_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::ChannelWritable_n");
if (writable_) {
return;
}
@@ -583,6 +555,7 @@ void BaseChannel::ChannelWritable_n() {
}
void BaseChannel::ChannelNotWritable_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::ChannelNotWritable_n");
if (!writable_) {
return;
}
@@ -779,39 +752,12 @@ bool BaseChannel::UpdateRemoteStreams_w(
return ret;
}
-RtpHeaderExtensions BaseChannel::GetFilteredRtpHeaderExtensions(
+RtpHeaderExtensions BaseChannel::GetDeduplicatedRtpHeaderExtensions(
const RtpHeaderExtensions& extensions) {
- if (crypto_options_.srtp.enable_encrypted_rtp_header_extensions) {
- RtpHeaderExtensions filtered;
- absl::c_copy_if(extensions, std::back_inserter(filtered),
- [](const webrtc::RtpExtension& extension) {
- return !extension.encrypt;
- });
- return filtered;
- }
-
- return webrtc::RtpExtension::FilterDuplicateNonEncrypted(extensions);
-}
-
-void BaseChannel::OnMessage(rtc::Message* pmsg) {
- TRACE_EVENT0("webrtc", "BaseChannel::OnMessage");
- switch (pmsg->message_id) {
- case MSG_SEND_RTP_PACKET:
- case MSG_SEND_RTCP_PACKET: {
- RTC_DCHECK_RUN_ON(network_thread());
- SendPacketMessageData* data =
- static_cast<SendPacketMessageData*>(pmsg->pdata);
- bool rtcp = pmsg->message_id == MSG_SEND_RTCP_PACKET;
- SendPacket(rtcp, &data->packet, data->options);
- delete data;
- break;
- }
- case MSG_FIRSTPACKETRECEIVED: {
- RTC_DCHECK_RUN_ON(signaling_thread_);
- SignalFirstPacketReceived_(this);
- break;
- }
- }
+ return webrtc::RtpExtension::DeduplicateHeaderExtensions(
+ extensions, crypto_options_.srtp.enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::kPreferEncryptedExtension
+ : webrtc::RtpExtension::kDiscardEncryptedExtension);
}
void BaseChannel::MaybeAddHandledPayloadType(int payload_type) {
@@ -828,35 +774,11 @@ void BaseChannel::ClearHandledPayloadTypes() {
payload_types_.clear();
}
-void BaseChannel::FlushRtcpMessages_n() {
- // Flush all remaining RTCP messages. This should only be called in
- // destructor.
- rtc::MessageList rtcp_messages;
- network_thread_->Clear(this, MSG_SEND_RTCP_PACKET, &rtcp_messages);
- for (const auto& message : rtcp_messages) {
- network_thread_->Send(RTC_FROM_HERE, this, MSG_SEND_RTCP_PACKET,
- message.pdata);
- }
-}
-
void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) {
RTC_DCHECK_RUN_ON(network_thread());
media_channel()->OnPacketSent(sent_packet);
}
-void BaseChannel::SetNegotiatedHeaderExtensions_w(
- const RtpHeaderExtensions& extensions) {
- TRACE_EVENT0("webrtc", __func__);
- webrtc::MutexLock lock(&negotiated_header_extensions_lock_);
- negotiated_header_extensions_ = extensions;
-}
-
-RtpHeaderExtensions BaseChannel::GetNegotiatedRtpHeaderExtensions() const {
- RTC_DCHECK_RUN_ON(signaling_thread());
- webrtc::MutexLock lock(&negotiated_header_extensions_lock_);
- return negotiated_header_extensions_;
-}
-
VoiceChannel::VoiceChannel(rtc::Thread* worker_thread,
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
@@ -904,26 +826,19 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString();
- RTC_DCHECK(content);
- if (!content) {
- SafeSetError("Can't find audio content in local description.", error_desc);
- return false;
- }
-
- const AudioContentDescription* audio = content->as_audio();
-
- if (type == SdpType::kAnswer)
- SetNegotiatedHeaderExtensions_w(audio->rtp_header_extensions());
-
RtpHeaderExtensions rtp_header_extensions =
- GetFilteredRtpHeaderExtensions(audio->rtp_header_extensions());
+ GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions());
+ // TODO(tommi): There's a hop to the network thread here.
+ // some of the below is also network thread related.
UpdateRtpHeaderExtensionMap(rtp_header_extensions);
- media_channel()->SetExtmapAllowMixed(audio->extmap_allow_mixed());
+ media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed());
AudioRecvParameters recv_params = last_recv_params_;
RtpParametersFromMediaDescription(
- audio, rtp_header_extensions,
- webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &recv_params);
+ content->as_audio(), rtp_header_extensions,
+ webrtc::RtpTransceiverDirectionHasRecv(content->direction()),
+ &recv_params);
+
if (!media_channel()->SetRecvParameters(recv_params)) {
SafeSetError(
"Failed to set local audio description recv parameters for m-section "
@@ -933,8 +848,8 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
return false;
}
- if (webrtc::RtpTransceiverDirectionHasRecv(audio->direction())) {
- for (const AudioCodec& codec : audio->codecs()) {
+ if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) {
+ for (const AudioCodec& codec : content->as_audio()->codecs()) {
MaybeAddHandledPayloadType(codec.id);
}
// Need to re-register the sink to update the handled payload.
@@ -950,7 +865,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
// only give it to the media channel once we have a remote
// description too (without a remote description, we won't be able
// to send them anyway).
- if (!UpdateLocalStreams_w(audio->streams(), type, error_desc)) {
+ if (!UpdateLocalStreams_w(content->as_audio()->streams(), type, error_desc)) {
SafeSetError(
"Failed to set local audio description streams for m-section with "
"mid='" +
@@ -971,19 +886,10 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString();
- RTC_DCHECK(content);
- if (!content) {
- SafeSetError("Can't find audio content in remote description.", error_desc);
- return false;
- }
-
const AudioContentDescription* audio = content->as_audio();
- if (type == SdpType::kAnswer)
- SetNegotiatedHeaderExtensions_w(audio->rtp_header_extensions());
-
RtpHeaderExtensions rtp_header_extensions =
- GetFilteredRtpHeaderExtensions(audio->rtp_header_extensions());
+ GetDeduplicatedRtpHeaderExtensions(audio->rtp_header_extensions());
AudioSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription(
@@ -1070,9 +976,9 @@ void VideoChannel::UpdateMediaSendRecvState_w() {
}
void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) {
+ RTC_DCHECK_RUN_ON(worker_thread());
VideoMediaChannel* mc = media_channel();
- InvokeOnWorker<void>(RTC_FROM_HERE,
- [mc, bwe_info] { mc->FillBitrateInfo(bwe_info); });
+ mc->FillBitrateInfo(bwe_info);
}
bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
@@ -1082,26 +988,17 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting local video description for " << ToString();
- RTC_DCHECK(content);
- if (!content) {
- SafeSetError("Can't find video content in local description.", error_desc);
- return false;
- }
-
- const VideoContentDescription* video = content->as_video();
-
- if (type == SdpType::kAnswer)
- SetNegotiatedHeaderExtensions_w(video->rtp_header_extensions());
-
RtpHeaderExtensions rtp_header_extensions =
- GetFilteredRtpHeaderExtensions(video->rtp_header_extensions());
+ GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions());
UpdateRtpHeaderExtensionMap(rtp_header_extensions);
- media_channel()->SetExtmapAllowMixed(video->extmap_allow_mixed());
+ media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed());
VideoRecvParameters recv_params = last_recv_params_;
+
RtpParametersFromMediaDescription(
- video, rtp_header_extensions,
- webrtc::RtpTransceiverDirectionHasRecv(video->direction()), &recv_params);
+ content->as_video(), rtp_header_extensions,
+ webrtc::RtpTransceiverDirectionHasRecv(content->direction()),
+ &recv_params);
VideoSendParameters send_params = last_send_params_;
@@ -1134,8 +1031,8 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
return false;
}
- if (webrtc::RtpTransceiverDirectionHasRecv(video->direction())) {
- for (const VideoCodec& codec : video->codecs()) {
+ if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) {
+ for (const VideoCodec& codec : content->as_video()->codecs()) {
MaybeAddHandledPayloadType(codec.id);
}
// Need to re-register the sink to update the handled payload.
@@ -1161,7 +1058,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
// only give it to the media channel once we have a remote
// description too (without a remote description, we won't be able
// to send them anyway).
- if (!UpdateLocalStreams_w(video->streams(), type, error_desc)) {
+ if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) {
SafeSetError(
"Failed to set local video description streams for m-section with "
"mid='" +
@@ -1182,19 +1079,10 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString();
- RTC_DCHECK(content);
- if (!content) {
- SafeSetError("Can't find video content in remote description.", error_desc);
- return false;
- }
-
const VideoContentDescription* video = content->as_video();
- if (type == SdpType::kAnswer)
- SetNegotiatedHeaderExtensions_w(video->rtp_header_extensions());
-
RtpHeaderExtensions rtp_header_extensions =
- GetFilteredRtpHeaderExtensions(video->rtp_header_extensions());
+ GetDeduplicatedRtpHeaderExtensions(video->rtp_header_extensions());
VideoSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription(
diff --git a/pc/channel.h b/pc/channel.h
index 7dd3f8baf6..d1dbe2cd6c 100644
--- a/pc/channel.h
+++ b/pc/channel.h
@@ -54,12 +54,10 @@
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/location.h"
-#include "rtc_base/message_handler.h"
#include "rtc_base/network.h"
#include "rtc_base/network/sent_packet.h"
#include "rtc_base/network_route.h"
#include "rtc_base/socket.h"
-#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
@@ -94,8 +92,10 @@ struct CryptoParams;
// NetworkInterface.
class BaseChannel : public ChannelInterface,
- public rtc::MessageHandlerAutoCleanup,
+ // TODO(tommi): Remove has_slots inheritance.
public sigslot::has_slots<>,
+ // TODO(tommi): Consider implementing these interfaces
+ // via composition.
public MediaChannel::NetworkInterface,
public webrtc::RtpPacketSinkInterface {
public:
@@ -131,7 +131,6 @@ class BaseChannel : public ChannelInterface,
// TODO(tommi): Delete this variable.
return transport_name_;
}
- bool enabled() const override { return enabled_; }
// This function returns true if using SRTP (DTLS-based keying or SDES).
bool srtp_active() const {
@@ -167,7 +166,7 @@ class BaseChannel : public ChannelInterface,
// actually belong to a new channel. See: crbug.com/webrtc/11477
bool SetPayloadTypeDemuxingEnabled(bool enabled) override;
- bool Enable(bool enable) override;
+ void Enable(bool enable) override;
const std::vector<StreamParams>& local_streams() const override {
return local_streams_;
@@ -177,15 +176,13 @@ class BaseChannel : public ChannelInterface,
}
// Used for latency measurements.
- sigslot::signal1<ChannelInterface*>& SignalFirstPacketReceived() override;
+ void SetFirstPacketReceivedCallback(std::function<void()> callback) override;
// From RtpTransport - public for testing only
void OnTransportReadyToSend(bool ready);
// Only public for unit tests. Otherwise, consider protected.
int SetOption(SocketType type, rtc::Socket::Option o, int val) override;
- int SetOption_n(SocketType type, rtc::Socket::Option o, int val)
- RTC_RUN_ON(network_thread());
// RtpPacketSinkInterface overrides.
void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override;
@@ -221,8 +218,6 @@ class BaseChannel : public ChannelInterface,
bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread());
rtc::Thread* signaling_thread() const { return signaling_thread_; }
- void FlushRtcpMessages_n() RTC_RUN_ON(network_thread());
-
// NetworkInterface implementation, called by MediaEngine
bool SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) override;
@@ -277,21 +272,12 @@ class BaseChannel : public ChannelInterface,
webrtc::SdpType type,
std::string* error_desc)
RTC_RUN_ON(worker_thread()) = 0;
- // Return a list of RTP header extensions with the non-encrypted extensions
- // removed depending on the current crypto_options_ and only if both the
- // non-encrypted and encrypted extension is present for the same URI.
- RtpHeaderExtensions GetFilteredRtpHeaderExtensions(
- const RtpHeaderExtensions& extensions);
-
- // From MessageHandler
- void OnMessage(rtc::Message* pmsg) override;
- // Helper function template for invoking methods on the worker thread.
- template <class T>
- T InvokeOnWorker(const rtc::Location& posted_from,
- rtc::FunctionView<T()> functor) {
- return worker_thread_->Invoke<T>(posted_from, functor);
- }
+ // Returns a list of RTP header extensions where any extension URI is unique.
+ // Encrypted extensions will be either preferred or discarded, depending on
+ // the current crypto_options_.
+ RtpHeaderExtensions GetDeduplicatedRtpHeaderExtensions(
+ const RtpHeaderExtensions& extensions);
// Add |payload_type| to |demuxer_criteria_| if payload type demuxing is
// enabled.
@@ -307,12 +293,6 @@ class BaseChannel : public ChannelInterface,
// Return description of media channel to facilitate logging
std::string ToString() const;
- void SetNegotiatedHeaderExtensions_w(const RtpHeaderExtensions& extensions)
- RTC_RUN_ON(worker_thread());
-
- // ChannelInterface overrides
- RtpHeaderExtensions GetNegotiatedRtpHeaderExtensions() const override;
-
private:
bool ConnectToRtpTransport() RTC_RUN_ON(network_thread());
void DisconnectFromRtpTransport() RTC_RUN_ON(network_thread());
@@ -322,12 +302,11 @@ class BaseChannel : public ChannelInterface,
rtc::Thread* const network_thread_;
rtc::Thread* const signaling_thread_;
rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> alive_;
- sigslot::signal1<ChannelInterface*> SignalFirstPacketReceived_
- RTC_GUARDED_BY(signaling_thread_);
const std::string content_name_;
- bool has_received_packet_ = false;
+ std::function<void()> on_first_packet_received_
+ RTC_GUARDED_BY(network_thread());
// Won't be set when using raw packet transports. SDP-specific thing.
// TODO(bugs.webrtc.org/12230): Written on network thread, read on
@@ -348,6 +327,24 @@ class BaseChannel : public ChannelInterface,
bool was_ever_writable_n_ RTC_GUARDED_BY(network_thread()) = false;
bool was_ever_writable_ RTC_GUARDED_BY(worker_thread()) = false;
const bool srtp_required_ = true;
+
+ // TODO(tommi): This field shouldn't be necessary. It's a copy of
+ // PeerConnection::GetCryptoOptions(), which is const state. It's also only
+ // used to filter header extensions when calling
+ // `rtp_transport_->UpdateRtpHeaderExtensionMap()` when the local/remote
+ // content description is updated. Since the transport is actually owned
+ // by the transport controller that also gets updated whenever the content
+ // description changes, it seems we have two paths into the transports, along
+ // with several thread hops via various classes (such as the Channel classes)
+ // that only serve as additional layers and store duplicate state. The Jsep*
+ // family of classes already apply session description updates on the network
+ // thread every time it changes.
+ // For the Channel classes, we should be able to get rid of:
+ // * crypto_options (and fewer construction parameters)_
+ // * UpdateRtpHeaderExtensionMap
+ // * GetFilteredRtpHeaderExtensions
+ // * Blocking thread hop to the network thread for every call to set
+ // local/remote content is updated.
const webrtc::CryptoOptions crypto_options_;
// MediaChannel related members that should be accessed from the worker
@@ -356,7 +353,8 @@ class BaseChannel : public ChannelInterface,
// Currently the |enabled_| flag is accessed from the signaling thread as
// well, but it can be changed only when signaling thread does a synchronous
// call to the worker thread, so it should be safe.
- bool enabled_ = false;
+ bool enabled_ RTC_GUARDED_BY(worker_thread()) = false;
+ bool enabled_s_ RTC_GUARDED_BY(signaling_thread()) = false;
bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true;
std::vector<StreamParams> local_streams_ RTC_GUARDED_BY(worker_thread());
std::vector<StreamParams> remote_streams_ RTC_GUARDED_BY(worker_thread());
@@ -381,14 +379,6 @@ class BaseChannel : public ChannelInterface,
// like in Simulcast.
// This object is not owned by the channel so it must outlive it.
rtc::UniqueRandomIdGenerator* const ssrc_generator_;
-
- // |negotiated_header_extensions_| is read on the signaling thread, but
- // written on the worker thread while being sync-invoked from the signal
- // thread in SdpOfferAnswerHandler::PushdownMediaDescription(). Hence the lock
- // isn't strictly needed, but it's anyway placed here for future safeness.
- mutable webrtc::Mutex negotiated_header_extensions_lock_;
- RtpHeaderExtensions negotiated_header_extensions_
- RTC_GUARDED_BY(negotiated_header_extensions_lock_);
};
// VoiceChannel is a specialization that adds support for early media, DTMF,
diff --git a/pc/channel_interface.h b/pc/channel_interface.h
index d3da37ac23..3b71f0f8b5 100644
--- a/pc/channel_interface.h
+++ b/pc/channel_interface.h
@@ -37,13 +37,12 @@ class ChannelInterface {
virtual const std::string& content_name() const = 0;
- virtual bool enabled() const = 0;
-
// Enables or disables this channel
- virtual bool Enable(bool enable) = 0;
+ virtual void Enable(bool enable) = 0;
// Used for latency measurements.
- virtual sigslot::signal1<ChannelInterface*>& SignalFirstPacketReceived() = 0;
+ virtual void SetFirstPacketReceivedCallback(
+ std::function<void()> callback) = 0;
// Channel control
virtual bool SetLocalContent(const MediaContentDescription* content,
@@ -65,9 +64,6 @@ class ChannelInterface {
// * A DtlsSrtpTransport for DTLS-SRTP.
virtual bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) = 0;
- // Returns the last negotiated header extensions.
- virtual RtpHeaderExtensions GetNegotiatedRtpHeaderExtensions() const = 0;
-
protected:
virtual ~ChannelInterface() = default;
};
diff --git a/pc/channel_manager.h b/pc/channel_manager.h
index 69ccf6a585..43fa27935f 100644
--- a/pc/channel_manager.h
+++ b/pc/channel_manager.h
@@ -67,7 +67,6 @@ class ChannelManager final {
void GetSupportedAudioReceiveCodecs(std::vector<AudioCodec>* codecs) const;
void GetSupportedVideoSendCodecs(std::vector<VideoCodec>* codecs) const;
void GetSupportedVideoReceiveCodecs(std::vector<VideoCodec>* codecs) const;
- void GetSupportedDataCodecs(std::vector<DataCodec>* codecs) const;
RtpHeaderExtensions GetDefaultEnabledAudioRtpHeaderExtensions() const;
std::vector<webrtc::RtpHeaderExtensionCapability>
GetSupportedAudioRtpHeaderExtensions() const;
diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc
index 4322219c8d..581f6de7ac 100644
--- a/pc/channel_unittest.cc
+++ b/pc/channel_unittest.cc
@@ -35,6 +35,8 @@
#include "rtc_base/checks.h"
#include "rtc_base/rtc_certificate.h"
#include "rtc_base/ssl_identity.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -52,7 +54,6 @@ const cricket::AudioCodec kPcmaCodec(8, "PCMA", 64000, 8000, 1);
const cricket::AudioCodec kIsacCodec(103, "ISAC", 40000, 16000, 1);
const cricket::VideoCodec kH264Codec(97, "H264");
const cricket::VideoCodec kH264SvcCodec(99, "H264-SVC");
-const cricket::DataCodec kGoogleDataCodec(101, "google-data");
const uint32_t kSsrc1 = 0x1111;
const uint32_t kSsrc2 = 0x2222;
const uint32_t kSsrc3 = 0x3333;
@@ -93,7 +94,7 @@ class VideoTraits : public Traits<cricket::VideoChannel,
cricket::VideoMediaInfo,
cricket::VideoOptions> {};
-// Base class for Voice/Video/RtpDataChannel tests
+// Base class for Voice/Video tests
template <class T>
class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
public:
@@ -120,19 +121,30 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
network_thread_keeper_->SetName("Network", nullptr);
network_thread_ = network_thread_keeper_.get();
}
+ RTC_DCHECK(network_thread_);
+ }
+
+ ~ChannelTest() {
+ if (network_thread_) {
+ network_thread_->Invoke<void>(
+ RTC_FROM_HERE, [this]() { network_thread_safety_->SetNotAlive(); });
+ }
}
void CreateChannels(int flags1, int flags2) {
CreateChannels(std::make_unique<typename T::MediaChannel>(
- nullptr, typename T::Options()),
+ nullptr, typename T::Options(), network_thread_),
std::make_unique<typename T::MediaChannel>(
- nullptr, typename T::Options()),
+ nullptr, typename T::Options(), network_thread_),
flags1, flags2);
}
void CreateChannels(std::unique_ptr<typename T::MediaChannel> ch1,
std::unique_ptr<typename T::MediaChannel> ch2,
int flags1,
int flags2) {
+ RTC_DCHECK(!channel1_);
+ RTC_DCHECK(!channel2_);
+
// Network thread is started in CreateChannels, to allow the test to
// configure a fake clock before any threads are spawned and attempt to
// access the time.
@@ -144,8 +156,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
// channels.
RTC_DCHECK_EQ(flags1 & RAW_PACKET_TRANSPORT, flags2 & RAW_PACKET_TRANSPORT);
rtc::Thread* worker_thread = rtc::Thread::Current();
- media_channel1_ = ch1.get();
- media_channel2_ = ch2.get();
rtc::PacketTransportInternal* rtp1 = nullptr;
rtc::PacketTransportInternal* rtcp1 = nullptr;
rtc::PacketTransportInternal* rtp2 = nullptr;
@@ -337,6 +347,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
SdpType::kOffer, NULL);
if (result) {
channel1_->Enable(true);
+ FlushCurrentThread();
result = channel2_->SetRemoteContent(&remote_media_content1_,
SdpType::kOffer, NULL);
if (result) {
@@ -350,6 +361,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
bool SendAccept() {
channel2_->Enable(true);
+ FlushCurrentThread();
return channel1_->SetRemoteContent(&remote_media_content2_,
SdpType::kAnswer, NULL);
}
@@ -386,55 +398,52 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
return result;
}
- bool Terminate() {
- channel1_.reset();
- channel2_.reset();
- fake_rtp_dtls_transport1_.reset();
- fake_rtcp_dtls_transport1_.reset();
- fake_rtp_dtls_transport2_.reset();
- fake_rtcp_dtls_transport2_.reset();
- fake_rtp_packet_transport1_.reset();
- fake_rtcp_packet_transport1_.reset();
- fake_rtp_packet_transport2_.reset();
- fake_rtcp_packet_transport2_.reset();
- if (network_thread_keeper_) {
- network_thread_keeper_.reset();
- }
- return true;
+ void SendRtp(typename T::MediaChannel* media_channel, rtc::Buffer data) {
+ network_thread_->PostTask(webrtc::ToQueuedTask(
+ network_thread_safety_, [media_channel, data = std::move(data)]() {
+ media_channel->SendRtp(data.data(), data.size(),
+ rtc::PacketOptions());
+ }));
}
void SendRtp1() {
- media_channel1_->SendRtp(rtp_packet_.data(), rtp_packet_.size(),
- rtc::PacketOptions());
+ SendRtp1(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size()));
+ }
+
+ void SendRtp1(rtc::Buffer data) {
+ SendRtp(media_channel1(), std::move(data));
}
+
void SendRtp2() {
- media_channel2_->SendRtp(rtp_packet_.data(), rtp_packet_.size(),
- rtc::PacketOptions());
+ SendRtp2(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size()));
+ }
+
+ void SendRtp2(rtc::Buffer data) {
+ SendRtp(media_channel2(), std::move(data));
}
+
// Methods to send custom data.
void SendCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
- media_channel1_->SendRtp(data.data(), data.size(), rtc::PacketOptions());
+ SendRtp1(CreateRtpData(ssrc, sequence_number, pl_type));
}
void SendCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
- media_channel2_->SendRtp(data.data(), data.size(), rtc::PacketOptions());
+ SendRtp2(CreateRtpData(ssrc, sequence_number, pl_type));
}
bool CheckRtp1() {
- return media_channel1_->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
+ return media_channel1()->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
}
bool CheckRtp2() {
- return media_channel2_->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
+ return media_channel2()->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
}
// Methods to check custom data.
bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
- return media_channel1_->CheckRtp(data.data(), data.size());
+ return media_channel1()->CheckRtp(data.data(), data.size());
}
bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
- return media_channel2_->CheckRtp(data.data(), data.size());
+ return media_channel2()->CheckRtp(data.data(), data.size());
}
rtc::Buffer CreateRtpData(uint32_t ssrc, int sequence_number, int pl_type) {
rtc::Buffer data(rtp_packet_.data(), rtp_packet_.size());
@@ -447,8 +456,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
return data;
}
- bool CheckNoRtp1() { return media_channel1_->CheckNoRtp(); }
- bool CheckNoRtp2() { return media_channel2_->CheckNoRtp(); }
+ bool CheckNoRtp1() { return media_channel1()->CheckNoRtp(); }
+ bool CheckNoRtp2() { return media_channel2()->CheckNoRtp(); }
void CreateContent(int flags,
const cricket::AudioCodec& audio_codec,
@@ -528,13 +537,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
void TestInit() {
CreateChannels(0, 0);
EXPECT_FALSE(IsSrtpActive(channel1_));
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_FALSE(media_channel1_->playout());
+ EXPECT_FALSE(media_channel1()->playout());
}
- EXPECT_TRUE(media_channel1_->codecs().empty());
- EXPECT_TRUE(media_channel1_->recv_streams().empty());
- EXPECT_TRUE(media_channel1_->rtp_packets().empty());
+ EXPECT_TRUE(media_channel1()->codecs().empty());
+ EXPECT_TRUE(media_channel1()->recv_streams().empty());
+ EXPECT_TRUE(media_channel1()->rtp_packets().empty());
}
// Test that SetLocalContent and SetRemoteContent properly configure
@@ -544,11 +553,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
typename T::Content content;
CreateContent(0, kPcmuCodec, kH264Codec, &content);
EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, NULL));
- EXPECT_EQ(0U, media_channel1_->codecs().size());
+ EXPECT_EQ(0U, media_channel1()->codecs().size());
EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, NULL));
- ASSERT_EQ(1U, media_channel1_->codecs().size());
+ ASSERT_EQ(1U, media_channel1()->codecs().size());
EXPECT_TRUE(
- CodecMatches(content.codecs()[0], media_channel1_->codecs()[0]));
+ CodecMatches(content.codecs()[0], media_channel1()->codecs()[0]));
}
// Test that SetLocalContent and SetRemoteContent properly configure
@@ -565,7 +574,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, NULL));
content.set_extmap_allow_mixed_enum(answer_enum);
EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, NULL));
- EXPECT_EQ(answer, media_channel1_->ExtmapAllowMixed());
+ EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed());
}
void TestSetContentsExtmapAllowMixedCallee(bool offer, bool answer) {
// For a callee, SetRemoteContent() is called first with an offer and next
@@ -579,7 +588,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kOffer, NULL));
content.set_extmap_allow_mixed_enum(answer_enum);
EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kAnswer, NULL));
- EXPECT_EQ(answer, media_channel1_->ExtmapAllowMixed());
+ EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed());
}
// Test that SetLocalContent and SetRemoteContent properly deals
@@ -589,11 +598,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
typename T::Content content;
EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, NULL));
CreateContent(0, kPcmuCodec, kH264Codec, &content);
- EXPECT_EQ(0U, media_channel1_->codecs().size());
+ EXPECT_EQ(0U, media_channel1()->codecs().size());
EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, NULL));
- ASSERT_EQ(1U, media_channel1_->codecs().size());
+ ASSERT_EQ(1U, media_channel1()->codecs().size());
EXPECT_TRUE(
- CodecMatches(content.codecs()[0], media_channel1_->codecs()[0]));
+ CodecMatches(content.codecs()[0], media_channel1()->codecs()[0]));
}
// Test that SetLocalContent and SetRemoteContent properly set RTCP
@@ -634,21 +643,21 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateContent(0, kPcmuCodec, kH264Codec, &content1);
content1.AddStream(stream1);
EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, NULL));
- EXPECT_TRUE(channel1_->Enable(true));
- EXPECT_EQ(1u, media_channel1_->send_streams().size());
+ channel1_->Enable(true);
+ EXPECT_EQ(1u, media_channel1()->send_streams().size());
EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, NULL));
- EXPECT_EQ(1u, media_channel2_->recv_streams().size());
+ EXPECT_EQ(1u, media_channel2()->recv_streams().size());
ConnectFakeTransports();
// Channel 2 do not send anything.
typename T::Content content2;
CreateContent(0, kPcmuCodec, kH264Codec, &content2);
EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, NULL));
- EXPECT_EQ(0u, media_channel1_->recv_streams().size());
+ EXPECT_EQ(0u, media_channel1()->recv_streams().size());
EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, NULL));
- EXPECT_TRUE(channel2_->Enable(true));
- EXPECT_EQ(0u, media_channel2_->send_streams().size());
+ channel2_->Enable(true);
+ EXPECT_EQ(0u, media_channel2()->send_streams().size());
SendCustomRtp1(kSsrc1, 0);
WaitForThreads();
@@ -659,21 +668,21 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateContent(0, kPcmuCodec, kH264Codec, &content3);
content3.AddStream(stream2);
EXPECT_TRUE(channel2_->SetLocalContent(&content3, SdpType::kOffer, NULL));
- ASSERT_EQ(1u, media_channel2_->send_streams().size());
- EXPECT_EQ(stream2, media_channel2_->send_streams()[0]);
+ ASSERT_EQ(1u, media_channel2()->send_streams().size());
+ EXPECT_EQ(stream2, media_channel2()->send_streams()[0]);
EXPECT_TRUE(channel1_->SetRemoteContent(&content3, SdpType::kOffer, NULL));
- ASSERT_EQ(1u, media_channel1_->recv_streams().size());
- EXPECT_EQ(stream2, media_channel1_->recv_streams()[0]);
+ ASSERT_EQ(1u, media_channel1()->recv_streams().size());
+ EXPECT_EQ(stream2, media_channel1()->recv_streams()[0]);
// Channel 1 replies but stop sending stream1.
typename T::Content content4;
CreateContent(0, kPcmuCodec, kH264Codec, &content4);
EXPECT_TRUE(channel1_->SetLocalContent(&content4, SdpType::kAnswer, NULL));
- EXPECT_EQ(0u, media_channel1_->send_streams().size());
+ EXPECT_EQ(0u, media_channel1()->send_streams().size());
EXPECT_TRUE(channel2_->SetRemoteContent(&content4, SdpType::kAnswer, NULL));
- EXPECT_EQ(0u, media_channel2_->recv_streams().size());
+ EXPECT_EQ(0u, media_channel2()->recv_streams().size());
SendCustomRtp2(kSsrc2, 0);
WaitForThreads();
@@ -684,56 +693,58 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
void TestPlayoutAndSendingStates() {
CreateChannels(0, 0);
if (verify_playout_) {
- EXPECT_FALSE(media_channel1_->playout());
+ EXPECT_FALSE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout());
+ EXPECT_FALSE(media_channel2()->playout());
}
- EXPECT_FALSE(media_channel2_->sending());
- EXPECT_TRUE(channel1_->Enable(true));
+ EXPECT_FALSE(media_channel2()->sending());
+ channel1_->Enable(true);
+ FlushCurrentThread();
if (verify_playout_) {
- EXPECT_FALSE(media_channel1_->playout());
+ EXPECT_FALSE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_,
SdpType::kOffer, NULL));
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
EXPECT_TRUE(channel2_->SetRemoteContent(&local_media_content1_,
SdpType::kOffer, NULL));
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout());
+ EXPECT_FALSE(media_channel2()->playout());
}
- EXPECT_FALSE(media_channel2_->sending());
+ EXPECT_FALSE(media_channel2()->sending());
EXPECT_TRUE(channel2_->SetLocalContent(&local_media_content2_,
SdpType::kAnswer, NULL));
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout());
+ EXPECT_FALSE(media_channel2()->playout());
}
- EXPECT_FALSE(media_channel2_->sending());
+ EXPECT_FALSE(media_channel2()->sending());
ConnectFakeTransports();
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout());
+ EXPECT_FALSE(media_channel2()->playout());
}
- EXPECT_FALSE(media_channel2_->sending());
- EXPECT_TRUE(channel2_->Enable(true));
+ EXPECT_FALSE(media_channel2()->sending());
+ channel2_->Enable(true);
+ FlushCurrentThread();
if (verify_playout_) {
- EXPECT_TRUE(media_channel2_->playout());
+ EXPECT_TRUE(media_channel2()->playout());
}
- EXPECT_TRUE(media_channel2_->sending());
+ EXPECT_TRUE(media_channel2()->sending());
EXPECT_TRUE(channel1_->SetRemoteContent(&local_media_content2_,
SdpType::kAnswer, NULL));
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
}
// Test that changing the MediaContentDirection in the local and remote
@@ -747,16 +758,17 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
// Set |content2| to be InActive.
content2.set_direction(RtpTransceiverDirection::kInactive);
- EXPECT_TRUE(channel1_->Enable(true));
- EXPECT_TRUE(channel2_->Enable(true));
+ channel1_->Enable(true);
+ channel2_->Enable(true);
+ FlushCurrentThread();
if (verify_playout_) {
- EXPECT_FALSE(media_channel1_->playout());
+ EXPECT_FALSE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout());
+ EXPECT_FALSE(media_channel2()->playout());
}
- EXPECT_FALSE(media_channel2_->sending());
+ EXPECT_FALSE(media_channel2()->sending());
EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, NULL));
EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, NULL));
@@ -767,13 +779,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
ConnectFakeTransports();
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending()); // remote InActive
+ EXPECT_FALSE(media_channel1()->sending()); // remote InActive
if (verify_playout_) {
- EXPECT_FALSE(media_channel2_->playout()); // local InActive
+ EXPECT_FALSE(media_channel2()->playout()); // local InActive
}
- EXPECT_FALSE(media_channel2_->sending()); // local InActive
+ EXPECT_FALSE(media_channel2()->sending()); // local InActive
// Update |content2| to be RecvOnly.
content2.set_direction(RtpTransceiverDirection::kRecvOnly);
@@ -783,13 +795,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
channel1_->SetRemoteContent(&content2, SdpType::kPrAnswer, NULL));
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_TRUE(media_channel2_->playout()); // local RecvOnly
+ EXPECT_TRUE(media_channel2()->playout()); // local RecvOnly
}
- EXPECT_FALSE(media_channel2_->sending()); // local RecvOnly
+ EXPECT_FALSE(media_channel2()->sending()); // local RecvOnly
// Update |content2| to be SendRecv.
content2.set_direction(RtpTransceiverDirection::kSendRecv);
@@ -797,13 +809,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, NULL));
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
if (verify_playout_) {
- EXPECT_TRUE(media_channel2_->playout());
+ EXPECT_TRUE(media_channel2()->playout());
}
- EXPECT_TRUE(media_channel2_->sending());
+ EXPECT_TRUE(media_channel2()->sending());
}
// Tests that when the transport channel signals a candidate pair change
@@ -872,40 +884,18 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
EXPECT_FALSE(IsSrtpActive(channel1_));
EXPECT_TRUE(SendInitiate());
if (verify_playout_) {
- EXPECT_TRUE(media_channel1_->playout());
+ EXPECT_TRUE(media_channel1()->playout());
}
- EXPECT_FALSE(media_channel1_->sending());
+ EXPECT_FALSE(media_channel1()->sending());
EXPECT_TRUE(SendAccept());
EXPECT_FALSE(IsSrtpActive(channel1_));
- EXPECT_TRUE(media_channel1_->sending());
- EXPECT_EQ(1U, media_channel1_->codecs().size());
+ EXPECT_TRUE(media_channel1()->sending());
+ EXPECT_EQ(1U, media_channel1()->codecs().size());
if (verify_playout_) {
- EXPECT_TRUE(media_channel2_->playout());
+ EXPECT_TRUE(media_channel2()->playout());
}
- EXPECT_TRUE(media_channel2_->sending());
- EXPECT_EQ(1U, media_channel2_->codecs().size());
- }
-
- // Test that we don't crash if packets are sent during call teardown
- // when RTCP mux is enabled. This is a regression test against a specific
- // race condition that would only occur when a RTCP packet was sent during
- // teardown of a channel on which RTCP mux was enabled.
- void TestCallTeardownRtcpMux() {
- class LastWordMediaChannel : public T::MediaChannel {
- public:
- LastWordMediaChannel() : T::MediaChannel(NULL, typename T::Options()) {}
- ~LastWordMediaChannel() {
- T::MediaChannel::SendRtp(kPcmuFrame, sizeof(kPcmuFrame),
- rtc::PacketOptions());
- T::MediaChannel::SendRtcp(kRtcpReport, sizeof(kRtcpReport));
- }
- };
- CreateChannels(std::make_unique<LastWordMediaChannel>(),
- std::make_unique<LastWordMediaChannel>(), RTCP_MUX,
- RTCP_MUX);
- EXPECT_TRUE(SendInitiate());
- EXPECT_TRUE(SendAccept());
- EXPECT_TRUE(Terminate());
+ EXPECT_TRUE(media_channel2()->sending());
+ EXPECT_EQ(1U, media_channel2()->codecs().size());
}
// Send voice RTP data to the other side and ensure it gets there.
@@ -1031,7 +1021,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
network_thread_->Invoke<void>(RTC_FROM_HERE, [this] {
fake_rtp_dtls_transport1_->SetWritable(true);
});
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
SendRtp1();
SendRtp2();
WaitForThreads();
@@ -1045,7 +1035,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
bool asymmetric = true;
fake_rtp_dtls_transport1_->SetDestination(nullptr, asymmetric);
});
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
// Should fail also.
SendRtp1();
@@ -1061,7 +1051,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
fake_rtp_dtls_transport1_->SetDestination(fake_rtp_dtls_transport2_.get(),
asymmetric);
});
- EXPECT_TRUE(media_channel1_->sending());
+ EXPECT_TRUE(media_channel1()->sending());
SendRtp1();
SendRtp2();
WaitForThreads();
@@ -1114,17 +1104,17 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
std::unique_ptr<typename T::Content> content(
CreateMediaContentWithStream(1));
- media_channel1_->set_fail_set_recv_codecs(true);
+ media_channel1()->set_fail_set_recv_codecs(true);
EXPECT_FALSE(
channel1_->SetLocalContent(content.get(), SdpType::kOffer, &err));
EXPECT_FALSE(
channel1_->SetLocalContent(content.get(), SdpType::kAnswer, &err));
- media_channel1_->set_fail_set_send_codecs(true);
+ media_channel1()->set_fail_set_send_codecs(true);
EXPECT_FALSE(
channel1_->SetRemoteContent(content.get(), SdpType::kOffer, &err));
- media_channel1_->set_fail_set_send_codecs(true);
+ media_channel1()->set_fail_set_send_codecs(true);
EXPECT_FALSE(
channel1_->SetRemoteContent(content.get(), SdpType::kAnswer, &err));
}
@@ -1137,14 +1127,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateMediaContentWithStream(1));
EXPECT_TRUE(
channel1_->SetLocalContent(content1.get(), SdpType::kOffer, &err));
- EXPECT_TRUE(media_channel1_->HasSendStream(1));
+ EXPECT_TRUE(media_channel1()->HasSendStream(1));
std::unique_ptr<typename T::Content> content2(
CreateMediaContentWithStream(2));
EXPECT_TRUE(
channel1_->SetLocalContent(content2.get(), SdpType::kOffer, &err));
- EXPECT_FALSE(media_channel1_->HasSendStream(1));
- EXPECT_TRUE(media_channel1_->HasSendStream(2));
+ EXPECT_FALSE(media_channel1()->HasSendStream(1));
+ EXPECT_TRUE(media_channel1()->HasSendStream(2));
}
void TestReceiveTwoOffers() {
@@ -1155,14 +1145,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateMediaContentWithStream(1));
EXPECT_TRUE(
channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, &err));
- EXPECT_TRUE(media_channel1_->HasRecvStream(1));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(1));
std::unique_ptr<typename T::Content> content2(
CreateMediaContentWithStream(2));
EXPECT_TRUE(
channel1_->SetRemoteContent(content2.get(), SdpType::kOffer, &err));
- EXPECT_FALSE(media_channel1_->HasRecvStream(1));
- EXPECT_TRUE(media_channel1_->HasRecvStream(2));
+ EXPECT_FALSE(media_channel1()->HasRecvStream(1));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(2));
}
void TestSendPrAnswer() {
@@ -1174,24 +1164,24 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateMediaContentWithStream(1));
EXPECT_TRUE(
channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, &err));
- EXPECT_TRUE(media_channel1_->HasRecvStream(1));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(1));
// Send PR answer
std::unique_ptr<typename T::Content> content2(
CreateMediaContentWithStream(2));
EXPECT_TRUE(
channel1_->SetLocalContent(content2.get(), SdpType::kPrAnswer, &err));
- EXPECT_TRUE(media_channel1_->HasRecvStream(1));
- EXPECT_TRUE(media_channel1_->HasSendStream(2));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(1));
+ EXPECT_TRUE(media_channel1()->HasSendStream(2));
// Send answer
std::unique_ptr<typename T::Content> content3(
CreateMediaContentWithStream(3));
EXPECT_TRUE(
channel1_->SetLocalContent(content3.get(), SdpType::kAnswer, &err));
- EXPECT_TRUE(media_channel1_->HasRecvStream(1));
- EXPECT_FALSE(media_channel1_->HasSendStream(2));
- EXPECT_TRUE(media_channel1_->HasSendStream(3));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(1));
+ EXPECT_FALSE(media_channel1()->HasSendStream(2));
+ EXPECT_TRUE(media_channel1()->HasSendStream(3));
}
void TestReceivePrAnswer() {
@@ -1203,39 +1193,39 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateMediaContentWithStream(1));
EXPECT_TRUE(
channel1_->SetLocalContent(content1.get(), SdpType::kOffer, &err));
- EXPECT_TRUE(media_channel1_->HasSendStream(1));
+ EXPECT_TRUE(media_channel1()->HasSendStream(1));
// Receive PR answer
std::unique_ptr<typename T::Content> content2(
CreateMediaContentWithStream(2));
EXPECT_TRUE(
channel1_->SetRemoteContent(content2.get(), SdpType::kPrAnswer, &err));
- EXPECT_TRUE(media_channel1_->HasSendStream(1));
- EXPECT_TRUE(media_channel1_->HasRecvStream(2));
+ EXPECT_TRUE(media_channel1()->HasSendStream(1));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(2));
// Receive answer
std::unique_ptr<typename T::Content> content3(
CreateMediaContentWithStream(3));
EXPECT_TRUE(
channel1_->SetRemoteContent(content3.get(), SdpType::kAnswer, &err));
- EXPECT_TRUE(media_channel1_->HasSendStream(1));
- EXPECT_FALSE(media_channel1_->HasRecvStream(2));
- EXPECT_TRUE(media_channel1_->HasRecvStream(3));
+ EXPECT_TRUE(media_channel1()->HasSendStream(1));
+ EXPECT_FALSE(media_channel1()->HasRecvStream(2));
+ EXPECT_TRUE(media_channel1()->HasRecvStream(3));
}
void TestOnTransportReadyToSend() {
CreateChannels(0, 0);
- EXPECT_FALSE(media_channel1_->ready_to_send());
+ EXPECT_FALSE(media_channel1()->ready_to_send());
network_thread_->PostTask(
RTC_FROM_HERE, [this] { channel1_->OnTransportReadyToSend(true); });
WaitForThreads();
- EXPECT_TRUE(media_channel1_->ready_to_send());
+ EXPECT_TRUE(media_channel1()->ready_to_send());
network_thread_->PostTask(
RTC_FROM_HERE, [this] { channel1_->OnTransportReadyToSend(false); });
WaitForThreads();
- EXPECT_FALSE(media_channel1_->ready_to_send());
+ EXPECT_FALSE(media_channel1()->ready_to_send());
}
bool SetRemoteContentWithBitrateLimit(int remote_limit) {
@@ -1263,8 +1253,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateChannels(0, 0);
EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_,
SdpType::kOffer, NULL));
- EXPECT_EQ(media_channel1_->max_bps(), -1);
- VerifyMaxBitrate(media_channel1_->GetRtpSendParameters(kSsrc1),
+ EXPECT_EQ(media_channel1()->max_bps(), -1);
+ VerifyMaxBitrate(media_channel1()->GetRtpSendParameters(kSsrc1),
absl::nullopt);
}
@@ -1281,17 +1271,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
CreateChannels(DTLS, DTLS);
- channel1_->SetOption(cricket::BaseChannel::ST_RTP,
- rtc::Socket::Option::OPT_SNDBUF, kSndBufSize);
- channel2_->SetOption(cricket::BaseChannel::ST_RTP,
- rtc::Socket::Option::OPT_RCVBUF, kRcvBufSize);
-
new_rtp_transport_ = CreateDtlsSrtpTransport(
fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get());
bool rcv_success, send_success;
int rcv_buf, send_buf;
network_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ channel1_->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_SNDBUF, kSndBufSize);
+ channel2_->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_RCVBUF, kRcvBufSize);
channel1_->SetRtpTransport(new_rtp_transport_.get());
send_success = fake_rtp_dtls_transport2_->GetOption(
rtc::Socket::Option::OPT_SNDBUF, &send_buf);
@@ -1366,6 +1355,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
thread->ProcessMessages(0);
}
}
+ static void FlushCurrentThread() {
+ rtc::Thread::Current()->ProcessMessages(0);
+ }
void WaitForThreads(rtc::ArrayView<rtc::Thread*> threads) {
// |threads| and current thread post packets to network thread.
for (rtc::Thread* thread : threads) {
@@ -1381,9 +1373,24 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
// Worker thread = current Thread process received messages.
ProcessThreadQueue(rtc::Thread::Current());
}
+
+ typename T::MediaChannel* media_channel1() {
+ RTC_DCHECK(channel1_);
+ RTC_DCHECK(channel1_->media_channel());
+ return static_cast<typename T::MediaChannel*>(channel1_->media_channel());
+ }
+
+ typename T::MediaChannel* media_channel2() {
+ RTC_DCHECK(channel2_);
+ RTC_DCHECK(channel2_->media_channel());
+ return static_cast<typename T::MediaChannel*>(channel2_->media_channel());
+ }
+
// TODO(pbos): Remove playout from all media channels and let renderers mute
// themselves.
const bool verify_playout_;
+ rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_thread_safety_ =
+ webrtc::PendingTaskSafetyFlag::CreateDetached();
std::unique_ptr<rtc::Thread> network_thread_keeper_;
rtc::Thread* network_thread_;
std::unique_ptr<cricket::FakeDtlsTransport> fake_rtp_dtls_transport1_;
@@ -1398,9 +1405,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
std::unique_ptr<webrtc::RtpTransportInternal> rtp_transport2_;
std::unique_ptr<webrtc::RtpTransportInternal> new_rtp_transport_;
cricket::FakeMediaEngine media_engine_;
- // The media channels are owned by the voice channel objects below.
- typename T::MediaChannel* media_channel1_ = nullptr;
- typename T::MediaChannel* media_channel2_ = nullptr;
std::unique_ptr<typename T::Channel> channel1_;
std::unique_ptr<typename T::Channel> channel2_;
typename T::Content local_media_content1_;
@@ -1560,8 +1564,8 @@ class VideoChannelDoubleThreadTest : public ChannelTest<VideoTraits> {
TEST_F(VoiceChannelSingleThreadTest, TestInit) {
Base::TestInit();
- EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
- EXPECT_TRUE(media_channel1_->dtmf_info_queue().empty());
+ EXPECT_FALSE(media_channel1()->IsStreamMuted(0));
+ EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty());
}
TEST_F(VoiceChannelSingleThreadTest, TestDeinit) {
@@ -1622,10 +1626,6 @@ TEST_F(VoiceChannelSingleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VoiceChannelSingleThreadTest, TestCallTeardownRtcpMux) {
- Base::TestCallTeardownRtcpMux();
-}
-
TEST_F(VoiceChannelSingleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
@@ -1701,8 +1701,8 @@ TEST_F(VoiceChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) {
// VoiceChannelDoubleThreadTest
TEST_F(VoiceChannelDoubleThreadTest, TestInit) {
Base::TestInit();
- EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
- EXPECT_TRUE(media_channel1_->dtmf_info_queue().empty());
+ EXPECT_FALSE(media_channel1()->IsStreamMuted(0));
+ EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty());
}
TEST_F(VoiceChannelDoubleThreadTest, TestDeinit) {
@@ -1763,10 +1763,6 @@ TEST_F(VoiceChannelDoubleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VoiceChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
- Base::TestCallTeardownRtcpMux();
-}
-
TEST_F(VoiceChannelDoubleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
@@ -1902,10 +1898,6 @@ TEST_F(VideoChannelSingleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VideoChannelSingleThreadTest, TestCallTeardownRtcpMux) {
- Base::TestCallTeardownRtcpMux();
-}
-
TEST_F(VideoChannelSingleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
@@ -1992,12 +1984,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) {
CreateChannels(0, 0);
EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, NULL));
- EXPECT_THAT(media_channel1_->send_codecs(), testing::IsEmpty());
- ASSERT_THAT(media_channel1_->recv_codecs(), testing::SizeIs(2));
- EXPECT_TRUE(media_channel1_->recv_codecs()[0].Matches(kVp8Codec));
- EXPECT_EQ(media_channel1_->recv_codecs()[0].packetization, absl::nullopt);
- EXPECT_TRUE(media_channel1_->recv_codecs()[1].Matches(vp9_codec));
- EXPECT_EQ(media_channel1_->recv_codecs()[1].packetization,
+ EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_channel1()->recv_codecs()[0].Matches(kVp8Codec));
+ EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt);
+ EXPECT_TRUE(media_channel1()->recv_codecs()[1].Matches(vp9_codec));
+ EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization,
cricket::kPacketizationParamRaw);
}
@@ -2011,12 +2003,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) {
CreateChannels(0, 0);
EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kOffer, NULL));
- EXPECT_THAT(media_channel1_->recv_codecs(), testing::IsEmpty());
- ASSERT_THAT(media_channel1_->send_codecs(), testing::SizeIs(2));
- EXPECT_TRUE(media_channel1_->send_codecs()[0].Matches(kVp8Codec));
- EXPECT_EQ(media_channel1_->send_codecs()[0].packetization, absl::nullopt);
- EXPECT_TRUE(media_channel1_->send_codecs()[1].Matches(vp9_codec));
- EXPECT_EQ(media_channel1_->send_codecs()[1].packetization,
+ EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_channel1()->send_codecs()[0].Matches(kVp8Codec));
+ EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt);
+ EXPECT_TRUE(media_channel1()->send_codecs()[1].Matches(vp9_codec));
+ EXPECT_EQ(media_channel1()->send_codecs()[1].packetization,
cricket::kPacketizationParamRaw);
}
@@ -2031,17 +2023,17 @@ TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) {
EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, NULL));
EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kAnswer, NULL));
- ASSERT_THAT(media_channel1_->recv_codecs(), testing::SizeIs(2));
- EXPECT_TRUE(media_channel1_->recv_codecs()[0].Matches(kVp8Codec));
- EXPECT_EQ(media_channel1_->recv_codecs()[0].packetization, absl::nullopt);
- EXPECT_TRUE(media_channel1_->recv_codecs()[1].Matches(vp9_codec));
- EXPECT_EQ(media_channel1_->recv_codecs()[1].packetization,
+ ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_channel1()->recv_codecs()[0].Matches(kVp8Codec));
+ EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt);
+ EXPECT_TRUE(media_channel1()->recv_codecs()[1].Matches(vp9_codec));
+ EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization,
cricket::kPacketizationParamRaw);
- EXPECT_THAT(media_channel1_->send_codecs(), testing::SizeIs(2));
- EXPECT_TRUE(media_channel1_->send_codecs()[0].Matches(kVp8Codec));
- EXPECT_EQ(media_channel1_->send_codecs()[0].packetization, absl::nullopt);
- EXPECT_TRUE(media_channel1_->send_codecs()[1].Matches(vp9_codec));
- EXPECT_EQ(media_channel1_->send_codecs()[1].packetization,
+ EXPECT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_channel1()->send_codecs()[0].Matches(kVp8Codec));
+ EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt);
+ EXPECT_TRUE(media_channel1()->send_codecs()[1].Matches(vp9_codec));
+ EXPECT_EQ(media_channel1()->send_codecs()[1].packetization,
cricket::kPacketizationParamRaw);
}
@@ -2059,10 +2051,10 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) {
EXPECT_TRUE(
channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, NULL));
EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, NULL));
- ASSERT_THAT(media_channel1_->recv_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->recv_codecs()[0].packetization, absl::nullopt);
- ASSERT_THAT(media_channel1_->send_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->send_codecs()[0].packetization, absl::nullopt);
+ ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt);
+ ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt);
}
TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) {
@@ -2079,10 +2071,10 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) {
EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, NULL));
EXPECT_TRUE(
channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, NULL));
- ASSERT_THAT(media_channel1_->recv_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->recv_codecs()[0].packetization, absl::nullopt);
- ASSERT_THAT(media_channel1_->send_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->send_codecs()[0].packetization, absl::nullopt);
+ ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt);
+ ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt);
}
TEST_F(VideoChannelSingleThreadTest,
@@ -2101,10 +2093,10 @@ TEST_F(VideoChannelSingleThreadTest,
EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, NULL));
EXPECT_FALSE(
channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, NULL));
- ASSERT_THAT(media_channel1_->recv_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->recv_codecs()[0].packetization,
+ ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization,
cricket::kPacketizationParamRaw);
- EXPECT_THAT(media_channel1_->send_codecs(), testing::IsEmpty());
+ EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty());
}
TEST_F(VideoChannelSingleThreadTest,
@@ -2123,9 +2115,9 @@ TEST_F(VideoChannelSingleThreadTest,
channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, NULL));
EXPECT_FALSE(
channel1_->SetLocalContent(&local_video, SdpType::kAnswer, NULL));
- EXPECT_THAT(media_channel1_->recv_codecs(), testing::IsEmpty());
- ASSERT_THAT(media_channel1_->send_codecs(), testing::SizeIs(1));
- EXPECT_EQ(media_channel1_->send_codecs()[0].packetization, absl::nullopt);
+ EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt);
}
// VideoChannelDoubleThreadTest
@@ -2191,10 +2183,6 @@ TEST_F(VideoChannelDoubleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VideoChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
- Base::TestCallTeardownRtcpMux();
-}
-
TEST_F(VideoChannelDoubleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
diff --git a/pc/connection_context.cc b/pc/connection_context.cc
index 2904714c87..1bb7908f5c 100644
--- a/pc/connection_context.cc
+++ b/pc/connection_context.cc
@@ -17,7 +17,6 @@
#include "api/transport/field_trial_based_config.h"
#include "media/sctp/sctp_transport_factory.h"
#include "rtc_base/helpers.h"
-#include "rtc_base/ref_counted_object.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
@@ -76,7 +75,7 @@ std::unique_ptr<SctpTransportFactoryInterface> MaybeCreateSctpFactory(
// Static
rtc::scoped_refptr<ConnectionContext> ConnectionContext::Create(
PeerConnectionFactoryDependencies* dependencies) {
- return new rtc::RefCountedObject<ConnectionContext>(dependencies);
+ return new ConnectionContext(dependencies);
}
ConnectionContext::ConnectionContext(
@@ -104,11 +103,13 @@ ConnectionContext::ConnectionContext(
signaling_thread_->AllowInvokesToThread(network_thread_);
worker_thread_->AllowInvokesToThread(network_thread_);
if (network_thread_->IsCurrent()) {
- network_thread_->DisallowAllInvokes();
+ // TODO(https://crbug.com/webrtc/12802) switch to DisallowAllInvokes
+ network_thread_->AllowInvokesToThread(network_thread_);
} else {
network_thread_->PostTask(ToQueuedTask([thread = network_thread_] {
thread->DisallowBlockingCalls();
- thread->DisallowAllInvokes();
+ // TODO(https://crbug.com/webrtc/12802) switch to DisallowAllInvokes
+ thread->AllowInvokesToThread(thread);
}));
}
diff --git a/pc/connection_context.h b/pc/connection_context.h
index 0c69c17a5b..8fad13c10c 100644
--- a/pc/connection_context.h
+++ b/pc/connection_context.h
@@ -17,6 +17,7 @@
#include "api/call/call_factory_interface.h"
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "api/sequence_checker.h"
#include "api/transport/sctp_transport_factory_interface.h"
@@ -27,7 +28,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/network.h"
#include "rtc_base/network_monitor_factory.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
@@ -47,7 +47,8 @@ class RtcEventLog;
// interferes with the operation of other PeerConnections.
//
// This class must be created and destroyed on the signaling thread.
-class ConnectionContext : public rtc::RefCountInterface {
+class ConnectionContext final
+ : public rtc::RefCountedNonVirtual<ConnectionContext> {
public:
// Creates a ConnectionContext. May return null if initialization fails.
// The Dependencies class allows simple management of all new dependencies
@@ -92,7 +93,8 @@ class ConnectionContext : public rtc::RefCountInterface {
protected:
explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies);
- virtual ~ConnectionContext();
+ friend class rtc::RefCountedNonVirtual<ConnectionContext>;
+ ~ConnectionContext();
private:
// The following three variables are used to communicate between the
diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc
index 6b3500cbff..d8e6b39895 100644
--- a/pc/data_channel_controller.cc
+++ b/pc/data_channel_controller.cc
@@ -31,11 +31,12 @@ bool DataChannelController::HasDataChannels() const {
return !sctp_data_channels_.empty();
}
-bool DataChannelController::SendData(const cricket::SendDataParams& params,
+bool DataChannelController::SendData(int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) {
if (data_channel_transport())
- return DataChannelSendData(params, payload, result);
+ return DataChannelSendData(sid, params, payload, result);
RTC_LOG(LS_ERROR) << "SendData called before transport is ready";
return false;
}
@@ -106,7 +107,7 @@ void DataChannelController::OnDataReceived(
RTC_DCHECK_RUN_ON(network_thread());
cricket::ReceiveDataParams params;
params.sid = channel_id;
- params.type = ToCricketDataMessageType(type);
+ params.type = type;
signaling_thread()->PostTask(
ToQueuedTask([self = weak_factory_.GetWeakPtr(), params, buffer] {
if (self) {
@@ -222,7 +223,7 @@ std::vector<DataChannelStats> DataChannelController::GetDataChannelStats()
bool DataChannelController::HandleOpenMessage_s(
const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
- if (params.type == cricket::DMT_CONTROL && IsOpenMessage(buffer)) {
+ if (params.type == DataMessageType::kControl && IsOpenMessage(buffer)) {
// Received OPEN message; parse and signal that a new data channel should
// be created.
std::string label;
@@ -386,7 +387,8 @@ void DataChannelController::set_data_channel_transport(
}
bool DataChannelController::DataChannelSendData(
- const cricket::SendDataParams& params,
+ int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) {
// TODO(bugs.webrtc.org/11547): Expect method to be called on the network
@@ -395,19 +397,9 @@ bool DataChannelController::DataChannelSendData(
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(data_channel_transport());
- SendDataParams send_params;
- send_params.type = ToWebrtcDataMessageType(params.type);
- send_params.ordered = params.ordered;
- if (params.max_rtx_count >= 0) {
- send_params.max_rtx_count = params.max_rtx_count;
- } else if (params.max_rtx_ms >= 0) {
- send_params.max_rtx_ms = params.max_rtx_ms;
- }
-
RTCError error = network_thread()->Invoke<RTCError>(
- RTC_FROM_HERE, [this, params, send_params, payload] {
- return data_channel_transport()->SendData(params.sid, send_params,
- payload);
+ RTC_FROM_HERE, [this, sid, params, payload] {
+ return data_channel_transport()->SendData(sid, params, payload);
});
if (error.ok()) {
diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h
index 4c42b8a345..05fcff0e03 100644
--- a/pc/data_channel_controller.h
+++ b/pc/data_channel_controller.h
@@ -53,7 +53,8 @@ class DataChannelController : public SctpDataChannelProviderInterface,
// Implements
// SctpDataChannelProviderInterface.
- bool SendData(const cricket::SendDataParams& params,
+ bool SendData(int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) override;
bool ConnectDataChannel(SctpDataChannel* webrtc_data_channel) override;
@@ -131,7 +132,8 @@ class DataChannelController : public SctpDataChannelProviderInterface,
RTC_RUN_ON(signaling_thread());
// Called from SendData when data_channel_transport() is true.
- bool DataChannelSendData(const cricket::SendDataParams& params,
+ bool DataChannelSendData(int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result);
diff --git a/pc/data_channel_integrationtest.cc b/pc/data_channel_integrationtest.cc
index 0b69726697..47ea74a4b2 100644
--- a/pc/data_channel_integrationtest.cc
+++ b/pc/data_channel_integrationtest.cc
@@ -27,21 +27,25 @@
#include "rtc_base/gunit.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/gtest.h"
namespace webrtc {
namespace {
-class DataChannelIntegrationTest
- : public PeerConnectionIntegrationBaseTest,
- public ::testing::WithParamInterface<SdpSemantics> {
+// All tests in this file require SCTP support.
+#ifdef WEBRTC_HAVE_SCTP
+
+class DataChannelIntegrationTest : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, std::string>> {
protected:
DataChannelIntegrationTest()
- : PeerConnectionIntegrationBaseTest(GetParam()) {}
+ : PeerConnectionIntegrationBaseTest(std::get<0>(GetParam()),
+ std::get<1>(GetParam())) {}
};
-GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(DataChannelIntegrationTest);
-
// Fake clock must be set before threads are started to prevent race on
// Set/GetClockForTesting().
// To achieve that, multiple inheritance is used as a mixin pattern
@@ -61,11 +65,6 @@ class FakeClockForTest : public rtc::ScopedFakeClock {
ScopedFakeClock& FakeClock() { return *this; }
};
-// Ensure FakeClockForTest is constructed first (see class for rationale).
-class DataChannelIntegrationTestWithFakeClock
- : public FakeClockForTest,
- public DataChannelIntegrationTest {};
-
class DataChannelIntegrationTestPlanB
: public PeerConnectionIntegrationBaseTest {
protected:
@@ -73,9 +72,6 @@ class DataChannelIntegrationTestPlanB
: PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB) {}
};
-GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(
- DataChannelIntegrationTestWithFakeClock);
-
class DataChannelIntegrationTestUnifiedPlan
: public PeerConnectionIntegrationBaseTest {
protected:
@@ -83,8 +79,6 @@ class DataChannelIntegrationTestUnifiedPlan
: PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
};
-#ifdef WEBRTC_HAVE_SCTP
-
// This test causes a PeerConnection to enter Disconnected state, and
// sends data on a DataChannel while disconnected.
// The data should be surfaced when the connection reestablishes.
@@ -218,6 +212,52 @@ TEST_P(DataChannelIntegrationTest,
}
}
+// This test sets up a call between two parties with an SCTP
+// data channel only, and sends empty messages
+TEST_P(DataChannelIntegrationTest,
+ EndToEndCallWithSctpDataChannelEmptyMessages) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Expect that data channel created on caller side will show up for callee as
+ // well.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Ensure data can be sent in both directions.
+ // Sending empty string data
+ std::string data = "";
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(callee()->data_observer()->last_message().empty());
+ EXPECT_FALSE(callee()->data_observer()->messages().back().binary);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(1u, caller()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(caller()->data_observer()->last_message().empty());
+ EXPECT_FALSE(caller()->data_observer()->messages().back().binary);
+
+ // Sending empty binary data
+ rtc::CopyOnWriteBuffer empty_buffer;
+ caller()->data_channel()->Send(DataBuffer(empty_buffer, true));
+ EXPECT_EQ_WAIT(2u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(callee()->data_observer()->last_message().empty());
+ EXPECT_TRUE(callee()->data_observer()->messages().back().binary);
+ callee()->data_channel()->Send(DataBuffer(empty_buffer, true));
+ EXPECT_EQ_WAIT(2u, caller()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(caller()->data_observer()->last_message().empty());
+ EXPECT_TRUE(caller()->data_observer()->messages().back().binary);
+}
+
TEST_P(DataChannelIntegrationTest,
EndToEndCallWithSctpDataChannelLowestSafeMtu) {
// The lowest payload size limit that's tested and found safe for this
@@ -316,8 +356,22 @@ TEST_P(DataChannelIntegrationTest, CalleeClosesSctpDataChannel) {
// Close the data channel on the callee side, and wait for it to reach the
// "closed" state on both sides.
callee()->data_channel()->Close();
- EXPECT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ DataChannelInterface::DataState expected_states[] = {
+ DataChannelInterface::DataState::kConnecting,
+ DataChannelInterface::DataState::kOpen,
+ DataChannelInterface::DataState::kClosing,
+ DataChannelInterface::DataState::kClosed};
+
+ EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed,
+ caller()->data_observer()->state(), kDefaultTimeout);
+ EXPECT_THAT(caller()->data_observer()->states(),
+ ::testing::ElementsAreArray(expected_states));
+
+ EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed,
+ callee()->data_observer()->state(), kDefaultTimeout);
+ EXPECT_THAT(callee()->data_observer()->states(),
+ ::testing::ElementsAreArray(expected_states));
}
TEST_P(DataChannelIntegrationTest, SctpDataChannelConfigSentToOtherSide) {
@@ -386,10 +440,16 @@ TEST_P(DataChannelIntegrationTest, StressTestUnorderedSctpDataChannel) {
kDefaultTimeout);
// Sort and compare to make sure none of the messages were corrupted.
- std::vector<std::string> caller_received_messages =
- caller()->data_observer()->messages();
- std::vector<std::string> callee_received_messages =
- callee()->data_observer()->messages();
+ std::vector<std::string> caller_received_messages;
+ absl::c_transform(caller()->data_observer()->messages(),
+ std::back_inserter(caller_received_messages),
+ [](const auto& a) { return a.data; });
+
+ std::vector<std::string> callee_received_messages;
+ absl::c_transform(callee()->data_observer()->messages(),
+ std::back_inserter(callee_received_messages),
+ [](const auto& a) { return a.data; });
+
absl::c_sort(sent_messages);
absl::c_sort(caller_received_messages);
absl::c_sort(callee_received_messages);
@@ -488,8 +548,6 @@ TEST_P(DataChannelIntegrationTest,
kDefaultTimeout);
}
-#endif // WEBRTC_HAVE_SCTP
-
// Test that after closing PeerConnections, they stop sending any packets (ICE,
// DTLS, RTP...).
TEST_P(DataChannelIntegrationTest, ClosingConnectionStopsPacketFlow) {
@@ -497,9 +555,7 @@ TEST_P(DataChannelIntegrationTest, ClosingConnectionStopsPacketFlow) {
ASSERT_TRUE(CreatePeerConnectionWrappers());
ConnectFakeSignaling();
caller()->AddAudioVideoTracks();
-#ifdef WEBRTC_HAVE_SCTP
caller()->CreateDataChannel();
-#endif
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
MediaExpectations media_expectations;
@@ -517,7 +573,6 @@ TEST_P(DataChannelIntegrationTest, ClosingConnectionStopsPacketFlow) {
// Test that transport stats are generated by the RTCStatsCollector for a
// connection that only involves data channels. This is a regression test for
// crbug.com/826972.
-#ifdef WEBRTC_HAVE_SCTP
TEST_P(DataChannelIntegrationTest,
TransportStatsReportedForDataChannelOnlyConnection) {
ASSERT_TRUE(CreatePeerConnectionWrappers());
@@ -534,15 +589,187 @@ TEST_P(DataChannelIntegrationTest,
EXPECT_EQ(1u, callee_report->GetStatsOfType<RTCTransportStats>().size());
}
-INSTANTIATE_TEST_SUITE_P(DataChannelIntegrationTest,
- DataChannelIntegrationTest,
- Values(SdpSemantics::kPlanB,
- SdpSemantics::kUnifiedPlan));
+TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDeliveredInReliableMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ for (int i = 1; i <= 10; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage. Short wait.
+ EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), 10);
+ // Reverse outage
+ virtual_socket_server()->set_drop_probability(0.0);
+ // All packets should be delivered.
+ EXPECT_EQ_WAIT(11u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+}
+
+TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDroppedInUnreliableMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmits = 0;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ // Send a few packets. Note that all get dropped only when all packets
+ // fit into the receiver receive window/congestion window, so that they
+ // actually get sent.
+ for (int i = 1; i <= 10; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(11u, callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose all messages) has not changed
+ EXPECT_EQ(2u, callee()->data_observer()->received_message_count());
+}
+
+TEST_P(DataChannelIntegrationTest,
+ QueuedPacketsGetDroppedInLifetimeLimitedMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmitTime = 1;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ for (int i = 1; i <= 200; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1,
+ // and to make sure max packet lifetime (which is in ms) is exceeded.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(202u, callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose some messages) has not changed
+ if (webrtc::field_trial::IsEnabled("WebRTC-DataChannel-Dcsctp")) {
+ // DcSctp loses all messages. This is correct.
+ EXPECT_EQ(2u, callee()->data_observer()->received_message_count());
+ } else {
+ // Usrsctp loses some messages, but keeps messages not attempted.
+ // THIS IS THE WRONG BEHAVIOR. According to discussion in
+ // https://github.com/sctplab/usrsctp/issues/584, all these packets
+ // should be discarded.
+ // TODO(bugs.webrtc.org/12731): Fix this.
+ EXPECT_EQ(90u, callee()->data_observer()->received_message_count());
+ }
+}
+
+TEST_P(DataChannelIntegrationTest,
+ SomeQueuedPacketsGetDroppedInMaxRetransmitsMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmits = 0;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ // Fill the buffer until queued data starts to build
+ size_t packet_counter = 0;
+ while (caller()->data_channel()->buffered_amount() < 1 &&
+ packet_counter < 10000) {
+ packet_counter++;
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ if (caller()->data_channel()->buffered_amount()) {
+ RTC_LOG(LS_INFO) << "Buffered data after " << packet_counter << " packets";
+ } else {
+ RTC_LOG(LS_INFO) << "No buffered data after " << packet_counter
+ << " packets";
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // Due to the fact that retransmissions are only counted when the packet
+ // goes on the wire, NOT when they are stalled in queue due to
+ // congestion, we expect some of the packets to be delivered, because
+ // congestion prevented them from being sent.
+ // Citation: https://tools.ietf.org/html/rfc7496#section-3.1
+
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(packet_counter,
+ callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose between 100 and 200 messages)
+ // has not changed.
+ // Usrsctp behavior is different on Android (177) and other platforms (122).
+ // Dcsctp loses 432 packets.
+ EXPECT_GT(2 + packet_counter - 100,
+ callee()->data_observer()->received_message_count());
+ EXPECT_LT(2 + packet_counter - 500,
+ callee()->data_observer()->received_message_count());
+}
-INSTANTIATE_TEST_SUITE_P(DataChannelIntegrationTest,
- DataChannelIntegrationTestWithFakeClock,
- Values(SdpSemantics::kPlanB,
- SdpSemantics::kUnifiedPlan));
+INSTANTIATE_TEST_SUITE_P(
+ DataChannelIntegrationTest,
+ DataChannelIntegrationTest,
+ Combine(Values(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan),
+ Values("WebRTC-DataChannel-Dcsctp/Enabled/",
+ "WebRTC-DataChannel-Dcsctp/Disabled/")));
TEST_F(DataChannelIntegrationTestUnifiedPlan,
EndToEndCallWithBundledSctpDataChannel) {
diff --git a/pc/data_channel_unittest.cc b/pc/data_channel_unittest.cc
index 2dc003fd11..98c44f26fe 100644
--- a/pc/data_channel_unittest.cc
+++ b/pc/data_channel_unittest.cc
@@ -286,8 +286,9 @@ TEST_F(SctpDataChannelTest, OpenMessageSent) {
SetChannelReady();
EXPECT_GE(webrtc_data_channel_->id(), 0);
- EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
- EXPECT_EQ(provider_->last_send_data_params().sid, webrtc_data_channel_->id());
+ EXPECT_EQ(webrtc::DataMessageType::kControl,
+ provider_->last_send_data_params().type);
+ EXPECT_EQ(provider_->last_sid(), webrtc_data_channel_->id());
}
TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
@@ -295,8 +296,9 @@ TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
SetChannelReady();
provider_->set_send_blocked(false);
- EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
- EXPECT_EQ(provider_->last_send_data_params().sid, webrtc_data_channel_->id());
+ EXPECT_EQ(webrtc::DataMessageType::kControl,
+ provider_->last_send_data_params().type);
+ EXPECT_EQ(provider_->last_sid(), webrtc_data_channel_->id());
}
// Tests that the DataChannel created after transport gets ready can enter OPEN
@@ -333,7 +335,7 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
// Emulates receiving an OPEN_ACK message.
cricket::ReceiveDataParams params;
params.sid = init.id;
- params.type = cricket::DMT_CONTROL;
+ params.type = webrtc::DataMessageType::kControl;
rtc::CopyOnWriteBuffer payload;
webrtc::WriteDataChannelOpenAckMessage(&payload);
dc->OnDataReceived(params, payload);
@@ -359,7 +361,7 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
// Emulates receiving a DATA message.
cricket::ReceiveDataParams params;
params.sid = init.id;
- params.type = cricket::DMT_TEXT;
+ params.type = webrtc::DataMessageType::kText;
webrtc::DataBuffer buffer("data");
dc->OnDataReceived(params, buffer.data);
@@ -380,7 +382,8 @@ TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) {
provider_->set_send_blocked(false);
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
webrtc_data_channel_->state(), 1000);
- EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
+ EXPECT_EQ(webrtc::DataMessageType::kControl,
+ provider_->last_send_data_params().type);
}
// Tests that close first makes sure all queued data gets sent.
@@ -401,7 +404,8 @@ TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
webrtc_data_channel_->state(), 1000);
EXPECT_TRUE(webrtc_data_channel_->error().ok());
- EXPECT_EQ(cricket::DMT_TEXT, provider_->last_send_data_params().type);
+ EXPECT_EQ(webrtc::DataMessageType::kText,
+ provider_->last_send_data_params().type);
}
// Tests that messages are sent with the right id.
@@ -410,7 +414,7 @@ TEST_F(SctpDataChannelTest, SendDataId) {
SetChannelReady();
webrtc::DataBuffer buffer("data");
EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
- EXPECT_EQ(1, provider_->last_send_data_params().sid);
+ EXPECT_EQ(1, provider_->last_sid());
}
// Tests that the incoming messages with wrong ids are rejected.
@@ -457,7 +461,7 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
- EXPECT_EQ(0, provider_->last_send_data_params().sid);
+ EXPECT_EQ(0, provider_->last_sid());
}
// Tests that DataChannel::messages_received() and DataChannel::bytes_received()
@@ -522,8 +526,9 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
- EXPECT_EQ(config.id, provider_->last_send_data_params().sid);
- EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
+ EXPECT_EQ(config.id, provider_->last_sid());
+ EXPECT_EQ(webrtc::DataMessageType::kControl,
+ provider_->last_send_data_params().type);
}
// Tests the OPEN_ACK role assigned by InternalDataChannelInit.
diff --git a/pc/dtls_srtp_transport.cc b/pc/dtls_srtp_transport.cc
index f272ab79cd..ac091c6131 100644
--- a/pc/dtls_srtp_transport.cc
+++ b/pc/dtls_srtp_transport.cc
@@ -15,6 +15,7 @@
#include <string>
#include <utility>
+#include "api/dtls_transport_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/ssl_stream_adapter.h"
@@ -114,10 +115,9 @@ bool DtlsSrtpTransport::IsDtlsConnected() {
auto rtcp_dtls_transport =
rtcp_mux_enabled() ? nullptr : rtcp_dtls_transport_;
return (rtp_dtls_transport_ &&
- rtp_dtls_transport_->dtls_state() ==
- cricket::DTLS_TRANSPORT_CONNECTED &&
+ rtp_dtls_transport_->dtls_state() == DtlsTransportState::kConnected &&
(!rtcp_dtls_transport || rtcp_dtls_transport->dtls_state() ==
- cricket::DTLS_TRANSPORT_CONNECTED));
+ DtlsTransportState::kConnected));
}
bool DtlsSrtpTransport::IsDtlsWritable() {
@@ -275,17 +275,16 @@ void DtlsSrtpTransport::SetDtlsTransport(
}
if (*old_dtls_transport) {
- (*old_dtls_transport)->UnsubscribeDtlsState(this);
+ (*old_dtls_transport)->UnsubscribeDtlsTransportState(this);
}
*old_dtls_transport = new_dtls_transport;
if (new_dtls_transport) {
- new_dtls_transport->SubscribeDtlsState(
- this, [this](cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
- OnDtlsState(transport, state);
- });
+ new_dtls_transport->SubscribeDtlsTransportState(
+ this,
+ [this](cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) { OnDtlsState(transport, state); });
}
}
@@ -300,7 +299,7 @@ void DtlsSrtpTransport::SetRtcpDtlsTransport(
}
void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
RTC_DCHECK(transport == rtp_dtls_transport_ ||
transport == rtcp_dtls_transport_);
@@ -308,7 +307,7 @@ void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport,
on_dtls_state_change_();
}
- if (state != cricket::DTLS_TRANSPORT_CONNECTED) {
+ if (state != DtlsTransportState::kConnected) {
ResetParams();
return;
}
diff --git a/pc/dtls_srtp_transport.h b/pc/dtls_srtp_transport.h
index bc82fd5a9a..9c52dcf809 100644
--- a/pc/dtls_srtp_transport.h
+++ b/pc/dtls_srtp_transport.h
@@ -16,6 +16,7 @@
#include "absl/types/optional.h"
#include "api/crypto_params.h"
+#include "api/dtls_transport_interface.h"
#include "api/rtc_error.h"
#include "p2p/base/dtls_transport_internal.h"
#include "p2p/base/packet_transport_internal.h"
@@ -82,7 +83,7 @@ class DtlsSrtpTransport : public SrtpTransport {
cricket::DtlsTransportInternal* rtcp_dtls_transport);
void OnDtlsState(cricket::DtlsTransportInternal* dtls_transport,
- cricket::DtlsTransportState state);
+ DtlsTransportState state);
// Override the SrtpTransport::OnWritableState.
void OnWritableState(rtc::PacketTransportInternal* packet_transport) override;
diff --git a/pc/dtls_transport.cc b/pc/dtls_transport.cc
index 0b6d367184..074f44e22b 100644
--- a/pc/dtls_transport.cc
+++ b/pc/dtls_transport.cc
@@ -13,6 +13,7 @@
#include <utility>
#include "absl/types/optional.h"
+#include "api/dtls_transport_interface.h"
#include "api/sequence_checker.h"
#include "pc/ice_transport.h"
#include "rtc_base/checks.h"
@@ -22,38 +23,18 @@
namespace webrtc {
-namespace {
-
-DtlsTransportState TranslateState(cricket::DtlsTransportState internal_state) {
- switch (internal_state) {
- case cricket::DTLS_TRANSPORT_NEW:
- return DtlsTransportState::kNew;
- case cricket::DTLS_TRANSPORT_CONNECTING:
- return DtlsTransportState::kConnecting;
- case cricket::DTLS_TRANSPORT_CONNECTED:
- return DtlsTransportState::kConnected;
- case cricket::DTLS_TRANSPORT_CLOSED:
- return DtlsTransportState::kClosed;
- case cricket::DTLS_TRANSPORT_FAILED:
- return DtlsTransportState::kFailed;
- }
- RTC_CHECK_NOTREACHED();
-}
-
-} // namespace
-
// Implementation of DtlsTransportInterface
DtlsTransport::DtlsTransport(
std::unique_ptr<cricket::DtlsTransportInternal> internal)
: owner_thread_(rtc::Thread::Current()),
info_(DtlsTransportState::kNew),
internal_dtls_transport_(std::move(internal)),
- ice_transport_(new rtc::RefCountedObject<IceTransportWithPointer>(
+ ice_transport_(rtc::make_ref_counted<IceTransportWithPointer>(
internal_dtls_transport_->ice_transport())) {
RTC_DCHECK(internal_dtls_transport_.get());
- internal_dtls_transport_->SubscribeDtlsState(
+ internal_dtls_transport_->SubscribeDtlsTransportState(
[this](cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
OnInternalDtlsState(transport, state);
});
UpdateInformation();
@@ -90,7 +71,7 @@ void DtlsTransport::Clear() {
RTC_DCHECK_RUN_ON(owner_thread_);
RTC_DCHECK(internal());
bool must_send_event =
- (internal()->dtls_state() != cricket::DTLS_TRANSPORT_CLOSED);
+ (internal()->dtls_state() != DtlsTransportState::kClosed);
// The destructor of cricket::DtlsTransportInternal calls back
// into DtlsTransport, so we can't hold the lock while releasing.
std::unique_ptr<cricket::DtlsTransportInternal> transport_to_release;
@@ -107,7 +88,7 @@ void DtlsTransport::Clear() {
void DtlsTransport::OnInternalDtlsState(
cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
RTC_DCHECK_RUN_ON(owner_thread_);
RTC_DCHECK(transport == internal());
RTC_DCHECK(state == internal()->dtls_state());
@@ -122,7 +103,7 @@ void DtlsTransport::UpdateInformation() {
MutexLock lock(&lock_);
if (internal_dtls_transport_) {
if (internal_dtls_transport_->dtls_state() ==
- cricket::DTLS_TRANSPORT_CONNECTED) {
+ DtlsTransportState::kConnected) {
bool success = true;
int ssl_cipher_suite;
int tls_version;
@@ -132,20 +113,19 @@ void DtlsTransport::UpdateInformation() {
success &= internal_dtls_transport_->GetSrtpCryptoSuite(&srtp_cipher);
if (success) {
info_ = DtlsTransportInformation(
- TranslateState(internal_dtls_transport_->dtls_state()), tls_version,
+ internal_dtls_transport_->dtls_state(), tls_version,
ssl_cipher_suite, srtp_cipher,
internal_dtls_transport_->GetRemoteSSLCertChain());
} else {
RTC_LOG(LS_ERROR) << "DtlsTransport in connected state has incomplete "
"TLS information";
info_ = DtlsTransportInformation(
- TranslateState(internal_dtls_transport_->dtls_state()),
- absl::nullopt, absl::nullopt, absl::nullopt,
+ internal_dtls_transport_->dtls_state(), absl::nullopt,
+ absl::nullopt, absl::nullopt,
internal_dtls_transport_->GetRemoteSSLCertChain());
}
} else {
- info_ = DtlsTransportInformation(
- TranslateState(internal_dtls_transport_->dtls_state()));
+ info_ = DtlsTransportInformation(internal_dtls_transport_->dtls_state());
}
} else {
info_ = DtlsTransportInformation(DtlsTransportState::kClosed);
diff --git a/pc/dtls_transport.h b/pc/dtls_transport.h
index 893b1263ae..cca4cc980a 100644
--- a/pc/dtls_transport.h
+++ b/pc/dtls_transport.h
@@ -60,7 +60,7 @@ class DtlsTransport : public DtlsTransportInterface {
private:
void OnInternalDtlsState(cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state);
+ DtlsTransportState state);
void UpdateInformation();
DtlsTransportObserverInterface* observer_ = nullptr;
diff --git a/pc/dtls_transport_unittest.cc b/pc/dtls_transport_unittest.cc
index a3f0a7ce8b..f80d99b05e 100644
--- a/pc/dtls_transport_unittest.cc
+++ b/pc/dtls_transport_unittest.cc
@@ -63,7 +63,7 @@ class DtlsTransportTest : public ::testing::Test {
}
cricket_transport->SetSslCipherSuite(kNonsenseCipherSuite);
transport_ =
- new rtc::RefCountedObject<DtlsTransport>(std::move(cricket_transport));
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
}
void CompleteDtlsHandshake() {
@@ -86,8 +86,8 @@ class DtlsTransportTest : public ::testing::Test {
TEST_F(DtlsTransportTest, CreateClearDelete) {
auto cricket_transport = std::make_unique<FakeDtlsTransport>(
"audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
- rtc::scoped_refptr<DtlsTransport> webrtc_transport =
- new rtc::RefCountedObject<DtlsTransport>(std::move(cricket_transport));
+ auto webrtc_transport =
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
ASSERT_TRUE(webrtc_transport->internal());
ASSERT_EQ(DtlsTransportState::kNew, webrtc_transport->Information().state());
webrtc_transport->Clear();
diff --git a/pc/dtmf_sender.cc b/pc/dtmf_sender.cc
index 5af05c8964..67c3fac134 100644
--- a/pc/dtmf_sender.cc
+++ b/pc/dtmf_sender.cc
@@ -65,9 +65,7 @@ rtc::scoped_refptr<DtmfSender> DtmfSender::Create(
if (!signaling_thread) {
return nullptr;
}
- rtc::scoped_refptr<DtmfSender> dtmf_sender(
- new rtc::RefCountedObject<DtmfSender>(signaling_thread, provider));
- return dtmf_sender;
+ return rtc::make_ref_counted<DtmfSender>(signaling_thread, provider);
}
DtmfSender::DtmfSender(rtc::Thread* signaling_thread,
diff --git a/pc/dtmf_sender.h b/pc/dtmf_sender.h
index 5cf7b2eba1..b64b50e09c 100644
--- a/pc/dtmf_sender.h
+++ b/pc/dtmf_sender.h
@@ -16,8 +16,8 @@
#include <string>
#include "api/dtmf_sender_interface.h"
-#include "api/proxy.h"
#include "api/scoped_refptr.h"
+#include "pc/proxy.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/location.h"
#include "rtc_base/ref_count.h"
@@ -102,7 +102,6 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> {
// Define proxy for DtmfSenderInterface.
BEGIN_PRIMARY_PROXY_MAP(DtmfSender)
-
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
PROXY_METHOD0(void, UnregisterObserver)
@@ -112,7 +111,7 @@ PROXY_CONSTMETHOD0(std::string, tones)
PROXY_CONSTMETHOD0(int, duration)
PROXY_CONSTMETHOD0(int, inter_tone_gap)
PROXY_CONSTMETHOD0(int, comma_delay)
-END_PROXY_MAP()
+END_PROXY_MAP(DtmfSender)
// Get DTMF code from the DTMF event character.
bool GetDtmfCode(char tone, int* code);
diff --git a/pc/dtmf_sender_unittest.cc b/pc/dtmf_sender_unittest.cc
index f7f229a887..261cbd0303 100644
--- a/pc/dtmf_sender_unittest.cc
+++ b/pc/dtmf_sender_unittest.cc
@@ -18,7 +18,6 @@
#include "rtc_base/fake_clock.h"
#include "rtc_base/gunit.h"
-#include "rtc_base/ref_counted_object.h"
#include "rtc_base/time_utils.h"
#include "test/gtest.h"
@@ -118,8 +117,7 @@ class FakeDtmfProvider : public DtmfProviderInterface {
class DtmfSenderTest : public ::testing::Test {
protected:
DtmfSenderTest()
- : observer_(new rtc::RefCountedObject<FakeDtmfObserver>()),
- provider_(new FakeDtmfProvider()) {
+ : observer_(new FakeDtmfObserver()), provider_(new FakeDtmfProvider()) {
provider_->SetCanInsertDtmf(true);
dtmf_ = DtmfSender::Create(rtc::Thread::Current(), provider_.get());
dtmf_->RegisterObserver(observer_.get());
diff --git a/pc/g3doc/dtls_transport.md b/pc/g3doc/dtls_transport.md
new file mode 100644
index 0000000000..65206dff5d
--- /dev/null
+++ b/pc/g3doc/dtls_transport.md
@@ -0,0 +1,53 @@
+<?% config.freshness.reviewed = '2021-05-07' %?>
+<?% config.freshness.owner = 'hta' %?>
+
+## Overview
+
+WebRTC uses DTLS in two ways:
+
+* to negotiate keys for SRTP encryption using
+ [DTLS-SRTP](https://www.rfc-editor.org/info/rfc5763)
+* as a transport for SCTP which is used by the Datachannel API
+
+The W3C WebRTC API represents this as the
+[DtlsTransport](https://w3c.github.io/webrtc-pc/#rtcdtlstransport-interface).
+
+The DTLS handshake happens after the ICE transport becomes writable and has
+found a valid pair. It results in a set of keys being derived for DTLS-SRTP as
+well as a fingerprint of the remote certificate which is compared to the one
+given in the SDP `a=fingerprint:` line.
+
+This documentation provides an overview of how DTLS is implemented, i.e how the
+following classes interact.
+
+## webrtc::DtlsTransport
+
+The [`webrtc::DtlsTransport`][1] class is a wrapper around the
+`cricket::DtlsTransportInternal` and allows registering observers implementing
+the `webrtc::DtlsTransportObserverInterface`. The
+[`webrtc::DtlsTransportObserverInterface`][2] will provide updates to the
+observers, passing around a snapshot of the transports state such as the
+connection state, the remote certificate(s) and the SRTP ciphers as
+[`DtlsTransportInformation`][3].
+
+## cricket::DtlsTransportInternal
+
+The [`cricket::DtlsTransportInternal`][4] class is an interface. Its
+implementation is [`cricket::DtlsTransport`][5]. The `cricket::DtlsTransport`
+sends and receives network packets via an ICE transport. It also demultiplexes
+DTLS packets and SRTP packets according to the scheme described in
+[RFC 5764](https://tools.ietf.org/html/rfc5764#section-5.1.2).
+
+## webrtc::DtlsSrtpTranport
+
+The [`webrtc::DtlsSrtpTransport`][6] class is responsіble for extracting the
+SRTP keys after the DTLS handshake as well as protection and unprotection of
+SRTP packets via its [`cricket::SrtpSession`][7].
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/pc/dtls_transport.h;l=32;drc=6a55e7307b78edb50f94a1ff1ef8393d58218369
+[2]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/dtls_transport_interface.h;l=76;drc=34437d5660a80393d631657329ef74c6538be25a
+[3]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/dtls_transport_interface.h;l=41;drc=34437d5660a80393d631657329ef74c6538be25a
+[4]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/p2p/base/dtls_transport_internal.h;l=63;drc=34437d5660a80393d631657329ef74c6538be25a
+[5]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/p2p/base/dtls_transport.h;l=94;drc=653bab6790ac92c513b7cf4cd3ad59039c589a95
+[6]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/pc/dtls_srtp_transport.h;l=31;drc=c32f00ea9ddf3267257fe6b45d4d79c6f6bcb829
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=33;drc=be66d95ab7f9428028806bbf66cb83800bda9241
diff --git a/pc/g3doc/peer_connection.md b/pc/g3doc/peer_connection.md
new file mode 100644
index 0000000000..1eae135991
--- /dev/null
+++ b/pc/g3doc/peer_connection.md
@@ -0,0 +1,59 @@
+<?% config.freshness.reviewed = '2021-05-07' %?>
+<?% config.freshness.owner = 'hta' %?>
+
+# PeerConnection and friends
+
+The PeerConnection is the C++-level implementation of the Javascript
+object "RTCPeerConnection" from the
+[WEBRTC specification](https://w3c.github.io/webrtc-pc/).
+
+Like many objects in WebRTC, the PeerConnection is used via a factory and an
+observer:
+
+ * PeerConnectionFactory, which is created via a static Create method and takes
+ a PeerConnectionFactoryDependencies structure listing such things as
+ non-default threads and factories for use by all PeerConnections using
+ the same factory. (Using more than one factory should be avoided, since
+ it takes more resources.)
+ * PeerConnection itself, which is created by the method called
+ PeerConnectionFactory::CreatePeerConnectionOrError, and takes a
+ PeerConnectionInterface::RTCConfiguration argument, as well as
+ a PeerConnectionDependencies (even more factories, plus other stuff).
+ * PeerConnectionObserver (a member of PeerConnectionDependencies), which
+ contains the functions that will be called on events in the PeerConnection
+
+These types are visible in the API.
+
+## Internal structure of PeerConnection and friends
+
+The PeerConnection is, to a large extent, a "God object" - most things
+that are done in WebRTC require a PeerConnection.
+
+Internally, it is divided into several objects, each with its own
+responsibilities, all of which are owned by the PeerConnection and live
+as long as the PeerConnection:
+
+ * SdpOfferAnswerHandler takes care of negotiating configurations with
+ a remote peer, using SDP-formatted descriptions.
+ * RtpTransmissionManager takes care of the lists of RtpSenders,
+ RtpReceivers and RtpTransceivers that form the heart of the transmission
+ service.
+ * DataChannelController takes care of managing the PeerConnection's
+ DataChannels and its SctpTransport.
+ * JsepTransportController takes care of configuring the details of senders
+ and receivers.
+ * Call does management of overall call state.
+ * RtcStatsCollector (and its obsolete sibling, StatsCollector) collects
+ statistics from all the objects comprising the PeerConnection when
+ requested.
+
+There are a number of other smaller objects that are also owned by
+the PeerConnection, but it would take too much space to describe them
+all here; please consult the .h files.
+
+PeerConnectionFactory owns an object called ConnectionContext, and a
+reference to this is passed to each PeerConnection. It is referenced
+via an rtc::scoped_refptr, which means that it is guaranteed to be
+alive as long as either the factory or one of the PeerConnections
+is using it.
+
diff --git a/pc/g3doc/rtp.md b/pc/g3doc/rtp.md
new file mode 100644
index 0000000000..38c1702ad3
--- /dev/null
+++ b/pc/g3doc/rtp.md
@@ -0,0 +1,56 @@
+<?% config.freshness.reviewed = '2021-06-03' %?>
+<?% config.freshness.owner = 'hta' %?>
+
+# RTP in WebRTC
+
+WebRTC uses the RTP protocol described in
+[RFC3550](https://datatracker.ietf.org/doc/html/rfc3550) for transporting audio
+and video. Media is encrypted using [SRTP](./srtp.md).
+
+## Allocation of payload types
+
+RTP packets have a payload type field that describes which media codec can be
+used to handle a packet. For some (older) codecs like PCMU the payload type is
+assigned statically as described in
+[RFC3551](https://datatracker.ietf.org/doc/html/rfc3551). For others, it is
+assigned dynamically through the SDP. **Note:** there are no guarantees on the
+stability of a payload type assignment.
+
+For this allocation, the range from 96 to 127 is used. When this range is
+exhausted, the allocation falls back to the range from 35 to 63 as permitted by
+[section 5.1 of RFC3550][1]. Note that older versions of WebRTC failed to
+recognize payload types in the lower range. Newer codecs (such as flexfec-03 and
+AV1) will by default be allocated in that range.
+
+Payload types in the range 64 to 95 are not used to avoid confusion with RTCP as
+described in [RFC5761](https://datatracker.ietf.org/doc/html/rfc5761).
+
+## Allocation of audio payload types
+
+Audio payload types are assigned from a table by the [PayloadTypeMapper][2]
+class. New audio codecs should be allocated in the lower dynamic range [35,63],
+starting at 63, to reduce collisions with payload types
+
+## Allocation of video payload types
+
+Video payload types are allocated by the
+[GetPayloadTypesAndDefaultCodecs method][3]. The set of codecs depends on the
+platform, in particular for H264 codecs and their different profiles. Payload
+numbers are assigned ascending from 96 for video codecs and their
+[associated retransmission format](https://datatracker.ietf.org/doc/html/rfc4588).
+Some codecs like flexfec-03 and AV1 are assigned to the lower range [35,63] for
+reasons explained above. When the upper range [96,127] is exhausted, payload
+types are assigned to the lower range [35,63], starting at 35.
+
+## Handling of payload type collisions
+
+Due to the requirement that payload types must be uniquely identifiable when
+using [BUNDLE](https://datatracker.ietf.org/doc/html/rfc8829) collisions between
+the assignments of the audio and video payload types may arise. These are
+resolved by the [UsedPayloadTypes][4] class which will reassign payload type
+numbers descending from 127.
+
+[1]: https://datatracker.ietf.org/doc/html/rfc3550#section-5.1
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/engine/payload_type_mapper.cc;l=25;drc=4f26a3c7e8e20e0e0ca4ca67a6ebdf3f5543dc3f
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/engine/webrtc_video_engine.cc;l=119;drc=b412efdb780c86e6530493afa403783d14985347
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/used_ids.h;l=94;drc=b412efdb780c86e6530493afa403783d14985347
diff --git a/pc/g3doc/srtp.md b/pc/g3doc/srtp.md
new file mode 100644
index 0000000000..47446157c9
--- /dev/null
+++ b/pc/g3doc/srtp.md
@@ -0,0 +1,72 @@
+<?% config.freshness.reviewed = '2021-05-13' %?>
+<?% config.freshness.owner = 'hta' %?>
+
+# SRTP in WebRTC
+
+WebRTC mandates encryption of media by means of the Secure Realtime Protocol, or
+SRTP, which is described in
+[RFC 3711](https://datatracker.ietf.org/doc/html/rfc3711).
+
+The key negotiation in WebRTC happens using DTLS-SRTP which is described in
+[RFC 5764](https://datatracker.ietf.org/doc/html/rfc5764). The older
+[SDES protocol](https://datatracker.ietf.org/doc/html/rfc4568) is implemented
+but not enabled by default.
+
+Unencrypted RTP can be enabled for debugging purposes by setting the
+PeerConnections [`disable_encryption`][1] option to true.
+
+## Supported cipher suites
+
+The implementation supports the following cipher suites:
+
+* SRTP_AES128_CM_HMAC_SHA1_80
+* SRTP_AEAD_AES_128_GCM
+* SRTP_AEAD_AES_256_GCM
+
+The SRTP_AES128_CM_HMAC_SHA1_32 cipher suite is accepted for audio-only
+connections if offered by the other side. It is not actively supported, see
+[SelectCrypto][2] for details.
+
+The cipher suite ordering allows a non-WebRTC peer to prefer GCM cipher suites,
+however they are not selected as default by two instances of the WebRTC library.
+
+## cricket::SrtpSession
+
+The [`cricket::SrtpSession`][3] is providing encryption and decryption of SRTP
+packets using [`libsrtp`](https://github.com/cisco/libsrtp). Keys will be
+provided by `SrtpTransport` or `DtlsSrtpTransport` in the [`SetSend`][4] and
+[`SetRecv`][5] methods.
+
+Encryption and decryption happens in-place in the [`ProtectRtp`][6],
+[`ProtectRtcp`][7], [`UnprotectRtp`][8] and [`UnprotectRtcp`][9] methods. The
+`SrtpSession` class also takes care of initializing and deinitializing `libsrtp`
+by keeping track of how many instances are being used.
+
+## webrtc::SrtpTransport and webrtc::DtlsSrtpTransport
+
+The [`webrtc::SrtpTransport`][10] class is controlling the `SrtpSession`
+instances for RTP and RTCP. When
+[rtcp-mux](https://datatracker.ietf.org/doc/html/rfc5761) is used, the
+`SrtpSession` for RTCP is not needed.
+
+[`webrtc:DtlsSrtpTransport`][11] is a subclass of the `SrtpTransport` that
+extracts the keying material when the DTLS handshake is done and configures it
+in its base class. It will also become writable only once the DTLS handshake is
+done.
+
+## cricket::SrtpFilter
+
+The [`cricket::SrtpFilter`][12] class is used to negotiate SDES.
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/peer_connection_interface.h;l=1413;drc=f467b445631189557d44de86a77ca6a0c3e2108d
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/media_session.cc;l=297;drc=3ac73bd0aa5322abee98f1ff8705af64a184bf61
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=33;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=40;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=51;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=62;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=69;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[8]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=72;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[9]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=73;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[10]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_transport.h;l=37;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a
+[11]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_srtp_transport.h;l=31;drc=2f8e0536eb97ce2131e7a74e3ca06077aa0b64b3
+[12]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_filter.h;drc=d15a575ec3528c252419149d35977e55269d8a41
diff --git a/pc/ice_transport_unittest.cc b/pc/ice_transport_unittest.cc
index 3711a86d5d..ebb46cb5d5 100644
--- a/pc/ice_transport_unittest.cc
+++ b/pc/ice_transport_unittest.cc
@@ -28,9 +28,8 @@ class IceTransportTest : public ::testing::Test {};
TEST_F(IceTransportTest, CreateNonSelfDeletingTransport) {
auto cricket_transport =
std::make_unique<cricket::FakeIceTransport>("name", 0, nullptr);
- rtc::scoped_refptr<IceTransportWithPointer> ice_transport =
- new rtc::RefCountedObject<IceTransportWithPointer>(
- cricket_transport.get());
+ auto ice_transport =
+ rtc::make_ref_counted<IceTransportWithPointer>(cricket_transport.get());
EXPECT_EQ(ice_transport->internal(), cricket_transport.get());
ice_transport->Clear();
EXPECT_NE(ice_transport->internal(), cricket_transport.get());
diff --git a/pc/jitter_buffer_delay.cc b/pc/jitter_buffer_delay.cc
index 3fdf823d24..801cef7215 100644
--- a/pc/jitter_buffer_delay.cc
+++ b/pc/jitter_buffer_delay.cc
@@ -14,7 +14,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/numerics/safe_minmax.h"
-#include "rtc_base/thread.h"
namespace {
constexpr int kDefaultDelay = 0;
@@ -23,43 +22,21 @@ constexpr int kMaximumDelayMs = 10000;
namespace webrtc {
-JitterBufferDelay::JitterBufferDelay(rtc::Thread* worker_thread)
- : signaling_thread_(rtc::Thread::Current()), worker_thread_(worker_thread) {
- RTC_DCHECK(worker_thread_);
-}
-
-void JitterBufferDelay::OnStart(cricket::Delayable* media_channel,
- uint32_t ssrc) {
- RTC_DCHECK_RUN_ON(signaling_thread_);
-
- media_channel_ = media_channel;
- ssrc_ = ssrc;
-
- // Trying to apply cached delay for the audio stream.
- if (cached_delay_seconds_) {
- Set(cached_delay_seconds_.value());
- }
-}
-
-void JitterBufferDelay::OnStop() {
- RTC_DCHECK_RUN_ON(signaling_thread_);
- // Assume that audio stream is no longer present.
- media_channel_ = nullptr;
- ssrc_ = absl::nullopt;
+JitterBufferDelay::JitterBufferDelay() {
+ worker_thread_checker_.Detach();
}
void JitterBufferDelay::Set(absl::optional<double> delay_seconds) {
- RTC_DCHECK_RUN_ON(worker_thread_);
-
- // TODO(kuddai) propagate absl::optional deeper down as default preference.
- int delay_ms =
- rtc::saturated_cast<int>(delay_seconds.value_or(kDefaultDelay) * 1000);
- delay_ms = rtc::SafeClamp(delay_ms, 0, kMaximumDelayMs);
-
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
cached_delay_seconds_ = delay_seconds;
- if (media_channel_ && ssrc_) {
- media_channel_->SetBaseMinimumPlayoutDelayMs(ssrc_.value(), delay_ms);
- }
+}
+
+int JitterBufferDelay::GetMs() const {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ return rtc::SafeClamp(
+ rtc::saturated_cast<int>(cached_delay_seconds_.value_or(kDefaultDelay) *
+ 1000),
+ 0, kMaximumDelayMs);
}
} // namespace webrtc
diff --git a/pc/jitter_buffer_delay.h b/pc/jitter_buffer_delay.h
index 8edfc6ce20..dc10e3d2ba 100644
--- a/pc/jitter_buffer_delay.h
+++ b/pc/jitter_buffer_delay.h
@@ -14,36 +14,25 @@
#include <stdint.h>
#include "absl/types/optional.h"
-#include "media/base/delayable.h"
-#include "pc/jitter_buffer_delay_interface.h"
-#include "rtc_base/thread.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/system/no_unique_address.h"
namespace webrtc {
// JitterBufferDelay converts delay from seconds to milliseconds for the
// underlying media channel. It also handles cases when user sets delay before
-// the start of media_channel by caching its request. Note, this class is not
-// thread safe. Its thread safe version is defined in
-// pc/jitter_buffer_delay_proxy.h
-class JitterBufferDelay : public JitterBufferDelayInterface {
+// the start of media_channel by caching its request.
+class JitterBufferDelay {
public:
- // Must be called on signaling thread.
- explicit JitterBufferDelay(rtc::Thread* worker_thread);
+ JitterBufferDelay();
- void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) override;
-
- void OnStop() override;
-
- void Set(absl::optional<double> delay_seconds) override;
+ void Set(absl::optional<double> delay_seconds);
+ int GetMs() const;
private:
- // Throughout webrtc source, sometimes it is also called as |main_thread_|.
- rtc::Thread* const signaling_thread_;
- rtc::Thread* const worker_thread_;
- // Media channel and ssrc together uniqely identify audio stream.
- cricket::Delayable* media_channel_ = nullptr;
- absl::optional<uint32_t> ssrc_;
- absl::optional<double> cached_delay_seconds_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_;
+ absl::optional<double> cached_delay_seconds_
+ RTC_GUARDED_BY(&worker_thread_checker_);
};
} // namespace webrtc
diff --git a/pc/jitter_buffer_delay_interface.h b/pc/jitter_buffer_delay_interface.h
deleted file mode 100644
index f2132d318d..0000000000
--- a/pc/jitter_buffer_delay_interface.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_JITTER_BUFFER_DELAY_INTERFACE_H_
-#define PC_JITTER_BUFFER_DELAY_INTERFACE_H_
-
-#include <stdint.h>
-
-#include "absl/types/optional.h"
-#include "media/base/delayable.h"
-#include "rtc_base/ref_count.h"
-
-namespace webrtc {
-
-// JitterBufferDelay delivers user's queries to the underlying media channel. It
-// can describe either video or audio delay for receiving stream. "Interface"
-// suffix in the interface name is required to be compatible with api/proxy.cc
-class JitterBufferDelayInterface : public rtc::RefCountInterface {
- public:
- // OnStart allows to uniqely identify to which receiving stream playout
- // delay must correpond through |media_channel| and |ssrc| pair.
- virtual void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) = 0;
-
- // Indicates that underlying receiving stream is stopped.
- virtual void OnStop() = 0;
-
- virtual void Set(absl::optional<double> delay_seconds) = 0;
-};
-
-} // namespace webrtc
-
-#endif // PC_JITTER_BUFFER_DELAY_INTERFACE_H_
diff --git a/pc/jitter_buffer_delay_proxy.h b/pc/jitter_buffer_delay_proxy.h
deleted file mode 100644
index 91729d6935..0000000000
--- a/pc/jitter_buffer_delay_proxy.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_JITTER_BUFFER_DELAY_PROXY_H_
-#define PC_JITTER_BUFFER_DELAY_PROXY_H_
-
-#include <stdint.h>
-
-#include "api/proxy.h"
-#include "media/base/delayable.h"
-#include "pc/jitter_buffer_delay_interface.h"
-
-namespace webrtc {
-
-BEGIN_PROXY_MAP(JitterBufferDelay)
-PROXY_PRIMARY_THREAD_DESTRUCTOR()
-PROXY_METHOD2(void, OnStart, cricket::Delayable*, uint32_t)
-PROXY_METHOD0(void, OnStop)
-PROXY_SECONDARY_METHOD1(void, Set, absl::optional<double>)
-END_PROXY_MAP()
-
-} // namespace webrtc
-
-#endif // PC_JITTER_BUFFER_DELAY_PROXY_H_
diff --git a/pc/jitter_buffer_delay_unittest.cc b/pc/jitter_buffer_delay_unittest.cc
index 7edd09acd2..b00075ceb5 100644
--- a/pc/jitter_buffer_delay_unittest.cc
+++ b/pc/jitter_buffer_delay_unittest.cc
@@ -13,79 +13,47 @@
#include <stdint.h>
#include "absl/types/optional.h"
-#include "api/scoped_refptr.h"
-#include "pc/test/mock_delayable.h"
-#include "rtc_base/ref_counted_object.h"
-#include "rtc_base/thread.h"
-#include "test/gmock.h"
#include "test/gtest.h"
-using ::testing::Return;
-
-namespace {
-constexpr int kSsrc = 1234;
-} // namespace
-
namespace webrtc {
class JitterBufferDelayTest : public ::testing::Test {
public:
- JitterBufferDelayTest()
- : delay_(new rtc::RefCountedObject<JitterBufferDelay>(
- rtc::Thread::Current())) {}
+ JitterBufferDelayTest() {}
protected:
- rtc::scoped_refptr<JitterBufferDelayInterface> delay_;
- MockDelayable delayable_;
+ JitterBufferDelay delay_;
};
TEST_F(JitterBufferDelayTest, Set) {
- delay_->OnStart(&delayable_, kSsrc);
-
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 3000))
- .WillOnce(Return(true));
-
// Delay in seconds.
- delay_->Set(3.0);
+ delay_.Set(3.0);
+ EXPECT_EQ(delay_.GetMs(), 3000);
}
-TEST_F(JitterBufferDelayTest, Caching) {
- // Check that value is cached before start.
- delay_->Set(4.0);
-
- // Check that cached value applied on the start.
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 4000))
- .WillOnce(Return(true));
- delay_->OnStart(&delayable_, kSsrc);
+TEST_F(JitterBufferDelayTest, DefaultValue) {
+ EXPECT_EQ(delay_.GetMs(), 0); // Default value is 0ms.
}
TEST_F(JitterBufferDelayTest, Clamping) {
- delay_->OnStart(&delayable_, kSsrc);
-
// In current Jitter Buffer implementation (Audio or Video) maximum supported
// value is 10000 milliseconds.
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 10000))
- .WillOnce(Return(true));
- delay_->Set(10.5);
+ delay_.Set(10.5);
+ EXPECT_EQ(delay_.GetMs(), 10000);
// Test int overflow.
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 10000))
- .WillOnce(Return(true));
- delay_->Set(21474836470.0);
+ delay_.Set(21474836470.0);
+ EXPECT_EQ(delay_.GetMs(), 10000);
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 0))
- .WillOnce(Return(true));
- delay_->Set(-21474836470.0);
+ delay_.Set(-21474836470.0);
+ EXPECT_EQ(delay_.GetMs(), 0);
// Boundary value in seconds to milliseconds conversion.
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 0))
- .WillOnce(Return(true));
- delay_->Set(0.0009);
-
- EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 0))
- .WillOnce(Return(true));
+ delay_.Set(0.0009);
+ EXPECT_EQ(delay_.GetMs(), 0);
- delay_->Set(-2.0);
+ delay_.Set(-2.0);
+ EXPECT_EQ(delay_.GetMs(), 0);
}
} // namespace webrtc
diff --git a/pc/jsep_transport.cc b/pc/jsep_transport.cc
index 1bdcafd7ec..e72088885f 100644
--- a/pc/jsep_transport.cc
+++ b/pc/jsep_transport.cc
@@ -26,6 +26,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/trace_event.h"
using webrtc::SdpType;
@@ -88,23 +89,23 @@ JsepTransport::JsepTransport(
unencrypted_rtp_transport_(std::move(unencrypted_rtp_transport)),
sdes_transport_(std::move(sdes_transport)),
dtls_srtp_transport_(std::move(dtls_srtp_transport)),
- rtp_dtls_transport_(
- rtp_dtls_transport ? new rtc::RefCountedObject<webrtc::DtlsTransport>(
- std::move(rtp_dtls_transport))
- : nullptr),
- rtcp_dtls_transport_(
- rtcp_dtls_transport
- ? new rtc::RefCountedObject<webrtc::DtlsTransport>(
- std::move(rtcp_dtls_transport))
- : nullptr),
+ rtp_dtls_transport_(rtp_dtls_transport
+ ? rtc::make_ref_counted<webrtc::DtlsTransport>(
+ std::move(rtp_dtls_transport))
+ : nullptr),
+ rtcp_dtls_transport_(rtcp_dtls_transport
+ ? rtc::make_ref_counted<webrtc::DtlsTransport>(
+ std::move(rtcp_dtls_transport))
+ : nullptr),
sctp_data_channel_transport_(
sctp_transport ? std::make_unique<webrtc::SctpDataChannelTransport>(
sctp_transport.get())
: nullptr),
sctp_transport_(sctp_transport
- ? new rtc::RefCountedObject<webrtc::SctpTransport>(
+ ? rtc::make_ref_counted<webrtc::SctpTransport>(
std::move(sctp_transport))
: nullptr) {
+ TRACE_EVENT0("webrtc", "JsepTransport::JsepTransport");
RTC_DCHECK(ice_transport_);
RTC_DCHECK(rtp_dtls_transport_);
// |rtcp_ice_transport_| must be present iff |rtcp_dtls_transport_| is
@@ -130,6 +131,7 @@ JsepTransport::JsepTransport(
}
JsepTransport::~JsepTransport() {
+ TRACE_EVENT0("webrtc", "JsepTransport::~JsepTransport");
if (sctp_transport_) {
sctp_transport_->Clear();
}
@@ -148,7 +150,7 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription(
const JsepTransportDescription& jsep_description,
SdpType type) {
webrtc::RTCError error;
-
+ TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription");
RTC_DCHECK_RUN_ON(network_thread_);
IceParameters ice_parameters =
@@ -234,6 +236,7 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription(
webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription(
const JsepTransportDescription& jsep_description,
webrtc::SdpType type) {
+ TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription");
webrtc::RTCError error;
RTC_DCHECK_RUN_ON(network_thread_);
@@ -345,6 +348,7 @@ absl::optional<rtc::SSLRole> JsepTransport::GetDtlsRole() const {
}
bool JsepTransport::GetStats(TransportStats* stats) {
+ TRACE_EVENT0("webrtc", "JsepTransport::GetStats");
RTC_DCHECK_RUN_ON(network_thread_);
stats->transport_name = mid();
stats->channel_stats.clear();
@@ -363,6 +367,7 @@ bool JsepTransport::GetStats(TransportStats* stats) {
webrtc::RTCError JsepTransport::VerifyCertificateFingerprint(
const rtc::RTCCertificate* certificate,
const rtc::SSLFingerprint* fingerprint) const {
+ TRACE_EVENT0("webrtc", "JsepTransport::VerifyCertificateFingerprint");
RTC_DCHECK_RUN_ON(network_thread_);
if (!fingerprint) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
@@ -401,6 +406,7 @@ void JsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) {
void JsepTransport::SetRemoteIceParameters(
const IceParameters& ice_parameters,
IceTransportInternal* ice_transport) {
+ TRACE_EVENT0("webrtc", "JsepTransport::SetRemoteIceParameters");
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(ice_transport);
RTC_DCHECK(remote_description_);
diff --git a/pc/jsep_transport_collection.cc b/pc/jsep_transport_collection.cc
new file mode 100644
index 0000000000..ce068d99fc
--- /dev/null
+++ b/pc/jsep_transport_collection.cc
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport_collection.h"
+
+#include <algorithm>
+#include <map>
+#include <type_traits>
+#include <utility>
+
+#include "p2p/base/p2p_constants.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void BundleManager::Update(const cricket::SessionDescription* description) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ bundle_groups_.clear();
+ for (const cricket::ContentGroup* new_bundle_group :
+ description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)) {
+ bundle_groups_.push_back(
+ std::make_unique<cricket::ContentGroup>(*new_bundle_group));
+ RTC_DLOG(LS_VERBOSE) << "Establishing bundle group "
+ << new_bundle_group->ToString();
+ }
+ established_bundle_groups_by_mid_.clear();
+ for (const auto& bundle_group : bundle_groups_) {
+ for (const std::string& content_name : bundle_group->content_names()) {
+ established_bundle_groups_by_mid_[content_name] = bundle_group.get();
+ }
+ }
+}
+
+const cricket::ContentGroup* BundleManager::LookupGroupByMid(
+ const std::string& mid) const {
+ auto it = established_bundle_groups_by_mid_.find(mid);
+ return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr;
+}
+bool BundleManager::IsFirstMidInGroup(const std::string& mid) const {
+ auto group = LookupGroupByMid(mid);
+ if (!group) {
+ return true; // Unbundled MIDs are considered group leaders
+ }
+ return mid == *(group->FirstContentName());
+}
+
+cricket::ContentGroup* BundleManager::LookupGroupByMid(const std::string& mid) {
+ auto it = established_bundle_groups_by_mid_.find(mid);
+ return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr;
+}
+
+void BundleManager::DeleteMid(const cricket::ContentGroup* bundle_group,
+ const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_LOG(LS_VERBOSE) << "Deleting mid " << mid << " from bundle group "
+ << bundle_group->ToString();
+ // Remove the rejected content from the |bundle_group|.
+ // The const pointer arg is used to identify the group, we verify
+ // it before we use it to make a modification.
+ auto bundle_group_it = std::find_if(
+ bundle_groups_.begin(), bundle_groups_.end(),
+ [bundle_group](std::unique_ptr<cricket::ContentGroup>& group) {
+ return bundle_group == group.get();
+ });
+ RTC_DCHECK(bundle_group_it != bundle_groups_.end());
+ (*bundle_group_it)->RemoveContentName(mid);
+ established_bundle_groups_by_mid_.erase(
+ established_bundle_groups_by_mid_.find(mid));
+}
+
+void BundleManager::DeleteGroup(const cricket::ContentGroup* bundle_group) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DLOG(LS_VERBOSE) << "Deleting bundle group " << bundle_group->ToString();
+
+ auto bundle_group_it = std::find_if(
+ bundle_groups_.begin(), bundle_groups_.end(),
+ [bundle_group](std::unique_ptr<cricket::ContentGroup>& group) {
+ return bundle_group == group.get();
+ });
+ RTC_DCHECK(bundle_group_it != bundle_groups_.end());
+ auto mid_list = (*bundle_group_it)->content_names();
+ for (const auto& content_name : mid_list) {
+ DeleteMid(bundle_group, content_name);
+ }
+ bundle_groups_.erase(bundle_group_it);
+}
+
+void JsepTransportCollection::RegisterTransport(
+ const std::string& mid,
+ std::unique_ptr<cricket::JsepTransport> transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ SetTransportForMid(mid, transport.get());
+ jsep_transports_by_name_[mid] = std::move(transport);
+ RTC_DCHECK(IsConsistent());
+}
+
+std::vector<cricket::JsepTransport*> JsepTransportCollection::Transports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::vector<cricket::JsepTransport*> result;
+ for (auto& kv : jsep_transports_by_name_) {
+ result.push_back(kv.second.get());
+ }
+ return result;
+}
+
+void JsepTransportCollection::DestroyAllTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& jsep_transport : jsep_transports_by_name_) {
+ map_change_callback_(jsep_transport.first, nullptr);
+ }
+ jsep_transports_by_name_.clear();
+ RTC_DCHECK(IsConsistent());
+}
+
+const cricket::JsepTransport* JsepTransportCollection::GetTransportByName(
+ const std::string& transport_name) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = jsep_transports_by_name_.find(transport_name);
+ return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+}
+
+cricket::JsepTransport* JsepTransportCollection::GetTransportByName(
+ const std::string& transport_name) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = jsep_transports_by_name_.find(transport_name);
+ return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+}
+
+cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = mid_to_transport_.find(mid);
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = mid_to_transport_.find(mid);
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+bool JsepTransportCollection::SetTransportForMid(
+ const std::string& mid,
+ cricket::JsepTransport* jsep_transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(jsep_transport);
+
+ auto it = mid_to_transport_.find(mid);
+ if (it != mid_to_transport_.end() && it->second == jsep_transport)
+ return true;
+
+ pending_mids_.push_back(mid);
+
+ // The map_change_callback must be called before destroying the
+ // transport, because it removes references to the transport
+ // in the RTP demuxer.
+ bool result = map_change_callback_(mid, jsep_transport);
+
+ if (it == mid_to_transport_.end()) {
+ mid_to_transport_.insert(std::make_pair(mid, jsep_transport));
+ } else {
+ auto old_transport = it->second;
+ it->second = jsep_transport;
+ MaybeDestroyJsepTransport(old_transport);
+ }
+ RTC_DCHECK(IsConsistent());
+ return result;
+}
+
+void JsepTransportCollection::RemoveTransportForMid(const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(IsConsistent());
+ bool ret = map_change_callback_(mid, nullptr);
+ // Calling OnTransportChanged with nullptr should always succeed, since it is
+ // only expected to fail when adding media to a transport (not removing).
+ RTC_DCHECK(ret);
+
+ auto old_transport = GetTransportForMid(mid);
+ if (old_transport) {
+ mid_to_transport_.erase(mid);
+ MaybeDestroyJsepTransport(old_transport);
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+void JsepTransportCollection::RollbackTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (auto&& mid : pending_mids_) {
+ RemoveTransportForMid(mid);
+ }
+ pending_mids_.clear();
+}
+
+void JsepTransportCollection::CommitTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ pending_mids_.clear();
+}
+
+bool JsepTransportCollection::TransportInUse(
+ cricket::JsepTransport* jsep_transport) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& kv : mid_to_transport_) {
+ if (kv.second == jsep_transport) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void JsepTransportCollection::MaybeDestroyJsepTransport(
+ cricket::JsepTransport* transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ // Don't destroy the JsepTransport if there are still media sections referring
+ // to it.
+ if (TransportInUse(transport)) {
+ return;
+ }
+ for (const auto& it : jsep_transports_by_name_) {
+ if (it.second.get() == transport) {
+ jsep_transports_by_name_.erase(it.first);
+ state_change_callback_();
+ break;
+ }
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+bool JsepTransportCollection::IsConsistent() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& it : jsep_transports_by_name_) {
+ if (!TransportInUse(it.second.get())) {
+ RTC_LOG(LS_ERROR) << "Transport registered with mid " << it.first
+ << " is not in use, transport " << it.second.get();
+ return false;
+ }
+ const auto& lookup = mid_to_transport_.find(it.first);
+ if (lookup->second != it.second.get()) {
+ // Not an error, but unusual.
+ RTC_DLOG(LS_INFO) << "Note: Mid " << it.first << " was registered to "
+ << it.second.get() << " but currently maps to "
+ << lookup->second;
+ }
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/pc/jsep_transport_collection.h b/pc/jsep_transport_collection.h
new file mode 100644
index 0000000000..0dd528d348
--- /dev/null
+++ b/pc/jsep_transport_collection.h
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_JSEP_TRANSPORT_COLLECTION_H_
+#define PC_JSEP_TRANSPORT_COLLECTION_H_
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/sequence_checker.h"
+#include "pc/jsep_transport.h"
+#include "pc/session_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// This class manages information about RFC 8843 BUNDLE bundles
+// in SDP descriptions.
+
+// This is a work-in-progress. Planned steps:
+// 1) Move all Bundle-related data structures from JsepTransport
+// into this class.
+// 2) Move all Bundle-related functions into this class.
+// 3) Move remaining Bundle-related logic into this class.
+// Make data members private.
+// 4) Refine interface to have comprehensible semantics.
+// 5) Add unit tests.
+// 6) Change the logic to do what's right.
+class BundleManager {
+ public:
+ BundleManager() {
+ // Allow constructor to be called on a different thread.
+ sequence_checker_.Detach();
+ }
+ const std::vector<std::unique_ptr<cricket::ContentGroup>>& bundle_groups()
+ const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return bundle_groups_;
+ }
+ // Lookup a bundle group by a member mid name.
+ const cricket::ContentGroup* LookupGroupByMid(const std::string& mid) const;
+ cricket::ContentGroup* LookupGroupByMid(const std::string& mid);
+ // Returns true if the MID is the first item of a group, or if
+ // the MID is not a member of a group.
+ bool IsFirstMidInGroup(const std::string& mid) const;
+ // Update the groups description. This completely replaces the group
+ // description with the one from the SessionDescription.
+ void Update(const cricket::SessionDescription* description);
+ // Delete a MID from the group that contains it.
+ void DeleteMid(const cricket::ContentGroup* bundle_group,
+ const std::string& mid);
+ // Delete a group.
+ void DeleteGroup(const cricket::ContentGroup* bundle_group);
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ std::vector<std::unique_ptr<cricket::ContentGroup>> bundle_groups_
+ RTC_GUARDED_BY(sequence_checker_);
+ std::map<std::string, cricket::ContentGroup*>
+ established_bundle_groups_by_mid_;
+};
+
+// This class keeps the mapping of MIDs to transports.
+// It is pulled out here because a lot of the code that deals with
+// bundles end up modifying this map, and the two need to be consistent;
+// the managers may merge.
+class JsepTransportCollection {
+ public:
+ JsepTransportCollection(std::function<bool(const std::string& mid,
+ cricket::JsepTransport* transport)>
+ map_change_callback,
+ std::function<void()> state_change_callback)
+ : map_change_callback_(map_change_callback),
+ state_change_callback_(state_change_callback) {
+ // Allow constructor to be called on a different thread.
+ sequence_checker_.Detach();
+ }
+
+ void RegisterTransport(const std::string& mid,
+ std::unique_ptr<cricket::JsepTransport> transport);
+ std::vector<cricket::JsepTransport*> Transports();
+ void DestroyAllTransports();
+ // Lookup a JsepTransport by the MID that was used to register it.
+ cricket::JsepTransport* GetTransportByName(const std::string& mid);
+ const cricket::JsepTransport* GetTransportByName(
+ const std::string& mid) const;
+ // Lookup a JsepTransport by any MID that refers to it.
+ cricket::JsepTransport* GetTransportForMid(const std::string& mid);
+ const cricket::JsepTransport* GetTransportForMid(
+ const std::string& mid) const;
+ // Set transport for a MID. This may destroy a transport if it is no
+ // longer in use.
+ bool SetTransportForMid(const std::string& mid,
+ cricket::JsepTransport* jsep_transport);
+ // Remove a transport for a MID. This may destroy a transport if it is
+ // no longer in use.
+ void RemoveTransportForMid(const std::string& mid);
+ // Roll back pending mid-to-transport mappings.
+ void RollbackTransports();
+ // Commit pending mid-transport mappings (rollback is no longer possible).
+ void CommitTransports();
+ // Returns true if any mid currently maps to this transport.
+ bool TransportInUse(cricket::JsepTransport* jsep_transport) const;
+
+ private:
+ // Destroy a transport if it's no longer in use.
+ void MaybeDestroyJsepTransport(cricket::JsepTransport* transport);
+
+ bool IsConsistent(); // For testing only: Verify internal structure.
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ // This member owns the JSEP transports.
+ std::map<std::string, std::unique_ptr<cricket::JsepTransport>>
+ jsep_transports_by_name_ RTC_GUARDED_BY(sequence_checker_);
+
+ // This keeps track of the mapping between media section
+ // (BaseChannel/SctpTransport) and the JsepTransport underneath.
+ std::map<std::string, cricket::JsepTransport*> mid_to_transport_
+ RTC_GUARDED_BY(sequence_checker_);
+ // Keep track of mids that have been mapped to transports. Used for rollback.
+ std::vector<std::string> pending_mids_ RTC_GUARDED_BY(sequence_checker_);
+ // Callback used to inform subscribers of altered transports.
+ const std::function<bool(const std::string& mid,
+ cricket::JsepTransport* transport)>
+ map_change_callback_;
+ // Callback used to inform subscribers of possibly altered state.
+ const std::function<void()> state_change_callback_;
+};
+
+} // namespace webrtc
+
+#endif // PC_JSEP_TRANSPORT_COLLECTION_H_
diff --git a/pc/jsep_transport_controller.cc b/pc/jsep_transport_controller.cc
index 312b1280b1..95cf21587d 100644
--- a/pc/jsep_transport_controller.cc
+++ b/pc/jsep_transport_controller.cc
@@ -13,10 +13,13 @@
#include <stddef.h>
#include <algorithm>
+#include <functional>
#include <memory>
+#include <type_traits>
#include <utility>
#include "absl/algorithm/container.h"
+#include "api/dtls_transport_interface.h"
#include "api/rtp_parameters.h"
#include "api/sequence_checker.h"
#include "api/transport/enums.h"
@@ -28,9 +31,8 @@
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
-#include "rtc_base/net_helper.h"
-#include "rtc_base/socket_address.h"
#include "rtc_base/thread.h"
+#include "rtc_base/trace_event.h"
using webrtc::SdpType;
@@ -44,6 +46,14 @@ JsepTransportController::JsepTransportController(
: network_thread_(network_thread),
port_allocator_(port_allocator),
async_dns_resolver_factory_(async_dns_resolver_factory),
+ transports_(
+ [this](const std::string& mid, cricket::JsepTransport* transport) {
+ return OnTransportChanged(mid, transport);
+ },
+ [this]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ UpdateAggregateStates_n();
+ }),
config_(config),
active_reset_srtp_params_(config.active_reset_srtp_params) {
// The |transport_observer| is assumed to be non-null.
@@ -63,6 +73,7 @@ JsepTransportController::~JsepTransportController() {
RTCError JsepTransportController::SetLocalDescription(
SdpType type,
const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::SetLocalDescription");
if (!network_thread_->IsCurrent()) {
return network_thread_->Invoke<RTCError>(
RTC_FROM_HERE, [=] { return SetLocalDescription(type, description); });
@@ -83,6 +94,7 @@ RTCError JsepTransportController::SetLocalDescription(
RTCError JsepTransportController::SetRemoteDescription(
SdpType type,
const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::SetRemoteDescription");
if (!network_thread_->IsCurrent()) {
return network_thread_->Invoke<RTCError>(
RTC_FROM_HERE, [=] { return SetRemoteDescription(type, description); });
@@ -162,8 +174,8 @@ void JsepTransportController::SetIceConfig(const cricket::IceConfig& config) {
void JsepTransportController::SetNeedsIceRestartFlag() {
RTC_DCHECK_RUN_ON(network_thread_);
- for (auto& kv : jsep_transports_by_name_) {
- kv.second->SetNeedsIceRestartFlag();
+ for (auto& transport : transports_.Transports()) {
+ transport->SetNeedsIceRestartFlag();
}
}
@@ -216,8 +228,8 @@ bool JsepTransportController::SetLocalCertificate(
// Set certificate for JsepTransport, which verifies it matches the
// fingerprint in SDP, and DTLS transport.
// Fallback from DTLS to SDES is not supported.
- for (auto& kv : jsep_transports_by_name_) {
- kv.second->SetLocalCertificate(certificate_);
+ for (auto& transport : transports_.Transports()) {
+ transport->SetLocalCertificate(certificate_);
}
for (auto& dtls : GetDtlsTransports()) {
bool set_cert_success = dtls->SetLocalCertificate(certificate_);
@@ -357,8 +369,8 @@ void JsepTransportController::SetActiveResetSrtpParams(
<< "Updating the active_reset_srtp_params for JsepTransportController: "
<< active_reset_srtp_params;
active_reset_srtp_params_ = active_reset_srtp_params;
- for (auto& kv : jsep_transports_by_name_) {
- kv.second->SetActiveResetSrtpParams(active_reset_srtp_params);
+ for (auto& transport : transports_.Transports()) {
+ transport->SetActiveResetSrtpParams(active_reset_srtp_params);
}
}
@@ -368,13 +380,7 @@ void JsepTransportController::RollbackTransports() {
return;
}
RTC_DCHECK_RUN_ON(network_thread_);
- for (auto&& mid : pending_mids_) {
- RemoveTransportForMid(mid);
- }
- for (auto&& mid : pending_mids_) {
- MaybeDestroyJsepTransport(mid);
- }
- pending_mids_.clear();
+ transports_.RollbackTransports();
}
rtc::scoped_refptr<webrtc::IceTransportInterface>
@@ -506,9 +512,7 @@ std::vector<cricket::DtlsTransportInternal*>
JsepTransportController::GetDtlsTransports() {
RTC_DCHECK_RUN_ON(network_thread_);
std::vector<cricket::DtlsTransportInternal*> dtls_transports;
- for (auto it = jsep_transports_by_name_.begin();
- it != jsep_transports_by_name_.end(); ++it) {
- auto jsep_transport = it->second.get();
+ for (auto jsep_transport : transports_.Transports()) {
RTC_DCHECK(jsep_transport);
if (jsep_transport->rtp_dtls_transport()) {
dtls_transports.push_back(jsep_transport->rtp_dtls_transport());
@@ -525,6 +529,7 @@ RTCError JsepTransportController::ApplyDescription_n(
bool local,
SdpType type,
const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::ApplyDescription_n");
RTC_DCHECK(description);
if (local) {
@@ -534,21 +539,33 @@ RTCError JsepTransportController::ApplyDescription_n(
}
RTCError error;
- error = ValidateAndMaybeUpdateBundleGroup(local, type, description);
+ error = ValidateAndMaybeUpdateBundleGroups(local, type, description);
if (!error.ok()) {
return error;
}
- std::vector<int> merged_encrypted_extension_ids;
- if (bundle_group_) {
- merged_encrypted_extension_ids =
- MergeEncryptedHeaderExtensionIdsForBundle(description);
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ merged_encrypted_extension_ids_by_bundle;
+ if (!bundles_.bundle_groups().empty()) {
+ merged_encrypted_extension_ids_by_bundle =
+ MergeEncryptedHeaderExtensionIdsForBundles(description);
+ }
+
+ // Because the creation of transports depends on whether
+ // certain mids are present, we have to process rejection
+ // before we try to create transports.
+ for (size_t i = 0; i < description->contents().size(); ++i) {
+ const cricket::ContentInfo& content_info = description->contents()[i];
+ if (content_info.rejected) {
+ // This may cause groups to be removed from |bundles_.bundle_groups()|.
+ HandleRejectedContent(content_info);
+ }
}
for (const cricket::ContentInfo& content_info : description->contents()) {
- // Don't create transports for rejected m-lines and bundled m-lines."
+ // Don't create transports for rejected m-lines and bundled m-lines.
if (content_info.rejected ||
- (IsBundled(content_info.name) && content_info.name != *bundled_mid())) {
+ !bundles_.IsFirstMidInGroup(content_info.name)) {
continue;
}
error = MaybeCreateJsepTransport(local, content_info, *description);
@@ -563,15 +580,22 @@ RTCError JsepTransportController::ApplyDescription_n(
const cricket::ContentInfo& content_info = description->contents()[i];
const cricket::TransportInfo& transport_info =
description->transport_infos()[i];
+
if (content_info.rejected) {
- HandleRejectedContent(content_info, description);
continue;
}
- if (IsBundled(content_info.name) && content_info.name != *bundled_mid()) {
- if (!HandleBundledContent(content_info)) {
+ const cricket::ContentGroup* established_bundle_group =
+ bundles_.LookupGroupByMid(content_info.name);
+
+ // For bundle members that are not BUNDLE-tagged (not first in the group),
+ // configure their transport to be the same as the BUNDLE-tagged transport.
+ if (established_bundle_group &&
+ content_info.name != *established_bundle_group->FirstContentName()) {
+ if (!HandleBundledContent(content_info, *established_bundle_group)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "Failed to process the bundled m= section with mid='" +
+ "Failed to process the bundled m= section with "
+ "mid='" +
content_info.name + "'.");
}
continue;
@@ -583,8 +607,13 @@ RTCError JsepTransportController::ApplyDescription_n(
}
std::vector<int> extension_ids;
- if (bundled_mid() && content_info.name == *bundled_mid()) {
- extension_ids = merged_encrypted_extension_ids;
+ // Is BUNDLE-tagged (first in the group)?
+ if (established_bundle_group &&
+ content_info.name == *established_bundle_group->FirstContentName()) {
+ auto it = merged_encrypted_extension_ids_by_bundle.find(
+ established_bundle_group);
+ RTC_DCHECK(it != merged_encrypted_extension_ids_by_bundle.end());
+ extension_ids = it->second;
} else {
extension_ids = GetEncryptedHeaderExtensionIds(content_info);
}
@@ -617,56 +646,103 @@ RTCError JsepTransportController::ApplyDescription_n(
}
}
if (type == SdpType::kAnswer) {
- pending_mids_.clear();
+ transports_.CommitTransports();
}
return RTCError::OK();
}
-RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
+RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups(
bool local,
SdpType type,
const cricket::SessionDescription* description) {
RTC_DCHECK(description);
- const cricket::ContentGroup* new_bundle_group =
- description->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- // The BUNDLE group containing a MID that no m= section has is invalid.
- if (new_bundle_group) {
+ std::vector<const cricket::ContentGroup*> new_bundle_groups =
+ description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ // Verify |new_bundle_groups|.
+ std::map<std::string, const cricket::ContentGroup*> new_bundle_groups_by_mid;
+ for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) {
for (const std::string& content_name : new_bundle_group->content_names()) {
+ // The BUNDLE group must not contain a MID that is a member of a different
+ // BUNDLE group, or that contains the same MID multiple times.
+ if (new_bundle_groups_by_mid.find(content_name) !=
+ new_bundle_groups_by_mid.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a MID='" + content_name +
+ "' that is already in a BUNDLE group.");
+ }
+ new_bundle_groups_by_mid.insert(
+ std::make_pair(content_name, new_bundle_group));
+ // The BUNDLE group must not contain a MID that no m= section has.
if (!description->GetContentByName(content_name)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains MID='" + content_name +
+ "A BUNDLE group contains a MID='" + content_name +
"' matching no m= section.");
}
}
}
if (type == SdpType::kAnswer) {
- const cricket::ContentGroup* offered_bundle_group =
- local ? remote_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE)
- : local_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ std::vector<const cricket::ContentGroup*> offered_bundle_groups =
+ local ? remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)
+ : local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+
+ std::map<std::string, const cricket::ContentGroup*>
+ offered_bundle_groups_by_mid;
+ for (const cricket::ContentGroup* offered_bundle_group :
+ offered_bundle_groups) {
+ for (const std::string& content_name :
+ offered_bundle_group->content_names()) {
+ offered_bundle_groups_by_mid[content_name] = offered_bundle_group;
+ }
+ }
- if (new_bundle_group) {
- // The BUNDLE group in answer should be a subset of offered group.
+ std::map<const cricket::ContentGroup*, const cricket::ContentGroup*>
+ new_bundle_groups_by_offered_bundle_groups;
+ for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) {
+ if (!new_bundle_group->FirstContentName()) {
+ // Empty groups could be a subset of any group.
+ continue;
+ }
+ // The group in the answer (new_bundle_group) must have a corresponding
+ // group in the offer (original_group), because the answer groups may only
+ // be subsets of the offer groups.
+ auto it = offered_bundle_groups_by_mid.find(
+ *new_bundle_group->FirstContentName());
+ if (it == offered_bundle_groups_by_mid.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group was added in the answer that did not "
+ "exist in the offer.");
+ }
+ const cricket::ContentGroup* offered_bundle_group = it->second;
+ if (new_bundle_groups_by_offered_bundle_groups.find(
+ offered_bundle_group) !=
+ new_bundle_groups_by_offered_bundle_groups.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A MID in the answer has changed group.");
+ }
+ new_bundle_groups_by_offered_bundle_groups.insert(
+ std::make_pair(offered_bundle_group, new_bundle_group));
for (const std::string& content_name :
new_bundle_group->content_names()) {
- if (!offered_bundle_group ||
- !offered_bundle_group->HasContentName(content_name)) {
+ it = offered_bundle_groups_by_mid.find(content_name);
+ // The BUNDLE group in answer should be a subset of offered group.
+ if (it == offered_bundle_groups_by_mid.end() ||
+ it->second != offered_bundle_group) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group in answer contains a MID='" +
+ "A BUNDLE group in answer contains a MID='" +
content_name +
- "' that was "
- "not in the offered group.");
+ "' that was not in the offered group.");
}
}
}
- if (bundle_group_) {
- for (const std::string& content_name : bundle_group_->content_names()) {
+ for (const auto& bundle_group : bundles_.bundle_groups()) {
+ for (const std::string& content_name : bundle_group->content_names()) {
// An answer that removes m= sections from pre-negotiated BUNDLE group
// without rejecting it, is invalid.
- if (!new_bundle_group ||
- !new_bundle_group->HasContentName(content_name)) {
+ auto it = new_bundle_groups_by_mid.find(content_name);
+ if (it == new_bundle_groups_by_mid.end()) {
auto* content_info = description->GetContentByName(content_name);
if (!content_info || !content_info->rejected) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
@@ -687,33 +763,35 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
}
if (ShouldUpdateBundleGroup(type, description)) {
- bundle_group_ = *new_bundle_group;
+ bundles_.Update(description);
}
- if (!bundled_mid()) {
- return RTCError::OK();
- }
+ for (const auto& bundle_group : bundles_.bundle_groups()) {
+ if (!bundle_group->FirstContentName())
+ continue;
- auto bundled_content = description->GetContentByName(*bundled_mid());
- if (!bundled_content) {
- return RTCError(
- RTCErrorType::INVALID_PARAMETER,
- "An m= section associated with the BUNDLE-tag doesn't exist.");
- }
+ // The first MID in a BUNDLE group is BUNDLE-tagged.
+ auto bundled_content =
+ description->GetContentByName(*bundle_group->FirstContentName());
+ if (!bundled_content) {
+ return RTCError(
+ RTCErrorType::INVALID_PARAMETER,
+ "An m= section associated with the BUNDLE-tag doesn't exist.");
+ }
- // If the |bundled_content| is rejected, other contents in the bundle group
- // should be rejected.
- if (bundled_content->rejected) {
- for (const auto& content_name : bundle_group_->content_names()) {
- auto other_content = description->GetContentByName(content_name);
- if (!other_content->rejected) {
- return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The m= section with mid='" + content_name +
- "' should be rejected.");
+ // If the |bundled_content| is rejected, other contents in the bundle group
+ // must also be rejected.
+ if (bundled_content->rejected) {
+ for (const auto& content_name : bundle_group->content_names()) {
+ auto other_content = description->GetContentByName(content_name);
+ if (!other_content->rejected) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "The m= section with mid='" + content_name +
+ "' should be rejected.");
+ }
}
}
}
-
return RTCError::OK();
}
@@ -732,78 +810,46 @@ RTCError JsepTransportController::ValidateContent(
}
void JsepTransportController::HandleRejectedContent(
- const cricket::ContentInfo& content_info,
- const cricket::SessionDescription* description) {
+ const cricket::ContentInfo& content_info) {
// If the content is rejected, let the
// BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first,
// then destroy the cricket::JsepTransport.
- RemoveTransportForMid(content_info.name);
- if (content_info.name == bundled_mid()) {
- for (const auto& content_name : bundle_group_->content_names()) {
- RemoveTransportForMid(content_name);
+ cricket::ContentGroup* bundle_group =
+ bundles_.LookupGroupByMid(content_info.name);
+ if (bundle_group && !bundle_group->content_names().empty() &&
+ content_info.name == *bundle_group->FirstContentName()) {
+ // Rejecting a BUNDLE group's first mid means we are rejecting the entire
+ // group.
+ for (const auto& content_name : bundle_group->content_names()) {
+ transports_.RemoveTransportForMid(content_name);
}
- bundle_group_.reset();
- } else if (IsBundled(content_info.name)) {
- // Remove the rejected content from the |bundle_group_|.
- bundle_group_->RemoveContentName(content_info.name);
- // Reset the bundle group if nothing left.
- if (!bundle_group_->FirstContentName()) {
- bundle_group_.reset();
+ // Delete the BUNDLE group.
+ bundles_.DeleteGroup(bundle_group);
+ } else {
+ transports_.RemoveTransportForMid(content_info.name);
+ if (bundle_group) {
+ // Remove the rejected content from the |bundle_group|.
+ bundles_.DeleteMid(bundle_group, content_info.name);
}
}
- MaybeDestroyJsepTransport(content_info.name);
}
bool JsepTransportController::HandleBundledContent(
- const cricket::ContentInfo& content_info) {
- auto jsep_transport = GetJsepTransportByName(*bundled_mid());
+ const cricket::ContentInfo& content_info,
+ const cricket::ContentGroup& bundle_group) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::HandleBundledContent");
+ RTC_DCHECK(bundle_group.FirstContentName());
+ auto jsep_transport =
+ GetJsepTransportByName(*bundle_group.FirstContentName());
RTC_DCHECK(jsep_transport);
// If the content is bundled, let the
// BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first,
// then destroy the cricket::JsepTransport.
- if (SetTransportForMid(content_info.name, jsep_transport)) {
- // TODO(bugs.webrtc.org/9719) For media transport this is far from ideal,
- // because it means that we first create media transport and start
- // connecting it, and then we destroy it. We will need to address it before
- // video path is enabled.
- MaybeDestroyJsepTransport(content_info.name);
- return true;
- }
- return false;
-}
-
-bool JsepTransportController::SetTransportForMid(
- const std::string& mid,
- cricket::JsepTransport* jsep_transport) {
- RTC_DCHECK_RUN_ON(network_thread_);
- RTC_DCHECK(jsep_transport);
-
- auto it = mid_to_transport_.find(mid);
- if (it != mid_to_transport_.end() && it->second == jsep_transport)
- return true;
-
- pending_mids_.push_back(mid);
-
- if (it == mid_to_transport_.end()) {
- mid_to_transport_.insert(std::make_pair(mid, jsep_transport));
- } else {
- it->second = jsep_transport;
- }
-
- return config_.transport_observer->OnTransportChanged(
- mid, jsep_transport->rtp_transport(), jsep_transport->RtpDtlsTransport(),
- jsep_transport->data_channel_transport());
-}
-
-void JsepTransportController::RemoveTransportForMid(const std::string& mid) {
- RTC_DCHECK_RUN_ON(network_thread_);
- bool ret = config_.transport_observer->OnTransportChanged(mid, nullptr,
- nullptr, nullptr);
- // Calling OnTransportChanged with nullptr should always succeed, since it is
- // only expected to fail when adding media to a transport (not removing).
- RTC_DCHECK(ret);
-
- mid_to_transport_.erase(mid);
+ // TODO(bugs.webrtc.org/9719) For media transport this is far from ideal,
+ // because it means that we first create media transport and start
+ // connecting it, and then we destroy it. We will need to address it before
+ // video path is enabled.
+ return transports_.SetTransportForMid(content_info.name, jsep_transport);
}
cricket::JsepTransportDescription
@@ -812,6 +858,8 @@ JsepTransportController::CreateJsepTransportDescription(
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
int rtp_abs_sendtime_extn_id) {
+ TRACE_EVENT0("webrtc",
+ "JsepTransportController::CreateJsepTransportDescription");
const cricket::MediaContentDescription* content_desc =
content_info.media_description();
RTC_DCHECK(content_desc);
@@ -837,11 +885,11 @@ bool JsepTransportController::ShouldUpdateBundleGroup(
}
RTC_DCHECK(local_desc_ && remote_desc_);
- const cricket::ContentGroup* local_bundle =
- local_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- const cricket::ContentGroup* remote_bundle =
- remote_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- return local_bundle && remote_bundle;
+ std::vector<const cricket::ContentGroup*> local_bundles =
+ local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ std::vector<const cricket::ContentGroup*> remote_bundles =
+ remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ return !local_bundles.empty() && !remote_bundles.empty();
}
std::vector<int> JsepTransportController::GetEncryptedHeaderExtensionIds(
@@ -865,26 +913,31 @@ std::vector<int> JsepTransportController::GetEncryptedHeaderExtensionIds(
return encrypted_header_extension_ids;
}
-std::vector<int>
-JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundle(
+std::map<const cricket::ContentGroup*, std::vector<int>>
+JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundles(
const cricket::SessionDescription* description) {
RTC_DCHECK(description);
- RTC_DCHECK(bundle_group_);
-
- std::vector<int> merged_ids;
+ RTC_DCHECK(!bundles_.bundle_groups().empty());
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ merged_encrypted_extension_ids_by_bundle;
// Union the encrypted header IDs in the group when bundle is enabled.
for (const cricket::ContentInfo& content_info : description->contents()) {
- if (bundle_group_->HasContentName(content_info.name)) {
- std::vector<int> extension_ids =
- GetEncryptedHeaderExtensionIds(content_info);
- for (int id : extension_ids) {
- if (!absl::c_linear_search(merged_ids, id)) {
- merged_ids.push_back(id);
- }
+ auto group = bundles_.LookupGroupByMid(content_info.name);
+ if (!group)
+ continue;
+ // Get or create list of IDs for the BUNDLE group.
+ std::vector<int>& merged_ids =
+ merged_encrypted_extension_ids_by_bundle[group];
+ // Add IDs not already in the list.
+ std::vector<int> extension_ids =
+ GetEncryptedHeaderExtensionIds(content_info);
+ for (int id : extension_ids) {
+ if (!absl::c_linear_search(merged_ids, id)) {
+ merged_ids.push_back(id);
}
}
}
- return merged_ids;
+ return merged_encrypted_extension_ids_by_bundle;
}
int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId(
@@ -899,32 +952,31 @@ int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId(
const webrtc::RtpExtension* send_time_extension =
webrtc::RtpExtension::FindHeaderExtensionByUri(
content_desc->rtp_header_extensions(),
- webrtc::RtpExtension::kAbsSendTimeUri);
+ webrtc::RtpExtension::kAbsSendTimeUri,
+ config_.crypto_options.srtp.enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::kPreferEncryptedExtension
+ : webrtc::RtpExtension::kDiscardEncryptedExtension);
return send_time_extension ? send_time_extension->id : -1;
}
const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
const std::string& mid) const {
- auto it = mid_to_transport_.find(mid);
- return it == mid_to_transport_.end() ? nullptr : it->second;
+ return transports_.GetTransportForMid(mid);
}
cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
const std::string& mid) {
- auto it = mid_to_transport_.find(mid);
- return it == mid_to_transport_.end() ? nullptr : it->second;
+ return transports_.GetTransportForMid(mid);
}
const cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
const std::string& transport_name) const {
- auto it = jsep_transports_by_name_.find(transport_name);
- return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+ return transports_.GetTransportByName(transport_name);
}
cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
const std::string& transport_name) {
- auto it = jsep_transports_by_name_.find(transport_name);
- return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+ return transports_.GetTransportByName(transport_name);
}
RTCError JsepTransportController::MaybeCreateJsepTransport(
@@ -935,7 +987,21 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
if (transport) {
return RTCError::OK();
}
-
+ // If we have agreed to a bundle, the new mid will be added to the bundle
+ // according to JSEP, and the responder can't move it out of the group
+ // according to BUNDLE. So don't create a transport.
+ // The MID will be added to the bundle elsewhere in the code.
+ if (bundles_.bundle_groups().size() > 0) {
+ const auto& default_bundle_group = bundles_.bundle_groups()[0];
+ if (default_bundle_group->content_names().size() > 0) {
+ auto bundle_transport =
+ GetJsepTransportByName(default_bundle_group->content_names()[0]);
+ if (bundle_transport) {
+ transports_.SetTransportForMid(content_info.name, bundle_transport);
+ return RTCError::OK();
+ }
+ }
+ }
const cricket::MediaContentDescription* content_desc =
content_info.media_description();
if (certificate_ && !content_desc->cryptos().empty()) {
@@ -997,39 +1063,13 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
jsep_transport->SignalRtcpMuxActive.connect(
this, &JsepTransportController::UpdateAggregateStates_n);
- SetTransportForMid(content_info.name, jsep_transport.get());
-
- jsep_transports_by_name_[content_info.name] = std::move(jsep_transport);
+ transports_.RegisterTransport(content_info.name, std::move(jsep_transport));
UpdateAggregateStates_n();
return RTCError::OK();
}
-void JsepTransportController::MaybeDestroyJsepTransport(
- const std::string& mid) {
- auto jsep_transport = GetJsepTransportByName(mid);
- if (!jsep_transport) {
- return;
- }
-
- // Don't destroy the JsepTransport if there are still media sections referring
- // to it.
- for (const auto& kv : mid_to_transport_) {
- if (kv.second == jsep_transport) {
- return;
- }
- }
-
- jsep_transports_by_name_.erase(mid);
- UpdateAggregateStates_n();
-}
-
void JsepTransportController::DestroyAllJsepTransports_n() {
- for (const auto& jsep_transport : jsep_transports_by_name_) {
- config_.transport_observer->OnTransportChanged(jsep_transport.first,
- nullptr, nullptr, nullptr);
- }
-
- jsep_transports_by_name_.clear();
+ transports_.DestroyAllTransports();
}
void JsepTransportController::SetIceRole_n(cricket::IceRole ice_role) {
@@ -1159,6 +1199,7 @@ void JsepTransportController::OnTransportStateChanged_n(
}
void JsepTransportController::UpdateAggregateStates_n() {
+ TRACE_EVENT0("webrtc", "JsepTransportController::UpdateAggregateStates_n");
auto dtls_transports = GetDtlsTransports();
cricket::IceConnectionState new_connection_state =
cricket::kIceConnectionConnecting;
@@ -1174,7 +1215,7 @@ void JsepTransportController::UpdateAggregateStates_n() {
bool all_done_gathering = !dtls_transports.empty();
std::map<IceTransportState, int> ice_state_counts;
- std::map<cricket::DtlsTransportState, int> dtls_state_counts;
+ std::map<DtlsTransportState, int> dtls_state_counts;
for (const auto& dtls : dtls_transports) {
any_failed = any_failed || dtls->ice_transport()->GetState() ==
@@ -1276,16 +1317,15 @@ void JsepTransportController::UpdateAggregateStates_n() {
// Note that "connecting" is only a valid state for DTLS transports while
// "checking", "completed" and "disconnected" are only valid for ICE
// transports.
- int total_connected = total_ice_connected +
- dtls_state_counts[cricket::DTLS_TRANSPORT_CONNECTED];
+ int total_connected =
+ total_ice_connected + dtls_state_counts[DtlsTransportState::kConnected];
int total_dtls_connecting =
- dtls_state_counts[cricket::DTLS_TRANSPORT_CONNECTING];
+ dtls_state_counts[DtlsTransportState::kConnecting];
int total_failed =
- total_ice_failed + dtls_state_counts[cricket::DTLS_TRANSPORT_FAILED];
+ total_ice_failed + dtls_state_counts[DtlsTransportState::kFailed];
int total_closed =
- total_ice_closed + dtls_state_counts[cricket::DTLS_TRANSPORT_CLOSED];
- int total_new =
- total_ice_new + dtls_state_counts[cricket::DTLS_TRANSPORT_NEW];
+ total_ice_closed + dtls_state_counts[DtlsTransportState::kClosed];
+ int total_new = total_ice_new + dtls_state_counts[DtlsTransportState::kNew];
int total_transports = total_ice * 2;
if (total_failed > 0) {
@@ -1347,4 +1387,21 @@ void JsepTransportController::OnDtlsHandshakeError(
config_.on_dtls_handshake_error_(error);
}
+bool JsepTransportController::OnTransportChanged(
+ const std::string& mid,
+ cricket::JsepTransport* jsep_transport) {
+ if (config_.transport_observer) {
+ if (jsep_transport) {
+ return config_.transport_observer->OnTransportChanged(
+ mid, jsep_transport->rtp_transport(),
+ jsep_transport->RtpDtlsTransport(),
+ jsep_transport->data_channel_transport());
+ } else {
+ return config_.transport_observer->OnTransportChanged(mid, nullptr,
+ nullptr, nullptr);
+ }
+ }
+ return false;
+}
+
} // namespace webrtc
diff --git a/pc/jsep_transport_controller.h b/pc/jsep_transport_controller.h
index 568058571f..71b01bffb2 100644
--- a/pc/jsep_transport_controller.h
+++ b/pc/jsep_transport_controller.h
@@ -31,6 +31,7 @@
#include "api/rtc_error.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
#include "api/transport/data_channel_transport_interface.h"
#include "api/transport/sctp_transport_factory_interface.h"
#include "media/sctp/sctp_transport_internal.h"
@@ -48,6 +49,7 @@
#include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h"
#include "pc/jsep_transport.h"
+#include "pc/jsep_transport_collection.h"
#include "pc/rtp_transport.h"
#include "pc/rtp_transport_internal.h"
#include "pc/sctp_transport.h"
@@ -55,6 +57,7 @@
#include "pc/srtp_transport.h"
#include "pc/transport_stats.h"
#include "rtc_base/callback_list.h"
+#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/helpers.h"
@@ -321,44 +324,29 @@ class JsepTransportController : public sigslot::has_slots<> {
SdpType type,
const cricket::SessionDescription* description)
RTC_RUN_ON(network_thread_);
- RTCError ValidateAndMaybeUpdateBundleGroup(
+ RTCError ValidateAndMaybeUpdateBundleGroups(
bool local,
SdpType type,
const cricket::SessionDescription* description);
RTCError ValidateContent(const cricket::ContentInfo& content_info);
- void HandleRejectedContent(const cricket::ContentInfo& content_info,
- const cricket::SessionDescription* description)
+ void HandleRejectedContent(const cricket::ContentInfo& content_info)
RTC_RUN_ON(network_thread_);
- bool HandleBundledContent(const cricket::ContentInfo& content_info)
+ bool HandleBundledContent(const cricket::ContentInfo& content_info,
+ const cricket::ContentGroup& bundle_group)
RTC_RUN_ON(network_thread_);
- bool SetTransportForMid(const std::string& mid,
- cricket::JsepTransport* jsep_transport);
- void RemoveTransportForMid(const std::string& mid);
-
cricket::JsepTransportDescription CreateJsepTransportDescription(
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
int rtp_abs_sendtime_extn_id);
- absl::optional<std::string> bundled_mid() const {
- absl::optional<std::string> bundled_mid;
- if (bundle_group_ && bundle_group_->FirstContentName()) {
- bundled_mid = *(bundle_group_->FirstContentName());
- }
- return bundled_mid;
- }
-
- bool IsBundled(const std::string& mid) const {
- return bundle_group_ && bundle_group_->HasContentName(mid);
- }
-
bool ShouldUpdateBundleGroup(SdpType type,
const cricket::SessionDescription* description);
- std::vector<int> MergeEncryptedHeaderExtensionIdsForBundle(
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ MergeEncryptedHeaderExtensionIdsForBundles(
const cricket::SessionDescription* description);
std::vector<int> GetEncryptedHeaderExtensionIds(
const cricket::ContentInfo& content_info);
@@ -392,8 +380,6 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::SessionDescription& description)
RTC_RUN_ON(network_thread_);
- void MaybeDestroyJsepTransport(const std::string& mid)
- RTC_RUN_ON(network_thread_);
void DestroyAllJsepTransports_n() RTC_RUN_ON(network_thread_);
void SetIceRole_n(cricket::IceRole ice_role) RTC_RUN_ON(network_thread_);
@@ -460,18 +446,14 @@ class JsepTransportController : public sigslot::has_slots<> {
void OnDtlsHandshakeError(rtc::SSLHandshakeError error);
+ bool OnTransportChanged(const std::string& mid,
+ cricket::JsepTransport* transport);
+
rtc::Thread* const network_thread_ = nullptr;
cricket::PortAllocator* const port_allocator_ = nullptr;
AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ = nullptr;
- std::map<std::string, std::unique_ptr<cricket::JsepTransport>>
- jsep_transports_by_name_ RTC_GUARDED_BY(network_thread_);
- // This keeps track of the mapping between media section
- // (BaseChannel/SctpTransport) and the JsepTransport underneath.
- std::map<std::string, cricket::JsepTransport*> mid_to_transport_
- RTC_GUARDED_BY(network_thread_);
- // Keep track of mids that have been mapped to transports. Used for rollback.
- std::vector<std::string> pending_mids_ RTC_GUARDED_BY(network_thread_);
+ JsepTransportCollection transports_ RTC_GUARDED_BY(network_thread_);
// Aggregate states for Transports.
// standardized_ice_connection_state_ is intended to replace
// ice_connection_state, see bugs.webrtc.org/9308
@@ -491,13 +473,13 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::SessionDescription* remote_desc_ = nullptr;
absl::optional<bool> initial_offerer_;
- absl::optional<cricket::ContentGroup> bundle_group_;
-
cricket::IceConfig ice_config_;
cricket::IceRole ice_role_ = cricket::ICEROLE_CONTROLLING;
uint64_t ice_tiebreaker_ = rtc::CreateRandomId64();
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
+ BundleManager bundles_;
+
RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransportController);
};
diff --git a/pc/jsep_transport_controller_unittest.cc b/pc/jsep_transport_controller_unittest.cc
index 674ac227f9..a06f5804e4 100644
--- a/pc/jsep_transport_controller_unittest.cc
+++ b/pc/jsep_transport_controller_unittest.cc
@@ -13,6 +13,7 @@
#include <map>
#include <memory>
+#include "api/dtls_transport_interface.h"
#include "p2p/base/dtls_transport_factory.h"
#include "p2p/base/fake_dtls_transport.h"
#include "p2p/base/fake_ice_transport.h"
@@ -33,6 +34,8 @@ static const char kIceUfrag2[] = "u0002";
static const char kIcePwd2[] = "TESTICEPWD00000000000002";
static const char kIceUfrag3[] = "u0003";
static const char kIcePwd3[] = "TESTICEPWD00000000000003";
+static const char kIceUfrag4[] = "u0004";
+static const char kIcePwd4[] = "TESTICEPWD00000000000004";
static const char kAudioMid1[] = "audio1";
static const char kAudioMid2[] = "audio2";
static const char kVideoMid1[] = "video1";
@@ -48,7 +51,7 @@ class FakeIceTransportFactory : public webrtc::IceTransportFactory {
const std::string& transport_name,
int component,
IceTransportInit init) override {
- return new rtc::RefCountedObject<cricket::FakeIceTransportWrapper>(
+ return rtc::make_ref_counted<cricket::FakeIceTransportWrapper>(
std::make_unique<cricket::FakeIceTransport>(transport_name, component));
}
};
@@ -691,8 +694,8 @@ TEST_F(JsepTransportControllerTest,
combined_connection_state_, kTimeout);
EXPECT_EQ(2, combined_connection_state_signal_count_);
- fake_audio_dtls->SetDtlsState(cricket::DTLS_TRANSPORT_CONNECTED);
- fake_video_dtls->SetDtlsState(cricket::DTLS_TRANSPORT_CONNECTED);
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected);
// Set the connection count to be 2 and the cricket::FakeIceTransport will set
// the transport state to be STATE_CONNECTING.
fake_video_dtls->fake_ice_transport()->SetConnectionCount(2);
@@ -748,8 +751,8 @@ TEST_F(JsepTransportControllerTest, SignalConnectionStateComplete) {
combined_connection_state_, kTimeout);
EXPECT_EQ(2, combined_connection_state_signal_count_);
- fake_audio_dtls->SetDtlsState(cricket::DTLS_TRANSPORT_CONNECTED);
- fake_video_dtls->SetDtlsState(cricket::DTLS_TRANSPORT_CONNECTED);
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected);
// Set the connection count to be 1 and the cricket::FakeIceTransport will set
// the transport state to be STATE_COMPLETED.
fake_video_dtls->fake_ice_transport()->SetTransportState(
@@ -837,7 +840,7 @@ TEST_F(JsepTransportControllerTest,
fake_audio_dtls->SetWritable(true);
fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
fake_audio_dtls->fake_ice_transport()->SetConnectionCount(1);
- fake_audio_dtls->SetDtlsState(cricket::DTLS_TRANSPORT_CONNECTED);
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
EXPECT_EQ(1, gathering_state_signal_count_);
// Set the remote description and enable the bundle.
@@ -1099,6 +1102,512 @@ TEST_F(JsepTransportControllerTest, MultipleMediaSectionsOfSameTypeWithBundle) {
ASSERT_TRUE(it2 != changed_dtls_transport_by_mid_.end());
}
+TEST_F(JsepTransportControllerTest, MultipleBundleGroups) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Audio);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group1);
+ remote_answer->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verify that (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video) form two
+ // distinct bundled groups.
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Video);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid3_transport, mid4_transport);
+ EXPECT_NE(mid1_transport, mid3_transport);
+
+ auto it = changed_rtp_transport_by_mid_.find(kMid1Audio);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid1_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid2Video);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid2_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid3Audio);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid3_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid4Video);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid4_transport);
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsInOfferButOnlyASingleGroupInAnswer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Audio);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ // The offer has both groups.
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ // The answer only has a single group! This is what happens when talking to an
+ // endpoint that does not have support for multiple BUNDLE groups.
+ remote_answer->AddGroup(bundle_group1);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verify that (kMid1Audio,kMid2Video) form a bundle group, but that
+ // kMid3Audio and kMid4Video are unbundled.
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Video);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_NE(mid3_transport, mid4_transport);
+ EXPECT_NE(mid1_transport, mid3_transport);
+ EXPECT_NE(mid1_transport, mid4_transport);
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsIllegallyChangeGroup) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ // Answer groups (kMid1Audio,kMid4Video) and (kMid3Audio,kMid2Video), i.e. the
+ // second group members have switched places. This should get rejected.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ answer_bundle_group1.AddContentName(kMid4Video);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid3Audio);
+ answer_bundle_group2.AddContentName(kMid2Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ // Accept offer.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ // Reject answer!
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidSubsets) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ // Answer groups (kMid1Audio) and (kMid2Video), i.e. the second group was
+ // moved from the first group. This should get rejected.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid2Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ // Accept offer.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ // Reject answer!
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidOverlap) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid3Audio) and (kMid2Video,kMid3Audio), i.e.
+ // kMid3Audio is in both groups - this is illegal.
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid2Video);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+
+ auto offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ offer->AddGroup(offer_bundle_group1);
+ offer->AddGroup(offer_bundle_group2);
+
+ // Reject offer, both if set as local or remote.
+ EXPECT_FALSE(
+ transport_controller_->SetLocalDescription(SdpType::kOffer, offer.get())
+ .ok());
+ EXPECT_FALSE(
+ transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsUnbundleFirstMid) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+ static const char kMid5Video[] = "5_video";
+ static const char kMid6Video[] = "6_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and
+ // (kMid4Video,kMid5Video,kMid6Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ offer_bundle_group2.AddContentName(kMid5Video);
+ offer_bundle_group2.AddContentName(kMid6Video);
+ // Answer groups (kMid2Audio,kMid3Audio) and (kMid5Video,kMid6Video), i.e.
+ // we've moved the first MIDs out of the groups.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid2Audio);
+ answer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid5Video);
+ answer_bundle_group2.AddContentName(kMid6Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ auto mid5_transport = transport_controller_->GetRtpTransport(kMid5Video);
+ auto mid6_transport = transport_controller_->GetRtpTransport(kMid6Video);
+ EXPECT_NE(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+ EXPECT_NE(mid4_transport, mid5_transport);
+ EXPECT_EQ(mid5_transport, mid6_transport);
+ EXPECT_NE(mid1_transport, mid4_transport);
+ EXPECT_NE(mid2_transport, mid5_transport);
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsChangeFirstMid) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+ static const char kMid5Video[] = "5_video";
+ static const char kMid6Video[] = "6_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and
+ // (kMid4Video,kMid5Video,kMid6Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ offer_bundle_group2.AddContentName(kMid5Video);
+ offer_bundle_group2.AddContentName(kMid6Video);
+ // Answer groups (kMid2Audio,kMid1Audio,kMid3Audio) and
+ // (kMid5Video,kMid6Video,kMid4Video), i.e. we've changed which MID is first
+ // but accept the whole group.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid2Audio);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ answer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid5Video);
+ answer_bundle_group2.AddContentName(kMid6Video);
+ answer_bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+
+ // The fact that we accept this answer is actually a bug. If we accept the
+ // first MID to be in the group, we should also accept that it is the tagged
+ // one.
+ // TODO(https://crbug.com/webrtc/12699): When this issue is fixed, change this
+ // to EXPECT_FALSE and remove the below expectations about transports.
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ auto mid5_transport = transport_controller_->GetRtpTransport(kMid5Video);
+ auto mid6_transport = transport_controller_->GetRtpTransport(kMid6Video);
+ EXPECT_NE(mid1_transport, mid4_transport);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+ EXPECT_EQ(mid4_transport, mid5_transport);
+ EXPECT_EQ(mid5_transport, mid6_transport);
+}
+
// Tests that only a subset of all the m= sections are bundled.
TEST_F(JsepTransportControllerTest, BundleSubsetOfMediaSections) {
CreateJsepTransportController(JsepTransportController::Config());
@@ -1542,7 +2051,6 @@ TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) {
EXPECT_TRUE(transport_controller_
->SetLocalDescription(SdpType::kOffer, local_reoffer.get())
.ok());
-
std::unique_ptr<cricket::SessionDescription> remote_reanswer(
local_reoffer->Clone());
EXPECT_TRUE(
diff --git a/pc/jsep_transport_unittest.cc b/pc/jsep_transport_unittest.cc
index 4dd2b5ce67..5f4334068a 100644
--- a/pc/jsep_transport_unittest.cc
+++ b/pc/jsep_transport_unittest.cc
@@ -48,8 +48,7 @@ rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
return nullptr;
}
- return new rtc::RefCountedObject<FakeIceTransportWrapper>(
- std::move(internal));
+ return rtc::make_ref_counted<FakeIceTransportWrapper>(std::move(internal));
}
class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> {
diff --git a/pc/local_audio_source.cc b/pc/local_audio_source.cc
index 22ab1c39c3..3fcad50a1d 100644
--- a/pc/local_audio_source.cc
+++ b/pc/local_audio_source.cc
@@ -18,8 +18,7 @@ namespace webrtc {
rtc::scoped_refptr<LocalAudioSource> LocalAudioSource::Create(
const cricket::AudioOptions* audio_options) {
- rtc::scoped_refptr<LocalAudioSource> source(
- new rtc::RefCountedObject<LocalAudioSource>());
+ auto source = rtc::make_ref_counted<LocalAudioSource>();
source->Initialize(audio_options);
return source;
}
diff --git a/pc/media_session.cc b/pc/media_session.cc
index 2e779bd7b1..3c73ddf535 100644
--- a/pc/media_session.cc
+++ b/pc/media_session.cc
@@ -989,68 +989,6 @@ static Codecs MatchCodecPreference(
return filtered_codecs;
}
-static bool FindByUriAndEncryption(const RtpHeaderExtensions& extensions,
- const webrtc::RtpExtension& ext_to_match,
- webrtc::RtpExtension* found_extension) {
- auto it = absl::c_find_if(
- extensions, [&ext_to_match](const webrtc::RtpExtension& extension) {
- // We assume that all URIs are given in a canonical
- // format.
- return extension.uri == ext_to_match.uri &&
- extension.encrypt == ext_to_match.encrypt;
- });
- if (it == extensions.end()) {
- return false;
- }
- if (found_extension) {
- *found_extension = *it;
- }
- return true;
-}
-
-static bool FindByUri(const RtpHeaderExtensions& extensions,
- const webrtc::RtpExtension& ext_to_match,
- webrtc::RtpExtension* found_extension) {
- // We assume that all URIs are given in a canonical format.
- const webrtc::RtpExtension* found =
- webrtc::RtpExtension::FindHeaderExtensionByUri(extensions,
- ext_to_match.uri);
- if (!found) {
- return false;
- }
- if (found_extension) {
- *found_extension = *found;
- }
- return true;
-}
-
-static bool FindByUriWithEncryptionPreference(
- const RtpHeaderExtensions& extensions,
- absl::string_view uri_to_match,
- bool encryption_preference,
- webrtc::RtpExtension* found_extension) {
- const webrtc::RtpExtension* unencrypted_extension = nullptr;
- for (const webrtc::RtpExtension& extension : extensions) {
- // We assume that all URIs are given in a canonical format.
- if (extension.uri == uri_to_match) {
- if (!encryption_preference || extension.encrypt) {
- if (found_extension) {
- *found_extension = extension;
- }
- return true;
- }
- unencrypted_extension = &extension;
- }
- }
- if (unencrypted_extension) {
- if (found_extension) {
- *found_extension = *unencrypted_extension;
- }
- return true;
- }
- return false;
-}
-
// Adds all extensions from |reference_extensions| to |offered_extensions| that
// don't already exist in |offered_extensions| and ensure the IDs don't
// collide. If an extension is added, it's also added to |regular_extensions| or
@@ -1065,22 +1003,28 @@ static void MergeRtpHdrExts(const RtpHeaderExtensions& reference_extensions,
RtpHeaderExtensions* encrypted_extensions,
UsedRtpHeaderExtensionIds* used_ids) {
for (auto reference_extension : reference_extensions) {
- if (!FindByUriAndEncryption(*offered_extensions, reference_extension,
- nullptr)) {
- webrtc::RtpExtension existing;
+ if (!webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *offered_extensions, reference_extension.uri,
+ reference_extension.encrypt)) {
if (reference_extension.encrypt) {
- if (FindByUriAndEncryption(*encrypted_extensions, reference_extension,
- &existing)) {
- offered_extensions->push_back(existing);
+ const webrtc::RtpExtension* existing =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *encrypted_extensions, reference_extension.uri,
+ reference_extension.encrypt);
+ if (existing) {
+ offered_extensions->push_back(*existing);
} else {
used_ids->FindAndSetIdUsed(&reference_extension);
encrypted_extensions->push_back(reference_extension);
offered_extensions->push_back(reference_extension);
}
} else {
- if (FindByUriAndEncryption(*regular_extensions, reference_extension,
- &existing)) {
- offered_extensions->push_back(existing);
+ const webrtc::RtpExtension* existing =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *regular_extensions, reference_extension.uri,
+ reference_extension.encrypt);
+ if (existing) {
+ offered_extensions->push_back(*existing);
} else {
used_ids->FindAndSetIdUsed(&reference_extension);
regular_extensions->push_back(reference_extension);
@@ -1091,41 +1035,86 @@ static void MergeRtpHdrExts(const RtpHeaderExtensions& reference_extensions,
}
}
-static void AddEncryptedVersionsOfHdrExts(RtpHeaderExtensions* extensions,
- RtpHeaderExtensions* all_extensions,
- UsedRtpHeaderExtensionIds* used_ids) {
- RtpHeaderExtensions encrypted_extensions;
- for (const webrtc::RtpExtension& extension : *extensions) {
- webrtc::RtpExtension existing;
- // Don't add encrypted extensions again that were already included in a
- // previous offer or regular extensions that are also included as encrypted
- // extensions.
- if (extension.encrypt ||
- !webrtc::RtpExtension::IsEncryptionSupported(extension.uri) ||
- (FindByUriWithEncryptionPreference(*extensions, extension.uri, true,
- &existing) &&
- existing.encrypt)) {
+static void AddEncryptedVersionsOfHdrExts(
+ RtpHeaderExtensions* offered_extensions,
+ RtpHeaderExtensions* encrypted_extensions,
+ UsedRtpHeaderExtensionIds* used_ids) {
+ RtpHeaderExtensions encrypted_extensions_to_add;
+ for (const auto& extension : *offered_extensions) {
+ // Skip existing encrypted offered extension
+ if (extension.encrypt) {
continue;
}
- if (FindByUri(*all_extensions, extension, &existing)) {
- encrypted_extensions.push_back(existing);
- } else {
- webrtc::RtpExtension encrypted(extension);
- encrypted.encrypt = true;
- used_ids->FindAndSetIdUsed(&encrypted);
- all_extensions->push_back(encrypted);
- encrypted_extensions.push_back(encrypted);
+ // Skip if we cannot encrypt the extension
+ if (!webrtc::RtpExtension::IsEncryptionSupported(extension.uri)) {
+ continue;
}
+
+ // Skip if an encrypted extension with that URI already exists in the
+ // offered extensions.
+ const bool have_encrypted_extension =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *offered_extensions, extension.uri, true);
+ if (have_encrypted_extension) {
+ continue;
+ }
+
+ // Determine if a shared encrypted extension with that URI already exists.
+ const webrtc::RtpExtension* shared_encrypted_extension =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *encrypted_extensions, extension.uri, true);
+ if (shared_encrypted_extension) {
+ // Re-use the shared encrypted extension
+ encrypted_extensions_to_add.push_back(*shared_encrypted_extension);
+ continue;
+ }
+
+ // None exists. Create a new shared encrypted extension from the
+ // non-encrypted one.
+ webrtc::RtpExtension new_encrypted_extension(extension);
+ new_encrypted_extension.encrypt = true;
+ used_ids->FindAndSetIdUsed(&new_encrypted_extension);
+ encrypted_extensions->push_back(new_encrypted_extension);
+ encrypted_extensions_to_add.push_back(new_encrypted_extension);
}
- extensions->insert(extensions->end(), encrypted_extensions.begin(),
- encrypted_extensions.end());
+
+ // Append the additional encrypted extensions to be offered
+ offered_extensions->insert(offered_extensions->end(),
+ encrypted_extensions_to_add.begin(),
+ encrypted_extensions_to_add.end());
+}
+
+// Mostly identical to RtpExtension::FindHeaderExtensionByUri but discards any
+// encrypted extensions that this implementation cannot encrypt.
+static const webrtc::RtpExtension* FindHeaderExtensionByUriDiscardUnsupported(
+ const std::vector<webrtc::RtpExtension>& extensions,
+ absl::string_view uri,
+ webrtc::RtpExtension::Filter filter) {
+ // Note: While it's technically possible to decrypt extensions that we don't
+ // encrypt, the symmetric API of libsrtp does not allow us to supply
+ // different IDs for encryption/decryption of header extensions depending on
+ // whether the packet is inbound or outbound. Thereby, we are limited to
+ // what we can send in encrypted form.
+ if (!webrtc::RtpExtension::IsEncryptionSupported(uri)) {
+ // If there's no encryption support and we only want encrypted extensions,
+ // there's no point in continuing the search here.
+ if (filter == webrtc::RtpExtension::kRequireEncryptedExtension) {
+ return nullptr;
+ }
+
+ // Instruct to only return non-encrypted extensions
+ filter = webrtc::RtpExtension::Filter::kDiscardEncryptedExtension;
+ }
+
+ return webrtc::RtpExtension::FindHeaderExtensionByUri(extensions, uri,
+ filter);
}
static void NegotiateRtpHeaderExtensions(
const RtpHeaderExtensions& local_extensions,
const RtpHeaderExtensions& offered_extensions,
- bool enable_encrypted_rtp_header_extensions,
+ webrtc::RtpExtension::Filter filter,
RtpHeaderExtensions* negotiated_extensions) {
// TransportSequenceNumberV2 is not offered by default. The special logic for
// the TransportSequenceNumber extensions works as follows:
@@ -1134,9 +1123,9 @@ static void NegotiateRtpHeaderExtensions(
// V1 and V2 V2 regardless of local_extensions.
// V2 V2 regardless of local_extensions.
const webrtc::RtpExtension* transport_sequence_number_v2_offer =
- webrtc::RtpExtension::FindHeaderExtensionByUri(
+ FindHeaderExtensionByUriDiscardUnsupported(
offered_extensions,
- webrtc::RtpExtension::kTransportSequenceNumberV2Uri);
+ webrtc::RtpExtension::kTransportSequenceNumberV2Uri, filter);
bool frame_descriptor_in_local = false;
bool dependency_descriptor_in_local = false;
@@ -1149,10 +1138,10 @@ static void NegotiateRtpHeaderExtensions(
dependency_descriptor_in_local = true;
else if (ours.uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri)
abs_capture_time_in_local = true;
- webrtc::RtpExtension theirs;
- if (FindByUriWithEncryptionPreference(
- offered_extensions, ours.uri,
- enable_encrypted_rtp_header_extensions, &theirs)) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(offered_extensions, ours.uri,
+ filter);
+ if (theirs) {
if (transport_sequence_number_v2_offer &&
ours.uri == webrtc::RtpExtension::kTransportSequenceNumberUri) {
// Don't respond to
@@ -1162,7 +1151,7 @@ static void NegotiateRtpHeaderExtensions(
continue;
} else {
// We respond with their RTP header extension id.
- negotiated_extensions->push_back(theirs);
+ negotiated_extensions->push_back(*theirs);
}
}
}
@@ -1174,28 +1163,35 @@ static void NegotiateRtpHeaderExtensions(
// Frame descriptors support. If the extension is not present locally, but is
// in the offer, we add it to the list.
- webrtc::RtpExtension theirs;
- if (!dependency_descriptor_in_local &&
- FindByUriWithEncryptionPreference(
- offered_extensions, webrtc::RtpExtension::kDependencyDescriptorUri,
- enable_encrypted_rtp_header_extensions, &theirs)) {
- negotiated_extensions->push_back(theirs);
- }
- if (!frame_descriptor_in_local &&
- FindByUriWithEncryptionPreference(
- offered_extensions,
- webrtc::RtpExtension::kGenericFrameDescriptorUri00,
- enable_encrypted_rtp_header_extensions, &theirs)) {
- negotiated_extensions->push_back(theirs);
+ if (!dependency_descriptor_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions, webrtc::RtpExtension::kDependencyDescriptorUri,
+ filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
+ }
+ if (!frame_descriptor_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions,
+ webrtc::RtpExtension::kGenericFrameDescriptorUri00, filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
}
// Absolute capture time support. If the extension is not present locally, but
// is in the offer, we add it to the list.
- if (!abs_capture_time_in_local &&
- FindByUriWithEncryptionPreference(
- offered_extensions, webrtc::RtpExtension::kAbsoluteCaptureTimeUri,
- enable_encrypted_rtp_header_extensions, &theirs)) {
- negotiated_extensions->push_back(theirs);
+ if (!abs_capture_time_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions, webrtc::RtpExtension::kAbsoluteCaptureTimeUri,
+ filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
}
}
@@ -1250,10 +1246,14 @@ static bool CreateMediaContentAnswer(
bool bundle_enabled,
MediaContentDescription* answer) {
answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum());
+ const webrtc::RtpExtension::Filter extensions_filter =
+ enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::Filter::kPreferEncryptedExtension
+ : webrtc::RtpExtension::Filter::kDiscardEncryptedExtension;
RtpHeaderExtensions negotiated_rtp_extensions;
- NegotiateRtpHeaderExtensions(
- local_rtp_extensions, offer->rtp_header_extensions(),
- enable_encrypted_rtp_header_extensions, &negotiated_rtp_extensions);
+ NegotiateRtpHeaderExtensions(local_rtp_extensions,
+ offer->rtp_header_extensions(),
+ extensions_filter, &negotiated_rtp_extensions);
answer->set_rtp_header_extensions(negotiated_rtp_extensions);
answer->set_rtcp_mux(session_options.rtcp_mux_enabled && offer->rtcp_mux());
@@ -1515,10 +1515,6 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
VideoCodecs offer_video_codecs;
GetCodecsForOffer(current_active_contents, &offer_audio_codecs,
&offer_video_codecs);
- if (!session_options.vad_enabled) {
- // If application doesn't want CN codecs in offer.
- StripCNCodecs(&offer_audio_codecs);
- }
AudioVideoRtpHeaderExtensions extensions_with_ids =
GetOfferedRtpHeaderExtensionsWithIds(
current_active_contents, session_options.offer_extmap_allow_mixed,
@@ -1664,19 +1660,23 @@ MediaSessionDescriptionFactory::CreateAnswer(
GetCodecsForAnswer(current_active_contents, *offer, &answer_audio_codecs,
&answer_video_codecs);
- if (!session_options.vad_enabled) {
- // If application doesn't want CN codecs in answer.
- StripCNCodecs(&answer_audio_codecs);
- }
-
auto answer = std::make_unique<SessionDescription>();
// If the offer supports BUNDLE, and we want to use it too, create a BUNDLE
// group in the answer with the appropriate content names.
- const ContentGroup* offer_bundle = offer->GetGroupByName(GROUP_TYPE_BUNDLE);
- ContentGroup answer_bundle(GROUP_TYPE_BUNDLE);
- // Transport info shared by the bundle group.
- std::unique_ptr<TransportInfo> bundle_transport;
+ std::vector<const ContentGroup*> offer_bundles =
+ offer->GetGroupsByName(GROUP_TYPE_BUNDLE);
+ // There are as many answer BUNDLE groups as offer BUNDLE groups (even if
+ // rejected, we respond with an empty group). |offer_bundles|,
+ // |answer_bundles| and |bundle_transports| share the same size and indices.
+ std::vector<ContentGroup> answer_bundles;
+ std::vector<std::unique_ptr<TransportInfo>> bundle_transports;
+ answer_bundles.reserve(offer_bundles.size());
+ bundle_transports.reserve(offer_bundles.size());
+ for (size_t i = 0; i < offer_bundles.size(); ++i) {
+ answer_bundles.emplace_back(GROUP_TYPE_BUNDLE);
+ bundle_transports.emplace_back(nullptr);
+ }
answer->set_extmap_allow_mixed(offer->extmap_allow_mixed());
@@ -1691,6 +1691,18 @@ MediaSessionDescriptionFactory::CreateAnswer(
RTC_DCHECK(
IsMediaContentOfType(offer_content, media_description_options.type));
RTC_DCHECK(media_description_options.mid == offer_content->name);
+ // Get the index of the BUNDLE group that this MID belongs to, if any.
+ absl::optional<size_t> bundle_index;
+ for (size_t i = 0; i < offer_bundles.size(); ++i) {
+ if (offer_bundles[i]->HasContentName(media_description_options.mid)) {
+ bundle_index = i;
+ break;
+ }
+ }
+ TransportInfo* bundle_transport =
+ bundle_index.has_value() ? bundle_transports[bundle_index.value()].get()
+ : nullptr;
+
const ContentInfo* current_content = nullptr;
if (current_description &&
msection_index < current_description->contents().size()) {
@@ -1703,35 +1715,34 @@ MediaSessionDescriptionFactory::CreateAnswer(
case MEDIA_TYPE_AUDIO:
if (!AddAudioContentForAnswer(
media_description_options, session_options, offer_content,
- offer, current_content, current_description,
- bundle_transport.get(), answer_audio_codecs, header_extensions,
- &current_streams, answer.get(), &ice_credentials)) {
+ offer, current_content, current_description, bundle_transport,
+ answer_audio_codecs, header_extensions, &current_streams,
+ answer.get(), &ice_credentials)) {
return nullptr;
}
break;
case MEDIA_TYPE_VIDEO:
if (!AddVideoContentForAnswer(
media_description_options, session_options, offer_content,
- offer, current_content, current_description,
- bundle_transport.get(), answer_video_codecs, header_extensions,
- &current_streams, answer.get(), &ice_credentials)) {
+ offer, current_content, current_description, bundle_transport,
+ answer_video_codecs, header_extensions, &current_streams,
+ answer.get(), &ice_credentials)) {
return nullptr;
}
break;
case MEDIA_TYPE_DATA:
- if (!AddDataContentForAnswer(media_description_options, session_options,
- offer_content, offer, current_content,
- current_description,
- bundle_transport.get(), &current_streams,
- answer.get(), &ice_credentials)) {
+ if (!AddDataContentForAnswer(
+ media_description_options, session_options, offer_content,
+ offer, current_content, current_description, bundle_transport,
+ &current_streams, answer.get(), &ice_credentials)) {
return nullptr;
}
break;
case MEDIA_TYPE_UNSUPPORTED:
if (!AddUnsupportedContentForAnswer(
media_description_options, session_options, offer_content,
- offer, current_content, current_description,
- bundle_transport.get(), answer.get(), &ice_credentials)) {
+ offer, current_content, current_description, bundle_transport,
+ answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1742,37 +1753,41 @@ MediaSessionDescriptionFactory::CreateAnswer(
// See if we can add the newly generated m= section to the BUNDLE group in
// the answer.
ContentInfo& added = answer->contents().back();
- if (!added.rejected && session_options.bundle_enabled && offer_bundle &&
- offer_bundle->HasContentName(added.name)) {
- answer_bundle.AddContentName(added.name);
- bundle_transport.reset(
+ if (!added.rejected && session_options.bundle_enabled &&
+ bundle_index.has_value()) {
+ // The |bundle_index| is for |media_description_options.mid|.
+ RTC_DCHECK_EQ(media_description_options.mid, added.name);
+ answer_bundles[bundle_index.value()].AddContentName(added.name);
+ bundle_transports[bundle_index.value()].reset(
new TransportInfo(*answer->GetTransportInfoByName(added.name)));
}
}
- // If a BUNDLE group was offered, put a BUNDLE group in the answer even if
- // it's empty. RFC5888 says:
+ // If BUNDLE group(s) were offered, put the same number of BUNDLE groups in
+ // the answer even if they're empty. RFC5888 says:
//
// A SIP entity that receives an offer that contains an "a=group" line
// with semantics that are understood MUST return an answer that
// contains an "a=group" line with the same semantics.
- if (offer_bundle) {
- answer->AddGroup(answer_bundle);
- }
-
- if (answer_bundle.FirstContentName()) {
- // Share the same ICE credentials and crypto params across all contents,
- // as BUNDLE requires.
- if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) {
- RTC_LOG(LS_ERROR)
- << "CreateAnswer failed to UpdateTransportInfoForBundle.";
- return NULL;
- }
+ if (!offer_bundles.empty()) {
+ for (const ContentGroup& answer_bundle : answer_bundles) {
+ answer->AddGroup(answer_bundle);
+
+ if (answer_bundle.FirstContentName()) {
+ // Share the same ICE credentials and crypto params across all contents,
+ // as BUNDLE requires.
+ if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) {
+ RTC_LOG(LS_ERROR)
+ << "CreateAnswer failed to UpdateTransportInfoForBundle.";
+ return NULL;
+ }
- if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) {
- RTC_LOG(LS_ERROR)
- << "CreateAnswer failed to UpdateCryptoParamsForBundle.";
- return NULL;
+ if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) {
+ RTC_LOG(LS_ERROR)
+ << "CreateAnswer failed to UpdateCryptoParamsForBundle.";
+ return NULL;
+ }
+ }
}
}
@@ -2159,6 +2174,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer(
}
}
}
+ if (!session_options.vad_enabled) {
+ // If application doesn't want CN codecs in offer.
+ StripCNCodecs(&filtered_codecs);
+ }
cricket::SecurePolicy sdes_policy =
IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED
@@ -2434,6 +2453,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
}
}
}
+ if (!session_options.vad_enabled) {
+ // If application doesn't want CN codecs in answer.
+ StripCNCodecs(&filtered_codecs);
+ }
bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) &&
session_options.bundle_enabled;
diff --git a/pc/media_session_unittest.cc b/pc/media_session_unittest.cc
index 6d914f9b81..c7c07fc527 100644
--- a/pc/media_session_unittest.cc
+++ b/pc/media_session_unittest.cc
@@ -139,6 +139,7 @@ static const RtpExtension kAudioRtpExtensionEncrypted1[] = {
RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
RtpExtension("http://google.com/testing/audio_something", 10),
RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 12, true),
+ RtpExtension("http://google.com/testing/audio_something", 11, true),
};
static const RtpExtension kAudioRtpExtension2[] = {
@@ -161,7 +162,15 @@ static const RtpExtension kAudioRtpExtension3ForEncryption[] = {
static const RtpExtension kAudioRtpExtension3ForEncryptionOffer[] = {
RtpExtension("http://google.com/testing/audio_something", 2),
RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3),
- RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14, true),
+ RtpExtension("http://google.com/testing/audio_something", 14, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true),
+};
+
+static const RtpExtension kVideoRtpExtension3ForEncryptionOffer[] = {
+ RtpExtension("http://google.com/testing/video_something", 4),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3),
+ RtpExtension("http://google.com/testing/video_something", 12, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true),
};
static const RtpExtension kAudioRtpExtensionAnswer[] = {
@@ -180,7 +189,8 @@ static const RtpExtension kVideoRtpExtension1[] = {
static const RtpExtension kVideoRtpExtensionEncrypted1[] = {
RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14),
RtpExtension("http://google.com/testing/video_something", 13),
- RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 11, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true),
+ RtpExtension("http://google.com/testing/video_something", 7, true),
};
static const RtpExtension kVideoRtpExtension2[] = {
@@ -205,7 +215,7 @@ static const RtpExtension kVideoRtpExtensionAnswer[] = {
};
static const RtpExtension kVideoRtpExtensionEncryptedAnswer[] = {
- RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 11, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true),
};
static const RtpExtension kRtpExtensionTransportSequenceNumber01[] = {
@@ -1036,6 +1046,66 @@ TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerChangedBundleOffererTagged) {
EXPECT_TRUE(bundle_group->HasContentName("video"));
}
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerForOfferWithMultipleBundleGroups) {
+ // Create an offer with 4 m= sections, initially without BUNDLE groups.
+ MediaSessionOptions opts;
+ opts.bundle_enabled = false;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "3",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "4",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ ASSERT_TRUE(offer->groups().empty());
+
+ // Munge the offer to have two groups. Offers like these cannot be generated
+ // without munging, but it is valid to receive such offers from remote
+ // endpoints.
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName("1");
+ bundle_group1.AddContentName("2");
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName("3");
+ bundle_group2.AddContentName("4");
+ offer->AddGroup(bundle_group1);
+ offer->AddGroup(bundle_group2);
+
+ // If BUNDLE is enabled, the answer to this offer should accept both BUNDLE
+ // groups.
+ opts.bundle_enabled = true;
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswer(offer.get(), opts, nullptr);
+
+ std::vector<const cricket::ContentGroup*> answer_groups =
+ answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT_EQ(answer_groups.size(), 2u);
+ EXPECT_EQ(answer_groups[0]->content_names().size(), 2u);
+ EXPECT_TRUE(answer_groups[0]->HasContentName("1"));
+ EXPECT_TRUE(answer_groups[0]->HasContentName("2"));
+ EXPECT_EQ(answer_groups[1]->content_names().size(), 2u);
+ EXPECT_TRUE(answer_groups[1]->HasContentName("3"));
+ EXPECT_TRUE(answer_groups[1]->HasContentName("4"));
+
+ // If BUNDLE is disabled, the answer to this offer should reject both BUNDLE
+ // groups.
+ opts.bundle_enabled = false;
+ answer = f2_.CreateAnswer(offer.get(), opts, nullptr);
+
+ answer_groups = answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ // Rejected groups are still listed, but they are empty.
+ ASSERT_EQ(answer_groups.size(), 2u);
+ EXPECT_TRUE(answer_groups[0]->content_names().empty());
+ EXPECT_TRUE(answer_groups[1]->content_names().empty());
+}
+
// Test that if the BUNDLE offerer-tagged media section is changed in a reoffer
// and there is still a non-rejected media section that was in the initial
// offer, then the ICE credentials do not change in the reoffer offerer-tagged
@@ -3371,19 +3441,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) {
MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
- // The extensions that are shared between audio and video should use the same
- // id.
- const RtpExtension kExpectedVideoRtpExtension[] = {
- kVideoRtpExtension3ForEncryption[0],
- kAudioRtpExtension3ForEncryptionOffer[1],
- kAudioRtpExtension3ForEncryptionOffer[2],
- };
-
EXPECT_EQ(
MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer),
GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
EXPECT_EQ(
- MAKE_VECTOR(kExpectedVideoRtpExtension),
+ MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer),
GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
// Nothing should change when creating a new offer
@@ -3393,7 +3455,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) {
EXPECT_EQ(MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer),
GetFirstAudioContentDescription(updated_offer.get())
->rtp_header_extensions());
- EXPECT_EQ(MAKE_VECTOR(kExpectedVideoRtpExtension),
+ EXPECT_EQ(MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer),
GetFirstVideoContentDescription(updated_offer.get())
->rtp_header_extensions());
}
diff --git a/pc/media_stream.cc b/pc/media_stream.cc
index 00f491b3cb..08a2a723d0 100644
--- a/pc/media_stream.cc
+++ b/pc/media_stream.cc
@@ -31,9 +31,7 @@ static typename V::iterator FindTrack(V* vector, const std::string& track_id) {
}
rtc::scoped_refptr<MediaStream> MediaStream::Create(const std::string& id) {
- rtc::RefCountedObject<MediaStream>* stream =
- new rtc::RefCountedObject<MediaStream>(id);
- return stream;
+ return rtc::make_ref_counted<MediaStream>(id);
}
MediaStream::MediaStream(const std::string& id) : id_(id) {}
diff --git a/api/media_stream_proxy.h b/pc/media_stream_proxy.h
index 773c5d8b14..36069a4369 100644
--- a/api/media_stream_proxy.h
+++ b/pc/media_stream_proxy.h
@@ -8,18 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef API_MEDIA_STREAM_PROXY_H_
-#define API_MEDIA_STREAM_PROXY_H_
+#ifndef PC_MEDIA_STREAM_PROXY_H_
+#define PC_MEDIA_STREAM_PROXY_H_
#include <string>
#include "api/media_stream_interface.h"
-#include "api/proxy.h"
+#include "pc/proxy.h"
namespace webrtc {
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
+// TODO(deadbeef): Move this to a .cc file. What threads methods are called on
+// is an implementation detail.
BEGIN_PRIMARY_PROXY_MAP(MediaStream)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
BYPASS_PROXY_CONSTMETHOD0(std::string, id)
@@ -37,8 +37,8 @@ PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*)
PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
-END_PROXY_MAP()
+END_PROXY_MAP(MediaStream)
} // namespace webrtc
-#endif // API_MEDIA_STREAM_PROXY_H_
+#endif // PC_MEDIA_STREAM_PROXY_H_
diff --git a/api/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h
index a0fe676d58..f563137c77 100644
--- a/api/media_stream_track_proxy.h
+++ b/pc/media_stream_track_proxy.h
@@ -11,26 +11,25 @@
// This file includes proxy classes for tracks. The purpose is
// to make sure tracks are only accessed from the signaling thread.
-#ifndef API_MEDIA_STREAM_TRACK_PROXY_H_
-#define API_MEDIA_STREAM_TRACK_PROXY_H_
+#ifndef PC_MEDIA_STREAM_TRACK_PROXY_H_
+#define PC_MEDIA_STREAM_TRACK_PROXY_H_
#include <string>
#include "api/media_stream_interface.h"
-#include "api/proxy.h"
+#include "pc/proxy.h"
namespace webrtc {
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
-
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
BEGIN_PRIMARY_PROXY_MAP(AudioTrack)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
BYPASS_PROXY_CONSTMETHOD0(std::string, kind)
BYPASS_PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled)
-PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
+BYPASS_PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*)
PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*)
PROXY_METHOD1(bool, GetSignalLevel, int*)
@@ -38,28 +37,28 @@ PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>, GetAudioProcessor)
PROXY_METHOD1(bool, set_enabled, bool)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
-END_PROXY_MAP()
+END_PROXY_MAP(AudioTrack)
BEGIN_PROXY_MAP(VideoTrack)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
BYPASS_PROXY_CONSTMETHOD0(std::string, kind)
BYPASS_PROXY_CONSTMETHOD0(std::string, id)
-PROXY_CONSTMETHOD0(TrackState, state)
-PROXY_CONSTMETHOD0(bool, enabled)
-PROXY_METHOD1(bool, set_enabled, bool)
-PROXY_CONSTMETHOD0(ContentHint, content_hint)
-PROXY_METHOD1(void, set_content_hint, ContentHint)
+PROXY_SECONDARY_CONSTMETHOD0(TrackState, state)
+PROXY_SECONDARY_CONSTMETHOD0(bool, enabled)
+PROXY_SECONDARY_METHOD1(bool, set_enabled, bool)
+PROXY_SECONDARY_CONSTMETHOD0(ContentHint, content_hint)
+PROXY_SECONDARY_METHOD1(void, set_content_hint, ContentHint)
PROXY_SECONDARY_METHOD2(void,
AddOrUpdateSink,
rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&)
PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
-PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
+BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
-END_PROXY_MAP()
+END_PROXY_MAP(VideoTrack)
} // namespace webrtc
-#endif // API_MEDIA_STREAM_TRACK_PROXY_H_
+#endif // PC_MEDIA_STREAM_TRACK_PROXY_H_
diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc
index 95abb108b9..276af1787d 100644
--- a/pc/peer_connection.cc
+++ b/pc/peer_connection.cc
@@ -465,10 +465,9 @@ RTCErrorOr<rtc::scoped_refptr<PeerConnection>> PeerConnection::Create(
}
// The PeerConnection constructor consumes some, but not all, dependencies.
- rtc::scoped_refptr<PeerConnection> pc(
- new rtc::RefCountedObject<PeerConnection>(
- context, options, is_unified_plan, std::move(event_log),
- std::move(call), dependencies, dtls_enabled));
+ auto pc = rtc::make_ref_counted<PeerConnection>(
+ context, options, is_unified_plan, std::move(event_log), std::move(call),
+ dependencies, dtls_enabled);
RTCError init_error = pc->Initialize(configuration, std::move(dependencies));
if (!init_error.ok()) {
RTC_LOG(LS_ERROR) << "PeerConnection initialization failed";
@@ -632,10 +631,12 @@ RTCError PeerConnection::Initialize(
if (!IsUnifiedPlan()) {
rtp_manager()->transceivers()->Add(
RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
- signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO)));
+ signaling_thread(),
+ new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO, channel_manager())));
rtp_manager()->transceivers()->Add(
RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
- signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO)));
+ signaling_thread(),
+ new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO, channel_manager())));
}
int delay_ms = configuration.report_usage_pattern_delay_ms
@@ -996,9 +997,11 @@ PeerConnection::AddTransceiver(
parameters.encodings = init.send_encodings;
// Encodings are dropped from the tail if too many are provided.
- if (parameters.encodings.size() > kMaxSimulcastStreams) {
+ size_t max_simulcast_streams =
+ media_type == cricket::MEDIA_TYPE_VIDEO ? kMaxSimulcastStreams : 1u;
+ if (parameters.encodings.size() > max_simulcast_streams) {
parameters.encodings.erase(
- parameters.encodings.begin() + kMaxSimulcastStreams,
+ parameters.encodings.begin() + max_simulcast_streams,
parameters.encodings.end());
}
@@ -1276,9 +1279,9 @@ absl::optional<bool> PeerConnection::can_trickle_ice_candidates() {
"trickle");
}
-rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
- const std::string& label,
- const DataChannelInit* config) {
+RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>
+PeerConnection::CreateDataChannelOrError(const std::string& label,
+ const DataChannelInit* config) {
RTC_DCHECK_RUN_ON(signaling_thread());
TRACE_EVENT0("webrtc", "PeerConnection::CreateDataChannel");
@@ -1288,14 +1291,16 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
if (config) {
internal_config.reset(new InternalDataChannelInit(*config));
}
+ // TODO(bugs.webrtc.org/12796): Return a more specific error.
rtc::scoped_refptr<DataChannelInterface> channel(
data_channel_controller_.InternalCreateDataChannelWithProxy(
label, internal_config.get()));
if (!channel.get()) {
- return nullptr;
+ return RTCError(RTCErrorType::INTERNAL_ERROR,
+ "Data channel creation failed");
}
- // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or
+ // Trigger the onRenegotiationNeeded event for
// the first SCTP DataChannel.
if (first_datachannel) {
sdp_handler_->UpdateNegotiationNeeded();
@@ -1878,6 +1883,16 @@ void PeerConnection::SetConnectionState(
configuration_.ice_candidate_pool_size, 0, 255, 256);
break;
}
+
+ // Record whether there was a local or remote provisional answer.
+ ProvisionalAnswerUsage pranswer = kProvisionalAnswerNotUsed;
+ if (local_description()->GetType() == SdpType::kPrAnswer) {
+ pranswer = kProvisionalAnswerLocal;
+ } else if (remote_description()->GetType() == SdpType::kPrAnswer) {
+ pranswer = kProvisionalAnswerRemote;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.ProvisionalAnswer",
+ pranswer, kProvisionalAnswerMax);
}
}
@@ -2183,6 +2198,7 @@ cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const {
std::map<std::string, cricket::TransportStats>
PeerConnection::GetTransportStatsByNames(
const std::set<std::string>& transport_names) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetTransportStatsByNames");
RTC_DCHECK_RUN_ON(network_thread());
if (!network_thread_safety_->alive())
return {};
@@ -2413,21 +2429,20 @@ void PeerConnection::TeardownDataChannelTransport_n() {
}
// Returns false if bundle is enabled and rtcp_mux is disabled.
-bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) {
- bool bundle_enabled = desc->HasGroup(cricket::GROUP_TYPE_BUNDLE);
- if (!bundle_enabled)
+bool PeerConnection::ValidateBundleSettings(
+ const SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ if (bundle_groups_by_mid.empty())
return true;
- const cricket::ContentGroup* bundle_group =
- desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- RTC_DCHECK(bundle_group != NULL);
-
const cricket::ContentInfos& contents = desc->contents();
for (cricket::ContentInfos::const_iterator citer = contents.begin();
citer != contents.end(); ++citer) {
const cricket::ContentInfo* content = (&*citer);
RTC_DCHECK(content != NULL);
- if (bundle_group->HasContentName(content->name) && !content->rejected &&
+ auto it = bundle_groups_by_mid.find(content->name);
+ if (it != bundle_groups_by_mid.end() && !content->rejected &&
content->type == MediaProtocolType::kRtp) {
if (!HasRtcpMuxEnabled(content))
return false;
@@ -2632,6 +2647,7 @@ void PeerConnection::OnTransportControllerGatheringState(
// Runs on network_thread().
void PeerConnection::ReportTransportStats() {
+ TRACE_EVENT0("webrtc", "PeerConnection::ReportTransportStats");
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
std::map<std::string, std::set<cricket::MediaType>>
media_types_by_transport_name;
@@ -2865,18 +2881,10 @@ std::function<void(const rtc::CopyOnWriteBuffer& packet,
int64_t packet_time_us)>
PeerConnection::InitializeRtcpCallback() {
RTC_DCHECK_RUN_ON(network_thread());
- return [this, flag = worker_thread_safety_](
- const rtc::CopyOnWriteBuffer& packet, int64_t packet_time_us) {
+ return [this](const rtc::CopyOnWriteBuffer& packet, int64_t packet_time_us) {
RTC_DCHECK_RUN_ON(network_thread());
- // TODO(bugs.webrtc.org/11993): We should actually be delivering this call
- // directly to the Call class somehow directly on the network thread and not
- // incur this hop here. The DeliverPacket() method will eventually just have
- // to hop back over to the network thread.
- worker_thread()->PostTask(ToQueuedTask(flag, [this, packet,
- packet_time_us] {
- RTC_DCHECK_RUN_ON(worker_thread());
- call_->Receiver()->DeliverPacket(MediaType::ANY, packet, packet_time_us);
- }));
+ call_ptr_->Receiver()->DeliverPacket(MediaType::ANY, packet,
+ packet_time_us);
};
}
diff --git a/pc/peer_connection.h b/pc/peer_connection.h
index d321fd5667..4476c5d8e1 100644
--- a/pc/peer_connection.h
+++ b/pc/peer_connection.h
@@ -167,7 +167,7 @@ class PeerConnection : public PeerConnectionInternal,
std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> GetTransceivers()
const override;
- rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
const std::string& label,
const DataChannelInit* config) override;
// WARNING: LEGACY. See peerconnectioninterface.h
@@ -389,7 +389,10 @@ class PeerConnection : public PeerConnectionInternal,
RTC_DCHECK_RUN_ON(signaling_thread());
return is_unified_plan_;
}
- bool ValidateBundleSettings(const cricket::SessionDescription* desc);
+ bool ValidateBundleSettings(
+ const cricket::SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
// Returns the MID for the data section associated with the
// SCTP data channel, if it has been set. If no data
diff --git a/pc/peer_connection_adaptation_integrationtest.cc b/pc/peer_connection_adaptation_integrationtest.cc
index 71d054eb90..dfb12971b4 100644
--- a/pc/peer_connection_adaptation_integrationtest.cc
+++ b/pc/peer_connection_adaptation_integrationtest.cc
@@ -50,7 +50,7 @@ TrackWithPeriodicSource CreateTrackWithPeriodicSource(
periodic_track_source_config.frame_interval_ms = 100;
periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis();
rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source =
- new rtc::RefCountedObject<FakePeriodicVideoTrackSource>(
+ rtc::make_ref_counted<FakePeriodicVideoTrackSource>(
periodic_track_source_config, /* remote */ false);
TrackWithPeriodicSource track_with_source;
track_with_source.track =
@@ -83,7 +83,7 @@ class PeerConnectionAdaptationIntegrationTest : public ::testing::Test {
rtc::scoped_refptr<PeerConnectionTestWrapper> CreatePcWrapper(
const char* name) {
rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper =
- new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ rtc::make_ref_counted<PeerConnectionTestWrapper>(
name, network_thread_.get(), worker_thread_.get());
PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = SdpSemantics::kUnifiedPlan;
diff --git a/pc/peer_connection_bundle_unittest.cc b/pc/peer_connection_bundle_unittest.cc
index a219fa33e4..08754c6820 100644
--- a/pc/peer_connection_bundle_unittest.cc
+++ b/pc/peer_connection_bundle_unittest.cc
@@ -13,7 +13,6 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h"
-#include "api/peer_connection_proxy.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "p2p/base/fake_port_allocator.h"
@@ -21,6 +20,7 @@
#include "p2p/client/basic_port_allocator.h"
#include "pc/media_session.h"
#include "pc/peer_connection.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/peer_connection_wrapper.h"
#include "pc/sdp_utils.h"
#ifdef WEBRTC_ANDROID
@@ -886,4 +886,56 @@ TEST_F(PeerConnectionBundleTestUnifiedPlan,
EXPECT_TRUE(bundle_group->content_names().empty());
}
+TEST_F(PeerConnectionBundleTestUnifiedPlan, MultipleBundleGroups) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("0_audio");
+ caller->AddAudioTrack("1_audio");
+ caller->AddVideoTrack("2_audio");
+ caller->AddVideoTrack("3_audio");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ // Modify the GROUP to have two BUNDLEs. We know that the MIDs will be 0,1,2,4
+ // because our implementation has predictable MIDs.
+ offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName("0");
+ bundle_group1.AddContentName("1");
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName("2");
+ bundle_group2.AddContentName("3");
+ offer->description()->AddGroup(bundle_group1);
+ offer->description()->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswer();
+ EXPECT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ caller->SetRemoteDescription(std::move(answer));
+
+ // Verify bundling on sender side.
+ auto senders = caller->pc()->GetSenders();
+ ASSERT_EQ(senders.size(), 4u);
+ auto sender0_transport = senders[0]->dtls_transport();
+ auto sender1_transport = senders[1]->dtls_transport();
+ auto sender2_transport = senders[2]->dtls_transport();
+ auto sender3_transport = senders[3]->dtls_transport();
+ EXPECT_EQ(sender0_transport, sender1_transport);
+ EXPECT_EQ(sender2_transport, sender3_transport);
+ EXPECT_NE(sender0_transport, sender2_transport);
+
+ // Verify bundling on receiver side.
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 4u);
+ auto receiver0_transport = receivers[0]->dtls_transport();
+ auto receiver1_transport = receivers[1]->dtls_transport();
+ auto receiver2_transport = receivers[2]->dtls_transport();
+ auto receiver3_transport = receivers[3]->dtls_transport();
+ EXPECT_EQ(receiver0_transport, receiver1_transport);
+ EXPECT_EQ(receiver2_transport, receiver3_transport);
+ EXPECT_NE(receiver0_transport, receiver2_transport);
+}
+
} // namespace webrtc
diff --git a/pc/peer_connection_crypto_unittest.cc b/pc/peer_connection_crypto_unittest.cc
index 32e8cbd74c..394203cb02 100644
--- a/pc/peer_connection_crypto_unittest.cc
+++ b/pc/peer_connection_crypto_unittest.cc
@@ -631,7 +631,7 @@ TEST_P(PeerConnectionCryptoDtlsCertGenTest, TestCertificateGeneration) {
observers;
for (size_t i = 0; i < concurrent_calls_; i++) {
rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer =
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>();
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
observers.push_back(observer);
if (sdp_type_ == SdpType::kOffer) {
pc->pc()->CreateOffer(observer,
diff --git a/pc/peer_connection_data_channel_unittest.cc b/pc/peer_connection_data_channel_unittest.cc
index 157dcd25c8..2544473536 100644
--- a/pc/peer_connection_data_channel_unittest.cc
+++ b/pc/peer_connection_data_channel_unittest.cc
@@ -19,7 +19,6 @@
#include "api/jsep.h"
#include "api/media_types.h"
#include "api/peer_connection_interface.h"
-#include "api/peer_connection_proxy.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/base/codec.h"
@@ -32,6 +31,7 @@
#include "pc/media_session.h"
#include "pc/peer_connection.h"
#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/peer_connection_wrapper.h"
#include "pc/sdp_utils.h"
#include "pc/session_description.h"
diff --git a/pc/peer_connection_factory.cc b/pc/peer_connection_factory.cc
index c911871f8d..50755a38c7 100644
--- a/pc/peer_connection_factory.cc
+++ b/pc/peer_connection_factory.cc
@@ -18,26 +18,28 @@
#include "api/call/call_factory_interface.h"
#include "api/fec_controller.h"
#include "api/ice_transport_interface.h"
-#include "api/media_stream_proxy.h"
-#include "api/media_stream_track_proxy.h"
#include "api/network_state_predictor.h"
#include "api/packet_socket_factory.h"
-#include "api/peer_connection_factory_proxy.h"
-#include "api/peer_connection_proxy.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/sequence_checker.h"
#include "api/transport/bitrate_settings.h"
#include "api/units/data_rate.h"
#include "call/audio_state.h"
+#include "call/rtp_transport_controller_send_factory.h"
#include "media/base/media_engine.h"
#include "p2p/base/basic_async_resolver_factory.h"
#include "p2p/base/basic_packet_socket_factory.h"
#include "p2p/base/default_ice_transport_factory.h"
+#include "p2p/base/port_allocator.h"
#include "p2p/client/basic_port_allocator.h"
#include "pc/audio_track.h"
#include "pc/local_audio_source.h"
#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
+#include "pc/media_stream_track_proxy.h"
#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory_proxy.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/rtp_parameters_conversion.h"
#include "pc/session_description.h"
#include "pc/video_track.h"
@@ -48,6 +50,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/system/file_wrapper.h"
namespace webrtc {
@@ -84,8 +87,7 @@ rtc::scoped_refptr<PeerConnectionFactory> PeerConnectionFactory::Create(
if (!context) {
return nullptr;
}
- return new rtc::RefCountedObject<PeerConnectionFactory>(context,
- &dependencies);
+ return rtc::make_ref_counted<PeerConnectionFactory>(context, &dependencies);
}
PeerConnectionFactory::PeerConnectionFactory(
@@ -99,7 +101,11 @@ PeerConnectionFactory::PeerConnectionFactory(
std::move(dependencies->network_state_predictor_factory)),
injected_network_controller_factory_(
std::move(dependencies->network_controller_factory)),
- neteq_factory_(std::move(dependencies->neteq_factory)) {}
+ neteq_factory_(std::move(dependencies->neteq_factory)),
+ transport_controller_send_factory_(
+ (dependencies->transport_controller_send_factory)
+ ? std::move(dependencies->transport_controller_send_factory)
+ : std::make_unique<RtpTransportControllerSendFactory>()) {}
PeerConnectionFactory::PeerConnectionFactory(
PeerConnectionFactoryDependencies dependencies)
@@ -138,6 +144,7 @@ RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities(
case cricket::MEDIA_TYPE_UNSUPPORTED:
return RtpCapabilities();
}
+ RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind;
RTC_CHECK_NOTREACHED();
}
@@ -164,6 +171,7 @@ RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities(
case cricket::MEDIA_TYPE_UNSUPPORTED:
return RtpCapabilities();
}
+ RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind;
RTC_CHECK_NOTREACHED();
}
@@ -185,33 +193,6 @@ void PeerConnectionFactory::StopAecDump() {
channel_manager()->StopAecDump();
}
-rtc::scoped_refptr<PeerConnectionInterface>
-PeerConnectionFactory::CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- std::unique_ptr<cricket::PortAllocator> allocator,
- std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
- PeerConnectionObserver* observer) {
- // Convert the legacy API into the new dependency structure.
- PeerConnectionDependencies dependencies(observer);
- dependencies.allocator = std::move(allocator);
- dependencies.cert_generator = std::move(cert_generator);
- // Pass that into the new API.
- return CreatePeerConnection(configuration, std::move(dependencies));
-}
-
-rtc::scoped_refptr<PeerConnectionInterface>
-PeerConnectionFactory::CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- PeerConnectionDependencies dependencies) {
- auto result =
- CreatePeerConnectionOrError(configuration, std::move(dependencies));
- if (result.ok()) {
- return result.MoveValue();
- } else {
- return nullptr;
- }
-}
-
RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>
PeerConnectionFactory::CreatePeerConnectionOrError(
const PeerConnectionInterface::RTCConfiguration& configuration,
@@ -358,7 +339,8 @@ std::unique_ptr<Call> PeerConnectionFactory::CreateCall_w(
}
call_config.trials = &trials();
-
+ call_config.rtp_transport_controller_send_factory =
+ transport_controller_send_factory_.get();
return std::unique_ptr<Call>(
context_->call_factory()->CreateCall(call_config));
}
diff --git a/pc/peer_connection_factory.h b/pc/peer_connection_factory.h
index d2bac7a85f..4946ec6ea2 100644
--- a/pc/peer_connection_factory.h
+++ b/pc/peer_connection_factory.h
@@ -37,6 +37,7 @@
#include "api/transport/sctp_transport_factory_interface.h"
#include "api/transport/webrtc_key_value_config.h"
#include "call/call.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
#include "p2p/base/port_allocator.h"
#include "pc/channel_manager.h"
#include "pc/connection_context.h"
@@ -66,16 +67,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
void SetOptions(const Options& options) override;
- rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- std::unique_ptr<cricket::PortAllocator> allocator,
- std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
- PeerConnectionObserver* observer) override;
-
- rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
- const PeerConnectionInterface::RTCConfiguration& configuration,
- PeerConnectionDependencies dependencies) override;
-
RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>
CreatePeerConnectionOrError(
const PeerConnectionInterface::RTCConfiguration& configuration,
@@ -159,6 +150,8 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
std::unique_ptr<NetworkControllerFactoryInterface>
injected_network_controller_factory_;
std::unique_ptr<NetEqFactory> neteq_factory_;
+ const std::unique_ptr<RtpTransportControllerSendFactoryInterface>
+ transport_controller_send_factory_;
};
} // namespace webrtc
diff --git a/api/peer_connection_factory_proxy.h b/pc/peer_connection_factory_proxy.h
index 0eb2b391f4..59e373db7b 100644
--- a/api/peer_connection_factory_proxy.h
+++ b/pc/peer_connection_factory_proxy.h
@@ -8,33 +8,23 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef API_PEER_CONNECTION_FACTORY_PROXY_H_
-#define API_PEER_CONNECTION_FACTORY_PROXY_H_
+#ifndef PC_PEER_CONNECTION_FACTORY_PROXY_H_
+#define PC_PEER_CONNECTION_FACTORY_PROXY_H_
#include <memory>
#include <string>
#include <utility>
#include "api/peer_connection_interface.h"
-#include "api/proxy.h"
+#include "pc/proxy.h"
namespace webrtc {
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
BEGIN_PROXY_MAP(PeerConnectionFactory)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_METHOD1(void, SetOptions, const Options&)
-PROXY_METHOD4(rtc::scoped_refptr<PeerConnectionInterface>,
- CreatePeerConnection,
- const PeerConnectionInterface::RTCConfiguration&,
- std::unique_ptr<cricket::PortAllocator>,
- std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
- PeerConnectionObserver*)
-PROXY_METHOD2(rtc::scoped_refptr<PeerConnectionInterface>,
- CreatePeerConnection,
- const PeerConnectionInterface::RTCConfiguration&,
- PeerConnectionDependencies)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>,
CreatePeerConnectionOrError,
const PeerConnectionInterface::RTCConfiguration&,
@@ -61,8 +51,8 @@ PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
AudioSourceInterface*)
PROXY_SECONDARY_METHOD2(bool, StartAecDump, FILE*, int64_t)
PROXY_SECONDARY_METHOD0(void, StopAecDump)
-END_PROXY_MAP()
+END_PROXY_MAP(PeerConnectionFactory)
} // namespace webrtc
-#endif // API_PEER_CONNECTION_FACTORY_PROXY_H_
+#endif // PC_PEER_CONNECTION_FACTORY_PROXY_H_
diff --git a/pc/peer_connection_histogram_unittest.cc b/pc/peer_connection_histogram_unittest.cc
index 97fbde2f86..fa46ce9802 100644
--- a/pc/peer_connection_histogram_unittest.cc
+++ b/pc/peer_connection_histogram_unittest.cc
@@ -19,7 +19,6 @@
#include "api/jsep.h"
#include "api/jsep_session_description.h"
#include "api/peer_connection_interface.h"
-#include "api/peer_connection_proxy.h"
#include "api/rtc_error.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/default_task_queue_factory.h"
@@ -29,6 +28,7 @@
#include "p2p/client/basic_port_allocator.h"
#include "pc/peer_connection.h"
#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/peer_connection_wrapper.h"
#include "pc/sdp_utils.h"
#include "pc/test/mock_peer_connection_observers.h"
diff --git a/pc/peer_connection_ice_unittest.cc b/pc/peer_connection_ice_unittest.cc
index b9b8966447..7971547ffa 100644
--- a/pc/peer_connection_ice_unittest.cc
+++ b/pc/peer_connection_ice_unittest.cc
@@ -23,10 +23,10 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h"
-#include "api/peer_connection_proxy.h"
#include "api/uma_metrics.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/test/fake_audio_capture_module.h"
#include "pc/test/mock_peer_connection_observers.h"
#include "rtc_base/fake_network.h"
@@ -768,8 +768,8 @@ TEST_P(PeerConnectionIceTest,
ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
// Chain an operation that will block AddIceCandidate() from executing.
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> answer_observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto answer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
callee->pc()->CreateAnswer(answer_observer, RTCOfferAnswerOptions());
auto jsep_candidate =
@@ -816,8 +816,8 @@ TEST_P(PeerConnectionIceTest,
ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
// Chain an operation that will block AddIceCandidate() from executing.
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> answer_observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto answer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
callee->pc()->CreateAnswer(answer_observer, RTCOfferAnswerOptions());
auto jsep_candidate =
diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc
index b2401022c4..53b674d851 100644
--- a/pc/peer_connection_integrationtest.cc
+++ b/pc/peer_connection_integrationtest.cc
@@ -2833,8 +2833,7 @@ TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) {
/*reset_decoder_factory=*/false);
ASSERT_TRUE(wrapper);
wrapper->CreateDataChannel();
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
wrapper->pc()->SetLocalDescription(observer,
wrapper->CreateOfferAndWait().release());
}
@@ -3319,8 +3318,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
SetSignalIceCandidates(false); // Workaround candidate outrace sdp.
caller()->AddVideoTrack();
callee()->AddVideoTrack();
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
callee()->pc()->SetLocalDescription(observer,
callee()->CreateOfferAndWait().release());
EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
@@ -3337,15 +3335,15 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> sld_observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto sld_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
callee()->pc()->SetLocalDescription(sld_observer,
callee()->CreateOfferAndWait().release());
EXPECT_TRUE_WAIT(sld_observer->called(), kDefaultTimeout);
EXPECT_EQ(sld_observer->error(), "");
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> srd_observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto srd_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
callee()->pc()->SetRemoteDescription(
srd_observer, caller()->CreateOfferAndWait().release());
EXPECT_TRUE_WAIT(srd_observer->called(), kDefaultTimeout);
@@ -3641,6 +3639,20 @@ TEST_P(PeerConnectionIntegrationInteropTest,
ASSERT_TRUE(ExpectNewFrames(media_expectations));
}
+TEST_P(PeerConnectionIntegrationTest, NewTracksDoNotCauseNewCandidates) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ caller()->ExpectCandidates(0);
+ callee()->ExpectCandidates(0);
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+}
+
INSTANTIATE_TEST_SUITE_P(
PeerConnectionIntegrationTest,
PeerConnectionIntegrationInteropTest,
diff --git a/pc/peer_connection_interface_unittest.cc b/pc/peer_connection_interface_unittest.cc
index d454dd2a05..fcea842b22 100644
--- a/pc/peer_connection_interface_unittest.cc
+++ b/pc/peer_connection_interface_unittest.cc
@@ -661,7 +661,7 @@ class PeerConnectionFactoryForTest : public webrtc::PeerConnectionFactory {
dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
dependencies.task_queue_factory.get());
- return new rtc::RefCountedObject<PeerConnectionFactoryForTest>(
+ return rtc::make_ref_counted<PeerConnectionFactoryForTest>(
std::move(dependencies));
}
@@ -879,8 +879,8 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test {
bool DoCreateOfferAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
const RTCOfferAnswerOptions* options,
bool offer) {
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
if (offer) {
pc_->CreateOffer(observer, options ? *options : RTCOfferAnswerOptions());
} else {
@@ -904,8 +904,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test {
bool DoSetSessionDescription(
std::unique_ptr<SessionDescriptionInterface> desc,
bool local) {
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
if (local) {
pc_->SetLocalDescription(observer, desc.release());
} else {
@@ -931,8 +930,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test {
// It does not verify the values in the StatReports since a RTCP packet might
// be required.
bool DoGetStats(MediaStreamTrackInterface* track) {
- rtc::scoped_refptr<MockStatsObserver> observer(
- new rtc::RefCountedObject<MockStatsObserver>());
+ auto observer = rtc::make_ref_counted<MockStatsObserver>();
if (!pc_->GetStats(observer, track,
PeerConnectionInterface::kStatsOutputLevelStandard))
return false;
@@ -942,8 +940,8 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test {
// Call the standards-compliant GetStats function.
bool DoGetRTCStats() {
- rtc::scoped_refptr<webrtc::MockRTCStatsCollectorCallback> callback(
- new rtc::RefCountedObject<webrtc::MockRTCStatsCollectorCallback>());
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
pc_->GetStats(callback);
EXPECT_TRUE_WAIT(callback->called(), kTimeout);
return callback->called();
@@ -1198,8 +1196,8 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test {
std::unique_ptr<SessionDescriptionInterface> CreateOfferWithOptions(
const RTCOfferAnswerOptions& offer_answer_options) {
RTC_DCHECK(pc_);
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
pc_->CreateOffer(observer, offer_answer_options);
EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
return observer->MoveDescription();
diff --git a/pc/peer_connection_media_unittest.cc b/pc/peer_connection_media_unittest.cc
index f078144d4f..d5d0b926b7 100644
--- a/pc/peer_connection_media_unittest.cc
+++ b/pc/peer_connection_media_unittest.cc
@@ -848,8 +848,9 @@ bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) {
TEST_P(PeerConnectionMediaTest,
CreateOfferWithNoVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
- auto caller = CreatePeerConnectionWithAudioVideo();
- AddComfortNoiseCodecsToSend(caller->media_engine());
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
RTCOfferAnswerOptions options;
options.voice_activity_detection = false;
@@ -859,11 +860,47 @@ TEST_P(PeerConnectionMediaTest,
}
TEST_P(PeerConnectionMediaTest,
- CreateAnswerWithNoVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
+ CreateOfferWithVoiceActivityDetectionIncludesComfortNoiseCodecs) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = true;
+ auto offer = caller->CreateOffer(options);
+
+ EXPECT_TRUE(HasAnyComfortNoiseCodecs(offer->description()));
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateAnswerWithVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
auto caller = CreatePeerConnectionWithAudioVideo();
- AddComfortNoiseCodecsToSend(caller->media_engine());
- auto callee = CreatePeerConnectionWithAudioVideo();
- AddComfortNoiseCodecsToSend(callee->media_engine());
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(callee_fake_engine.get());
+ auto callee =
+ CreatePeerConnectionWithAudioVideo(std::move(callee_fake_engine));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = true;
+ auto answer = callee->CreateAnswer(options);
+
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(answer->description()));
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateAnswerWithNoVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(caller_fake_engine.get());
+ auto caller =
+ CreatePeerConnectionWithAudioVideo(std::move(caller_fake_engine));
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(callee_fake_engine.get());
+ auto callee =
+ CreatePeerConnectionWithAudioVideo(std::move(callee_fake_engine));
ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
@@ -1736,6 +1773,26 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan,
EXPECT_TRUE(CompareCodecs(video_codecs_vpx_reverse, recv_codecs));
}
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVoiceActivityDetection) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ auto offer = caller->CreateOffer(options);
+ EXPECT_TRUE(HasAnyComfortNoiseCodecs(offer->description()));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ options.voice_activity_detection = false;
+ offer = caller->CreateOffer(options);
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(offer->description()));
+}
+
INSTANTIATE_TEST_SUITE_P(PeerConnectionMediaTest,
PeerConnectionMediaTest,
Values(SdpSemantics::kPlanB,
diff --git a/api/peer_connection_proxy.h b/pc/peer_connection_proxy.h
index cc9df10eed..7601c9d053 100644
--- a/api/peer_connection_proxy.h
+++ b/pc/peer_connection_proxy.h
@@ -8,23 +8,23 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef API_PEER_CONNECTION_PROXY_H_
-#define API_PEER_CONNECTION_PROXY_H_
+#ifndef PC_PEER_CONNECTION_PROXY_H_
+#define PC_PEER_CONNECTION_PROXY_H_
#include <memory>
#include <string>
#include <vector>
#include "api/peer_connection_interface.h"
-#include "api/proxy.h"
+#include "pc/proxy.h"
namespace webrtc {
// PeerConnection proxy objects will be constructed with two thread pointers,
// signaling and network. The proxy macros don't have 'network' specific macros
// and support for a secondary thread is provided via 'SECONDARY' macros.
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
BEGIN_PROXY_MAP(PeerConnection)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, local_streams)
@@ -76,8 +76,8 @@ PROXY_METHOD2(void,
rtc::scoped_refptr<RtpReceiverInterface>,
rtc::scoped_refptr<RTCStatsCollectorCallback>)
PROXY_METHOD0(void, ClearStatsCache)
-PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>,
- CreateDataChannel,
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>,
+ CreateDataChannelOrError,
const std::string&,
const DataChannelInit*)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
@@ -157,8 +157,8 @@ PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr<RtcEventLogOutput>)
PROXY_METHOD0(void, StopRtcEventLog)
PROXY_METHOD0(void, Close)
BYPASS_PROXY_CONSTMETHOD0(rtc::Thread*, signaling_thread)
-END_PROXY_MAP()
+END_PROXY_MAP(PeerConnection)
} // namespace webrtc
-#endif // API_PEER_CONNECTION_PROXY_H_
+#endif // PC_PEER_CONNECTION_PROXY_H_
diff --git a/pc/peer_connection_rampup_tests.cc b/pc/peer_connection_rampup_tests.cc
index cf3b0a27f5..d50d488125 100644
--- a/pc/peer_connection_rampup_tests.cc
+++ b/pc/peer_connection_rampup_tests.cc
@@ -120,7 +120,7 @@ class PeerConnectionWrapperForRampUpTest : public PeerConnectionWrapper {
FrameGeneratorCapturerVideoTrackSource::Config config,
Clock* clock) {
video_track_sources_.emplace_back(
- new rtc::RefCountedObject<FrameGeneratorCapturerVideoTrackSource>(
+ rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>(
config, clock, /*is_screencast=*/false));
video_track_sources_.back()->Start();
return rtc::scoped_refptr<VideoTrackInterface>(
@@ -192,14 +192,14 @@ class PeerConnectionRampUpTest : public ::testing::Test {
dependencies.tls_cert_verifier =
std::make_unique<rtc::TestCertificateVerifier>();
- auto pc =
- pc_factory_->CreatePeerConnection(config, std::move(dependencies));
- if (!pc) {
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(dependencies));
+ if (!result.ok()) {
return nullptr;
}
return std::make_unique<PeerConnectionWrapperForRampUpTest>(
- pc_factory_, pc, std::move(observer));
+ pc_factory_, result.MoveValue(), std::move(observer));
}
void SetupOneWayCall() {
diff --git a/pc/peer_connection_rtp_unittest.cc b/pc/peer_connection_rtp_unittest.cc
index 54287e7b3b..2822854a2d 100644
--- a/pc/peer_connection_rtp_unittest.cc
+++ b/pc/peer_connection_rtp_unittest.cc
@@ -919,7 +919,7 @@ TEST_P(PeerConnectionRtpTest,
auto callee = CreatePeerConnection();
rtc::scoped_refptr<webrtc::MockSetSessionDescriptionObserver> observer =
- new rtc::RefCountedObject<webrtc::MockSetSessionDescriptionObserver>();
+ rtc::make_ref_counted<webrtc::MockSetSessionDescriptionObserver>();
auto offer = caller->CreateOfferAndSetAsLocal();
callee->pc()->SetRemoteDescription(observer, offer.release());
diff --git a/pc/peer_connection_signaling_unittest.cc b/pc/peer_connection_signaling_unittest.cc
index 605a1338c6..1c94570ec7 100644
--- a/pc/peer_connection_signaling_unittest.cc
+++ b/pc/peer_connection_signaling_unittest.cc
@@ -11,6 +11,7 @@
// This file contains tests that check the PeerConnection's signaling state
// machine, as well as tests that check basic, media-agnostic aspects of SDP.
+#include <algorithm>
#include <memory>
#include <tuple>
@@ -18,10 +19,10 @@
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h"
#include "api/jsep_session_description.h"
-#include "api/peer_connection_proxy.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "pc/peer_connection.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/peer_connection_wrapper.h"
#include "pc/sdp_utils.h"
#include "pc/webrtc_sdp.h"
@@ -537,8 +538,7 @@ TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) {
rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observers[100];
for (auto& observer : observers) {
- observer =
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>();
+ observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
caller->pc()->CreateOffer(observer, options);
}
@@ -559,8 +559,7 @@ TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) {
// the WebRtcSessionDescriptionFactory is responsible for it.
TEST_P(PeerConnectionSignalingTest, CloseCreateOfferAndShutdown) {
auto caller = CreatePeerConnection();
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer =
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>();
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
caller->pc()->Close();
caller->pc()->CreateOffer(observer, RTCOfferAnswerOptions());
caller.reset(nullptr);
@@ -687,8 +686,8 @@ TEST_P(PeerConnectionSignalingTest, CreateOfferBlocksSetRemoteDescription) {
auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
EXPECT_EQ(0u, callee->pc()->GetReceivers().size());
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> offer_observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto offer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
// Synchronously invoke CreateOffer() and SetRemoteDescription(). The
// SetRemoteDescription() operation should be chained to be executed
// asynchronously, when CreateOffer() completes.
@@ -901,6 +900,137 @@ TEST_P(PeerConnectionSignalingTest, UnsupportedContentType) {
EXPECT_TRUE(caller->SetLocalDescription(std::move(offer)));
}
+TEST_P(PeerConnectionSignalingTest, ReceiveFlexFec) {
+ auto caller = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102 122\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=rtpmap:122 flexfec-03/90000\r\n"
+ "a=fmtp:122 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"
+ "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
+
+ auto answer = caller->CreateAnswer();
+ ASSERT_EQ(answer->description()->contents().size(), 1u);
+ ASSERT_NE(
+ answer->description()->contents()[0].media_description()->as_video(),
+ nullptr);
+ auto codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ ASSERT_EQ(codecs.size(), 2u);
+ EXPECT_EQ(codecs[1].name, "flexfec-03");
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
+}
+
+TEST_P(PeerConnectionSignalingTest, ReceiveFlexFecReoffer) {
+ auto caller = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102 35\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=rtpmap:35 flexfec-03/90000\r\n"
+ "a=fmtp:35 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"
+ "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
+
+ auto answer = caller->CreateAnswer();
+ ASSERT_EQ(answer->description()->contents().size(), 1u);
+ ASSERT_NE(
+ answer->description()->contents()[0].media_description()->as_video(),
+ nullptr);
+ auto codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ ASSERT_EQ(codecs.size(), 2u);
+ EXPECT_EQ(codecs[1].name, "flexfec-03");
+ EXPECT_EQ(codecs[1].id, 35);
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
+
+ // This generates a collision for AV1 which needs to be remapped.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ auto offer_codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ auto flexfec_it = std::find_if(
+ offer_codecs.begin(), offer_codecs.end(),
+ [](const cricket::Codec& codec) { return codec.name == "flexfec-03"; });
+ ASSERT_EQ(flexfec_it->id, 35);
+ auto av1_it = std::find_if(
+ offer_codecs.begin(), offer_codecs.end(),
+ [](const cricket::Codec& codec) { return codec.name == "AV1X"; });
+ if (av1_it != offer_codecs.end()) {
+ ASSERT_NE(av1_it->id, 35);
+ }
+}
+
INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest,
PeerConnectionSignalingTest,
Values(SdpSemantics::kPlanB,
@@ -929,7 +1059,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest,
// waiting for it would not ensure synchronicity.
RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value());
caller->pc()->SetLocalDescription(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>(),
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>(),
offer.release());
EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value());
}
@@ -957,9 +1087,8 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest,
// This offer will cause transceiver mids to get assigned.
auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
- rtc::scoped_refptr<ExecuteFunctionOnCreateSessionDescriptionObserver>
- offer_observer(new rtc::RefCountedObject<
- ExecuteFunctionOnCreateSessionDescriptionObserver>(
+ auto offer_observer =
+ rtc::make_ref_counted<ExecuteFunctionOnCreateSessionDescriptionObserver>(
[pc = caller->pc()](SessionDescriptionInterface* desc) {
// By not waiting for the observer's callback we can verify that the
// operation executed immediately.
@@ -968,7 +1097,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest,
new rtc::RefCountedObject<MockSetSessionDescriptionObserver>(),
desc);
EXPECT_TRUE(pc->GetTransceivers()[0]->mid().has_value());
- }));
+ });
caller->pc()->CreateOffer(offer_observer, RTCOfferAnswerOptions());
EXPECT_TRUE_WAIT(offer_observer->was_called(), kWaitTimeout);
}
@@ -1055,8 +1184,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest,
caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit());
EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer =
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>();
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
caller->pc()->CreateOffer(observer, RTCOfferAnswerOptions());
// For this test to work, the operation has to be pending, i.e. the observer
// has not yet been invoked.
diff --git a/pc/peer_connection_simulcast_unittest.cc b/pc/peer_connection_simulcast_unittest.cc
index 8822a980f7..31385754b7 100644
--- a/pc/peer_connection_simulcast_unittest.cc
+++ b/pc/peer_connection_simulcast_unittest.cc
@@ -157,9 +157,10 @@ class PeerConnectionSimulcastTests : public ::testing::Test {
rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
PeerConnectionWrapper* pc,
- const std::vector<SimulcastLayer>& layers) {
+ const std::vector<SimulcastLayer>& layers,
+ cricket::MediaType media_type = cricket::MEDIA_TYPE_VIDEO) {
auto init = CreateTransceiverInit(layers);
- return pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ return pc->AddTransceiver(media_type, init);
}
SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd) {
@@ -556,6 +557,25 @@ TEST_F(PeerConnectionSimulcastTests, NegotiationDoesNotHaveRidExtension) {
ValidateTransceiverParameters(transceiver, expected_layers);
}
+TEST_F(PeerConnectionSimulcastTests, SimulcastAudioRejected) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3", "4"}, true);
+ auto transceiver =
+ AddTransceiver(local.get(), layers, cricket::MEDIA_TYPE_AUDIO);
+ // Should only have the first layer.
+ auto parameters = transceiver->sender()->GetParameters();
+ EXPECT_EQ(1u, parameters.encodings.size());
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq(""))));
+ ExchangeOfferAnswer(local.get(), remote.get(), {});
+ // Still have a single layer after negotiation
+ parameters = transceiver->sender()->GetParameters();
+ EXPECT_EQ(1u, parameters.encodings.size());
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq(""))));
+}
+
#if RTC_METRICS_ENABLED
//
// Checks the logged metrics when simulcast is not used.
diff --git a/pc/peer_connection_wrapper.cc b/pc/peer_connection_wrapper.cc
index 65384ee447..3b4d28f0d9 100644
--- a/pc/peer_connection_wrapper.cc
+++ b/pc/peer_connection_wrapper.cc
@@ -136,8 +136,7 @@ PeerConnectionWrapper::CreateRollback() {
std::unique_ptr<SessionDescriptionInterface> PeerConnectionWrapper::CreateSdp(
rtc::FunctionView<void(CreateSessionDescriptionObserver*)> fn,
std::string* error_out) {
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
fn(observer);
EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
if (error_out && !observer->result()) {
@@ -182,8 +181,7 @@ bool PeerConnectionWrapper::SetRemoteDescription(
bool PeerConnectionWrapper::SetSdp(
rtc::FunctionView<void(SetSessionDescriptionObserver*)> fn,
std::string* error_out) {
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
fn(observer);
EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
if (error_out && !observer->result()) {
@@ -308,7 +306,14 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnectionWrapper::AddVideoTrack(
rtc::scoped_refptr<DataChannelInterface>
PeerConnectionWrapper::CreateDataChannel(const std::string& label) {
- return pc()->CreateDataChannel(label, nullptr);
+ auto result = pc()->CreateDataChannelOrError(label, nullptr);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "CreateDataChannel failed: "
+ << ToString(result.error().type()) << " "
+ << result.error().message();
+ return nullptr;
+ }
+ return result.MoveValue();
}
PeerConnectionInterface::SignalingState
@@ -326,8 +331,7 @@ bool PeerConnectionWrapper::IsIceConnected() {
rtc::scoped_refptr<const webrtc::RTCStatsReport>
PeerConnectionWrapper::GetStats() {
- rtc::scoped_refptr<webrtc::MockRTCStatsCollectorCallback> callback(
- new rtc::RefCountedObject<webrtc::MockRTCStatsCollectorCallback>());
+ auto callback = rtc::make_ref_counted<MockRTCStatsCollectorCallback>();
pc()->GetStats(callback);
EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout);
return callback->report();
diff --git a/pc/proxy.cc b/pc/proxy.cc
new file mode 100644
index 0000000000..5f4e0b8832
--- /dev/null
+++ b/pc/proxy.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/proxy.h"
+
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+namespace proxy_internal {
+ScopedTrace::ScopedTrace(const char* class_and_method_name)
+ : class_and_method_name_(class_and_method_name) {
+ TRACE_EVENT_BEGIN0("webrtc", class_and_method_name_);
+}
+ScopedTrace::~ScopedTrace() {
+ TRACE_EVENT_END0("webrtc", class_and_method_name_);
+}
+} // namespace proxy_internal
+} // namespace webrtc
diff --git a/api/proxy.h b/pc/proxy.h
index 3be9f93764..565ae80175 100644
--- a/api/proxy.h
+++ b/pc/proxy.h
@@ -56,8 +56,8 @@
// The variant defined with BEGIN_OWNED_PROXY_MAP does not use
// refcounting, and instead just takes ownership of the object being proxied.
-#ifndef API_PROXY_H_
-#define API_PROXY_H_
+#ifndef PC_PROXY_H_
+#define PC_PROXY_H_
#include <memory>
#include <string>
@@ -71,14 +71,31 @@
#include "rtc_base/event.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/string_utils.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread.h"
+#if !defined(RTC_DISABLE_PROXY_TRACE_EVENTS) && !defined(WEBRTC_CHROMIUM_BUILD)
+#define RTC_DISABLE_PROXY_TRACE_EVENTS
+#endif
+
namespace rtc {
class Location;
}
namespace webrtc {
+namespace proxy_internal {
+
+// Class for tracing the lifetime of MethodCall::Marshal.
+class ScopedTrace {
+ public:
+ explicit ScopedTrace(const char* class_and_method_name);
+ ~ScopedTrace();
+
+ private:
+ const char* const class_and_method_name_;
+};
+} // namespace proxy_internal
template <typename R>
class ReturnType {
@@ -181,6 +198,9 @@ class ConstMethodCall : public QueuedTask {
rtc::Event event_;
};
+#define PROXY_STRINGIZE_IMPL(x) #x
+#define PROXY_STRINGIZE(x) PROXY_STRINGIZE_IMPL(x)
+
// Helper macros to reduce code duplication.
#define PROXY_MAP_BOILERPLATE(c) \
template <class INTERNAL_CLASS> \
@@ -189,6 +209,7 @@ class ConstMethodCall : public QueuedTask {
template <class INTERNAL_CLASS> \
class c##ProxyWithInternal : public c##Interface { \
protected: \
+ static constexpr char proxy_name_[] = #c "Proxy"; \
typedef c##Interface C; \
\
public: \
@@ -198,8 +219,10 @@ class ConstMethodCall : public QueuedTask {
// clang-format off
// clang-format would put the semicolon alone,
// leading to a presubmit error (cpplint.py)
-#define END_PROXY_MAP() \
- };
+#define END_PROXY_MAP(c) \
+ }; \
+ template <class INTERNAL_CLASS> \
+ constexpr char c##ProxyWithInternal<INTERNAL_CLASS>::proxy_name_[];
// clang-format on
#define PRIMARY_PROXY_MAP_BOILERPLATE(c) \
@@ -253,26 +276,26 @@ class ConstMethodCall : public QueuedTask {
void DestroyInternal() { delete c_; } \
INTERNAL_CLASS* c_;
-#define BEGIN_PRIMARY_PROXY_MAP(c) \
- PROXY_MAP_BOILERPLATE(c) \
- PRIMARY_PROXY_MAP_BOILERPLATE(c) \
- REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \
- public: \
- static rtc::scoped_refptr<c##ProxyWithInternal> Create( \
- rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \
- return new rtc::RefCountedObject<c##ProxyWithInternal>(primary_thread, c); \
+#define BEGIN_PRIMARY_PROXY_MAP(c) \
+ PROXY_MAP_BOILERPLATE(c) \
+ PRIMARY_PROXY_MAP_BOILERPLATE(c) \
+ REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \
+ public: \
+ static rtc::scoped_refptr<c##ProxyWithInternal> Create( \
+ rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \
+ return rtc::make_ref_counted<c##ProxyWithInternal>(primary_thread, c); \
}
-#define BEGIN_PROXY_MAP(c) \
- PROXY_MAP_BOILERPLATE(c) \
- SECONDARY_PROXY_MAP_BOILERPLATE(c) \
- REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \
- public: \
- static rtc::scoped_refptr<c##ProxyWithInternal> Create( \
- rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \
- INTERNAL_CLASS* c) { \
- return new rtc::RefCountedObject<c##ProxyWithInternal>( \
- primary_thread, secondary_thread, c); \
+#define BEGIN_PROXY_MAP(c) \
+ PROXY_MAP_BOILERPLATE(c) \
+ SECONDARY_PROXY_MAP_BOILERPLATE(c) \
+ REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \
+ public: \
+ static rtc::scoped_refptr<c##ProxyWithInternal> Create( \
+ rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \
+ INTERNAL_CLASS* c) { \
+ return rtc::make_ref_counted<c##ProxyWithInternal>(primary_thread, \
+ secondary_thread, c); \
}
#define BEGIN_OWNED_PROXY_MAP(c) \
@@ -299,32 +322,51 @@ class ConstMethodCall : public QueuedTask {
\
public: // NOLINTNEXTLINE
+#if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+#define TRACE_BOILERPLATE(method) \
+ do { \
+ } while (0)
+#else // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+#define TRACE_BOILERPLATE(method) \
+ static constexpr auto class_and_method_name = \
+ rtc::MakeCompileTimeString(proxy_name_) \
+ .Concat(rtc::MakeCompileTimeString("::")) \
+ .Concat(rtc::MakeCompileTimeString(#method)); \
+ proxy_internal::ScopedTrace scoped_trace(class_and_method_name.string)
+
+#endif // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+
#define PROXY_METHOD0(r, method) \
r method() override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r> call(c_, &C::method); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
}
#define PROXY_CONSTMETHOD0(r, method) \
r method() const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r> call(c_, &C::method); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
}
#define PROXY_METHOD1(r, method, t1) \
r method(t1 a1) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1> call(c_, &C::method, std::move(a1)); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
}
#define PROXY_CONSTMETHOD1(r, method, t1) \
r method(t1 a1) const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r, t1> call(c_, &C::method, std::move(a1)); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
}
#define PROXY_METHOD2(r, method, t1, t2) \
r method(t1 a1, t2 a2) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2> call(c_, &C::method, std::move(a1), \
std::move(a2)); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
@@ -332,6 +374,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_METHOD3(r, method, t1, t2, t3) \
r method(t1 a1, t2 a2, t3 a3) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2, t3> call(c_, &C::method, std::move(a1), \
std::move(a2), std::move(a3)); \
return call.Marshal(RTC_FROM_HERE, primary_thread_); \
@@ -339,6 +382,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \
r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2, t3, t4> call(c_, &C::method, std::move(a1), \
std::move(a2), std::move(a3), \
std::move(a4)); \
@@ -347,6 +391,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \
r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2, t3, t4, t5> call(c_, &C::method, std::move(a1), \
std::move(a2), std::move(a3), \
std::move(a4), std::move(a5)); \
@@ -356,30 +401,35 @@ class ConstMethodCall : public QueuedTask {
// Define methods which should be invoked on the secondary thread.
#define PROXY_SECONDARY_METHOD0(r, method) \
r method() override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r> call(c_, &C::method); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
}
#define PROXY_SECONDARY_CONSTMETHOD0(r, method) \
r method() const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r> call(c_, &C::method); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
}
#define PROXY_SECONDARY_METHOD1(r, method, t1) \
r method(t1 a1) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1> call(c_, &C::method, std::move(a1)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
}
#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \
r method(t1 a1) const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r, t1> call(c_, &C::method, std::move(a1)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
}
#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \
r method(t1 a1, t2 a2) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2> call(c_, &C::method, std::move(a1), \
std::move(a2)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
@@ -387,6 +437,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \
r method(t1 a1, t2 a2) const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r, t1, t2> call(c_, &C::method, std::move(a1), \
std::move(a2)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
@@ -394,6 +445,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \
r method(t1 a1, t2 a2, t3 a3) override { \
+ TRACE_BOILERPLATE(method); \
MethodCall<C, r, t1, t2, t3> call(c_, &C::method, std::move(a1), \
std::move(a2), std::move(a3)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
@@ -401,6 +453,7 @@ class ConstMethodCall : public QueuedTask {
#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \
r method(t1 a1, t2 a2, t3 a3) const override { \
+ TRACE_BOILERPLATE(method); \
ConstMethodCall<C, r, t1, t2, t3> call(c_, &C::method, std::move(a1), \
std::move(a2), std::move(a3)); \
return call.Marshal(RTC_FROM_HERE, secondary_thread_); \
@@ -409,15 +462,12 @@ class ConstMethodCall : public QueuedTask {
// For use when returning purely const state (set during construction).
// Use with caution. This method should only be used when the return value will
// always be the same.
-#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
- r method() const override { \
- static_assert( \
- std::is_same<r, rtc::Thread*>::value || !std::is_pointer<r>::value, \
- "Type is a pointer"); \
- static_assert(!std::is_reference<r>::value, "Type is a reference"); \
- return c_->method(); \
+#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ TRACE_BOILERPLATE(method); \
+ return c_->method(); \
}
} // namespace webrtc
-#endif // API_PROXY_H_
+#endif // PC_PROXY_H_
diff --git a/pc/proxy_unittest.cc b/pc/proxy_unittest.cc
index fdc7dc3e70..ef3d97eddc 100644
--- a/pc/proxy_unittest.cc
+++ b/pc/proxy_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "api/proxy.h"
+#include "pc/proxy.h"
#include <memory>
#include <string>
@@ -43,7 +43,7 @@ class FakeInterface : public rtc::RefCountInterface {
class Fake : public FakeInterface {
public:
static rtc::scoped_refptr<Fake> Create() {
- return new rtc::RefCountedObject<Fake>();
+ return rtc::make_ref_counted<Fake>();
}
// Used to verify destructor is called on the correct thread.
MOCK_METHOD(void, Destroy, ());
@@ -71,7 +71,7 @@ PROXY_CONSTMETHOD0(std::string, ConstMethod0)
PROXY_SECONDARY_METHOD1(std::string, Method1, std::string)
PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
PROXY_SECONDARY_METHOD2(std::string, Method2, std::string, std::string)
-END_PROXY_MAP()
+END_PROXY_MAP(Fake)
// Preprocessor hack to get a proxy class a name different than FakeProxy.
#define FakeProxy FakeSignalingProxy
@@ -84,7 +84,7 @@ PROXY_CONSTMETHOD0(std::string, ConstMethod0)
PROXY_METHOD1(std::string, Method1, std::string)
PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
PROXY_METHOD2(std::string, Method2, std::string, std::string)
-END_PROXY_MAP()
+END_PROXY_MAP(Fake)
#undef FakeProxy
class SignalingProxyTest : public ::testing::Test {
@@ -272,7 +272,7 @@ class Foo : public FooInterface {
BEGIN_OWNED_PROXY_MAP(Foo)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_METHOD0(void, Bar)
-END_PROXY_MAP()
+END_PROXY_MAP(Foo)
class OwnedProxyTest : public ::testing::Test {
public:
diff --git a/pc/remote_audio_source.cc b/pc/remote_audio_source.cc
index 9e65f6781c..dc890e737c 100644
--- a/pc/remote_audio_source.cc
+++ b/pc/remote_audio_source.cc
@@ -61,7 +61,7 @@ RemoteAudioSource::RemoteAudioSource(
}
RemoteAudioSource::~RemoteAudioSource() {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(audio_observers_.empty());
if (!sinks_.empty()) {
RTC_LOG(LS_WARNING)
@@ -71,32 +71,28 @@ RemoteAudioSource::~RemoteAudioSource() {
void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel,
absl::optional<uint32_t> ssrc) {
- RTC_DCHECK_RUN_ON(main_thread_);
- RTC_DCHECK(media_channel);
+ RTC_DCHECK_RUN_ON(worker_thread_);
// Register for callbacks immediately before AddSink so that we always get
// notified when a channel goes out of scope (signaled when "AudioDataProxy"
// is destroyed).
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- ssrc ? media_channel->SetRawAudioSink(
- *ssrc, std::make_unique<AudioDataProxy>(this))
- : media_channel->SetDefaultRawAudioSink(
- std::make_unique<AudioDataProxy>(this));
- });
+ RTC_DCHECK(media_channel);
+ ssrc ? media_channel->SetRawAudioSink(*ssrc,
+ std::make_unique<AudioDataProxy>(this))
+ : media_channel->SetDefaultRawAudioSink(
+ std::make_unique<AudioDataProxy>(this));
}
void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel,
absl::optional<uint32_t> ssrc) {
- RTC_DCHECK_RUN_ON(main_thread_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(media_channel);
-
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr)
- : media_channel->SetDefaultRawAudioSink(nullptr);
- });
+ ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr)
+ : media_channel->SetDefaultRawAudioSink(nullptr);
}
void RemoteAudioSource::SetState(SourceState new_state) {
+ RTC_DCHECK_RUN_ON(main_thread_);
if (state_ != new_state) {
state_ = new_state;
FireOnChanged();
@@ -104,12 +100,12 @@ void RemoteAudioSource::SetState(SourceState new_state) {
}
MediaSourceInterface::SourceState RemoteAudioSource::state() const {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
return state_;
}
bool RemoteAudioSource::remote() const {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
return true;
}
@@ -135,7 +131,7 @@ void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) {
}
void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(sink);
if (state_ != MediaSourceInterface::kLive) {
@@ -149,7 +145,7 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
}
void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(sink);
MutexLock lock(&sink_lock_);
@@ -184,7 +180,7 @@ void RemoteAudioSource::OnAudioChannelGone() {
}
void RemoteAudioSource::OnMessage(rtc::Message* msg) {
- RTC_DCHECK(main_thread_->IsCurrent());
+ RTC_DCHECK_RUN_ON(main_thread_);
sinks_.clear();
SetState(MediaSourceInterface::kEnded);
// Will possibly delete this RemoteAudioSource since it is reference counted
diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc
index 93aa6af7a8..4b38abc073 100644
--- a/pc/rtc_stats_collector.cc
+++ b/pc/rtc_stats_collector.cc
@@ -209,20 +209,20 @@ const char* IceCandidatePairStateToRTCStatsIceCandidatePairState(
}
const char* DtlsTransportStateToRTCDtlsTransportState(
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
switch (state) {
- case cricket::DTLS_TRANSPORT_NEW:
+ case DtlsTransportState::kNew:
return RTCDtlsTransportState::kNew;
- case cricket::DTLS_TRANSPORT_CONNECTING:
+ case DtlsTransportState::kConnecting:
return RTCDtlsTransportState::kConnecting;
- case cricket::DTLS_TRANSPORT_CONNECTED:
+ case DtlsTransportState::kConnected:
return RTCDtlsTransportState::kConnected;
- case cricket::DTLS_TRANSPORT_CLOSED:
+ case DtlsTransportState::kClosed:
return RTCDtlsTransportState::kClosed;
- case cricket::DTLS_TRANSPORT_FAILED:
+ case DtlsTransportState::kFailed:
return RTCDtlsTransportState::kFailed;
default:
- RTC_NOTREACHED();
+ RTC_CHECK_NOTREACHED();
return nullptr;
}
}
@@ -265,6 +265,17 @@ const char* QualityLimitationReasonToRTCQualityLimitationReason(
RTC_CHECK_NOTREACHED();
}
+std::map<std::string, double>
+QualityLimitationDurationToRTCQualityLimitationDuration(
+ std::map<webrtc::QualityLimitationReason, int64_t> durations_ms) {
+ std::map<std::string, double> result;
+ for (const auto& elem : durations_ms) {
+ result[QualityLimitationReasonToRTCQualityLimitationReason(elem.first)] =
+ elem.second;
+ }
+ return result;
+}
+
double DoubleAudioLevelFromIntAudioLevel(int audio_level) {
RTC_DCHECK_GE(audio_level, 0);
RTC_DCHECK_LE(audio_level, 32767);
@@ -568,6 +579,9 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo(
outbound_video->quality_limitation_reason =
QualityLimitationReasonToRTCQualityLimitationReason(
video_sender_info.quality_limitation_reason);
+ outbound_video->quality_limitation_durations =
+ QualityLimitationDurationToRTCQualityLimitationDuration(
+ video_sender_info.quality_limitation_durations_ms);
outbound_video->quality_limitation_resolution_changes =
video_sender_info.quality_limitation_resolution_changes;
// TODO(https://crbug.com/webrtc/10529): When info's |content_info| is
@@ -1097,8 +1111,7 @@ RTCStatsCollector::RequestInfo::RequestInfo(
rtc::scoped_refptr<RTCStatsCollector> RTCStatsCollector::Create(
PeerConnectionInternal* pc,
int64_t cache_lifetime_us) {
- return rtc::scoped_refptr<RTCStatsCollector>(
- new rtc::RefCountedObject<RTCStatsCollector>(pc, cache_lifetime_us));
+ return rtc::make_ref_counted<RTCStatsCollector>(pc, cache_lifetime_us);
}
RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc,
@@ -1260,6 +1273,8 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl(
void RTCStatsCollector::ProducePartialResultsOnNetworkThread(
int64_t timestamp_us,
absl::optional<std::string> sctp_transport_name) {
+ TRACE_EVENT0("webrtc",
+ "RTCStatsCollector::ProducePartialResultsOnNetworkThread");
RTC_DCHECK_RUN_ON(network_thread_);
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
diff --git a/pc/rtc_stats_collector.h b/pc/rtc_stats_collector.h
index b5b8c8c900..5f13f54d26 100644
--- a/pc/rtc_stats_collector.h
+++ b/pc/rtc_stats_collector.h
@@ -53,7 +53,7 @@ class RtpReceiverInternal;
// Stats are gathered on the signaling, worker and network threads
// asynchronously. The callback is invoked on the signaling thread. Resulting
// reports are cached for |cache_lifetime_| ms.
-class RTCStatsCollector : public virtual rtc::RefCountInterface,
+class RTCStatsCollector : public rtc::RefCountInterface,
public sigslot::has_slots<> {
public:
static rtc::scoped_refptr<RTCStatsCollector> Create(
diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc
index 3ccdde0056..92f73770d8 100644
--- a/pc/rtc_stats_collector_unittest.cc
+++ b/pc/rtc_stats_collector_unittest.cc
@@ -22,6 +22,7 @@
#include "absl/memory/memory.h"
#include "absl/strings/str_replace.h"
+#include "api/dtls_transport_interface.h"
#include "api/media_stream_track.h"
#include "api/rtp_parameters.h"
#include "api/stats/rtc_stats_report.h"
@@ -2198,6 +2199,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) {
video_media_info.senders[0].total_packet_send_delay_ms = 10000;
video_media_info.senders[0].quality_limitation_reason =
QualityLimitationReason::kBandwidth;
+ video_media_info.senders[0].quality_limitation_durations_ms
+ [webrtc::QualityLimitationReason::kBandwidth] = 300;
video_media_info.senders[0].quality_limitation_resolution_changes = 56u;
video_media_info.senders[0].qp_sum = absl::nullopt;
video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED;
@@ -2253,6 +2256,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) {
expected_video.total_encoded_bytes_target = 1234;
expected_video.total_packet_send_delay = 10.0;
expected_video.quality_limitation_reason = "bandwidth";
+ expected_video.quality_limitation_durations = std::map<std::string, double>{
+ std::pair<std::string, double>{"bandwidth", 300.0},
+ };
expected_video.quality_limitation_resolution_changes = 56u;
expected_video.frame_width = 200u;
expected_video.frame_height = 100u;
@@ -2322,7 +2328,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
rtp_connection_info);
- rtp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_NEW;
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
rtp_transport_channel_stats.ice_transport_stats
.selected_candidate_pair_changes = 1;
pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats});
@@ -2360,7 +2366,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
cricket::ICE_CANDIDATE_COMPONENT_RTCP;
rtcp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
rtcp_connection_info);
- rtcp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_CONNECTING;
+ rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kConnecting;
pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats,
rtcp_transport_channel_stats});
@@ -2476,7 +2482,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) {
rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
rtp_connection_info);
// The state must be connected in order for crypto parameters to show up.
- rtp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_CONNECTED;
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kConnected;
rtp_transport_channel_stats.ice_transport_stats
.selected_candidate_pair_changes = 1;
rtp_transport_channel_stats.ssl_version_bytes = 0x0203;
@@ -2963,11 +2969,11 @@ TEST_P(RTCStatsCollectorTestWithParamKind,
cricket::TransportChannelStats rtp_transport_channel_stats;
rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
- rtp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_NEW;
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
cricket::TransportChannelStats rtcp_transport_channel_stats;
rtcp_transport_channel_stats.component =
cricket::ICE_CANDIDATE_COMPONENT_RTCP;
- rtcp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_NEW;
+ rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
pc_->SetTransportStats("TransportName", {rtp_transport_channel_stats,
rtcp_transport_channel_stats});
AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
@@ -3215,11 +3221,20 @@ class FakeRTCStatsCollector : public RTCStatsCollector,
static rtc::scoped_refptr<FakeRTCStatsCollector> Create(
PeerConnectionInternal* pc,
int64_t cache_lifetime_us) {
- return rtc::scoped_refptr<FakeRTCStatsCollector>(
- new rtc::RefCountedObject<FakeRTCStatsCollector>(pc,
- cache_lifetime_us));
+ return new rtc::RefCountedObject<FakeRTCStatsCollector>(pc,
+ cache_lifetime_us);
}
+ // Since FakeRTCStatsCollector inherits twice from RefCountInterface, once via
+ // RTCStatsCollector and once via RTCStatsCollectorCallback, scoped_refptr
+ // will get confused about which AddRef()/Release() methods to call.
+ // So to remove all doubt, we declare them here again in the class that we
+ // give to scoped_refptr.
+ // Satisfying the implementation of these methods and associating them with a
+ // reference counter, will be done by RefCountedObject.
+ virtual void AddRef() const = 0;
+ virtual rtc::RefCountReleaseStatus Release() const = 0;
+
// RTCStatsCollectorCallback implementation.
void OnStatsDelivered(
const rtc::scoped_refptr<const RTCStatsReport>& report) override {
diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc
index 2bf026eabb..032cbe9592 100644
--- a/pc/rtc_stats_integrationtest.cc
+++ b/pc/rtc_stats_integrationtest.cc
@@ -114,9 +114,9 @@ class RTCStatsIntegrationTest : public ::testing::Test {
RTC_CHECK(network_thread_->Start());
RTC_CHECK(worker_thread_->Start());
- caller_ = new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ caller_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
"caller", network_thread_.get(), worker_thread_.get());
- callee_ = new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ callee_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
"callee", network_thread_.get(), worker_thread_.get());
}
@@ -973,6 +973,8 @@ class RTCStatsReportVerifier {
verifier.TestMemberIsNonNegative<double>(
outbound_stream.total_packet_send_delay);
verifier.TestMemberIsDefined(outbound_stream.quality_limitation_reason);
+ verifier.TestMemberIsDefined(
+ outbound_stream.quality_limitation_durations);
verifier.TestMemberIsNonNegative<uint32_t>(
outbound_stream.quality_limitation_resolution_changes);
// The integration test is not set up to test screen share; don't require
@@ -1006,6 +1008,8 @@ class RTCStatsReportVerifier {
verifier.TestMemberIsUndefined(outbound_stream.total_packet_send_delay);
verifier.TestMemberIsUndefined(outbound_stream.quality_limitation_reason);
verifier.TestMemberIsUndefined(
+ outbound_stream.quality_limitation_durations);
+ verifier.TestMemberIsUndefined(
outbound_stream.quality_limitation_resolution_changes);
verifier.TestMemberIsUndefined(outbound_stream.content_type);
// TODO(hbos): Implement for audio as well.
diff --git a/pc/rtcp_mux_filter.cc b/pc/rtcp_mux_filter.cc
index a8cf717b28..62adea2243 100644
--- a/pc/rtcp_mux_filter.cc
+++ b/pc/rtcp_mux_filter.cc
@@ -91,7 +91,8 @@ bool RtcpMuxFilter::SetAnswer(bool answer_enable, ContentSource src) {
}
if (!ExpectAnswer(src)) {
- RTC_LOG(LS_ERROR) << "Invalid state for RTCP mux answer";
+ RTC_LOG(LS_ERROR) << "Invalid state for RTCP mux answer, state is "
+ << state_ << ", source is " << src;
return false;
}
diff --git a/pc/rtp_receiver.cc b/pc/rtp_receiver.cc
index 694a8215a6..2444c9b60d 100644
--- a/pc/rtp_receiver.cc
+++ b/pc/rtp_receiver.cc
@@ -15,8 +15,8 @@
#include <utility>
#include <vector>
-#include "api/media_stream_proxy.h"
#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
#include "rtc_base/location.h"
namespace webrtc {
@@ -39,20 +39,4 @@ RtpReceiverInternal::CreateStreamsFromIds(std::vector<std::string> stream_ids) {
return streams;
}
-// Attempt to attach the frame decryptor to the current media channel on the
-// correct worker thread only if both the media channel exists and a ssrc has
-// been allocated to the stream.
-void RtpReceiverInternal::MaybeAttachFrameDecryptorToMediaChannel(
- const absl::optional<uint32_t>& ssrc,
- rtc::Thread* worker_thread,
- rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor,
- cricket::MediaChannel* media_channel,
- bool stopped) {
- if (media_channel && frame_decryptor && ssrc.has_value() && !stopped) {
- worker_thread->Invoke<void>(RTC_FROM_HERE, [&] {
- media_channel->SetFrameDecryptor(*ssrc, frame_decryptor);
- });
- }
-}
-
} // namespace webrtc
diff --git a/pc/rtp_receiver.h b/pc/rtp_receiver.h
index 22fa75360f..73fc5b9858 100644
--- a/pc/rtp_receiver.h
+++ b/pc/rtp_receiver.h
@@ -92,13 +92,6 @@ class RtpReceiverInternal : public RtpReceiverInterface {
static std::vector<rtc::scoped_refptr<MediaStreamInterface>>
CreateStreamsFromIds(std::vector<std::string> stream_ids);
-
- static void MaybeAttachFrameDecryptorToMediaChannel(
- const absl::optional<uint32_t>& ssrc,
- rtc::Thread* worker_thread,
- rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor,
- cricket::MediaChannel* media_channel,
- bool stopped);
};
} // namespace webrtc
diff --git a/pc/rtp_receiver_proxy.h b/pc/rtp_receiver_proxy.h
new file mode 100644
index 0000000000..d4114e0f0b
--- /dev/null
+++ b/pc/rtp_receiver_proxy.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_RECEIVER_PROXY_H_
+#define PC_RTP_RECEIVER_PROXY_H_
+
+#include <string>
+#include <vector>
+
+#include "api/rtp_receiver_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for RtpReceiverInterface.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PROXY_MAP(RtpReceiver)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
+PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
+PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams)
+BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_SECONDARY_CONSTMETHOD0(RtpParameters, GetParameters)
+PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*)
+PROXY_SECONDARY_METHOD1(void,
+ SetJitterBufferMinimumDelay,
+ absl::optional<double>)
+PROXY_SECONDARY_CONSTMETHOD0(std::vector<RtpSource>, GetSources)
+// TODO(bugs.webrtc.org/12772): Remove.
+PROXY_SECONDARY_METHOD1(void,
+ SetFrameDecryptor,
+ rtc::scoped_refptr<FrameDecryptorInterface>)
+// TODO(bugs.webrtc.org/12772): Remove.
+PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor)
+PROXY_SECONDARY_METHOD1(void,
+ SetDepacketizerToDecoderFrameTransformer,
+ rtc::scoped_refptr<FrameTransformerInterface>)
+END_PROXY_MAP(RtpReceiver)
+
+} // namespace webrtc
+
+#endif // PC_RTP_RECEIVER_PROXY_H_
diff --git a/pc/rtp_sender.cc b/pc/rtp_sender.cc
index 7026dd9db7..aa268cef45 100644
--- a/pc/rtp_sender.cc
+++ b/pc/rtp_sender.cc
@@ -424,9 +424,8 @@ rtc::scoped_refptr<AudioRtpSender> AudioRtpSender::Create(
const std::string& id,
StatsCollectorInterface* stats,
SetStreamsObserver* set_streams_observer) {
- return rtc::scoped_refptr<AudioRtpSender>(
- new rtc::RefCountedObject<AudioRtpSender>(worker_thread, id, stats,
- set_streams_observer));
+ return rtc::make_ref_counted<AudioRtpSender>(worker_thread, id, stats,
+ set_streams_observer);
}
AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread,
@@ -571,9 +570,8 @@ rtc::scoped_refptr<VideoRtpSender> VideoRtpSender::Create(
rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer) {
- return rtc::scoped_refptr<VideoRtpSender>(
- new rtc::RefCountedObject<VideoRtpSender>(worker_thread, id,
- set_streams_observer));
+ return rtc::make_ref_counted<VideoRtpSender>(worker_thread, id,
+ set_streams_observer);
}
VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread,
diff --git a/pc/rtp_sender_proxy.h b/pc/rtp_sender_proxy.h
new file mode 100644
index 0000000000..2f8fe2c0bf
--- /dev/null
+++ b/pc/rtp_sender_proxy.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_SENDER_PROXY_H_
+#define PC_RTP_SENDER_PROXY_H_
+
+#include <string>
+#include <vector>
+
+#include "api/rtp_sender_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for RtpSenderInterface.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PRIMARY_PROXY_MAP(RtpSender)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
+PROXY_CONSTMETHOD0(uint32_t, ssrc)
+BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
+PROXY_CONSTMETHOD0(std::vector<RtpEncodingParameters>, init_send_encodings)
+PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
+PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtmfSenderInterface>, GetDtmfSender)
+PROXY_METHOD1(void,
+ SetFrameEncryptor,
+ rtc::scoped_refptr<FrameEncryptorInterface>)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor)
+PROXY_METHOD1(void, SetStreams, const std::vector<std::string>&)
+PROXY_METHOD1(void,
+ SetEncoderToPacketizerFrameTransformer,
+ rtc::scoped_refptr<FrameTransformerInterface>)
+END_PROXY_MAP(RtpSender)
+
+} // namespace webrtc
+
+#endif // PC_RTP_SENDER_PROXY_H_
diff --git a/pc/rtp_sender_receiver_unittest.cc b/pc/rtp_sender_receiver_unittest.cc
index 625f29b34b..10dc894518 100644
--- a/pc/rtp_sender_receiver_unittest.cc
+++ b/pc/rtp_sender_receiver_unittest.cc
@@ -63,6 +63,7 @@
#include "rtc_base/thread.h"
#include "test/gmock.h"
#include "test/gtest.h"
+#include "test/run_loop.h"
using ::testing::_;
using ::testing::ContainerEq;
@@ -299,9 +300,9 @@ class RtpSenderReceiverTest
void CreateAudioRtpReceiver(
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
- audio_rtp_receiver_ =
- new AudioRtpReceiver(rtc::Thread::Current(), kAudioTrackId, streams,
- /*is_unified_plan=*/true);
+ audio_rtp_receiver_ = rtc::make_ref_counted<AudioRtpReceiver>(
+ rtc::Thread::Current(), kAudioTrackId, streams,
+ /*is_unified_plan=*/true);
audio_rtp_receiver_->SetMediaChannel(voice_media_channel_);
audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc);
audio_track_ = audio_rtp_receiver_->audio_track();
@@ -310,8 +311,8 @@ class RtpSenderReceiverTest
void CreateVideoRtpReceiver(
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
- video_rtp_receiver_ =
- new VideoRtpReceiver(rtc::Thread::Current(), kVideoTrackId, streams);
+ video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
+ rtc::Thread::Current(), kVideoTrackId, streams);
video_rtp_receiver_->SetMediaChannel(video_media_channel_);
video_rtp_receiver_->SetupMediaChannel(kVideoSsrc);
video_track_ = video_rtp_receiver_->video_track();
@@ -330,19 +331,25 @@ class RtpSenderReceiverTest
video_media_channel_->AddRecvStream(stream_params);
uint32_t primary_ssrc = stream_params.first_ssrc();
- video_rtp_receiver_ =
- new VideoRtpReceiver(rtc::Thread::Current(), kVideoTrackId, streams);
+ video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
+ rtc::Thread::Current(), kVideoTrackId, streams);
video_rtp_receiver_->SetMediaChannel(video_media_channel_);
video_rtp_receiver_->SetupMediaChannel(primary_ssrc);
video_track_ = video_rtp_receiver_->video_track();
}
void DestroyAudioRtpReceiver() {
+ if (!audio_rtp_receiver_)
+ return;
+ audio_rtp_receiver_->Stop();
audio_rtp_receiver_ = nullptr;
VerifyVoiceChannelNoOutput();
}
void DestroyVideoRtpReceiver() {
+ if (!video_rtp_receiver_)
+ return;
+ video_rtp_receiver_->Stop();
video_rtp_receiver_ = nullptr;
VerifyVideoChannelNoOutput();
}
@@ -498,6 +505,7 @@ class RtpSenderReceiverTest
}
protected:
+ test::RunLoop run_loop_;
rtc::Thread* const network_thread_;
rtc::Thread* const worker_thread_;
webrtc::RtcEventLogNull event_log_;
@@ -599,11 +607,15 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(1, volume);
+ // Handling of enable/disable is applied asynchronously.
audio_track_->set_enabled(false);
+ run_loop_.Flush();
+
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0, volume);
audio_track_->set_enabled(true);
+ run_loop_.Flush();
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(1, volume);
@@ -636,6 +648,7 @@ TEST_F(RtpSenderReceiverTest, RemoteVideoTrackState) {
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, video_track_->state());
EXPECT_EQ(webrtc::MediaSourceInterface::kEnded,
video_track_->GetSource()->state());
+ DestroyVideoRtpReceiver();
}
// Currently no action is taken when a remote video track is disabled or
@@ -657,22 +670,27 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
double volume;
audio_track_->GetSource()->SetVolume(0.5);
+ run_loop_.Flush();
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.5, volume);
// Disable the audio track, this should prevent setting the volume.
audio_track_->set_enabled(false);
+ RTC_DCHECK_EQ(worker_thread_, run_loop_.task_queue());
+ run_loop_.Flush();
audio_track_->GetSource()->SetVolume(0.8);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0, volume);
// When the track is enabled, the previously set volume should take effect.
audio_track_->set_enabled(true);
+ run_loop_.Flush();
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.8, volume);
// Try changing volume one more time.
audio_track_->GetSource()->SetVolume(0.9);
+ run_loop_.Flush();
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.9, volume);
@@ -683,12 +701,14 @@ TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) {
CreateAudioRtpReceiver();
VerifyRtpReceiverDelayBehaviour(voice_media_channel_,
audio_rtp_receiver_.get(), kAudioSsrc);
+ DestroyAudioRtpReceiver();
}
TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) {
CreateVideoRtpReceiver();
VerifyRtpReceiverDelayBehaviour(video_media_channel_,
video_rtp_receiver_.get(), kVideoSsrc);
+ DestroyVideoRtpReceiver();
}
// Test that the media channel isn't enabled for sending if the audio sender
@@ -1582,6 +1602,7 @@ TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetFrameDecryptor) {
audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
EXPECT_EQ(fake_frame_decryptor.get(),
audio_rtp_receiver_->GetFrameDecryptor().get());
+ DestroyAudioRtpReceiver();
}
// Validate that the default FrameEncryptor setting is nullptr.
@@ -1593,6 +1614,7 @@ TEST_F(RtpSenderReceiverTest, AudioReceiverCannotSetFrameDecryptorAfterStop) {
audio_rtp_receiver_->Stop();
audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
// TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+ DestroyAudioRtpReceiver();
}
// Validate that the default FrameEncryptor setting is nullptr.
@@ -1627,6 +1649,7 @@ TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetFrameDecryptor) {
video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
EXPECT_EQ(fake_frame_decryptor.get(),
video_rtp_receiver_->GetFrameDecryptor().get());
+ DestroyVideoRtpReceiver();
}
// Validate that the default FrameEncryptor setting is nullptr.
@@ -1638,6 +1661,7 @@ TEST_F(RtpSenderReceiverTest, VideoReceiverCannotSetFrameDecryptorAfterStop) {
video_rtp_receiver_->Stop();
video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
// TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+ DestroyVideoRtpReceiver();
}
// Checks that calling the internal methods for get/set parameters do not
diff --git a/pc/rtp_transceiver.cc b/pc/rtp_transceiver.cc
index d2d05bcedd..a78b9d6be6 100644
--- a/pc/rtp_transceiver.cc
+++ b/pc/rtp_transceiver.cc
@@ -25,6 +25,7 @@
#include "pc/session_description.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/thread.h"
namespace webrtc {
@@ -112,12 +113,16 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() {
} // namespace
-RtpTransceiver::RtpTransceiver(cricket::MediaType media_type)
+RtpTransceiver::RtpTransceiver(
+ cricket::MediaType media_type,
+ cricket::ChannelManager* channel_manager /* = nullptr*/)
: thread_(GetCurrentTaskQueueOrThread()),
unified_plan_(false),
- media_type_(media_type) {
+ media_type_(media_type),
+ channel_manager_(channel_manager) {
RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
media_type == cricket::MEDIA_TYPE_VIDEO);
+ RTC_DCHECK(channel_manager_);
}
RtpTransceiver::RtpTransceiver(
@@ -136,11 +141,15 @@ RtpTransceiver::RtpTransceiver(
RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO ||
media_type_ == cricket::MEDIA_TYPE_VIDEO);
RTC_DCHECK_EQ(sender->media_type(), receiver->media_type());
+ RTC_DCHECK(channel_manager_);
senders_.push_back(sender);
receivers_.push_back(receiver);
}
RtpTransceiver::~RtpTransceiver() {
+ // TODO(tommi): On Android, when running PeerConnectionClientTest (e.g.
+ // PeerConnectionClientTest#testCameraSwitch), the instance doesn't get
+ // deleted on `thread_`. See if we can fix that.
if (!stopped_) {
RTC_DCHECK_RUN_ON(thread_);
StopInternal();
@@ -148,52 +157,70 @@ RtpTransceiver::~RtpTransceiver() {
}
void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) {
+ RTC_DCHECK_RUN_ON(thread_);
// Cannot set a non-null channel on a stopped transceiver.
if (stopped_ && channel) {
return;
}
+ RTC_DCHECK(channel || channel_);
+
RTC_LOG_THREAD_BLOCK_COUNT();
+ if (channel_) {
+ signaling_thread_safety_->SetNotAlive();
+ signaling_thread_safety_ = nullptr;
+ }
+
if (channel) {
RTC_DCHECK_EQ(media_type(), channel->media_type());
+ signaling_thread_safety_ = PendingTaskSafetyFlag::Create();
}
- if (channel_) {
- channel_->SignalFirstPacketReceived().disconnect(this);
- }
+ // An alternative to this, could be to require SetChannel to be called
+ // on the network thread. The channel object operates for the most part
+ // on the network thread, as part of its initialization being on the network
+ // thread is required, so setting a channel object as part of the construction
+ // (without thread hopping) might be the more efficient thing to do than
+ // how SetChannel works today.
+ // Similarly, if the channel() accessor is limited to the network thread, that
+ // helps with keeping the channel implementation requirements being met and
+ // avoids synchronization for accessing the pointer or network related state.
+ channel_manager_->network_thread()->Invoke<void>(RTC_FROM_HERE, [&]() {
+ if (channel_) {
+ channel_->SetFirstPacketReceivedCallback(nullptr);
+ }
- channel_ = channel;
+ channel_ = channel;
- if (channel_) {
- channel_->SignalFirstPacketReceived().connect(
- this, &RtpTransceiver::OnFirstPacketReceived);
- }
+ if (channel_) {
+ channel_->SetFirstPacketReceivedCallback(
+ [thread = thread_, flag = signaling_thread_safety_, this]() mutable {
+ thread->PostTask(ToQueuedTask(
+ std::move(flag), [this]() { OnFirstPacketReceived(); }));
+ });
+ }
+ });
for (const auto& sender : senders_) {
sender->internal()->SetMediaChannel(channel_ ? channel_->media_channel()
: nullptr);
}
- RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1);
for (const auto& receiver : receivers_) {
if (!channel_) {
- // TODO(tommi): This can internally block and hop to the worker thread.
- // It's likely that SetMediaChannel also does that, so perhaps we should
- // require SetMediaChannel(nullptr) to also Stop() and skip this call.
receiver->internal()->Stop();
+ } else {
+ receiver->internal()->SetMediaChannel(channel_->media_channel());
}
-
- receiver->internal()->SetMediaChannel(channel_ ? channel_->media_channel()
- : nullptr);
}
-
- RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(receivers_.size() * 2);
}
void RtpTransceiver::AddSender(
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender) {
+ RTC_DCHECK_RUN_ON(thread_);
RTC_DCHECK(!stopped_);
RTC_DCHECK(!unified_plan_);
RTC_DCHECK(sender);
@@ -219,6 +246,7 @@ bool RtpTransceiver::RemoveSender(RtpSenderInterface* sender) {
void RtpTransceiver::AddReceiver(
rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
receiver) {
+ RTC_DCHECK_RUN_ON(thread_);
RTC_DCHECK(!stopped_);
RTC_DCHECK(!unified_plan_);
RTC_DCHECK(receiver);
@@ -236,12 +264,8 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) {
if (it == receivers_.end()) {
return false;
}
+ // `Stop()` will clear the internally cached pointer to the media channel.
(*it)->internal()->Stop();
- // After the receiver has been removed, there's no guarantee that the
- // contained media channel isn't deleted shortly after this. To make sure that
- // the receiver doesn't spontaneously try to use it's (potentially stale)
- // media channel reference, we clear it out.
- (*it)->internal()->SetMediaChannel(nullptr);
receivers_.erase(it);
return true;
}
@@ -267,7 +291,7 @@ absl::optional<std::string> RtpTransceiver::mid() const {
return mid_;
}
-void RtpTransceiver::OnFirstPacketReceived(cricket::ChannelInterface*) {
+void RtpTransceiver::OnFirstPacketReceived() {
for (const auto& receiver : receivers_) {
receiver->internal()->NotifyFirstPacketReceived();
}
@@ -304,6 +328,7 @@ void RtpTransceiver::set_fired_direction(RtpTransceiverDirection direction) {
}
bool RtpTransceiver::stopped() const {
+ RTC_DCHECK_RUN_ON(thread_);
return stopped_;
}
@@ -476,10 +501,9 @@ RtpTransceiver::HeaderExtensionsToOffer() const {
std::vector<RtpHeaderExtensionCapability>
RtpTransceiver::HeaderExtensionsNegotiated() const {
- if (!channel_)
- return {};
+ RTC_DCHECK_RUN_ON(thread_);
std::vector<RtpHeaderExtensionCapability> result;
- for (const auto& ext : channel_->GetNegotiatedRtpHeaderExtensions()) {
+ for (const auto& ext : negotiated_header_extensions_) {
result.emplace_back(ext.uri, ext.id, RtpTransceiverDirection::kSendRecv);
}
return result;
@@ -529,6 +553,15 @@ RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions(
return RTCError::OK();
}
+void RtpTransceiver::OnNegotiationUpdate(
+ SdpType sdp_type,
+ const cricket::MediaContentDescription* content) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(content);
+ if (sdp_type == SdpType::kAnswer)
+ negotiated_header_extensions_ = content->rtp_header_extensions();
+}
+
void RtpTransceiver::SetPeerConnectionClosed() {
is_pc_closed_ = true;
}
diff --git a/pc/rtp_transceiver.h b/pc/rtp_transceiver.h
index 8d2d72857d..6b1307b1db 100644
--- a/pc/rtp_transceiver.h
+++ b/pc/rtp_transceiver.h
@@ -21,20 +21,21 @@
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/media_types.h"
-#include "api/proxy.h"
#include "api/rtc_error.h"
#include "api/rtp_parameters.h"
-#include "api/rtp_receiver_interface.h"
-#include "api/rtp_sender_interface.h"
#include "api/rtp_transceiver_direction.h"
#include "api/rtp_transceiver_interface.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/task_queue_base.h"
#include "pc/channel_interface.h"
#include "pc/channel_manager.h"
+#include "pc/proxy.h"
#include "pc/rtp_receiver.h"
+#include "pc/rtp_receiver_proxy.h"
#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread_annotations.h"
@@ -78,7 +79,8 @@ class RtpTransceiver final
// channel set.
// |media_type| specifies the type of RtpTransceiver (and, by transitivity,
// the type of senders, receivers, and channel). Can either by audio or video.
- explicit RtpTransceiver(cricket::MediaType media_type);
+ RtpTransceiver(cricket::MediaType media_type,
+ cricket::ChannelManager* channel_manager);
// Construct a Unified Plan-style RtpTransceiver with the given sender and
// receiver. The media type will be derived from the media types of the sender
// and receiver. The sender and receiver should have the same media type.
@@ -231,21 +233,32 @@ class RtpTransceiver final
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer) override;
+ // Called on the signaling thread when the local or remote content description
+ // is updated. Used to update the negotiated header extensions.
+ // TODO(tommi): The implementation of this method is currently very simple and
+ // only used for updating the negotiated headers. However, we're planning to
+ // move all the updates done on the channel from the transceiver into this
+ // method. This will happen with the ownership of the channel object being
+ // moved into the transceiver.
+ void OnNegotiationUpdate(SdpType sdp_type,
+ const cricket::MediaContentDescription* content);
+
private:
- void OnFirstPacketReceived(cricket::ChannelInterface* channel);
+ void OnFirstPacketReceived();
void StopSendingAndReceiving();
// Enforce that this object is created, used and destroyed on one thread.
- const TaskQueueBase* thread_;
+ TaskQueueBase* const thread_;
const bool unified_plan_;
const cricket::MediaType media_type_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_thread_safety_;
std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
senders_;
std::vector<
rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
receivers_;
- bool stopped_ = false;
+ bool stopped_ RTC_GUARDED_BY(thread_) = false;
bool stopping_ RTC_GUARDED_BY(thread_) = false;
bool is_pc_closed_ = false;
RtpTransceiverDirection direction_ = RtpTransceiverDirection::kInactive;
@@ -261,6 +274,13 @@ class RtpTransceiver final
cricket::ChannelManager* channel_manager_ = nullptr;
std::vector<RtpCodecCapability> codec_preferences_;
std::vector<RtpHeaderExtensionCapability> header_extensions_to_offer_;
+
+ // |negotiated_header_extensions_| is read and written to on the signaling
+ // thread from the SdpOfferAnswerHandler class (e.g.
+ // PushdownMediaDescription().
+ cricket::RtpHeaderExtensions negotiated_header_extensions_
+ RTC_GUARDED_BY(thread_);
+
const std::function<void()> on_negotiation_needed_;
};
@@ -290,7 +310,7 @@ PROXY_CONSTMETHOD0(std::vector<RtpHeaderExtensionCapability>,
PROXY_METHOD1(webrtc::RTCError,
SetOfferedRtpHeaderExtensions,
rtc::ArrayView<const RtpHeaderExtensionCapability>)
-END_PROXY_MAP()
+END_PROXY_MAP(RtpTransceiver)
} // namespace webrtc
diff --git a/pc/rtp_transceiver_unittest.cc b/pc/rtp_transceiver_unittest.cc
index c518aae0c7..0128e912e3 100644
--- a/pc/rtp_transceiver_unittest.cc
+++ b/pc/rtp_transceiver_unittest.cc
@@ -23,6 +23,7 @@
#include "test/gmock.h"
#include "test/gtest.h"
+using ::testing::_;
using ::testing::ElementsAre;
using ::testing::Optional;
using ::testing::Property;
@@ -33,13 +34,13 @@ namespace webrtc {
// Checks that a channel cannot be set on a stopped |RtpTransceiver|.
TEST(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) {
- RtpTransceiver transceiver(cricket::MediaType::MEDIA_TYPE_AUDIO);
+ auto cm = cricket::ChannelManager::Create(
+ nullptr, true, rtc::Thread::Current(), rtc::Thread::Current());
+ RtpTransceiver transceiver(cricket::MediaType::MEDIA_TYPE_AUDIO, cm.get());
cricket::MockChannelInterface channel1;
- sigslot::signal1<cricket::ChannelInterface*> signal;
EXPECT_CALL(channel1, media_type())
.WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
- EXPECT_CALL(channel1, SignalFirstPacketReceived())
- .WillRepeatedly(ReturnRef(signal));
+ EXPECT_CALL(channel1, SetFirstPacketReceivedCallback(_));
transceiver.SetChannel(&channel1);
EXPECT_EQ(&channel1, transceiver.channel());
@@ -59,13 +60,14 @@ TEST(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) {
// Checks that a channel can be unset on a stopped |RtpTransceiver|
TEST(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) {
- RtpTransceiver transceiver(cricket::MediaType::MEDIA_TYPE_VIDEO);
+ auto cm = cricket::ChannelManager::Create(
+ nullptr, true, rtc::Thread::Current(), rtc::Thread::Current());
+ RtpTransceiver transceiver(cricket::MediaType::MEDIA_TYPE_VIDEO, cm.get());
cricket::MockChannelInterface channel;
- sigslot::signal1<cricket::ChannelInterface*> signal;
EXPECT_CALL(channel, media_type())
.WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_VIDEO));
- EXPECT_CALL(channel, SignalFirstPacketReceived())
- .WillRepeatedly(ReturnRef(signal));
+ EXPECT_CALL(channel, SetFirstPacketReceivedCallback(_))
+ .WillRepeatedly(testing::Return());
transceiver.SetChannel(&channel);
EXPECT_EQ(&channel, transceiver.channel());
@@ -89,20 +91,41 @@ class RtpTransceiverUnifiedPlanTest : public ::testing::Test {
rtc::Thread::Current())),
transceiver_(RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpSenderInternal>()),
+ sender_),
RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpReceiverInternal>()),
+ rtc::Thread::Current(),
+ receiver_),
channel_manager_.get(),
channel_manager_->GetSupportedAudioRtpHeaderExtensions(),
/* on_negotiation_needed= */ [] {}) {}
+ static rtc::scoped_refptr<MockRtpReceiverInternal> MockReceiver() {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return receiver;
+ }
+
+ static rtc::scoped_refptr<MockRtpSenderInternal> MockSender() {
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return sender;
+ }
+
+ rtc::scoped_refptr<MockRtpReceiverInternal> receiver_ = MockReceiver();
+ rtc::scoped_refptr<MockRtpSenderInternal> sender_ = MockSender();
std::unique_ptr<cricket::ChannelManager> channel_manager_;
RtpTransceiver transceiver_;
};
// Basic tests for Stop()
TEST_F(RtpTransceiverUnifiedPlanTest, StopSetsDirection) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver_.direction());
EXPECT_FALSE(transceiver_.current_direction());
transceiver_.StopStandard();
@@ -138,24 +161,50 @@ class RtpTransceiverTestForHeaderExtensions : public ::testing::Test {
RtpTransceiverDirection::kSendRecv)}),
transceiver_(RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpSenderInternal>()),
+ sender_),
RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpReceiverInternal>()),
+ rtc::Thread::Current(),
+ receiver_),
channel_manager_.get(),
extensions_,
/* on_negotiation_needed= */ [] {}) {}
+ static rtc::scoped_refptr<MockRtpReceiverInternal> MockReceiver() {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return receiver;
+ }
+
+ static rtc::scoped_refptr<MockRtpSenderInternal> MockSender() {
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return sender;
+ }
+
+ rtc::scoped_refptr<MockRtpReceiverInternal> receiver_ = MockReceiver();
+ rtc::scoped_refptr<MockRtpSenderInternal> sender_ = MockSender();
+
std::unique_ptr<cricket::ChannelManager> channel_manager_;
std::vector<RtpHeaderExtensionCapability> extensions_;
RtpTransceiver transceiver_;
};
TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
}
TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
auto modified_extensions = extensions_;
modified_extensions[0].direction = RtpTransceiverDirection::kSendOnly;
EXPECT_TRUE(
@@ -176,6 +225,10 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) {
}
TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
auto modified_extensions = extensions_;
modified_extensions[0].direction = RtpTransceiverDirection::kStopped;
EXPECT_TRUE(
@@ -184,6 +237,10 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) {
}
TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsUnsupportedExtension) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
std::vector<RtpHeaderExtensionCapability> modified_extensions(
{RtpHeaderExtensionCapability("uri3", 1,
RtpTransceiverDirection::kSendRecv)});
@@ -194,6 +251,10 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsUnsupportedExtension) {
TEST_F(RtpTransceiverTestForHeaderExtensions,
RejectsStoppedMandatoryExtensions) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
std::vector<RtpHeaderExtensionCapability> modified_extensions = extensions_;
// Attempting to stop the mandatory MID extension.
modified_extensions[2].direction = RtpTransceiverDirection::kStopped;
@@ -210,28 +271,47 @@ TEST_F(RtpTransceiverTestForHeaderExtensions,
TEST_F(RtpTransceiverTestForHeaderExtensions,
NoNegotiatedHdrExtsWithoutChannel) {
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
EXPECT_THAT(transceiver_.HeaderExtensionsNegotiated(), ElementsAre());
}
TEST_F(RtpTransceiverTestForHeaderExtensions,
NoNegotiatedHdrExtsWithChannelWithoutNegotiation) {
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
cricket::MockChannelInterface mock_channel;
- sigslot::signal1<cricket::ChannelInterface*> signal;
- ON_CALL(mock_channel, SignalFirstPacketReceived)
- .WillByDefault(ReturnRef(signal));
+ EXPECT_CALL(mock_channel, SetFirstPacketReceivedCallback(_));
+ EXPECT_CALL(mock_channel, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ EXPECT_CALL(mock_channel, media_channel()).WillRepeatedly(Return(nullptr));
transceiver_.SetChannel(&mock_channel);
EXPECT_THAT(transceiver_.HeaderExtensionsNegotiated(), ElementsAre());
}
TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) {
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
cricket::MockChannelInterface mock_channel;
- sigslot::signal1<cricket::ChannelInterface*> signal;
- ON_CALL(mock_channel, SignalFirstPacketReceived)
- .WillByDefault(ReturnRef(signal));
+ EXPECT_CALL(mock_channel, SetFirstPacketReceivedCallback(_));
+ EXPECT_CALL(mock_channel, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ EXPECT_CALL(mock_channel, media_channel()).WillRepeatedly(Return(nullptr));
+
cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1),
webrtc::RtpExtension("uri2", 2)};
- EXPECT_CALL(mock_channel, GetNegotiatedRtpHeaderExtensions)
- .WillOnce(Return(extensions));
+ cricket::AudioContentDescription description;
+ description.set_rtp_header_extensions(extensions);
+ transceiver_.OnNegotiationUpdate(SdpType::kAnswer, &description);
+
transceiver_.SetChannel(&mock_channel);
EXPECT_THAT(transceiver_.HeaderExtensionsNegotiated(),
ElementsAre(RtpHeaderExtensionCapability(
@@ -242,23 +322,27 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) {
TEST_F(RtpTransceiverTestForHeaderExtensions,
ReturnsNegotiatedHdrExtsSecondTime) {
- cricket::MockChannelInterface mock_channel;
- sigslot::signal1<cricket::ChannelInterface*> signal;
- ON_CALL(mock_channel, SignalFirstPacketReceived)
- .WillByDefault(ReturnRef(signal));
+ EXPECT_CALL(*receiver_.get(), StopAndEndTrack());
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1),
webrtc::RtpExtension("uri2", 2)};
+ cricket::AudioContentDescription description;
+ description.set_rtp_header_extensions(extensions);
+ transceiver_.OnNegotiationUpdate(SdpType::kAnswer, &description);
- EXPECT_CALL(mock_channel, GetNegotiatedRtpHeaderExtensions)
- .WillOnce(Return(extensions));
- transceiver_.SetChannel(&mock_channel);
- transceiver_.HeaderExtensionsNegotiated();
- testing::Mock::VerifyAndClearExpectations(&mock_channel);
+ EXPECT_THAT(transceiver_.HeaderExtensionsNegotiated(),
+ ElementsAre(RtpHeaderExtensionCapability(
+ "uri1", 1, RtpTransceiverDirection::kSendRecv),
+ RtpHeaderExtensionCapability(
+ "uri2", 2, RtpTransceiverDirection::kSendRecv)));
extensions = {webrtc::RtpExtension("uri3", 4),
webrtc::RtpExtension("uri5", 6)};
- EXPECT_CALL(mock_channel, GetNegotiatedRtpHeaderExtensions)
- .WillOnce(Return(extensions));
+ description.set_rtp_header_extensions(extensions);
+ transceiver_.OnNegotiationUpdate(SdpType::kAnswer, &description);
+
EXPECT_THAT(transceiver_.HeaderExtensionsNegotiated(),
ElementsAre(RtpHeaderExtensionCapability(
"uri3", 4, RtpTransceiverDirection::kSendRecv),
diff --git a/pc/rtp_transmission_manager.cc b/pc/rtp_transmission_manager.cc
index eaf29b889f..9040a69699 100644
--- a/pc/rtp_transmission_manager.cc
+++ b/pc/rtp_transmission_manager.cc
@@ -11,6 +11,7 @@
#include "pc/rtp_transmission_manager.h"
#include <algorithm>
+#include <utility>
#include "absl/types/optional.h"
#include "api/peer_connection_interface.h"
@@ -240,15 +241,17 @@ RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type,
receiver;
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- signaling_thread(),
- new AudioRtpReceiver(worker_thread(), receiver_id,
- std::vector<std::string>({}), IsUnifiedPlan()));
+ signaling_thread(), worker_thread(),
+ rtc::make_ref_counted<AudioRtpReceiver>(worker_thread(), receiver_id,
+ std::vector<std::string>({}),
+ IsUnifiedPlan()));
NoteUsageEvent(UsageEvent::AUDIO_ADDED);
} else {
RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id,
- std::vector<std::string>({})));
+ signaling_thread(), worker_thread(),
+ rtc::make_ref_counted<VideoRtpReceiver>(worker_thread(), receiver_id,
+ std::vector<std::string>({})));
NoteUsageEvent(UsageEvent::VIDEO_ADDED);
}
return receiver;
@@ -453,7 +456,7 @@ void RtpTransmissionManager::CreateAudioReceiver(
streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
// TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use
// the constructor taking stream IDs instead.
- auto* audio_receiver = new AudioRtpReceiver(
+ auto audio_receiver = rtc::make_ref_counted<AudioRtpReceiver>(
worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan());
audio_receiver->SetMediaChannel(voice_media_channel());
if (remote_sender_info.sender_id == kDefaultAudioSenderId) {
@@ -462,7 +465,7 @@ void RtpTransmissionManager::CreateAudioReceiver(
audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc);
}
auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- signaling_thread(), audio_receiver);
+ signaling_thread(), worker_thread(), std::move(audio_receiver));
GetAudioTransceiver()->internal()->AddReceiver(receiver);
Observer()->OnAddTrack(receiver, streams);
NoteUsageEvent(UsageEvent::AUDIO_ADDED);
@@ -476,7 +479,7 @@ void RtpTransmissionManager::CreateVideoReceiver(
streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
// TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use
// the constructor taking stream IDs instead.
- auto* video_receiver = new VideoRtpReceiver(
+ auto video_receiver = rtc::make_ref_counted<VideoRtpReceiver>(
worker_thread(), remote_sender_info.sender_id, streams);
video_receiver->SetMediaChannel(video_media_channel());
if (remote_sender_info.sender_id == kDefaultVideoSenderId) {
@@ -485,7 +488,7 @@ void RtpTransmissionManager::CreateVideoReceiver(
video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc);
}
auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- signaling_thread(), video_receiver);
+ signaling_thread(), worker_thread(), std::move(video_receiver));
GetVideoTransceiver()->internal()->AddReceiver(receiver);
Observer()->OnAddTrack(receiver, streams);
NoteUsageEvent(UsageEvent::VIDEO_ADDED);
diff --git a/pc/rtp_transport.cc b/pc/rtp_transport.cc
index 1f2082ef28..d4edb9501c 100644
--- a/pc/rtp_transport.cc
+++ b/pc/rtp_transport.cc
@@ -181,16 +181,16 @@ bool RtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) {
void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
- webrtc::RtpPacketReceived parsed_packet(&header_extension_map_);
+ webrtc::RtpPacketReceived parsed_packet(
+ &header_extension_map_, packet_time_us == -1
+ ? Timestamp::MinusInfinity()
+ : Timestamp::Micros(packet_time_us));
if (!parsed_packet.Parse(std::move(packet))) {
RTC_LOG(LS_ERROR)
<< "Failed to parse the incoming RTP packet before demuxing. Drop it.";
return;
}
- if (packet_time_us != -1) {
- parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000);
- }
if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: "
<< RtpDemuxer::DescribePacket(parsed_packet);
diff --git a/pc/scenario_tests/goog_cc_test.cc b/pc/scenario_tests/goog_cc_test.cc
index 4a996b8684..d9e27e2edf 100644
--- a/pc/scenario_tests/goog_cc_test.cc
+++ b/pc/scenario_tests/goog_cc_test.cc
@@ -73,8 +73,8 @@ TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) {
ASSERT_EQ(num_video_streams, 1); // Exactly 1 video stream.
auto get_bwe = [&] {
- rtc::scoped_refptr<webrtc::MockRTCStatsCollectorCallback> callback(
- new rtc::RefCountedObject<webrtc::MockRTCStatsCollectorCallback>());
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
caller->pc()->GetStats(callback);
s.net()->time_controller()->Wait([&] { return callback->called(); });
auto stats =
diff --git a/pc/sctp_data_channel.cc b/pc/sctp_data_channel.cc
index f16eb8a521..359cc798c8 100644
--- a/pc/sctp_data_channel.cc
+++ b/pc/sctp_data_channel.cc
@@ -10,12 +10,13 @@
#include "pc/sctp_data_channel.h"
+#include <limits>
#include <memory>
#include <string>
#include <utility>
-#include "api/proxy.h"
#include "media/sctp/sctp_transport_internal.h"
+#include "pc/proxy.h"
#include "pc/sctp_utils.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
@@ -64,7 +65,7 @@ PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
PROXY_METHOD0(void, Close)
// TODO(bugs.webrtc.org/11547): Change to run on the network thread.
PROXY_METHOD1(bool, Send, const DataBuffer&)
-END_PROXY_MAP()
+END_PROXY_MAP(DataChannel)
} // namespace
@@ -78,17 +79,27 @@ InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base)
// Specified in createDataChannel, WebRTC spec section 6.1 bullet 13.
id = -1;
}
- // Backwards compatibility: If base.maxRetransmits or base.maxRetransmitTime
- // have been set to -1, unset them.
- if (maxRetransmits && *maxRetransmits == -1) {
- RTC_LOG(LS_ERROR)
- << "Accepting maxRetransmits = -1 for backwards compatibility";
- maxRetransmits = absl::nullopt;
+ // Backwards compatibility: If maxRetransmits or maxRetransmitTime
+ // are negative, the feature is not enabled.
+ // Values are clamped to a 16bit range.
+ if (maxRetransmits) {
+ if (*maxRetransmits < 0) {
+ RTC_LOG(LS_ERROR)
+ << "Accepting maxRetransmits < 0 for backwards compatibility";
+ maxRetransmits = absl::nullopt;
+ } else if (*maxRetransmits > std::numeric_limits<uint16_t>::max()) {
+ maxRetransmits = std::numeric_limits<uint16_t>::max();
+ }
}
- if (maxRetransmitTime && *maxRetransmitTime == -1) {
- RTC_LOG(LS_ERROR)
- << "Accepting maxRetransmitTime = -1 for backwards compatibility";
- maxRetransmitTime = absl::nullopt;
+
+ if (maxRetransmitTime) {
+ if (*maxRetransmitTime < 0) {
+ RTC_LOG(LS_ERROR)
+ << "Accepting maxRetransmitTime < 0 for backwards compatibility";
+ maxRetransmitTime = absl::nullopt;
+ } else if (*maxRetransmitTime > std::numeric_limits<uint16_t>::max()) {
+ maxRetransmitTime = std::numeric_limits<uint16_t>::max();
+ }
}
}
@@ -135,9 +146,8 @@ rtc::scoped_refptr<SctpDataChannel> SctpDataChannel::Create(
const InternalDataChannelInit& config,
rtc::Thread* signaling_thread,
rtc::Thread* network_thread) {
- rtc::scoped_refptr<SctpDataChannel> channel(
- new rtc::RefCountedObject<SctpDataChannel>(
- config, provider, label, signaling_thread, network_thread));
+ auto channel = rtc::make_ref_counted<SctpDataChannel>(
+ config, provider, label, signaling_thread, network_thread);
if (!channel->Init()) {
return nullptr;
}
@@ -294,13 +304,6 @@ bool SctpDataChannel::Send(const DataBuffer& buffer) {
return false;
}
- // TODO(jiayl): the spec is unclear about if the remote side should get the
- // onmessage event. We need to figure out the expected behavior and change the
- // code accordingly.
- if (buffer.size() == 0) {
- return true;
- }
-
buffered_amount_ += buffer.size();
// If the queue is non-empty, we're waiting for SignalReadyToSend,
@@ -403,7 +406,7 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
return;
}
- if (params.type == cricket::DMT_CONTROL) {
+ if (params.type == DataMessageType::kControl) {
if (handshake_state_ != kHandshakeWaitingForAck) {
// Ignore it if we are not expecting an ACK message.
RTC_LOG(LS_WARNING)
@@ -424,8 +427,8 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
return;
}
- RTC_DCHECK(params.type == cricket::DMT_BINARY ||
- params.type == cricket::DMT_TEXT);
+ RTC_DCHECK(params.type == DataMessageType::kBinary ||
+ params.type == DataMessageType::kText);
RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = "
<< params.sid;
@@ -436,7 +439,7 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
handshake_state_ = kHandshakeReady;
}
- bool binary = (params.type == cricket::DMT_BINARY);
+ bool binary = (params.type == webrtc::DataMessageType::kBinary);
auto buffer = std::make_unique<DataBuffer>(payload, binary);
if (state_ == kOpen && observer_) {
++messages_received_;
@@ -617,7 +620,7 @@ void SctpDataChannel::SendQueuedDataMessages() {
bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer,
bool queue_if_blocked) {
RTC_DCHECK_RUN_ON(signaling_thread_);
- cricket::SendDataParams send_params;
+ SendDataParams send_params;
send_params.ordered = config_.ordered;
// Send as ordered if it is still going through OPEN/ACK signaling.
@@ -628,15 +631,14 @@ bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer,
"because the OPEN_ACK message has not been received.";
}
- send_params.max_rtx_count =
- config_.maxRetransmits ? *config_.maxRetransmits : -1;
- send_params.max_rtx_ms =
- config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1;
- send_params.sid = config_.id;
- send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
+ send_params.max_rtx_count = config_.maxRetransmits;
+ send_params.max_rtx_ms = config_.maxRetransmitTime;
+ send_params.type =
+ buffer.binary ? DataMessageType::kBinary : DataMessageType::kText;
cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
- bool success = provider_->SendData(send_params, buffer.data, &send_result);
+ bool success =
+ provider_->SendData(config_.id, send_params, buffer.data, &send_result);
if (success) {
++messages_sent_;
@@ -702,16 +704,16 @@ bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
RTC_DCHECK(!is_open_message || !config_.negotiated);
- cricket::SendDataParams send_params;
- send_params.sid = config_.id;
+ SendDataParams send_params;
// Send data as ordered before we receive any message from the remote peer to
// make sure the remote peer will not receive any data before it receives the
// OPEN message.
send_params.ordered = config_.ordered || is_open_message;
- send_params.type = cricket::DMT_CONTROL;
+ send_params.type = DataMessageType::kControl;
cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
- bool retval = provider_->SendData(send_params, buffer, &send_result);
+ bool retval =
+ provider_->SendData(config_.id, send_params, buffer, &send_result);
if (retval) {
RTC_LOG(LS_VERBOSE) << "Sent CONTROL message on channel " << config_.id;
diff --git a/pc/sctp_data_channel.h b/pc/sctp_data_channel.h
index ddb8565ff7..1d7a3c73f4 100644
--- a/pc/sctp_data_channel.h
+++ b/pc/sctp_data_channel.h
@@ -40,7 +40,8 @@ class SctpDataChannel;
class SctpDataChannelProviderInterface {
public:
// Sends the data to the transport.
- virtual bool SendData(const cricket::SendDataParams& params,
+ virtual bool SendData(int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) = 0;
// Connects to the transport signals.
diff --git a/pc/sctp_data_channel_transport.cc b/pc/sctp_data_channel_transport.cc
index 135c14424a..bb81156a23 100644
--- a/pc/sctp_data_channel_transport.cc
+++ b/pc/sctp_data_channel_transport.cc
@@ -39,18 +39,8 @@ RTCError SctpDataChannelTransport::SendData(
int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
- // Map webrtc::SendDataParams to cricket::SendDataParams.
- // TODO(mellem): See about unifying these structs.
- cricket::SendDataParams sd_params;
- sd_params.sid = channel_id;
- sd_params.type = ToCricketDataMessageType(params.type);
- sd_params.ordered = params.ordered;
- sd_params.reliable = !(params.max_rtx_count || params.max_rtx_ms);
- sd_params.max_rtx_count = params.max_rtx_count.value_or(-1);
- sd_params.max_rtx_ms = params.max_rtx_ms.value_or(-1);
-
cricket::SendDataResult result;
- sctp_transport_->SendData(sd_params, buffer, &result);
+ sctp_transport_->SendData(channel_id, params, buffer, &result);
// TODO(mellem): See about changing the interfaces to not require mapping
// SendDataResult to RTCError and back again.
@@ -95,8 +85,7 @@ void SctpDataChannelTransport::OnDataReceived(
const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
if (sink_) {
- sink_->OnDataReceived(params.sid, ToWebrtcDataMessageType(params.type),
- buffer);
+ sink_->OnDataReceived(params.sid, params.type, buffer);
}
}
diff --git a/pc/sctp_transport.cc b/pc/sctp_transport.cc
index 14a09d77e0..7d4e4551f1 100644
--- a/pc/sctp_transport.cc
+++ b/pc/sctp_transport.cc
@@ -14,6 +14,7 @@
#include <utility>
#include "absl/types/optional.h"
+#include "api/dtls_transport_interface.h"
#include "api/sequence_checker.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
@@ -95,9 +96,9 @@ void SctpTransport::SetDtlsTransport(
if (transport) {
internal_sctp_transport_->SetDtlsTransport(transport->internal());
- transport->internal()->SubscribeDtlsState(
+ transport->internal()->SubscribeDtlsTransportState(
[this](cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
OnDtlsStateChange(transport, state);
});
if (info_.state() == SctpTransportState::kNew) {
@@ -159,11 +160,11 @@ void SctpTransport::OnAssociationChangeCommunicationUp() {
}
void SctpTransport::OnDtlsStateChange(cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state) {
+ DtlsTransportState state) {
RTC_DCHECK_RUN_ON(owner_thread_);
RTC_CHECK(transport == dtls_transport_->internal());
- if (state == cricket::DTLS_TRANSPORT_CLOSED ||
- state == cricket::DTLS_TRANSPORT_FAILED) {
+ if (state == DtlsTransportState::kClosed ||
+ state == DtlsTransportState::kFailed) {
UpdateInformation(SctpTransportState::kClosed);
// TODO(http://bugs.webrtc.org/11090): Close all the data channels
}
diff --git a/pc/sctp_transport.h b/pc/sctp_transport.h
index a8bc45b770..87fde53d97 100644
--- a/pc/sctp_transport.h
+++ b/pc/sctp_transport.h
@@ -71,7 +71,7 @@ class SctpTransport : public SctpTransportInterface,
void OnInternalClosingProcedureStartedRemotely(int sid);
void OnInternalClosingProcedureComplete(int sid);
void OnDtlsStateChange(cricket::DtlsTransportInternal* transport,
- cricket::DtlsTransportState state);
+ DtlsTransportState state);
// NOTE: |owner_thread_| is the thread that the SctpTransport object is
// constructed on. In the context of PeerConnection, it's the network thread.
diff --git a/pc/sctp_transport_unittest.cc b/pc/sctp_transport_unittest.cc
index f3070cd9a7..679b481f4c 100644
--- a/pc/sctp_transport_unittest.cc
+++ b/pc/sctp_transport_unittest.cc
@@ -14,6 +14,7 @@
#include <vector>
#include "absl/memory/memory.h"
+#include "api/dtls_transport_interface.h"
#include "p2p/base/fake_dtls_transport.h"
#include "pc/dtls_transport.h"
#include "rtc_base/gunit.h"
@@ -38,7 +39,8 @@ class FakeCricketSctpTransport : public cricket::SctpTransportInternal {
}
bool OpenStream(int sid) override { return true; }
bool ResetStream(int sid) override { return true; }
- bool SendData(const cricket::SendDataParams& params,
+ bool SendData(int sid,
+ const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result = nullptr) override {
return true;
@@ -112,8 +114,8 @@ class SctpTransportTest : public ::testing::Test {
void CreateTransport() {
auto cricket_sctp_transport =
absl::WrapUnique(new FakeCricketSctpTransport());
- transport_ = new rtc::RefCountedObject<SctpTransport>(
- std::move(cricket_sctp_transport));
+ transport_ =
+ rtc::make_ref_counted<SctpTransport>(std::move(cricket_sctp_transport));
}
void AddDtlsTransport() {
@@ -121,7 +123,7 @@ class SctpTransportTest : public ::testing::Test {
std::make_unique<FakeDtlsTransport>(
"audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
dtls_transport_ =
- new rtc::RefCountedObject<DtlsTransport>(std::move(cricket_transport));
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
transport_->SetDtlsTransport(dtls_transport_);
}
@@ -147,7 +149,7 @@ TEST(SctpTransportSimpleTest, CreateClearDelete) {
std::unique_ptr<cricket::SctpTransportInternal> fake_cricket_sctp_transport =
absl::WrapUnique(new FakeCricketSctpTransport());
rtc::scoped_refptr<SctpTransport> sctp_transport =
- new rtc::RefCountedObject<SctpTransport>(
+ rtc::make_ref_counted<SctpTransport>(
std::move(fake_cricket_sctp_transport));
ASSERT_TRUE(sctp_transport->internal());
ASSERT_EQ(SctpTransportState::kNew, sctp_transport->Information().state());
@@ -203,7 +205,7 @@ TEST_F(SctpTransportTest, CloseWhenTransportCloses) {
ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(),
kDefaultTimeout);
static_cast<cricket::FakeDtlsTransport*>(dtls_transport_->internal())
- ->SetDtlsState(cricket::DTLS_TRANSPORT_CLOSED);
+ ->SetDtlsState(DtlsTransportState::kClosed);
ASSERT_EQ_WAIT(SctpTransportState::kClosed, observer_.State(),
kDefaultTimeout);
}
diff --git a/pc/sctp_utils.cc b/pc/sctp_utils.cc
index 9d46cc4319..f7458405ea 100644
--- a/pc/sctp_utils.cc
+++ b/pc/sctp_utils.cc
@@ -230,33 +230,4 @@ void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload) {
payload->SetData(&data, sizeof(data));
}
-cricket::DataMessageType ToCricketDataMessageType(DataMessageType type) {
- switch (type) {
- case DataMessageType::kText:
- return cricket::DMT_TEXT;
- case DataMessageType::kBinary:
- return cricket::DMT_BINARY;
- case DataMessageType::kControl:
- return cricket::DMT_CONTROL;
- default:
- return cricket::DMT_NONE;
- }
- return cricket::DMT_NONE;
-}
-
-DataMessageType ToWebrtcDataMessageType(cricket::DataMessageType type) {
- switch (type) {
- case cricket::DMT_TEXT:
- return DataMessageType::kText;
- case cricket::DMT_BINARY:
- return DataMessageType::kBinary;
- case cricket::DMT_CONTROL:
- return DataMessageType::kControl;
- case cricket::DMT_NONE:
- default:
- RTC_NOTREACHED();
- }
- return DataMessageType::kControl;
-}
-
} // namespace webrtc
diff --git a/pc/sctp_utils.h b/pc/sctp_utils.h
index 44225cfe3e..da854458f4 100644
--- a/pc/sctp_utils.h
+++ b/pc/sctp_utils.h
@@ -40,10 +40,6 @@ bool WriteDataChannelOpenMessage(const std::string& label,
void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload);
-cricket::DataMessageType ToCricketDataMessageType(DataMessageType type);
-
-DataMessageType ToWebrtcDataMessageType(cricket::DataMessageType type);
-
} // namespace webrtc
#endif // PC_SCTP_UTILS_H_
diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc
index c9ee82495b..2bfb61a15f 100644
--- a/pc/sdp_offer_answer.cc
+++ b/pc/sdp_offer_answer.cc
@@ -23,7 +23,6 @@
#include "api/array_view.h"
#include "api/crypto/crypto_options.h"
#include "api/dtls_transport_interface.h"
-#include "api/media_stream_proxy.h"
#include "api/rtp_parameters.h"
#include "api/rtp_receiver_interface.h"
#include "api/rtp_sender_interface.h"
@@ -41,6 +40,7 @@
#include "pc/data_channel_utils.h"
#include "pc/dtls_transport.h"
#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
#include "pc/peer_connection.h"
#include "pc/peer_connection_message_handler.h"
#include "pc/rtp_media_utils.h"
@@ -60,6 +60,7 @@
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
using cricket::ContentInfo;
@@ -87,6 +88,9 @@ namespace {
typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions
RTCOfferAnswerOptions;
+constexpr const char* kAlwaysAllowPayloadTypeDemuxingFieldTrialName =
+ "WebRTC-AlwaysAllowPayloadTypeDemuxing";
+
// Error messages
const char kInvalidSdp[] = "Invalid session description.";
const char kInvalidCandidates[] = "Description contains invalid candidates.";
@@ -164,6 +168,19 @@ void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type,
}
}
+std::map<std::string, const cricket::ContentGroup*> GetBundleGroupsByMid(
+ const SessionDescription* desc) {
+ std::vector<const cricket::ContentGroup*> bundle_groups =
+ desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid;
+ for (const cricket::ContentGroup* bundle_group : bundle_groups) {
+ for (const std::string& content_name : bundle_group->content_names()) {
+ bundle_groups_by_mid[content_name] = bundle_group;
+ }
+ }
+ return bundle_groups_by_mid;
+}
+
// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd).
bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
const SessionDescriptionInterface* new_desc,
@@ -334,9 +351,10 @@ bool MediaSectionsHaveSameCount(const SessionDescription& desc1,
// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint
// to SDES keys, will be caught in JsepTransport negotiation, and backstopped
// by Channel's |srtp_required| check.
-RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) {
- const cricket::ContentGroup* bundle =
- desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+RTCError VerifyCrypto(const SessionDescription* desc,
+ bool dtls_enabled,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
for (const cricket::ContentInfo& content_info : desc->contents()) {
if (content_info.rejected) {
continue;
@@ -346,8 +364,10 @@ RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) {
: webrtc::kEnumCounterKeyProtocolSdes,
content_info.media_description()->type());
const std::string& mid = content_info.name;
- if (bundle && bundle->HasContentName(mid) &&
- mid != *(bundle->FirstContentName())) {
+ auto it = bundle_groups_by_mid.find(mid);
+ const cricket::ContentGroup* bundle =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ if (bundle && mid != *(bundle->FirstContentName())) {
// This isn't the first media section in the BUNDLE group, so it's not
// required to have crypto attributes, since only the crypto attributes
// from the first section actually get used.
@@ -384,16 +404,19 @@ RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) {
// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless
// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first
// media section/description in the BUNDLE group) needs a ufrag and pwd.
-bool VerifyIceUfragPwdPresent(const SessionDescription* desc) {
- const cricket::ContentGroup* bundle =
- desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+bool VerifyIceUfragPwdPresent(
+ const SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
for (const cricket::ContentInfo& content_info : desc->contents()) {
if (content_info.rejected) {
continue;
}
const std::string& mid = content_info.name;
- if (bundle && bundle->HasContentName(mid) &&
- mid != *(bundle->FirstContentName())) {
+ auto it = bundle_groups_by_mid.find(mid);
+ const cricket::ContentGroup* bundle =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ if (bundle && mid != *(bundle->FirstContentName())) {
// This isn't the first media section in the BUNDLE group, so it's not
// required to have ufrag/password, since only the ufrag/password from
// the first section actually get used.
@@ -525,13 +548,17 @@ RTCError UpdateSimulcastLayerStatusInSender(
}
bool SimulcastIsRejected(const ContentInfo* local_content,
- const MediaContentDescription& answer_media_desc) {
+ const MediaContentDescription& answer_media_desc,
+ bool enable_encrypted_rtp_header_extensions) {
bool simulcast_offered = local_content &&
local_content->media_description() &&
local_content->media_description()->HasSimulcast();
bool simulcast_answered = answer_media_desc.HasSimulcast();
bool rids_supported = RtpExtension::FindHeaderExtensionByUri(
- answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri);
+ answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri,
+ enable_encrypted_rtp_header_extensions
+ ? RtpExtension::Filter::kPreferEncryptedExtension
+ : RtpExtension::Filter::kDiscardEncryptedExtension);
return simulcast_offered && (!simulcast_answered || !rids_supported);
}
@@ -715,6 +742,17 @@ rtc::scoped_refptr<webrtc::DtlsTransport> LookupDtlsTransportByMid(
[controller, &mid] { return controller->LookupDtlsTransportByMid(mid); });
}
+bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info,
+ absl::string_view header_extension_uri) {
+ for (const RtpExtension& rtp_header_extension :
+ content_info.media_description()->rtp_header_extensions()) {
+ if (rtp_header_extension.uri == header_extension_uri) {
+ return true;
+ }
+ }
+ return false;
+}
+
} // namespace
// Used by parameterless SetLocalDescription() to create an offer or answer.
@@ -1225,7 +1263,10 @@ void SdpOfferAnswerHandler::SetLocalDescription(
}
RTCError SdpOfferAnswerHandler::ApplyLocalDescription(
- std::unique_ptr<SessionDescriptionInterface> desc) {
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyLocalDescription");
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(desc);
@@ -1279,7 +1320,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription(
if (IsUnifiedPlan()) {
RTCError error = UpdateTransceiversAndDataChannels(
cricket::CS_LOCAL, *local_description(), old_local_description,
- remote_description());
+ remote_description(), bundle_groups_by_mid);
if (!error.ok()) {
return error;
}
@@ -1349,7 +1390,8 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription(
}
error = UpdateSessionState(type, cricket::CS_LOCAL,
- local_description()->description());
+ local_description()->description(),
+ bundle_groups_by_mid);
if (!error.ok()) {
return error;
}
@@ -1511,7 +1553,10 @@ void SdpOfferAnswerHandler::SetRemoteDescription(
}
RTCError SdpOfferAnswerHandler::ApplyRemoteDescription(
- std::unique_ptr<SessionDescriptionInterface> desc) {
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription");
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(desc);
@@ -1555,7 +1600,7 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription(
if (IsUnifiedPlan()) {
RTCError error = UpdateTransceiversAndDataChannels(
cricket::CS_REMOTE, *remote_description(), local_description(),
- old_remote_description);
+ old_remote_description, bundle_groups_by_mid);
if (!error.ok()) {
return error;
}
@@ -1577,7 +1622,8 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription(
// NOTE: Candidates allocation will be initiated only when
// SetLocalDescription is called.
error = UpdateSessionState(type, cricket::CS_REMOTE,
- remote_description()->description());
+ remote_description()->description(),
+ bundle_groups_by_mid);
if (!error.ok()) {
return error;
}
@@ -1870,7 +1916,10 @@ void SdpOfferAnswerHandler::DoSetLocalDescription(
return;
}
- RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL);
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid =
+ GetBundleGroupsByMid(desc->description());
+ RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL,
+ bundle_groups_by_mid);
if (!error.ok()) {
std::string error_message = GetSetDescriptionErrorMessage(
cricket::CS_LOCAL, desc->GetType(), error);
@@ -1884,7 +1933,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription(
// which may destroy it before returning.
const SdpType type = desc->GetType();
- error = ApplyLocalDescription(std::move(desc));
+ error = ApplyLocalDescription(std::move(desc), bundle_groups_by_mid);
// |desc| may be destroyed at this point.
if (!error.ok()) {
@@ -1993,6 +2042,7 @@ void SdpOfferAnswerHandler::DoCreateOffer(
void SdpOfferAnswerHandler::CreateAnswer(
CreateSessionDescriptionObserver* observer,
const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateAnswer");
RTC_DCHECK_RUN_ON(signaling_thread());
// Chain this operation. If asynchronous operations are pending on the chain,
// this operation will be queued to be invoked, otherwise the contents of the
@@ -2130,7 +2180,10 @@ void SdpOfferAnswerHandler::DoSetRemoteDescription(
// points.
FillInMissingRemoteMids(desc->description());
- RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE);
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid =
+ GetBundleGroupsByMid(desc->description());
+ RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE,
+ bundle_groups_by_mid);
if (!error.ok()) {
std::string error_message = GetSetDescriptionErrorMessage(
cricket::CS_REMOTE, desc->GetType(), error);
@@ -2144,7 +2197,7 @@ void SdpOfferAnswerHandler::DoSetRemoteDescription(
// ApplyRemoteDescription, which may destroy it before returning.
const SdpType type = desc->GetType();
- error = ApplyRemoteDescription(std::move(desc));
+ error = ApplyRemoteDescription(std::move(desc), bundle_groups_by_mid);
// |desc| may be destroyed at this point.
if (!error.ok()) {
@@ -2290,6 +2343,7 @@ AddIceCandidateResult SdpOfferAnswerHandler::AddIceCandidateInternal(
void SdpOfferAnswerHandler::AddIceCandidate(
std::unique_ptr<IceCandidateInterface> candidate,
std::function<void(RTCError)> callback) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate");
RTC_DCHECK_RUN_ON(signaling_thread());
// Chain this operation. If asynchronous operations are pending on the chain,
// this operation will be queued to be invoked, otherwise the contents of the
@@ -2421,6 +2475,7 @@ PeerConnectionInterface::SignalingState SdpOfferAnswerHandler::signaling_state()
void SdpOfferAnswerHandler::ChangeSignalingState(
PeerConnectionInterface::SignalingState signaling_state) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ChangeSignalingState");
RTC_DCHECK_RUN_ON(signaling_thread());
if (signaling_state_ == signaling_state) {
return;
@@ -2436,7 +2491,9 @@ void SdpOfferAnswerHandler::ChangeSignalingState(
RTCError SdpOfferAnswerHandler::UpdateSessionState(
SdpType type,
cricket::ContentSource source,
- const cricket::SessionDescription* description) {
+ const cricket::SessionDescription* description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
RTC_DCHECK_RUN_ON(signaling_thread());
// If there's already a pending error then no state transition should happen.
@@ -2466,12 +2523,7 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState(
// Update internal objects according to the session description's media
// descriptions.
- RTCError error = PushdownMediaDescription(type, source);
- if (!error.ok()) {
- return error;
- }
-
- return RTCError::OK();
+ return PushdownMediaDescription(type, source, bundle_groups_by_mid);
}
bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent(
@@ -2628,6 +2680,7 @@ void SdpOfferAnswerHandler::OnVideoTrackRemoved(VideoTrackInterface* track,
}
RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::Rollback");
auto state = signaling_state();
if (state != PeerConnectionInterface::kHaveLocalOffer &&
state != PeerConnectionInterface::kHaveRemoteOffer) {
@@ -2969,7 +3022,9 @@ void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() {
RTCError SdpOfferAnswerHandler::ValidateSessionDescription(
const SessionDescriptionInterface* sdesc,
- cricket::ContentSource source) {
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
if (session_error() != SessionError::kNone) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg());
}
@@ -2995,20 +3050,21 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription(
std::string crypto_error;
if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED ||
pc_->dtls_enabled()) {
- RTCError crypto_error =
- VerifyCrypto(sdesc->description(), pc_->dtls_enabled());
+ RTCError crypto_error = VerifyCrypto(
+ sdesc->description(), pc_->dtls_enabled(), bundle_groups_by_mid);
if (!crypto_error.ok()) {
return crypto_error;
}
}
// Verify ice-ufrag and ice-pwd.
- if (!VerifyIceUfragPwdPresent(sdesc->description())) {
+ if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
kSdpWithoutIceUfragPwd);
}
- if (!pc_->ValidateBundleSettings(sdesc->description())) {
+ if (!pc_->ValidateBundleSettings(sdesc->description(),
+ bundle_groups_by_mid)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
kBundleWithoutRtcpMux);
}
@@ -3081,18 +3137,25 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels(
cricket::ContentSource source,
const SessionDescriptionInterface& new_session,
const SessionDescriptionInterface* old_local_description,
- const SessionDescriptionInterface* old_remote_description) {
+ const SessionDescriptionInterface* old_remote_description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc",
+ "SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels");
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(IsUnifiedPlan());
- const cricket::ContentGroup* bundle_group = nullptr;
if (new_session.GetType() == SdpType::kOffer) {
- auto bundle_group_or_error =
- GetEarlyBundleGroup(*new_session.description());
- if (!bundle_group_or_error.ok()) {
- return bundle_group_or_error.MoveError();
+ // If the BUNDLE policy is max-bundle, then we know for sure that all
+ // transports will be bundled from the start. Return an error if max-bundle
+ // is specified but the session description does not have a BUNDLE group.
+ if (pc_->configuration()->bundle_policy ==
+ PeerConnectionInterface::kBundlePolicyMaxBundle &&
+ bundle_groups_by_mid.empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "max-bundle configured but session description "
+ "has no BUNDLE group");
}
- bundle_group = bundle_group_or_error.MoveValue();
}
const ContentInfos& new_contents = new_session.description()->contents();
@@ -3100,6 +3163,9 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels(
const cricket::ContentInfo& new_content = new_contents[i];
cricket::MediaType media_type = new_content.media_description()->type();
mid_generator_.AddKnownId(new_content.name);
+ auto it = bundle_groups_by_mid.find(new_content.name);
+ const cricket::ContentGroup* bundle_group =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
if (media_type == cricket::MEDIA_TYPE_AUDIO ||
media_type == cricket::MEDIA_TYPE_VIDEO) {
const cricket::ContentInfo* old_local_content = nullptr;
@@ -3162,6 +3228,7 @@ SdpOfferAnswerHandler::AssociateTransceiver(
const ContentInfo& content,
const ContentInfo* old_local_content,
const ContentInfo* old_remote_content) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AssociateTransceiver");
RTC_DCHECK(IsUnifiedPlan());
#if RTC_DCHECK_IS_ON
// If this is an offer then the m= section might be recycled. If the m=
@@ -3238,7 +3305,9 @@ SdpOfferAnswerHandler::AssociateTransceiver(
// Check if the offer indicated simulcast but the answer rejected it.
// This can happen when simulcast is not supported on the remote party.
- if (SimulcastIsRejected(old_local_content, *media_desc)) {
+ if (SimulcastIsRejected(old_local_content, *media_desc,
+ pc_->GetCryptoOptions()
+ .srtp.enable_encrypted_rtp_header_extensions)) {
RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true);
RTCError error =
DisableSimulcastInSender(transceiver->internal()->sender_internal());
@@ -3288,27 +3357,12 @@ SdpOfferAnswerHandler::AssociateTransceiver(
return std::move(transceiver);
}
-RTCErrorOr<const cricket::ContentGroup*>
-SdpOfferAnswerHandler::GetEarlyBundleGroup(
- const SessionDescription& desc) const {
- const cricket::ContentGroup* bundle_group = nullptr;
- if (pc_->configuration()->bundle_policy ==
- PeerConnectionInterface::kBundlePolicyMaxBundle) {
- bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- if (!bundle_group) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
- "max-bundle configured but session description "
- "has no BUNDLE group");
- }
- }
- return bundle_group;
-}
-
RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel(
rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
transceiver,
const cricket::ContentInfo& content,
const cricket::ContentGroup* bundle_group) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateTransceiverChannel");
RTC_DCHECK(IsUnifiedPlan());
RTC_DCHECK(transceiver);
cricket::ChannelInterface* channel = transceiver->internal()->channel();
@@ -3993,6 +4047,7 @@ void SdpOfferAnswerHandler::RemoveSenders(cricket::MediaType media_type) {
void SdpOfferAnswerHandler::UpdateLocalSenders(
const std::vector<cricket::StreamParams>& streams,
cricket::MediaType media_type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateLocalSenders");
RTC_DCHECK_RUN_ON(signaling_thread());
std::vector<RtpSenderInfo>* current_senders =
rtp_manager()->GetLocalSenderInfos(media_type);
@@ -4035,6 +4090,7 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList(
bool default_sender_needed,
cricket::MediaType media_type,
StreamCollection* new_streams) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateRemoteSendersList");
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(!IsUnifiedPlan());
@@ -4134,10 +4190,11 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList(
}
void SdpOfferAnswerHandler::EnableSending() {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::EnableSending");
RTC_DCHECK_RUN_ON(signaling_thread());
for (const auto& transceiver : transceivers()->ListInternal()) {
cricket::ChannelInterface* channel = transceiver->channel();
- if (channel && !channel->enabled()) {
+ if (channel) {
channel->Enable(true);
}
}
@@ -4145,14 +4202,17 @@ void SdpOfferAnswerHandler::EnableSending() {
RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
SdpType type,
- cricket::ContentSource source) {
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownMediaDescription");
const SessionDescriptionInterface* sdesc =
(source == cricket::CS_LOCAL ? local_description()
: remote_description());
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(sdesc);
- if (!UpdatePayloadTypeDemuxingState(source)) {
+ if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) {
// Note that this is never expected to fail, since RtpDemuxer doesn't return
// an error when changing payload type demux criteria, which is all this
// does.
@@ -4161,7 +4221,11 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
}
// Push down the new SDP media section for each audio/video transceiver.
- for (const auto& transceiver : transceivers()->ListInternal()) {
+ auto rtp_transceivers = transceivers()->ListInternal();
+ std::vector<
+ std::pair<cricket::ChannelInterface*, const MediaContentDescription*>>
+ channels;
+ for (const auto& transceiver : rtp_transceivers) {
const ContentInfo* content_info =
FindMediaSectionForTransceiver(transceiver, sdesc);
cricket::ChannelInterface* channel = transceiver->channel();
@@ -4173,12 +4237,35 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
if (!content_desc) {
continue;
}
- std::string error;
- bool success = (source == cricket::CS_LOCAL)
- ? channel->SetLocalContent(content_desc, type, &error)
- : channel->SetRemoteContent(content_desc, type, &error);
- if (!success) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error);
+
+ transceiver->OnNegotiationUpdate(type, content_desc);
+ channels.push_back(std::make_pair(channel, content_desc));
+ }
+
+ // This for-loop of invokes helps audio impairment during re-negotiations.
+ // One of the causes is that downstairs decoder creation is synchronous at the
+ // moment, and that a decoder is created for each codec listed in the SDP.
+ //
+ // TODO(bugs.webrtc.org/12840): consider merging the invokes again after
+ // these projects have shipped:
+ // - bugs.webrtc.org/12462
+ // - crbug.com/1157227
+ // - crbug.com/1187289
+ for (const auto& entry : channels) {
+ RTCError error =
+ pc_->worker_thread()->Invoke<RTCError>(RTC_FROM_HERE, [&]() {
+ std::string error;
+ bool success =
+ (source == cricket::CS_LOCAL)
+ ? entry.first->SetLocalContent(entry.second, type, &error)
+ : entry.first->SetRemoteContent(entry.second, type, &error);
+ if (!success) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error);
+ }
+ return RTCError::OK();
+ });
+ if (!error.ok()) {
+ return error;
}
}
@@ -4212,6 +4299,7 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
RTCError SdpOfferAnswerHandler::PushdownTransportDescription(
cricket::ContentSource source,
SdpType type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownTransportDescription");
RTC_DCHECK_RUN_ON(signaling_thread());
if (source == cricket::CS_LOCAL) {
@@ -4228,6 +4316,7 @@ RTCError SdpOfferAnswerHandler::PushdownTransportDescription(
}
void SdpOfferAnswerHandler::RemoveStoppedTransceivers() {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveStoppedTransceivers");
RTC_DCHECK_RUN_ON(signaling_thread());
// 3.2.10.1: For each transceiver in the connection's set of transceivers
// run the following steps:
@@ -4447,6 +4536,7 @@ RTCErrorOr<const cricket::ContentInfo*> SdpOfferAnswerHandler::FindContentInfo(
}
RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateChannels");
// Creating the media channels. Transports should already have been created
// at this point.
RTC_DCHECK_RUN_ON(signaling_thread());
@@ -4487,6 +4577,7 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) {
// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver.
cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel(
const std::string& mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateVoiceChannel");
RTC_DCHECK_RUN_ON(signaling_thread());
if (!channel_manager()->media_engine())
return nullptr;
@@ -4505,6 +4596,7 @@ cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel(
// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver.
cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel(
const std::string& mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateVideoChannel");
RTC_DCHECK_RUN_ON(signaling_thread());
if (!channel_manager()->media_engine())
return nullptr;
@@ -4542,6 +4634,7 @@ bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) {
void SdpOfferAnswerHandler::DestroyTransceiverChannel(
rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
transceiver) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyTransceiverChannel");
RTC_DCHECK(transceiver);
RTC_LOG_THREAD_BLOCK_COUNT();
@@ -4556,14 +4649,18 @@ void SdpOfferAnswerHandler::DestroyTransceiverChannel(
RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
if (channel) {
// TODO(tommi): VideoRtpReceiver::SetMediaChannel blocks and jumps to the
- // worker thread. When being set to nullptrpus, there are additional
+ // worker thread. When being set to nullptr, there are additional
// blocking calls to e.g. ClearRecordableEncodedFrameCallback which triggers
// another blocking call or Stop() for video channels.
+ // The channel object also needs to be de-initialized on the network thread
+ // so if ownership of the channel object lies with the transceiver, we could
+ // un-set the channel pointer and uninitialize/destruct the channel object
+ // at the same time, rather than in separate steps.
transceiver->internal()->SetChannel(nullptr);
- RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
// TODO(tommi): All channel objects end up getting deleted on the
- // worker thread. Can DestroyTransceiverChannel be purely posted to the
- // worker?
+ // worker thread (ideally should be on the network thread but the
+ // MediaChannel objects are tied to the worker. Can the teardown be done
+ // asynchronously across the threads rather than blocking?
DestroyChannelInterface(channel);
}
}
@@ -4586,6 +4683,7 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport() {
void SdpOfferAnswerHandler::DestroyChannelInterface(
cricket::ChannelInterface* channel) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyChannelInterface");
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(channel_manager()->media_engine());
RTC_DCHECK(channel);
@@ -4737,7 +4835,11 @@ SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData(
}
bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState(
- cricket::ContentSource source) {
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc",
+ "SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState");
RTC_DCHECK_RUN_ON(signaling_thread());
// We may need to delete any created default streams and disable creation of
// new ones on the basis of payload type. This is needed to avoid SSRC
@@ -4750,19 +4852,27 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState(
const SessionDescriptionInterface* sdesc =
(source == cricket::CS_LOCAL ? local_description()
: remote_description());
- const cricket::ContentGroup* bundle_group =
- sdesc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
- std::set<int> audio_payload_types;
- std::set<int> video_payload_types;
- bool pt_demuxing_enabled_audio = true;
- bool pt_demuxing_enabled_video = true;
+ struct PayloadTypes {
+ std::set<int> audio_payload_types;
+ std::set<int> video_payload_types;
+ bool pt_demuxing_possible_audio = true;
+ bool pt_demuxing_possible_video = true;
+ };
+ std::map<const cricket::ContentGroup*, PayloadTypes> payload_types_by_bundle;
+ // If the MID is missing from *any* receiving m= section, this is set to true.
+ bool mid_header_extension_missing_audio = false;
+ bool mid_header_extension_missing_video = false;
for (auto& content_info : sdesc->description()->contents()) {
+ auto it = bundle_groups_by_mid.find(content_info.name);
+ const cricket::ContentGroup* bundle_group =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
// If this m= section isn't bundled, it's safe to demux by payload type
// since other m= sections using the same payload type will also be using
// different transports.
- if (!bundle_group || !bundle_group->HasContentName(content_info.name)) {
+ if (!bundle_group) {
continue;
}
+ PayloadTypes* payload_types = &payload_types_by_bundle[bundle_group];
if (content_info.rejected ||
(source == cricket::ContentSource::CS_LOCAL &&
!RtpTransceiverDirectionHasRecv(
@@ -4775,28 +4885,36 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState(
}
switch (content_info.media_description()->type()) {
case cricket::MediaType::MEDIA_TYPE_AUDIO: {
+ if (!mid_header_extension_missing_audio) {
+ mid_header_extension_missing_audio =
+ !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri);
+ }
const cricket::AudioContentDescription* audio_desc =
content_info.media_description()->as_audio();
for (const cricket::AudioCodec& audio : audio_desc->codecs()) {
- if (audio_payload_types.count(audio.id)) {
+ if (payload_types->audio_payload_types.count(audio.id)) {
// Two m= sections are using the same payload type, thus demuxing
// by payload type is not possible.
- pt_demuxing_enabled_audio = false;
+ payload_types->pt_demuxing_possible_audio = false;
}
- audio_payload_types.insert(audio.id);
+ payload_types->audio_payload_types.insert(audio.id);
}
break;
}
case cricket::MediaType::MEDIA_TYPE_VIDEO: {
+ if (!mid_header_extension_missing_video) {
+ mid_header_extension_missing_video =
+ !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri);
+ }
const cricket::VideoContentDescription* video_desc =
content_info.media_description()->as_video();
for (const cricket::VideoCodec& video : video_desc->codecs()) {
- if (video_payload_types.count(video.id)) {
+ if (payload_types->video_payload_types.count(video.id)) {
// Two m= sections are using the same payload type, thus demuxing
// by payload type is not possible.
- pt_demuxing_enabled_video = false;
+ payload_types->pt_demuxing_possible_video = false;
}
- video_payload_types.insert(video.id);
+ payload_types->video_payload_types.insert(video.id);
}
break;
}
@@ -4828,25 +4946,69 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState(
if (channels_to_update.empty()) {
return true;
}
+
+ // In Unified Plan, payload type demuxing is useful for legacy endpoints that
+ // don't support the MID header extension, but it can also cause incorrrect
+ // forwarding of packets when going from one m= section to multiple m=
+ // sections in the same BUNDLE. This only happens if media arrives prior to
+ // negotiation, but this can cause missing video and unsignalled ssrc bugs
+ // severe enough to warrant disabling PT demuxing in such cases. Therefore, if
+ // a MID header extension is present on all m= sections for a given kind
+ // (audio/video) then we use that as an OK to disable payload type demuxing in
+ // BUNDLEs of that kind. However if PT demuxing was ever turned on (e.g. MID
+ // was ever removed on ANY m= section of that kind) then we continue to allow
+ // PT demuxing in order to prevent disabling it in follow-up O/A exchanges and
+ // allowing early media by PT.
+ bool bundled_pt_demux_allowed_audio = !IsUnifiedPlan() ||
+ mid_header_extension_missing_audio ||
+ pt_demuxing_has_been_used_audio_;
+ bool bundled_pt_demux_allowed_video = !IsUnifiedPlan() ||
+ mid_header_extension_missing_video ||
+ pt_demuxing_has_been_used_video_;
+ // Kill switch for the above change.
+ if (field_trial::IsEnabled(kAlwaysAllowPayloadTypeDemuxingFieldTrialName)) {
+ // TODO(https://crbug.com/webrtc/12814): If disabling PT-based demux does
+ // not trigger regressions, remove this kill switch.
+ bundled_pt_demux_allowed_audio = true;
+ bundled_pt_demux_allowed_video = true;
+ }
+
return pc_->worker_thread()->Invoke<bool>(
- RTC_FROM_HERE, [&channels_to_update, bundle_group,
- pt_demuxing_enabled_audio, pt_demuxing_enabled_video]() {
+ RTC_FROM_HERE,
+ [&channels_to_update, &bundle_groups_by_mid, &payload_types_by_bundle,
+ bundled_pt_demux_allowed_audio, bundled_pt_demux_allowed_video,
+ pt_demuxing_has_been_used_audio = &pt_demuxing_has_been_used_audio_,
+ pt_demuxing_has_been_used_video = &pt_demuxing_has_been_used_video_]() {
for (const auto& it : channels_to_update) {
RtpTransceiverDirection local_direction = it.first;
cricket::ChannelInterface* channel = it.second;
cricket::MediaType media_type = channel->media_type();
- bool in_bundle_group = (bundle_group && bundle_group->HasContentName(
- channel->content_name()));
+ auto bundle_it = bundle_groups_by_mid.find(channel->content_name());
+ const cricket::ContentGroup* bundle_group =
+ bundle_it != bundle_groups_by_mid.end() ? bundle_it->second
+ : nullptr;
if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) {
- if (!channel->SetPayloadTypeDemuxingEnabled(
- (!in_bundle_group || pt_demuxing_enabled_audio) &&
- RtpTransceiverDirectionHasRecv(local_direction))) {
+ bool pt_demux_enabled =
+ RtpTransceiverDirectionHasRecv(local_direction) &&
+ (!bundle_group || (bundled_pt_demux_allowed_audio &&
+ payload_types_by_bundle[bundle_group]
+ .pt_demuxing_possible_audio));
+ if (pt_demux_enabled) {
+ *pt_demuxing_has_been_used_audio = true;
+ }
+ if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) {
return false;
}
} else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) {
- if (!channel->SetPayloadTypeDemuxingEnabled(
- (!in_bundle_group || pt_demuxing_enabled_video) &&
- RtpTransceiverDirectionHasRecv(local_direction))) {
+ bool pt_demux_enabled =
+ RtpTransceiverDirectionHasRecv(local_direction) &&
+ (!bundle_group || (bundled_pt_demux_allowed_video &&
+ payload_types_by_bundle[bundle_group]
+ .pt_demuxing_possible_video));
+ if (pt_demux_enabled) {
+ *pt_demuxing_has_been_used_video = true;
+ }
+ if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) {
return false;
}
}
diff --git a/pc/sdp_offer_answer.h b/pc/sdp_offer_answer.h
index 0608c38ce5..e5b39b83e7 100644
--- a/pc/sdp_offer_answer.h
+++ b/pc/sdp_offer_answer.h
@@ -227,9 +227,13 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
// Synchronous implementations of SetLocalDescription/SetRemoteDescription
// that return an RTCError instead of invoking a callback.
RTCError ApplyLocalDescription(
- std::unique_ptr<SessionDescriptionInterface> desc);
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
RTCError ApplyRemoteDescription(
- std::unique_ptr<SessionDescriptionInterface> desc);
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
// Implementation of the offer/answer exchange operations. These are chained
// onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(),
@@ -251,9 +255,12 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
void ChangeSignalingState(
PeerConnectionInterface::SignalingState signaling_state);
- RTCError UpdateSessionState(SdpType type,
- cricket::ContentSource source,
- const cricket::SessionDescription* description);
+ RTCError UpdateSessionState(
+ SdpType type,
+ cricket::ContentSource source,
+ const cricket::SessionDescription* description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread());
@@ -286,9 +293,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
bool CheckIfNegotiationIsNeeded();
void GenerateNegotiationNeededEvent();
// Helper method which verifies SDP.
- RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc,
- cricket::ContentSource source)
- RTC_RUN_ON(signaling_thread());
+ RTCError ValidateSessionDescription(
+ const SessionDescriptionInterface* sdesc,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) RTC_RUN_ON(signaling_thread());
// Updates the local RtpTransceivers according to the JSEP rules. Called as
// part of setting the local/remote description.
@@ -296,7 +305,9 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
cricket::ContentSource source,
const SessionDescriptionInterface& new_session,
const SessionDescriptionInterface* old_local_description,
- const SessionDescriptionInterface* old_remote_description);
+ const SessionDescriptionInterface* old_remote_description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
// Associate the given transceiver according to the JSEP rules.
RTCErrorOr<
@@ -317,15 +328,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
const RtpTransceiver* transceiver,
const SessionDescriptionInterface* sdesc) const;
- // If the BUNDLE policy is max-bundle, then we know for sure that all
- // transports will be bundled from the start. This method returns the BUNDLE
- // group if that's the case, or null if BUNDLE will be negotiated later. An
- // error is returned if max-bundle is specified but the session description
- // does not have a BUNDLE group.
- RTCErrorOr<const cricket::ContentGroup*> GetEarlyBundleGroup(
- const cricket::SessionDescription& desc) const
- RTC_RUN_ON(signaling_thread());
-
// Either creates or destroys the transceiver's BaseChannel according to the
// given media section.
RTCError UpdateTransceiverChannel(
@@ -452,13 +454,15 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
StreamCollection* new_streams);
// Enables media channels to allow sending of media.
- // This enables media to flow on all configured audio/video channels and the
- // RtpDataChannel.
+ // This enables media to flow on all configured audio/video channels.
void EnableSending();
// Push the media parts of the local or remote session description
// down to all of the channels.
- RTCError PushdownMediaDescription(SdpType type,
- cricket::ContentSource source);
+ RTCError PushdownMediaDescription(
+ SdpType type,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
RTCError PushdownTransportDescription(cricket::ContentSource source,
SdpType type);
@@ -545,7 +549,10 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
// Based on number of transceivers per media type, enabled or disable
// payload type based demuxing in the affected channels.
- bool UpdatePayloadTypeDemuxingState(cricket::ContentSource source);
+ bool UpdatePayloadTypeDemuxingState(
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
// ==================================================================
// Access to pc_ variables
@@ -622,6 +629,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
uint32_t negotiation_needed_event_id_ = 0;
bool update_negotiation_needed_on_empty_chain_
RTC_GUARDED_BY(signaling_thread()) = false;
+ // If PT demuxing is successfully negotiated one time we will allow PT
+ // demuxing for the rest of the session so that PT-based apps default to PT
+ // demuxing in follow-up O/A exchanges.
+ bool pt_demuxing_has_been_used_audio_ = false;
+ bool pt_demuxing_has_been_used_video_ = false;
// In Unified Plan, if we encounter remote SDP that does not contain an a=msid
// line we create and use a stream with a random ID for our receivers. This is
diff --git a/pc/session_description.cc b/pc/session_description.cc
index 3cb2b6d231..7b878cbf7b 100644
--- a/pc/session_description.cc
+++ b/pc/session_description.cc
@@ -85,6 +85,18 @@ bool ContentGroup::RemoveContentName(const std::string& content_name) {
return true;
}
+std::string ContentGroup::ToString() const {
+ rtc::StringBuilder acc;
+ acc << semantics_ << "(";
+ if (!content_names_.empty()) {
+ for (const auto& name : content_names_) {
+ acc << name << " ";
+ }
+ }
+ acc << ")";
+ return acc.Release();
+}
+
SessionDescription::SessionDescription() = default;
SessionDescription::SessionDescription(const SessionDescription&) = default;
@@ -259,6 +271,17 @@ const ContentGroup* SessionDescription::GetGroupByName(
return NULL;
}
+std::vector<const ContentGroup*> SessionDescription::GetGroupsByName(
+ const std::string& name) const {
+ std::vector<const ContentGroup*> content_groups;
+ for (const ContentGroup& content_group : content_groups_) {
+ if (content_group.semantics() == name) {
+ content_groups.push_back(&content_group);
+ }
+ }
+ return content_groups;
+}
+
ContentInfo::~ContentInfo() {
}
diff --git a/pc/session_description.h b/pc/session_description.h
index 0d462eb2b2..a20caf624a 100644
--- a/pc/session_description.h
+++ b/pc/session_description.h
@@ -44,7 +44,6 @@ namespace cricket {
typedef std::vector<AudioCodec> AudioCodecs;
typedef std::vector<VideoCodec> VideoCodecs;
-typedef std::vector<RtpDataCodec> RtpDataCodecs;
typedef std::vector<CryptoParams> CryptoParamsVec;
typedef std::vector<webrtc::RtpExtension> RtpHeaderExtensions;
@@ -144,6 +143,11 @@ class MediaContentDescription {
cryptos_ = cryptos;
}
+ // List of RTP header extensions. URIs are **NOT** guaranteed to be unique
+ // as they can appear twice when both encrypted and non-encrypted extensions
+ // are present.
+ // Use RtpExtension::FindHeaderExtensionByUri for finding and
+ // RtpExtension::DeduplicateHeaderExtensions for filtering.
virtual const RtpHeaderExtensions& rtp_header_extensions() const {
return rtp_header_extensions_;
}
@@ -484,6 +488,8 @@ class ContentGroup {
bool HasContentName(const std::string& content_name) const;
void AddContentName(const std::string& content_name);
bool RemoveContentName(const std::string& content_name);
+ // for debugging
+ std::string ToString() const;
private:
std::string semantics_;
@@ -568,6 +574,8 @@ class SessionDescription {
// Group accessors.
const ContentGroups& groups() const { return content_groups_; }
const ContentGroup* GetGroupByName(const std::string& name) const;
+ std::vector<const ContentGroup*> GetGroupsByName(
+ const std::string& name) const;
bool HasGroup(const std::string& name) const;
// Group mutators.
diff --git a/pc/srtp_transport.cc b/pc/srtp_transport.cc
index ee073497e7..c90b3fa227 100644
--- a/pc/srtp_transport.cc
+++ b/pc/srtp_transport.cc
@@ -201,12 +201,12 @@ bool SrtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
void SrtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
+ TRACE_EVENT0("webrtc", "SrtpTransport::OnRtpPacketReceived");
if (!IsSrtpActive()) {
RTC_LOG(LS_WARNING)
<< "Inactive SRTP transport received an RTP packet. Drop it.";
return;
}
- TRACE_EVENT0("webrtc", "SRTP Decode");
char* data = packet.MutableData<char>();
int len = rtc::checked_cast<int>(packet.size());
if (!UnprotectRtp(data, len, &len)) {
@@ -233,12 +233,12 @@ void SrtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
void SrtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
+ TRACE_EVENT0("webrtc", "SrtpTransport::OnRtcpPacketReceived");
if (!IsSrtpActive()) {
RTC_LOG(LS_WARNING)
<< "Inactive SRTP transport received an RTCP packet. Drop it.";
return;
}
- TRACE_EVENT0("webrtc", "SRTP Decode");
char* data = packet.MutableData<char>();
int len = rtc::checked_cast<int>(packet.size());
if (!UnprotectRtcp(data, len, &len)) {
diff --git a/pc/stats_collector.cc b/pc/stats_collector.cc
index 8955729192..7376e24c8b 100644
--- a/pc/stats_collector.cc
+++ b/pc/stats_collector.cc
@@ -50,6 +50,7 @@
#include "rtc_base/string_encode.h"
#include "rtc_base/thread.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
namespace webrtc {
@@ -810,7 +811,7 @@ StatsReport* StatsCollector::AddConnectionInfoReport(
StatsReport* StatsCollector::AddCandidateReport(
const cricket::CandidateStats& candidate_stats,
bool local) {
- const auto& candidate = candidate_stats.candidate;
+ const auto& candidate = candidate_stats.candidate();
StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id()));
StatsReport* report = reports_.Find(id);
if (!report) {
@@ -833,8 +834,8 @@ StatsReport* StatsCollector::AddCandidateReport(
}
report->set_timestamp(stats_gathering_started_);
- if (local && candidate_stats.stun_stats.has_value()) {
- const auto& stun_stats = candidate_stats.stun_stats.value();
+ if (local && candidate_stats.stun_stats().has_value()) {
+ const auto& stun_stats = candidate_stats.stun_stats().value();
report->AddInt64(StatsReport::kStatsValueNameSentStunKeepaliveRequests,
stun_stats.stun_binding_requests_sent);
report->AddInt64(StatsReport::kStatsValueNameRecvStunKeepaliveResponses,
@@ -849,6 +850,7 @@ StatsReport* StatsCollector::AddCandidateReport(
}
std::map<std::string, std::string> StatsCollector::ExtractSessionInfo() {
+ TRACE_EVENT0("webrtc", "StatsCollector::ExtractSessionInfo");
RTC_DCHECK_RUN_ON(pc_->signaling_thread());
SessionStats stats;
@@ -870,6 +872,7 @@ StatsCollector::SessionStats StatsCollector::ExtractSessionInfo_n(
RtpTransceiverProxyWithInternal<RtpTransceiver>>>& transceivers,
absl::optional<std::string> sctp_transport_name,
absl::optional<std::string> sctp_mid) {
+ TRACE_EVENT0("webrtc", "StatsCollector::ExtractSessionInfo_n");
RTC_DCHECK_RUN_ON(pc_->network_thread());
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
SessionStats stats;
@@ -1026,16 +1029,25 @@ void StatsCollector::ExtractBweInfo() {
// Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc.
// TODO(holmer): Also fill this in for audio.
- for (const auto& transceiver : pc_->GetTransceiversInternal()) {
+ auto transceivers = pc_->GetTransceiversInternal();
+ std::vector<cricket::VideoChannel*> video_channels;
+ for (const auto& transceiver : transceivers) {
if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) {
continue;
}
auto* video_channel =
static_cast<cricket::VideoChannel*>(transceiver->internal()->channel());
- if (!video_channel) {
- continue;
+ if (video_channel) {
+ video_channels.push_back(video_channel);
}
- video_channel->FillBitrateInfo(&bwe_info);
+ }
+
+ if (!video_channels.empty()) {
+ pc_->worker_thread()->Invoke<void>(RTC_FROM_HERE, [&] {
+ for (const auto& channel : video_channels) {
+ channel->FillBitrateInfo(&bwe_info);
+ }
+ });
}
StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId());
@@ -1154,9 +1166,10 @@ void StatsCollector::ExtractMediaInfo(
std::vector<std::unique_ptr<MediaChannelStatsGatherer>> gatherers;
+ auto transceivers = pc_->GetTransceiversInternal();
{
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
- for (const auto& transceiver : pc_->GetTransceiversInternal()) {
+ for (const auto& transceiver : transceivers) {
cricket::ChannelInterface* channel = transceiver->internal()->channel();
if (!channel) {
continue;
@@ -1167,20 +1180,37 @@ void StatsCollector::ExtractMediaInfo(
gatherer->transport_name = transport_names_by_mid.at(gatherer->mid);
for (const auto& sender : transceiver->internal()->senders()) {
- std::string track_id = (sender->track() ? sender->track()->id() : "");
+ auto track = sender->track();
+ std::string track_id = (track ? track->id() : "");
gatherer->sender_track_id_by_ssrc.insert(
std::make_pair(sender->ssrc(), track_id));
}
- for (const auto& receiver : transceiver->internal()->receivers()) {
- gatherer->receiver_track_id_by_ssrc.insert(std::make_pair(
- receiver->internal()->ssrc(), receiver->track()->id()));
- }
+
+ // Populating `receiver_track_id_by_ssrc` will be done on the worker
+ // thread as the `ssrc` property of the receiver needs to be accessed
+ // there.
+
gatherers.push_back(std::move(gatherer));
}
}
pc_->worker_thread()->Invoke<void>(RTC_FROM_HERE, [&] {
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ // Populate `receiver_track_id_by_ssrc` for the gatherers.
+ int i = 0;
+ for (const auto& transceiver : transceivers) {
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (!channel)
+ continue;
+ MediaChannelStatsGatherer* gatherer = gatherers[i++].get();
+ RTC_DCHECK_EQ(gatherer->mid, channel->content_name());
+
+ for (const auto& receiver : transceiver->internal()->receivers()) {
+ gatherer->receiver_track_id_by_ssrc.insert(std::make_pair(
+ receiver->internal()->ssrc(), receiver->track()->id()));
+ }
+ }
+
for (auto it = gatherers.begin(); it != gatherers.end();
/* incremented manually */) {
MediaChannelStatsGatherer* gatherer = it->get();
diff --git a/pc/stats_collector_unittest.cc b/pc/stats_collector_unittest.cc
index 3767081b56..c630c3af6c 100644
--- a/pc/stats_collector_unittest.cc
+++ b/pc/stats_collector_unittest.cc
@@ -96,7 +96,7 @@ class FakeAudioTrack : public MediaStreamTrack<AudioTrackInterface> {
public:
explicit FakeAudioTrack(const std::string& id)
: MediaStreamTrack<AudioTrackInterface>(id),
- processor_(new rtc::RefCountedObject<FakeAudioProcessor>()) {}
+ processor_(rtc::make_ref_counted<FakeAudioProcessor>()) {}
std::string kind() const override { return "audio"; }
AudioSourceInterface* GetSource() const override { return NULL; }
void AddSink(AudioTrackSinkInterface* sink) override {}
@@ -134,8 +134,7 @@ class FakeAudioTrackWithInitValue
public:
explicit FakeAudioTrackWithInitValue(const std::string& id)
: MediaStreamTrack<AudioTrackInterface>(id),
- processor_(
- new rtc::RefCountedObject<FakeAudioProcessorWithInitValue>()) {}
+ processor_(rtc::make_ref_counted<FakeAudioProcessorWithInitValue>()) {}
std::string kind() const override { return "audio"; }
AudioSourceInterface* GetSource() const override { return NULL; }
void AddSink(AudioTrackSinkInterface* sink) override {}
@@ -600,7 +599,7 @@ class StatsCollectorForTest : public StatsCollector {
class StatsCollectorTest : public ::testing::Test {
protected:
rtc::scoped_refptr<FakePeerConnectionForStats> CreatePeerConnection() {
- return new rtc::RefCountedObject<FakePeerConnectionForStats>();
+ return rtc::make_ref_counted<FakePeerConnectionForStats>();
}
std::unique_ptr<StatsCollectorForTest> CreateStatsCollector(
@@ -738,8 +737,7 @@ class StatsCollectorTest : public ::testing::Test {
static rtc::scoped_refptr<MockRtpSenderInternal> CreateMockSender(
rtc::scoped_refptr<MediaStreamTrackInterface> track,
uint32_t ssrc) {
- rtc::scoped_refptr<MockRtpSenderInternal> sender(
- new rtc::RefCountedObject<MockRtpSenderInternal>());
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
EXPECT_CALL(*sender, track()).WillRepeatedly(Return(track));
EXPECT_CALL(*sender, ssrc()).WillRepeatedly(Return(ssrc));
EXPECT_CALL(*sender, media_type())
@@ -753,8 +751,7 @@ static rtc::scoped_refptr<MockRtpSenderInternal> CreateMockSender(
static rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockReceiver(
rtc::scoped_refptr<MediaStreamTrackInterface> track,
uint32_t ssrc) {
- rtc::scoped_refptr<MockRtpReceiverInternal> receiver(
- new rtc::RefCountedObject<MockRtpReceiverInternal>());
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
EXPECT_CALL(*receiver, track()).WillRepeatedly(Return(track));
EXPECT_CALL(*receiver, ssrc()).WillRepeatedly(Return(ssrc));
EXPECT_CALL(*receiver, media_type())
@@ -808,7 +805,7 @@ class StatsCollectorTrackTest : public StatsCollectorTest,
rtc::scoped_refptr<RtpSenderInterface> AddOutgoingAudioTrack(
FakePeerConnectionForStats* pc,
StatsCollectorForTest* stats) {
- audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(kLocalTrackId);
+ audio_track_ = rtc::make_ref_counted<FakeAudioTrack>(kLocalTrackId);
if (GetParam()) {
if (!stream_)
stream_ = MediaStream::Create("streamid");
@@ -823,7 +820,7 @@ class StatsCollectorTrackTest : public StatsCollectorTest,
// Adds a incoming audio track with a given SSRC into the stats.
void AddIncomingAudioTrack(FakePeerConnectionForStats* pc,
StatsCollectorForTest* stats) {
- audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(kRemoteTrackId);
+ audio_track_ = rtc::make_ref_counted<FakeAudioTrack>(kRemoteTrackId);
if (GetParam()) {
if (stream_ == NULL)
stream_ = MediaStream::Create("streamid");
@@ -1483,8 +1480,8 @@ TEST_P(StatsCollectorTrackTest, FilterOutNegativeInitialValues) {
// Create a local stream with a local audio track and adds it to the stats.
stream_ = MediaStream::Create("streamid");
- rtc::scoped_refptr<FakeAudioTrackWithInitValue> local_track(
- new rtc::RefCountedObject<FakeAudioTrackWithInitValue>(kLocalTrackId));
+ auto local_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kLocalTrackId);
stream_->AddTrack(local_track);
pc->AddSender(CreateMockSender(local_track, kSsrcOfTrack));
if (GetParam()) {
@@ -1495,8 +1492,8 @@ TEST_P(StatsCollectorTrackTest, FilterOutNegativeInitialValues) {
// Create a remote stream with a remote audio track and adds it to the stats.
rtc::scoped_refptr<MediaStream> remote_stream(
MediaStream::Create("remotestreamid"));
- rtc::scoped_refptr<FakeAudioTrackWithInitValue> remote_track(
- new rtc::RefCountedObject<FakeAudioTrackWithInitValue>(kRemoteTrackId));
+ auto remote_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kRemoteTrackId);
remote_stream->AddTrack(remote_track);
pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack));
if (GetParam()) {
@@ -1665,8 +1662,7 @@ TEST_P(StatsCollectorTrackTest, LocalAndRemoteTracksWithSameSsrc) {
// Create a remote stream with a remote audio track and adds it to the stats.
rtc::scoped_refptr<MediaStream> remote_stream(
MediaStream::Create("remotestreamid"));
- rtc::scoped_refptr<FakeAudioTrack> remote_track(
- new rtc::RefCountedObject<FakeAudioTrack>(kRemoteTrackId));
+ auto remote_track = rtc::make_ref_counted<FakeAudioTrack>(kRemoteTrackId);
pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack));
remote_stream->AddTrack(remote_track);
stats->AddStream(remote_stream);
@@ -1755,8 +1751,7 @@ TEST_P(StatsCollectorTrackTest, TwoLocalTracksWithSameSsrc) {
// Create a new audio track and adds it to the stream and stats.
static const std::string kNewTrackId = "new_track_id";
- rtc::scoped_refptr<FakeAudioTrack> new_audio_track(
- new rtc::RefCountedObject<FakeAudioTrack>(kNewTrackId));
+ auto new_audio_track = rtc::make_ref_counted<FakeAudioTrack>(kNewTrackId);
pc->AddSender(CreateMockSender(new_audio_track, kSsrcOfTrack));
stream_->AddTrack(new_audio_track);
@@ -1785,8 +1780,8 @@ TEST_P(StatsCollectorTrackTest, TwoLocalSendersWithSameTrack) {
auto pc = CreatePeerConnection();
auto stats = CreateStatsCollector(pc);
- rtc::scoped_refptr<FakeAudioTrackWithInitValue> local_track(
- new rtc::RefCountedObject<FakeAudioTrackWithInitValue>(kLocalTrackId));
+ auto local_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kLocalTrackId);
pc->AddSender(CreateMockSender(local_track, kFirstSsrc));
stats->AddLocalAudioTrack(local_track.get(), kFirstSsrc);
pc->AddSender(CreateMockSender(local_track, kSecondSsrc));
diff --git a/pc/stream_collection.h b/pc/stream_collection.h
index 28cd46fc5d..9bbf957efd 100644
--- a/pc/stream_collection.h
+++ b/pc/stream_collection.h
@@ -22,16 +22,12 @@ namespace webrtc {
class StreamCollection : public StreamCollectionInterface {
public:
static rtc::scoped_refptr<StreamCollection> Create() {
- rtc::RefCountedObject<StreamCollection>* implementation =
- new rtc::RefCountedObject<StreamCollection>();
- return implementation;
+ return rtc::make_ref_counted<StreamCollection>();
}
static rtc::scoped_refptr<StreamCollection> Create(
StreamCollection* streams) {
- rtc::RefCountedObject<StreamCollection>* implementation =
- new rtc::RefCountedObject<StreamCollection>(streams);
- return implementation;
+ return rtc::make_ref_counted<StreamCollection>(streams);
}
virtual size_t count() { return media_streams_.size(); }
diff --git a/pc/test/fake_audio_capture_module.cc b/pc/test/fake_audio_capture_module.cc
index a395df0409..214ed6b523 100644
--- a/pc/test/fake_audio_capture_module.cc
+++ b/pc/test/fake_audio_capture_module.cc
@@ -58,8 +58,7 @@ FakeAudioCaptureModule::~FakeAudioCaptureModule() {
}
rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
- rtc::scoped_refptr<FakeAudioCaptureModule> capture_module(
- new rtc::RefCountedObject<FakeAudioCaptureModule>());
+ auto capture_module = rtc::make_ref_counted<FakeAudioCaptureModule>();
if (!capture_module->Initialize()) {
return nullptr;
}
diff --git a/pc/test/fake_data_channel_provider.h b/pc/test/fake_data_channel_provider.h
index 7145225ca6..f9e9e91d48 100644
--- a/pc/test/fake_data_channel_provider.h
+++ b/pc/test/fake_data_channel_provider.h
@@ -26,7 +26,8 @@ class FakeDataChannelProvider
transport_error_(false) {}
virtual ~FakeDataChannelProvider() {}
- bool SendData(const cricket::SendDataParams& params,
+ bool SendData(int sid,
+ const webrtc::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) override {
RTC_CHECK(ready_to_send_);
@@ -36,11 +37,12 @@ class FakeDataChannelProvider
return false;
}
- if (transport_error_ || payload.size() == 0) {
+ if (transport_error_) {
*result = cricket::SDR_ERROR;
return false;
}
+ last_sid_ = sid;
last_send_data_params_ = params;
return true;
}
@@ -127,7 +129,8 @@ class FakeDataChannelProvider
void set_transport_error() { transport_error_ = true; }
- cricket::SendDataParams last_send_data_params() const {
+ int last_sid() const { return last_sid_; }
+ const webrtc::SendDataParams& last_send_data_params() const {
return last_send_data_params_;
}
@@ -144,7 +147,8 @@ class FakeDataChannelProvider
}
private:
- cricket::SendDataParams last_send_data_params_;
+ int last_sid_;
+ webrtc::SendDataParams last_send_data_params_;
bool send_blocked_;
bool transport_available_;
bool ready_to_send_;
diff --git a/pc/test/fake_peer_connection_base.h b/pc/test/fake_peer_connection_base.h
index 1acf86fdac..7970dd0f0f 100644
--- a/pc/test/fake_peer_connection_base.h
+++ b/pc/test/fake_peer_connection_base.h
@@ -120,10 +120,11 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
return nullptr;
}
- rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
const std::string& label,
const DataChannelInit* config) override {
- return nullptr;
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Fake function called");
}
const SessionDescriptionInterface* local_description() const override {
diff --git a/pc/test/fake_peer_connection_for_stats.h b/pc/test/fake_peer_connection_for_stats.h
index 3f3e0a9ee0..4cdbd82162 100644
--- a/pc/test/fake_peer_connection_for_stats.h
+++ b/pc/test/fake_peer_connection_for_stats.h
@@ -28,8 +28,10 @@ namespace webrtc {
// Fake VoiceMediaChannel where the result of GetStats can be configured.
class FakeVoiceMediaChannelForStats : public cricket::FakeVoiceMediaChannel {
public:
- FakeVoiceMediaChannelForStats()
- : cricket::FakeVoiceMediaChannel(nullptr, cricket::AudioOptions()) {}
+ explicit FakeVoiceMediaChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVoiceMediaChannel(nullptr,
+ cricket::AudioOptions(),
+ network_thread) {}
void SetStats(const cricket::VoiceMediaInfo& voice_info) {
stats_ = voice_info;
@@ -52,8 +54,10 @@ class FakeVoiceMediaChannelForStats : public cricket::FakeVoiceMediaChannel {
// Fake VideoMediaChannel where the result of GetStats can be configured.
class FakeVideoMediaChannelForStats : public cricket::FakeVideoMediaChannel {
public:
- FakeVideoMediaChannelForStats()
- : cricket::FakeVideoMediaChannel(nullptr, cricket::VideoOptions()) {}
+ explicit FakeVideoMediaChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVideoMediaChannel(nullptr,
+ cricket::VideoOptions(),
+ network_thread) {}
void SetStats(const cricket::VideoMediaInfo& video_info) {
stats_ = video_info;
@@ -178,7 +182,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
// TODO(steveanton): Switch tests to use RtpTransceivers directly.
auto receiver_proxy =
RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- signaling_thread_, receiver);
+ signaling_thread_, worker_thread_, receiver);
GetOrCreateFirstTransceiverOfType(receiver->media_type())
->internal()
->AddReceiver(receiver_proxy);
@@ -196,7 +200,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
const std::string& transport_name) {
RTC_DCHECK(!voice_channel_);
auto voice_media_channel =
- std::make_unique<FakeVoiceMediaChannelForStats>();
+ std::make_unique<FakeVoiceMediaChannelForStats>(network_thread_);
auto* voice_media_channel_ptr = voice_media_channel.get();
voice_channel_ = std::make_unique<VoiceChannelForTesting>(
worker_thread_, network_thread_, signaling_thread_,
@@ -213,7 +217,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
const std::string& transport_name) {
RTC_DCHECK(!video_channel_);
auto video_media_channel =
- std::make_unique<FakeVideoMediaChannelForStats>();
+ std::make_unique<FakeVideoMediaChannelForStats>(network_thread_);
auto video_media_channel_ptr = video_media_channel.get();
video_channel_ = std::make_unique<VideoChannelForTesting>(
worker_thread_, network_thread_, signaling_thread_,
@@ -388,7 +392,8 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
}
}
auto transceiver = RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
- signaling_thread_, new RtpTransceiver(media_type));
+ signaling_thread_,
+ new RtpTransceiver(media_type, channel_manager_.get()));
transceivers_.push_back(transceiver);
return transceiver;
}
@@ -397,6 +402,12 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
rtc::Thread* const worker_thread_;
rtc::Thread* const signaling_thread_;
+ std::unique_ptr<cricket::ChannelManager> channel_manager_ =
+ cricket::ChannelManager::Create(nullptr /* MediaEngineInterface */,
+ true,
+ worker_thread_,
+ network_thread_);
+
rtc::scoped_refptr<StreamCollection> local_streams_;
rtc::scoped_refptr<StreamCollection> remote_streams_;
diff --git a/pc/test/fake_video_track_source.h b/pc/test/fake_video_track_source.h
index d6562313c5..2042c39175 100644
--- a/pc/test/fake_video_track_source.h
+++ b/pc/test/fake_video_track_source.h
@@ -22,7 +22,7 @@ namespace webrtc {
class FakeVideoTrackSource : public VideoTrackSource {
public:
static rtc::scoped_refptr<FakeVideoTrackSource> Create(bool is_screencast) {
- return new rtc::RefCountedObject<FakeVideoTrackSource>(is_screencast);
+ return rtc::make_ref_counted<FakeVideoTrackSource>(is_screencast);
}
static rtc::scoped_refptr<FakeVideoTrackSource> Create() {
diff --git a/pc/test/integration_test_helpers.h b/pc/test/integration_test_helpers.h
index 075a907200..9ec9b0e982 100644
--- a/pc/test/integration_test_helpers.h
+++ b/pc/test/integration_test_helpers.h
@@ -17,6 +17,7 @@
#include <algorithm>
#include <functional>
+#include <limits>
#include <list>
#include <map>
#include <memory>
@@ -37,7 +38,6 @@
#include "api/media_stream_interface.h"
#include "api/media_types.h"
#include "api/peer_connection_interface.h"
-#include "api/peer_connection_proxy.h"
#include "api/rtc_error.h"
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
@@ -84,6 +84,7 @@
#include "pc/media_session.h"
#include "pc/peer_connection.h"
#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
#include "pc/rtp_media_utils.h"
#include "pc/session_description.h"
#include "pc/test/fake_audio_capture_module.h"
@@ -451,8 +452,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
// which can be used to access the gathered stats.
rtc::scoped_refptr<MockStatsObserver> OldGetStatsForTrack(
webrtc::MediaStreamTrackInterface* track) {
- rtc::scoped_refptr<MockStatsObserver> observer(
- new rtc::RefCountedObject<MockStatsObserver>());
+ auto observer = rtc::make_ref_counted<MockStatsObserver>();
EXPECT_TRUE(peer_connection_->GetStats(
observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
@@ -467,8 +467,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
// Synchronously gets stats and returns them. If it times out, fails the test
// and returns null.
rtc::scoped_refptr<const webrtc::RTCStatsReport> NewGetStats() {
- rtc::scoped_refptr<webrtc::MockRTCStatsCollectorCallback> callback(
- new rtc::RefCountedObject<webrtc::MockRTCStatsCollectorCallback>());
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
peer_connection_->GetStats(callback);
EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout);
return callback->report();
@@ -605,8 +605,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
// Returns null on failure.
std::unique_ptr<SessionDescriptionInterface> CreateOfferAndWait() {
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
pc()->CreateOffer(observer, offer_answer_options_);
return WaitForDescriptionFromObserver(observer);
}
@@ -705,6 +705,11 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
audio_concealed_stat_ = *track_stats->concealed_samples;
}
+ // Sets number of candidates expected
+ void ExpectCandidates(int candidate_count) {
+ candidates_expected_ = candidate_count;
+ }
+
private:
explicit PeerConnectionIntegrationWrapper(const std::string& debug_name)
: debug_name_(debug_name) {}
@@ -828,7 +833,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
config.frame_interval_ms = 100;
video_track_sources_.emplace_back(
- new rtc::RefCountedObject<webrtc::FakePeriodicVideoTrackSource>(
+ rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
config, false /* remote */));
rtc::scoped_refptr<webrtc::VideoTrackInterface> track(
peer_connection_factory_->CreateVideoTrack(
@@ -874,8 +879,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
// Returns null on failure.
std::unique_ptr<SessionDescriptionInterface> CreateAnswer() {
- rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockCreateSessionDescriptionObserver>());
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
pc()->CreateAnswer(observer, offer_answer_options_);
return WaitForDescriptionFromObserver(observer);
}
@@ -900,8 +905,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
// don't outrace the description.
bool SetLocalDescriptionAndSendSdpMessage(
std::unique_ptr<SessionDescriptionInterface> desc) {
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage";
SdpType type = desc->GetType();
std::string sdp;
@@ -917,8 +921,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
}
bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc) {
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription";
pc()->SetRemoteDescription(observer, desc.release());
RemoveUnusedVideoRenderers();
@@ -1092,6 +1095,9 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
}
}
+ // Check if we expected to have a candidate.
+ EXPECT_GT(candidates_expected_, 1);
+ candidates_expected_--;
std::string ice_sdp;
EXPECT_TRUE(candidate->ToString(&ice_sdp));
if (signaling_message_receiver_ == nullptr || !signal_ice_candidates_) {
@@ -1175,6 +1181,9 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
peer_connection_signaling_state_history_;
webrtc::FakeRtcEventLogFactory* event_log_factory_;
+ // Number of ICE candidates expected. The default is no limit.
+ int candidates_expected_ = std::numeric_limits<int>::max();
+
// Variables for tracking delay stats on an audio track
int audio_packets_stat_ = 0;
double audio_delay_stat_ = 0.0;
@@ -1322,8 +1331,7 @@ class MockIceTransportFactory : public IceTransportFactory {
int component,
IceTransportInit init) {
RecordIceTransportCreated();
- return new rtc::RefCountedObject<MockIceTransport>(transport_name,
- component);
+ return rtc::make_ref_counted<MockIceTransport>(transport_name, component);
}
MOCK_METHOD(void, RecordIceTransportCreated, ());
};
@@ -1334,12 +1342,17 @@ class MockIceTransportFactory : public IceTransportFactory {
// of everything else (including "PeerConnectionFactory"s).
class PeerConnectionIntegrationBaseTest : public ::testing::Test {
public:
- explicit PeerConnectionIntegrationBaseTest(SdpSemantics sdp_semantics)
+ PeerConnectionIntegrationBaseTest(
+ SdpSemantics sdp_semantics,
+ absl::optional<std::string> field_trials = absl::nullopt)
: sdp_semantics_(sdp_semantics),
ss_(new rtc::VirtualSocketServer()),
fss_(new rtc::FirewallSocketServer(ss_.get())),
network_thread_(new rtc::Thread(fss_.get())),
- worker_thread_(rtc::Thread::Create()) {
+ worker_thread_(rtc::Thread::Create()),
+ field_trials_(field_trials.has_value()
+ ? new test::ScopedFieldTrials(*field_trials)
+ : nullptr) {
network_thread_->SetName("PCNetworkThread", this);
worker_thread_->SetName("PCWorkerThread", this);
RTC_CHECK(network_thread_->Start());
@@ -1843,6 +1856,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
std::vector<std::unique_ptr<cricket::TestTurnCustomizer>> turn_customizers_;
std::unique_ptr<PeerConnectionIntegrationWrapper> caller_;
std::unique_ptr<PeerConnectionIntegrationWrapper> callee_;
+ std::unique_ptr<test::ScopedFieldTrials> field_trials_;
};
} // namespace webrtc
diff --git a/pc/test/mock_channel_interface.h b/pc/test/mock_channel_interface.h
index 726519cf97..6faba5c8fc 100644
--- a/pc/test/mock_channel_interface.h
+++ b/pc/test/mock_channel_interface.h
@@ -28,11 +28,10 @@ class MockChannelInterface : public cricket::ChannelInterface {
MOCK_METHOD(MediaChannel*, media_channel, (), (const, override));
MOCK_METHOD(const std::string&, transport_name, (), (const, override));
MOCK_METHOD(const std::string&, content_name, (), (const, override));
- MOCK_METHOD(bool, enabled, (), (const, override));
- MOCK_METHOD(bool, Enable, (bool), (override));
- MOCK_METHOD(sigslot::signal1<ChannelInterface*>&,
- SignalFirstPacketReceived,
- (),
+ MOCK_METHOD(void, Enable, (bool), (override));
+ MOCK_METHOD(void,
+ SetFirstPacketReceivedCallback,
+ (std::function<void()>),
(override));
MOCK_METHOD(bool,
SetLocalContent,
@@ -59,10 +58,6 @@ class MockChannelInterface : public cricket::ChannelInterface {
SetRtpTransport,
(webrtc::RtpTransportInternal*),
(override));
- MOCK_METHOD(RtpHeaderExtensions,
- GetNegotiatedRtpHeaderExtensions,
- (),
- (const));
};
} // namespace cricket
diff --git a/pc/test/mock_delayable.h b/pc/test/mock_delayable.h
deleted file mode 100644
index bef07c1970..0000000000
--- a/pc/test/mock_delayable.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_TEST_MOCK_DELAYABLE_H_
-#define PC_TEST_MOCK_DELAYABLE_H_
-
-#include <stdint.h>
-
-#include "absl/types/optional.h"
-#include "media/base/delayable.h"
-#include "test/gmock.h"
-
-namespace webrtc {
-
-class MockDelayable : public cricket::Delayable {
- public:
- MOCK_METHOD(bool,
- SetBaseMinimumPlayoutDelayMs,
- (uint32_t ssrc, int delay_ms),
- (override));
- MOCK_METHOD(absl::optional<int>,
- GetBaseMinimumPlayoutDelayMs,
- (uint32_t ssrc),
- (const, override));
-};
-
-} // namespace webrtc
-
-#endif // PC_TEST_MOCK_DELAYABLE_H_
diff --git a/pc/test/mock_peer_connection_observers.h b/pc/test/mock_peer_connection_observers.h
index 7766297843..413339dbf7 100644
--- a/pc/test/mock_peer_connection_observers.h
+++ b/pc/test/mock_peer_connection_observers.h
@@ -286,7 +286,7 @@ class MockSetSessionDescriptionObserver
: public webrtc::SetSessionDescriptionObserver {
public:
static rtc::scoped_refptr<MockSetSessionDescriptionObserver> Create() {
- return new rtc::RefCountedObject<MockSetSessionDescriptionObserver>();
+ return rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
}
MockSetSessionDescriptionObserver()
@@ -351,32 +351,51 @@ class FakeSetRemoteDescriptionObserver
class MockDataChannelObserver : public webrtc::DataChannelObserver {
public:
+ struct Message {
+ std::string data;
+ bool binary;
+ };
+
explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel)
: channel_(channel) {
channel_->RegisterObserver(this);
- state_ = channel_->state();
+ states_.push_back(channel_->state());
}
virtual ~MockDataChannelObserver() { channel_->UnregisterObserver(); }
void OnBufferedAmountChange(uint64_t previous_amount) override {}
- void OnStateChange() override { state_ = channel_->state(); }
+ void OnStateChange() override { states_.push_back(channel_->state()); }
void OnMessage(const DataBuffer& buffer) override {
messages_.push_back(
- std::string(buffer.data.data<char>(), buffer.data.size()));
+ {std::string(buffer.data.data<char>(), buffer.data.size()),
+ buffer.binary});
}
- bool IsOpen() const { return state_ == DataChannelInterface::kOpen; }
- std::vector<std::string> messages() const { return messages_; }
+ bool IsOpen() const { return state() == DataChannelInterface::kOpen; }
+ std::vector<Message> messages() const { return messages_; }
std::string last_message() const {
- return messages_.empty() ? std::string() : messages_.back();
+ if (messages_.empty())
+ return {};
+
+ return messages_.back().data;
+ }
+ bool last_message_is_binary() const {
+ if (messages_.empty())
+ return false;
+ return messages_.back().binary;
}
size_t received_message_count() const { return messages_.size(); }
+ DataChannelInterface::DataState state() const { return states_.back(); }
+ const std::vector<DataChannelInterface::DataState>& states() const {
+ return states_;
+ }
+
private:
rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
- DataChannelInterface::DataState state_;
- std::vector<std::string> messages_;
+ std::vector<DataChannelInterface::DataState> states_;
+ std::vector<Message> messages_;
};
class MockStatsObserver : public webrtc::StatsObserver {
diff --git a/pc/test/peer_connection_test_wrapper.cc b/pc/test/peer_connection_test_wrapper.cc
index c3ffcb0893..8fdfb1bbb8 100644
--- a/pc/test/peer_connection_test_wrapper.cc
+++ b/pc/test/peer_connection_test_wrapper.cc
@@ -123,17 +123,31 @@ bool PeerConnectionTestWrapper::CreatePc(
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator(
new FakeRTCCertificateGenerator());
- peer_connection_ = peer_connection_factory_->CreatePeerConnection(
- config, std::move(port_allocator), std::move(cert_generator), this);
-
- return peer_connection_.get() != NULL;
+ webrtc::PeerConnectionDependencies deps(this);
+ deps.allocator = std::move(port_allocator);
+ deps.cert_generator = std::move(cert_generator);
+ auto result = peer_connection_factory_->CreatePeerConnectionOrError(
+ config, std::move(deps));
+ if (result.ok()) {
+ peer_connection_ = result.MoveValue();
+ return true;
+ } else {
+ return false;
+ }
}
rtc::scoped_refptr<webrtc::DataChannelInterface>
PeerConnectionTestWrapper::CreateDataChannel(
const std::string& label,
const webrtc::DataChannelInit& init) {
- return peer_connection_->CreateDataChannel(label, &init);
+ auto result = peer_connection_->CreateDataChannelOrError(label, &init);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "CreateDataChannel failed: "
+ << ToString(result.error().type()) << " "
+ << result.error().message();
+ return nullptr;
+ }
+ return result.MoveValue();
}
void PeerConnectionTestWrapper::WaitForNegotiation() {
@@ -221,8 +235,7 @@ void PeerConnectionTestWrapper::SetLocalDescription(SdpType type,
<< ": SetLocalDescription " << webrtc::SdpTypeToString(type)
<< " " << sdp;
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
peer_connection_->SetLocalDescription(
observer, webrtc::CreateSessionDescription(type, sdp).release());
}
@@ -233,8 +246,7 @@ void PeerConnectionTestWrapper::SetRemoteDescription(SdpType type,
<< ": SetRemoteDescription " << webrtc::SdpTypeToString(type)
<< " " << sdp;
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
peer_connection_->SetRemoteDescription(
observer, webrtc::CreateSessionDescription(type, sdp).release());
}
@@ -331,9 +343,8 @@ PeerConnectionTestWrapper::GetUserMedia(
config.frame_interval_ms = 100;
config.timestamp_offset_ms = rtc::TimeMillis();
- rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
- new rtc::RefCountedObject<webrtc::FakePeriodicVideoTrackSource>(
- config, /* remote */ false);
+ auto source = rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
+ config, /* remote */ false);
std::string videotrack_label = stream_id + kVideoTrackLabelBase;
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
diff --git a/pc/test/rtc_stats_obtainer.h b/pc/test/rtc_stats_obtainer.h
index 335b9de307..4da23c6628 100644
--- a/pc/test/rtc_stats_obtainer.h
+++ b/pc/test/rtc_stats_obtainer.h
@@ -20,8 +20,7 @@ class RTCStatsObtainer : public RTCStatsCollectorCallback {
public:
static rtc::scoped_refptr<RTCStatsObtainer> Create(
rtc::scoped_refptr<const RTCStatsReport>* report_ptr = nullptr) {
- return rtc::scoped_refptr<RTCStatsObtainer>(
- new rtc::RefCountedObject<RTCStatsObtainer>(report_ptr));
+ return rtc::make_ref_counted<RTCStatsObtainer>(report_ptr);
}
void OnStatsDelivered(
diff --git a/pc/track_media_info_map_unittest.cc b/pc/track_media_info_map_unittest.cc
index 0cb1e0e277..1d5caacddb 100644
--- a/pc/track_media_info_map_unittest.cc
+++ b/pc/track_media_info_map_unittest.cc
@@ -31,6 +31,45 @@ namespace webrtc {
namespace {
+class MockVideoTrack : public VideoTrackInterface {
+ public:
+ // NotifierInterface
+ MOCK_METHOD(void,
+ RegisterObserver,
+ (ObserverInterface * observer),
+ (override));
+ MOCK_METHOD(void,
+ UnregisterObserver,
+ (ObserverInterface * observer),
+ (override));
+
+ // MediaStreamTrackInterface
+ MOCK_METHOD(std::string, kind, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(bool, enabled, (), (const, override));
+ MOCK_METHOD(bool, set_enabled, (bool enable), (override));
+ MOCK_METHOD(TrackState, state, (), (const, override));
+
+ // VideoSourceInterface
+ MOCK_METHOD(void,
+ AddOrUpdateSink,
+ (rtc::VideoSinkInterface<VideoFrame> * sink,
+ const rtc::VideoSinkWants& wants),
+ (override));
+ // RemoveSink must guarantee that at the time the method returns,
+ // there is no current and no future calls to VideoSinkInterface::OnFrame.
+ MOCK_METHOD(void,
+ RemoveSink,
+ (rtc::VideoSinkInterface<VideoFrame> * sink),
+ (override));
+
+ // VideoTrackInterface
+ MOCK_METHOD(VideoTrackSourceInterface*, GetSource, (), (const, override));
+
+ MOCK_METHOD(ContentHint, content_hint, (), (const, override));
+ MOCK_METHOD(void, set_content_hint, (ContentHint hint), (override));
+};
+
RtpParameters CreateRtpParametersWithSsrcs(
std::initializer_list<uint32_t> ssrcs) {
RtpParameters params;
@@ -52,8 +91,7 @@ rtc::scoped_refptr<MockRtpSenderInternal> CreateMockRtpSender(
} else {
first_ssrc = 0;
}
- rtc::scoped_refptr<MockRtpSenderInternal> sender(
- new rtc::RefCountedObject<MockRtpSenderInternal>());
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
EXPECT_CALL(*sender, track())
.WillRepeatedly(::testing::Return(std::move(track)));
EXPECT_CALL(*sender, ssrc()).WillRepeatedly(::testing::Return(first_ssrc));
@@ -69,8 +107,7 @@ rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockRtpReceiver(
cricket::MediaType media_type,
std::initializer_list<uint32_t> ssrcs,
rtc::scoped_refptr<MediaStreamTrackInterface> track) {
- rtc::scoped_refptr<MockRtpReceiverInternal> receiver(
- new rtc::RefCountedObject<MockRtpReceiverInternal>());
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
EXPECT_CALL(*receiver, track())
.WillRepeatedly(::testing::Return(std::move(track)));
EXPECT_CALL(*receiver, media_type())
@@ -81,23 +118,35 @@ rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockRtpReceiver(
return receiver;
}
+rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
+ const std::string& id) {
+ return VideoTrack::Create(id, FakeVideoTrackSource::Create(false),
+ rtc::Thread::Current());
+}
+
+rtc::scoped_refptr<VideoTrackInterface> CreateMockVideoTrack(
+ const std::string& id) {
+ auto track = rtc::make_ref_counted<MockVideoTrack>();
+ EXPECT_CALL(*track, kind())
+ .WillRepeatedly(::testing::Return(VideoTrack::kVideoKind));
+ return track;
+}
+
class TrackMediaInfoMapTest : public ::testing::Test {
public:
TrackMediaInfoMapTest() : TrackMediaInfoMapTest(true) {}
- explicit TrackMediaInfoMapTest(bool use_current_thread)
+ explicit TrackMediaInfoMapTest(bool use_real_video_track)
: voice_media_info_(new cricket::VoiceMediaInfo()),
video_media_info_(new cricket::VideoMediaInfo()),
local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)),
remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)),
- local_video_track_(VideoTrack::Create(
- "LocalVideoTrack",
- FakeVideoTrackSource::Create(false),
- use_current_thread ? rtc::Thread::Current() : nullptr)),
- remote_video_track_(VideoTrack::Create(
- "RemoteVideoTrack",
- FakeVideoTrackSource::Create(false),
- use_current_thread ? rtc::Thread::Current() : nullptr)) {}
+ local_video_track_(use_real_video_track
+ ? CreateVideoTrack("LocalVideoTrack")
+ : CreateMockVideoTrack("LocalVideoTrack")),
+ remote_video_track_(use_real_video_track
+ ? CreateVideoTrack("RemoteVideoTrack")
+ : CreateMockVideoTrack("LocalVideoTrack")) {}
~TrackMediaInfoMapTest() {
// If we have a map the ownership has been passed to the map, only delete if
@@ -181,8 +230,8 @@ class TrackMediaInfoMapTest : public ::testing::Test {
std::unique_ptr<TrackMediaInfoMap> map_;
rtc::scoped_refptr<AudioTrack> local_audio_track_;
rtc::scoped_refptr<AudioTrack> remote_audio_track_;
- rtc::scoped_refptr<VideoTrack> local_video_track_;
- rtc::scoped_refptr<VideoTrack> remote_video_track_;
+ rtc::scoped_refptr<VideoTrackInterface> local_video_track_;
+ rtc::scoped_refptr<VideoTrackInterface> remote_video_track_;
};
} // namespace
diff --git a/pc/transport_stats.h b/pc/transport_stats.h
index 7cb95f4ad2..173af91fba 100644
--- a/pc/transport_stats.h
+++ b/pc/transport_stats.h
@@ -14,6 +14,7 @@
#include <string>
#include <vector>
+#include "api/dtls_transport_interface.h"
#include "p2p/base/dtls_transport_internal.h"
#include "p2p/base/ice_transport_internal.h"
#include "p2p/base/port.h"
@@ -30,7 +31,7 @@ struct TransportChannelStats {
int ssl_version_bytes = 0;
int srtp_crypto_suite = rtc::SRTP_INVALID_CRYPTO_SUITE;
int ssl_cipher_suite = rtc::TLS_NULL_WITH_NULL_NULL;
- DtlsTransportState dtls_state = DTLS_TRANSPORT_NEW;
+ webrtc::DtlsTransportState dtls_state = webrtc::DtlsTransportState::kNew;
IceTransportStats ice_transport_stats;
};
diff --git a/pc/used_ids.h b/pc/used_ids.h
index 78e64caa41..62b2faa018 100644
--- a/pc/used_ids.h
+++ b/pc/used_ids.h
@@ -60,7 +60,9 @@ class UsedIds {
}
protected:
- bool IsIdUsed(int new_id) { return id_set_.find(new_id) != id_set_.end(); }
+ virtual bool IsIdUsed(int new_id) {
+ return id_set_.find(new_id) != id_set_.end();
+ }
const int min_allowed_id_;
const int max_allowed_id_;
@@ -92,11 +94,24 @@ class UsedIds {
class UsedPayloadTypes : public UsedIds<Codec> {
public:
UsedPayloadTypes()
- : UsedIds<Codec>(kDynamicPayloadTypeMin, kDynamicPayloadTypeMax) {}
+ : UsedIds<Codec>(kFirstDynamicPayloadTypeLowerRange,
+ kLastDynamicPayloadTypeUpperRange) {}
+
+ protected:
+ bool IsIdUsed(int new_id) override {
+ // Range marked for RTCP avoidance is "used".
+ if (new_id > kLastDynamicPayloadTypeLowerRange &&
+ new_id < kFirstDynamicPayloadTypeUpperRange)
+ return true;
+ return UsedIds<Codec>::IsIdUsed(new_id);
+ }
private:
- static const int kDynamicPayloadTypeMin = 96;
- static const int kDynamicPayloadTypeMax = 127;
+ static const int kFirstDynamicPayloadTypeLowerRange = 35;
+ static const int kLastDynamicPayloadTypeLowerRange = 63;
+
+ static const int kFirstDynamicPayloadTypeUpperRange = 96;
+ static const int kLastDynamicPayloadTypeUpperRange = 127;
};
// Helper class used for finding duplicate RTP Header extension ids among
diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc
index 374770a935..8db4d9f02f 100644
--- a/pc/video_rtp_receiver.cc
+++ b/pc/video_rtp_receiver.cc
@@ -16,9 +16,7 @@
#include <vector>
#include "api/video/recordable_encoded_frame.h"
-#include "api/video_track_source_proxy.h"
-#include "pc/jitter_buffer_delay.h"
-#include "pc/jitter_buffer_delay_proxy.h"
+#include "api/video_track_source_proxy_factory.h"
#include "pc/video_track.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
@@ -39,121 +37,139 @@ VideoRtpReceiver::VideoRtpReceiver(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams)
: worker_thread_(worker_thread),
id_(receiver_id),
- source_(new RefCountedObject<VideoRtpTrackSource>(this)),
+ source_(rtc::make_ref_counted<VideoRtpTrackSource>(&source_callback_)),
track_(VideoTrackProxyWithInternal<VideoTrack>::Create(
rtc::Thread::Current(),
worker_thread,
- VideoTrack::Create(
- receiver_id,
- VideoTrackSourceProxy::Create(rtc::Thread::Current(),
- worker_thread,
- source_),
- worker_thread))),
- attachment_id_(GenerateUniqueId()),
- delay_(JitterBufferDelayProxy::Create(
- rtc::Thread::Current(),
- worker_thread,
- new rtc::RefCountedObject<JitterBufferDelay>(worker_thread))) {
+ VideoTrack::Create(receiver_id,
+ CreateVideoTrackSourceProxy(rtc::Thread::Current(),
+ worker_thread,
+ source_),
+ worker_thread))),
+ attachment_id_(GenerateUniqueId()) {
RTC_DCHECK(worker_thread_);
SetStreams(streams);
- source_->SetState(MediaSourceInterface::kLive);
+ RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kLive);
}
VideoRtpReceiver::~VideoRtpReceiver() {
- // Since cricket::VideoRenderer is not reference counted,
- // we need to remove it from the channel before we are deleted.
- Stop();
- // Make sure we can't be called by the |source_| anymore.
- worker_thread_->Invoke<void>(RTC_FROM_HERE,
- [this] { source_->ClearCallback(); });
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK(stopped_);
+ RTC_DCHECK(!media_channel_);
}
std::vector<std::string> VideoRtpReceiver::stream_ids() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
std::vector<std::string> stream_ids(streams_.size());
for (size_t i = 0; i < streams_.size(); ++i)
stream_ids[i] = streams_[i]->id();
return stream_ids;
}
+rtc::scoped_refptr<DtlsTransportInterface> VideoRtpReceiver::dtls_transport()
+ const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return dtls_transport_;
+}
+
+std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+VideoRtpReceiver::streams() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return streams_;
+}
+
RtpParameters VideoRtpReceiver::GetParameters() const {
- if (!media_channel_ || stopped_) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
return RtpParameters();
- }
- return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
- return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_)
- : media_channel_->GetDefaultRtpReceiveParameters();
- });
+ return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_)
+ : media_channel_->GetDefaultRtpReceiveParameters();
}
void VideoRtpReceiver::SetFrameDecryptor(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
frame_decryptor_ = std::move(frame_decryptor);
// Special Case: Set the frame decryptor to any value on any existing channel.
- if (media_channel_ && ssrc_.has_value() && !stopped_) {
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
- });
+ if (media_channel_ && ssrc_) {
+ media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
}
}
rtc::scoped_refptr<FrameDecryptorInterface>
VideoRtpReceiver::GetFrameDecryptor() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
return frame_decryptor_;
}
void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- RTC_DCHECK_RUN_ON(worker_thread_);
- frame_transformer_ = std::move(frame_transformer);
- if (media_channel_ && !stopped_) {
- media_channel_->SetDepacketizerToDecoderFrameTransformer(
- ssrc_.value_or(0), frame_transformer_);
- }
- });
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ frame_transformer_ = std::move(frame_transformer);
+ if (media_channel_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ ssrc_.value_or(0), frame_transformer_);
+ }
}
void VideoRtpReceiver::Stop() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
- if (stopped_) {
- return;
+
+ if (!stopped_) {
+ source_->SetState(MediaSourceInterface::kEnded);
+ stopped_ = true;
}
- source_->SetState(MediaSourceInterface::kEnded);
- if (!media_channel_) {
- RTC_LOG(LS_WARNING) << "VideoRtpReceiver::Stop: No video channel exists.";
- } else {
- // Allow that SetSink fails. This is the normal case when the underlying
- // media channel has already been deleted.
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
- RTC_DCHECK_RUN_ON(worker_thread_);
+
+ worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (media_channel_) {
SetSink(nullptr);
- });
- }
- delay_->OnStop();
- stopped_ = true;
+ SetMediaChannel_w(nullptr);
+ }
+ source_->ClearCallback();
+ });
}
void VideoRtpReceiver::StopAndEndTrack() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
Stop();
track_->internal()->set_ended();
}
void VideoRtpReceiver::RestartMediaChannel(absl::optional<uint32_t> ssrc) {
- RTC_DCHECK(media_channel_);
- if (!stopped_ && ssrc_ == ssrc) {
- return;
- }
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+
+ // `stopped_` will be `true` on construction. RestartMediaChannel
+ // can in this case function like "ensure started" and flip `stopped_`
+ // to false.
+
+ // TODO(tommi): Can we restart the media channel without blocking?
+ bool ok = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&, was_stopped =
+ stopped_] {
RTC_DCHECK_RUN_ON(worker_thread_);
- if (!stopped_) {
+ if (!media_channel_) {
+ // Ignore further negotiations if we've already been stopped and don't
+ // have an associated media channel.
+ RTC_DCHECK(was_stopped);
+ return false; // Can't restart.
+ }
+
+ if (!was_stopped && ssrc_ == ssrc) {
+ // Already running with that ssrc.
+ return true;
+ }
+
+ // Disconnect from the previous ssrc.
+ if (!was_stopped) {
SetSink(nullptr);
}
+
bool encoded_sink_enabled = saved_encoded_sink_enabled_;
SetEncodedSinkEnabled(false);
- stopped_ = false;
-
- ssrc_ = ssrc;
+ // Set up the new ssrc.
+ ssrc_ = std::move(ssrc);
SetSink(source_->sink());
if (encoded_sink_enabled) {
SetEncodedSinkEnabled(true);
@@ -163,47 +179,62 @@ void VideoRtpReceiver::RestartMediaChannel(absl::optional<uint32_t> ssrc) {
media_channel_->SetDepacketizerToDecoderFrameTransformer(
ssrc_.value_or(0), frame_transformer_);
}
+
+ if (media_channel_ && ssrc_) {
+ if (frame_decryptor_) {
+ media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_);
+ }
+
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs());
+ }
+
+ return true;
});
- // Attach any existing frame decryptor to the media channel.
- MaybeAttachFrameDecryptorToMediaChannel(
- ssrc, worker_thread_, frame_decryptor_, media_channel_, stopped_);
- // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
- // value.
- delay_->OnStart(media_channel_, ssrc.value_or(0));
+ if (!ok)
+ return;
+
+ stopped_ = false;
}
+// RTC_RUN_ON(worker_thread_)
void VideoRtpReceiver::SetSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
- RTC_DCHECK(media_channel_);
if (ssrc_) {
media_channel_->SetSink(*ssrc_, sink);
- return;
+ } else {
+ media_channel_->SetDefaultSink(sink);
}
- media_channel_->SetDefaultSink(sink);
}
void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
- if (!media_channel_) {
- RTC_LOG(LS_ERROR)
- << "VideoRtpReceiver::SetupMediaChannel: No video channel exists.";
- }
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RestartMediaChannel(ssrc);
}
void VideoRtpReceiver::SetupUnsignaledMediaChannel() {
- if (!media_channel_) {
- RTC_LOG(LS_ERROR) << "VideoRtpReceiver::SetupUnsignaledMediaChannel: No "
- "video channel exists.";
- }
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RestartMediaChannel(absl::nullopt);
}
+uint32_t VideoRtpReceiver::ssrc() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return ssrc_.value_or(0);
+}
+
void VideoRtpReceiver::set_stream_ids(std::vector<std::string> stream_ids) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
SetStreams(CreateStreamsFromIds(std::move(stream_ids)));
}
+void VideoRtpReceiver::set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ dtls_transport_ = std::move(dtls_transport);
+}
+
void VideoRtpReceiver::SetStreams(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
// Remove remote track from any streams that are going away.
for (const auto& existing_stream : streams_) {
bool removed = true;
@@ -236,6 +267,7 @@ void VideoRtpReceiver::SetStreams(
}
void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
observer_ = observer;
// Deliver any notifications the observer may have missed by being set late.
if (received_first_packet_ && observer_) {
@@ -245,40 +277,57 @@ void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
void VideoRtpReceiver::SetJitterBufferMinimumDelay(
absl::optional<double> delay_seconds) {
- delay_->Set(delay_seconds);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ delay_.Set(delay_seconds);
+ if (media_channel_ && ssrc_)
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs());
}
void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
RTC_DCHECK(media_channel == nullptr ||
media_channel->media_type() == media_type());
+
+ if (stopped_ && !media_channel)
+ return;
+
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
RTC_DCHECK_RUN_ON(worker_thread_);
- bool encoded_sink_enabled = saved_encoded_sink_enabled_;
- if (encoded_sink_enabled && media_channel_) {
- // Turn off the old sink, if any.
- SetEncodedSinkEnabled(false);
- }
+ SetMediaChannel_w(media_channel);
+ });
+}
- media_channel_ = static_cast<cricket::VideoMediaChannel*>(media_channel);
+// RTC_RUN_ON(worker_thread_)
+void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) {
+ if (media_channel == media_channel_)
+ return;
- if (media_channel_) {
- if (saved_generate_keyframe_) {
- // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
- media_channel_->GenerateKeyFrame(ssrc_.value_or(0));
- saved_generate_keyframe_ = false;
- }
- if (encoded_sink_enabled) {
- SetEncodedSinkEnabled(true);
- }
- if (frame_transformer_) {
- media_channel_->SetDepacketizerToDecoderFrameTransformer(
- ssrc_.value_or(0), frame_transformer_);
- }
+ bool encoded_sink_enabled = saved_encoded_sink_enabled_;
+ if (encoded_sink_enabled && media_channel_) {
+ // Turn off the old sink, if any.
+ SetEncodedSinkEnabled(false);
+ }
+
+ media_channel_ = static_cast<cricket::VideoMediaChannel*>(media_channel);
+
+ if (media_channel_) {
+ if (saved_generate_keyframe_) {
+ // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
+ media_channel_->GenerateKeyFrame(ssrc_.value_or(0));
+ saved_generate_keyframe_ = false;
}
- });
+ if (encoded_sink_enabled) {
+ SetEncodedSinkEnabled(true);
+ }
+ if (frame_transformer_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ ssrc_.value_or(0), frame_transformer_);
+ }
+ }
}
void VideoRtpReceiver::NotifyFirstPacketReceived() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
if (observer_) {
observer_->OnFirstPacketReceived(media_type());
}
@@ -286,11 +335,10 @@ void VideoRtpReceiver::NotifyFirstPacketReceived() {
}
std::vector<RtpSource> VideoRtpReceiver::GetSources() const {
- if (!media_channel_ || !ssrc_ || stopped_) {
- return {};
- }
- return worker_thread_->Invoke<std::vector<RtpSource>>(
- RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); });
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!ssrc_ || !media_channel_)
+ return std::vector<RtpSource>();
+ return media_channel_->GetSources(*ssrc_);
}
void VideoRtpReceiver::OnGenerateKeyFrame() {
@@ -316,20 +364,21 @@ void VideoRtpReceiver::OnEncodedSinkEnabled(bool enable) {
saved_encoded_sink_enabled_ = enable;
}
+// RTC_RUN_ON(worker_thread_)
void VideoRtpReceiver::SetEncodedSinkEnabled(bool enable) {
- if (media_channel_) {
- if (enable) {
- // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
- auto source = source_;
- media_channel_->SetRecordableEncodedFrameCallback(
- ssrc_.value_or(0),
- [source = std::move(source)](const RecordableEncodedFrame& frame) {
- source->BroadcastRecordableEncodedFrame(frame);
- });
- } else {
- // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
- media_channel_->ClearRecordableEncodedFrameCallback(ssrc_.value_or(0));
- }
+ if (!media_channel_)
+ return;
+
+ // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
+ const auto ssrc = ssrc_.value_or(0);
+
+ if (enable) {
+ media_channel_->SetRecordableEncodedFrameCallback(
+ ssrc, [source = source_](const RecordableEncodedFrame& frame) {
+ source->BroadcastRecordableEncodedFrame(frame);
+ });
+ } else {
+ media_channel_->ClearRecordableEncodedFrameCallback(ssrc);
}
}
diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h
index 8e36af6dfa..f59db7a840 100644
--- a/pc/video_rtp_receiver.h
+++ b/pc/video_rtp_receiver.h
@@ -21,7 +21,6 @@
#include "api/dtls_transport_interface.h"
#include "api/frame_transformer_interface.h"
#include "api/media_stream_interface.h"
-#include "api/media_stream_track_proxy.h"
#include "api/media_types.h"
#include "api/rtp_parameters.h"
#include "api/rtp_receiver_interface.h"
@@ -32,18 +31,19 @@
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "media/base/media_channel.h"
-#include "pc/jitter_buffer_delay_interface.h"
+#include "pc/jitter_buffer_delay.h"
+#include "pc/media_stream_track_proxy.h"
#include "pc/rtp_receiver.h"
#include "pc/video_rtp_track_source.h"
#include "pc/video_track.h"
#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
-class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
- public VideoRtpTrackSource::Callback {
+class VideoRtpReceiver : public RtpReceiverInternal {
public:
// An SSRC of 0 will create a receiver that will match the first SSRC it
// sees. Must be called on signaling thread.
@@ -59,23 +59,16 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
virtual ~VideoRtpReceiver();
- rtc::scoped_refptr<VideoTrackInterface> video_track() const {
- return track_.get();
- }
+ rtc::scoped_refptr<VideoTrackInterface> video_track() const { return track_; }
// RtpReceiverInterface implementation
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
- return track_.get();
- }
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
- return dtls_transport_;
+ return track_;
}
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override;
std::vector<std::string> stream_ids() const override;
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams()
- const override {
- return streams_;
- }
-
+ const override;
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_VIDEO;
}
@@ -98,13 +91,11 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
void StopAndEndTrack() override;
void SetupMediaChannel(uint32_t ssrc) override;
void SetupUnsignaledMediaChannel() override;
- uint32_t ssrc() const override { return ssrc_.value_or(0); }
+ uint32_t ssrc() const override;
void NotifyFirstPacketReceived() override;
void set_stream_ids(std::vector<std::string> stream_ids) override;
void set_transport(
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
- dtls_transport_ = dtls_transport;
- }
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override;
void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
streams) override;
@@ -123,33 +114,68 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
void RestartMediaChannel(absl::optional<uint32_t> ssrc);
void SetSink(rtc::VideoSinkInterface<VideoFrame>* sink)
RTC_RUN_ON(worker_thread_);
+ void SetMediaChannel_w(cricket::MediaChannel* media_channel)
+ RTC_RUN_ON(worker_thread_);
// VideoRtpTrackSource::Callback
- void OnGenerateKeyFrame() override;
- void OnEncodedSinkEnabled(bool enable) override;
+ void OnGenerateKeyFrame();
+ void OnEncodedSinkEnabled(bool enable);
+
void SetEncodedSinkEnabled(bool enable) RTC_RUN_ON(worker_thread_);
+ class SourceCallback : public VideoRtpTrackSource::Callback {
+ public:
+ explicit SourceCallback(VideoRtpReceiver* receiver) : receiver_(receiver) {}
+ ~SourceCallback() override = default;
+
+ private:
+ void OnGenerateKeyFrame() override { receiver_->OnGenerateKeyFrame(); }
+ void OnEncodedSinkEnabled(bool enable) override {
+ receiver_->OnEncodedSinkEnabled(enable);
+ }
+
+ VideoRtpReceiver* const receiver_;
+ } source_callback_{this};
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
rtc::Thread* const worker_thread_;
const std::string id_;
- cricket::VideoMediaChannel* media_channel_ = nullptr;
- absl::optional<uint32_t> ssrc_;
+ // See documentation for `stopped_` below for when a valid media channel
+ // has been assigned and when this pointer will be null.
+ cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) =
+ nullptr;
+ absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_);
// |source_| is held here to be able to change the state of the source when
// the VideoRtpReceiver is stopped.
- rtc::scoped_refptr<VideoRtpTrackSource> source_;
- rtc::scoped_refptr<VideoTrackProxyWithInternal<VideoTrack>> track_;
- std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_;
- bool stopped_ = true;
- RtpReceiverObserverInterface* observer_ = nullptr;
- bool received_first_packet_ = false;
- int attachment_id_ = 0;
- rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_;
- rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
+ const rtc::scoped_refptr<VideoRtpTrackSource> source_;
+ const rtc::scoped_refptr<VideoTrackProxyWithInternal<VideoTrack>> track_;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ // `stopped` is state that's used on the signaling thread to indicate whether
+ // a valid `media_channel_` has been assigned and configured. When an instance
+ // of VideoRtpReceiver is initially created, `stopped_` is true and will
+ // remain true until either `SetupMediaChannel` or
+ // `SetupUnsignaledMediaChannel` is called after assigning a media channel.
+ // After that, `stopped_` will remain false until `Stop()` is called.
+ // Note, for checking the state of the class on the worker thread,
+ // check `media_channel_` instead, as that's the main worker thread state.
+ bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true;
+ RtpReceiverObserverInterface* observer_
+ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr;
+ bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) =
+ false;
+ const int attachment_id_;
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_
+ RTC_GUARDED_BY(worker_thread_);
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
RTC_GUARDED_BY(worker_thread_);
- // Allows to thread safely change jitter buffer delay. Handles caching cases
+ // Stores the minimum jitter buffer delay. Handles caching cases
// if |SetJitterBufferMinimumDelay| is called before start.
- rtc::scoped_refptr<JitterBufferDelayInterface> delay_;
+ JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_);
+
// Records if we should generate a keyframe when |media_channel_| gets set up
// or switched.
bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false;
diff --git a/pc/video_rtp_receiver_unittest.cc b/pc/video_rtp_receiver_unittest.cc
index b3eb6e6e35..3a8099d30f 100644
--- a/pc/video_rtp_receiver_unittest.cc
+++ b/pc/video_rtp_receiver_unittest.cc
@@ -17,8 +17,10 @@
#include "test/gmock.h"
using ::testing::_;
+using ::testing::AnyNumber;
using ::testing::InSequence;
using ::testing::Mock;
+using ::testing::NiceMock;
using ::testing::SaveArg;
using ::testing::StrictMock;
@@ -29,9 +31,11 @@ class VideoRtpReceiverTest : public testing::Test {
protected:
class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
public:
- MockVideoMediaChannel(cricket::FakeVideoEngine* engine,
- const cricket::VideoOptions& options)
- : FakeVideoMediaChannel(engine, options) {}
+ MockVideoMediaChannel(
+ cricket::FakeVideoEngine* engine,
+ const cricket::VideoOptions& options,
+ TaskQueueBase* network_thread = rtc::Thread::Current())
+ : FakeVideoMediaChannel(engine, options, network_thread) {}
MOCK_METHOD(void,
SetRecordableEncodedFrameCallback,
(uint32_t, std::function<void(const RecordableEncodedFrame&)>),
@@ -51,19 +55,26 @@ class VideoRtpReceiverTest : public testing::Test {
VideoRtpReceiverTest()
: worker_thread_(rtc::Thread::Create()),
channel_(nullptr, cricket::VideoOptions()),
- receiver_(new VideoRtpReceiver(worker_thread_.get(),
- "receiver",
- {"stream"})) {
+ receiver_(rtc::make_ref_counted<VideoRtpReceiver>(
+ worker_thread_.get(),
+ std::string("receiver"),
+ std::vector<std::string>({"stream"}))) {
worker_thread_->Start();
receiver_->SetMediaChannel(&channel_);
}
+ ~VideoRtpReceiverTest() override {
+ // Clear expectations that tests may have set up before calling Stop().
+ Mock::VerifyAndClearExpectations(&channel_);
+ receiver_->Stop();
+ }
+
webrtc::VideoTrackSourceInterface* Source() {
return receiver_->streams()[0]->FindVideoTrack("receiver")->GetSource();
}
std::unique_ptr<rtc::Thread> worker_thread_;
- MockVideoMediaChannel channel_;
+ NiceMock<MockVideoMediaChannel> channel_;
rtc::scoped_refptr<VideoRtpReceiver> receiver_;
};
@@ -96,6 +107,10 @@ TEST_F(VideoRtpReceiverTest,
// Switching to a new channel should now not cause calls to GenerateKeyFrame.
StrictMock<MockVideoMediaChannel> channel4(nullptr, cricket::VideoOptions());
receiver_->SetMediaChannel(&channel4);
+
+ // We must call Stop() here since the mock media channels live on the stack
+ // and `receiver_` still has a pointer to those objects.
+ receiver_->Stop();
}
TEST_F(VideoRtpReceiverTest, EnablesEncodedOutput) {
@@ -129,6 +144,10 @@ TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) {
Source()->RemoveEncodedSink(&sink);
StrictMock<MockVideoMediaChannel> channel3(nullptr, cricket::VideoOptions());
receiver_->SetMediaChannel(&channel3);
+
+ // We must call Stop() here since the mock media channels live on the stack
+ // and `receiver_` still has a pointer to those objects.
+ receiver_->Stop();
}
TEST_F(VideoRtpReceiverTest, BroadcastsEncodedFramesWhenEnabled) {
diff --git a/pc/video_rtp_track_source_unittest.cc b/pc/video_rtp_track_source_unittest.cc
index ea1b4cacf8..5666b77d5f 100644
--- a/pc/video_rtp_track_source_unittest.cc
+++ b/pc/video_rtp_track_source_unittest.cc
@@ -30,9 +30,7 @@ class MockSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
rtc::scoped_refptr<VideoRtpTrackSource> MakeSource(
VideoRtpTrackSource::Callback* callback) {
- rtc::scoped_refptr<VideoRtpTrackSource> source(
- new rtc::RefCountedObject<VideoRtpTrackSource>(callback));
- return source;
+ return rtc::make_ref_counted<VideoRtpTrackSource>(callback);
}
TEST(VideoRtpTrackSourceTest, CreatesWithRemoteAtttributeSet) {
diff --git a/pc/video_track.cc b/pc/video_track.cc
index d67d4f6cd2..d0246faa87 100644
--- a/pc/video_track.cc
+++ b/pc/video_track.cc
@@ -11,6 +11,7 @@
#include "pc/video_track.h"
#include <string>
+#include <utility>
#include <vector>
#include "api/notifier.h"
@@ -28,10 +29,16 @@ VideoTrack::VideoTrack(const std::string& label,
worker_thread_(worker_thread),
video_source_(video_source),
content_hint_(ContentHint::kNone) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ // Detach the thread checker for VideoSourceBaseGuarded since we'll make calls
+ // to VideoSourceBaseGuarded on the worker thread, but we're currently on the
+ // signaling thread.
+ source_sequence_.Detach();
video_source_->RegisterObserver(this);
}
VideoTrack::~VideoTrack() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
video_source_->UnregisterObserver(this);
}
@@ -43,26 +50,31 @@ std::string VideoTrack::kind() const {
// thread.
void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
- RTC_DCHECK(worker_thread_->IsCurrent());
- VideoSourceBase::AddOrUpdateSink(sink, wants);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants);
rtc::VideoSinkWants modified_wants = wants;
modified_wants.black_frames = !enabled();
video_source_->AddOrUpdateSink(sink, modified_wants);
}
void VideoTrack::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
- RTC_DCHECK(worker_thread_->IsCurrent());
- VideoSourceBase::RemoveSink(sink);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ VideoSourceBaseGuarded::RemoveSink(sink);
video_source_->RemoveSink(sink);
}
+VideoTrackSourceInterface* VideoTrack::GetSource() const {
+ // Callable from any thread.
+ return video_source_.get();
+}
+
VideoTrackInterface::ContentHint VideoTrack::content_hint() const {
- RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
return content_hint_;
}
void VideoTrack::set_content_hint(ContentHint hint) {
- RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
if (content_hint_ == hint)
return;
content_hint_ = hint;
@@ -70,34 +82,43 @@ void VideoTrack::set_content_hint(ContentHint hint) {
}
bool VideoTrack::set_enabled(bool enable) {
- RTC_DCHECK(signaling_thread_checker_.IsCurrent());
- worker_thread_->Invoke<void>(RTC_FROM_HERE, [enable, this] {
- RTC_DCHECK(worker_thread_->IsCurrent());
- for (auto& sink_pair : sink_pairs()) {
- rtc::VideoSinkWants modified_wants = sink_pair.wants;
- modified_wants.black_frames = !enable;
- video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants);
- }
- });
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ for (auto& sink_pair : sink_pairs()) {
+ rtc::VideoSinkWants modified_wants = sink_pair.wants;
+ modified_wants.black_frames = !enable;
+ video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants);
+ }
return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
}
+bool VideoTrack::enabled() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return MediaStreamTrack<VideoTrackInterface>::enabled();
+}
+
+MediaStreamTrackInterface::TrackState VideoTrack::state() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return MediaStreamTrack<VideoTrackInterface>::state();
+}
+
void VideoTrack::OnChanged() {
- RTC_DCHECK(signaling_thread_checker_.IsCurrent());
- if (video_source_->state() == MediaSourceInterface::kEnded) {
- set_state(kEnded);
- } else {
- set_state(kLive);
- }
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ worker_thread_->Invoke<void>(
+ RTC_FROM_HERE, [this, state = video_source_->state()]() {
+ // TODO(tommi): Calling set_state() this way isn't ideal since we're
+ // currently blocking the signaling thread and set_state() may
+ // internally fire notifications via `FireOnChanged()` which may further
+ // amplify the blocking effect on the signaling thread.
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive);
+ });
}
rtc::scoped_refptr<VideoTrack> VideoTrack::Create(
const std::string& id,
VideoTrackSourceInterface* source,
rtc::Thread* worker_thread) {
- rtc::RefCountedObject<VideoTrack>* track =
- new rtc::RefCountedObject<VideoTrack>(id, source, worker_thread);
- return track;
+ return rtc::make_ref_counted<VideoTrack>(id, source, worker_thread);
}
} // namespace webrtc
diff --git a/pc/video_track.h b/pc/video_track.h
index bff63fcb96..e840c8097f 100644
--- a/pc/video_track.h
+++ b/pc/video_track.h
@@ -27,7 +27,7 @@
namespace webrtc {
class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
- public rtc::VideoSourceBase,
+ public rtc::VideoSourceBaseGuarded,
public ObserverInterface {
public:
static rtc::scoped_refptr<VideoTrack> Create(
@@ -38,13 +38,13 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ VideoTrackSourceInterface* GetSource() const override;
- VideoTrackSourceInterface* GetSource() const override {
- return video_source_.get();
- }
ContentHint content_hint() const override;
void set_content_hint(ContentHint hint) override;
bool set_enabled(bool enable) override;
+ bool enabled() const override;
+ MediaStreamTrackInterface::TrackState state() const override;
std::string kind() const override;
protected:
@@ -57,10 +57,10 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
// Implements ObserverInterface. Observes |video_source_| state.
void OnChanged() override;
+ RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_;
rtc::Thread* const worker_thread_;
- SequenceChecker signaling_thread_checker_;
- rtc::scoped_refptr<VideoTrackSourceInterface> video_source_;
- ContentHint content_hint_ RTC_GUARDED_BY(signaling_thread_checker_);
+ const rtc::scoped_refptr<VideoTrackSourceInterface> video_source_;
+ ContentHint content_hint_ RTC_GUARDED_BY(worker_thread_);
};
} // namespace webrtc
diff --git a/pc/video_track_source.cc b/pc/video_track_source.cc
index f45d44aa32..d15eaaf43c 100644
--- a/pc/video_track_source.cc
+++ b/pc/video_track_source.cc
@@ -15,7 +15,7 @@
namespace webrtc {
VideoTrackSource::VideoTrackSource(bool remote)
- : state_(kInitializing), remote_(remote) {
+ : state_(kLive), remote_(remote) {
worker_thread_checker_.Detach();
}
diff --git a/pc/video_track_source_proxy.cc b/pc/video_track_source_proxy.cc
new file mode 100644
index 0000000000..309c1f20f8
--- /dev/null
+++ b/pc/video_track_source_proxy.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_track_source_proxy.h"
+
+#include "api/media_stream_interface.h"
+#include "api/video_track_source_proxy_factory.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoTrackSourceInterface> CreateVideoTrackSourceProxy(
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ VideoTrackSourceInterface* source) {
+ return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source);
+}
+
+} // namespace webrtc
diff --git a/api/video_track_source_proxy.h b/pc/video_track_source_proxy.h
index 0b60d20de5..8914dd0525 100644
--- a/api/video_track_source_proxy.h
+++ b/pc/video_track_source_proxy.h
@@ -8,18 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef API_VIDEO_TRACK_SOURCE_PROXY_H_
-#define API_VIDEO_TRACK_SOURCE_PROXY_H_
+#ifndef PC_VIDEO_TRACK_SOURCE_PROXY_H_
+#define PC_VIDEO_TRACK_SOURCE_PROXY_H_
#include "api/media_stream_interface.h"
-#include "api/proxy.h"
+#include "pc/proxy.h"
namespace webrtc {
// Makes sure the real VideoTrackSourceInterface implementation is destroyed on
// the signaling thread and marshals all method calls to the signaling thread.
-// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
-// are called on is an implementation detail.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
BEGIN_PROXY_MAP(VideoTrackSource)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(SourceState, state)
@@ -42,8 +42,8 @@ PROXY_SECONDARY_METHOD1(void,
PROXY_SECONDARY_METHOD1(void,
RemoveEncodedSink,
rtc::VideoSinkInterface<RecordableEncodedFrame>*)
-END_PROXY_MAP()
+END_PROXY_MAP(VideoTrackSource)
} // namespace webrtc
-#endif // API_VIDEO_TRACK_SOURCE_PROXY_H_
+#endif // PC_VIDEO_TRACK_SOURCE_PROXY_H_
diff --git a/pc/video_track_unittest.cc b/pc/video_track_unittest.cc
index f86bec8321..ab094ec487 100644
--- a/pc/video_track_unittest.cc
+++ b/pc/video_track_unittest.cc
@@ -32,7 +32,7 @@ class VideoTrackTest : public ::testing::Test {
public:
VideoTrackTest() : frame_source_(640, 480, rtc::kNumMicrosecsPerSec / 30) {
static const char kVideoTrackId[] = "track_id";
- video_track_source_ = new rtc::RefCountedObject<FakeVideoTrackSource>(
+ video_track_source_ = rtc::make_ref_counted<FakeVideoTrackSource>(
/*is_screencast=*/false);
video_track_ = VideoTrack::Create(kVideoTrackId, video_track_source_,
rtc::Thread::Current());
diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc
index 58dcc4f606..379b2f30c2 100644
--- a/pc/webrtc_sdp.cc
+++ b/pc/webrtc_sdp.cc
@@ -900,11 +900,11 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) {
// Time Description.
AddLine(kTimeDescription, &message);
- // Group
- if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
+ // BUNDLE Groups
+ std::vector<const cricket::ContentGroup*> groups =
+ desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ for (const cricket::ContentGroup* group : groups) {
std::string group_line = kAttrGroup;
- const cricket::ContentGroup* group =
- desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
RTC_DCHECK(group != NULL);
for (const std::string& content_name : group->content_names()) {
group_line.append(" ");
@@ -3049,21 +3049,6 @@ bool ParseContent(const std::string& message,
return ParseFailed(
line, "b=" + bandwidth_type + " value can't be negative.", error);
}
- // We should never use more than the default bandwidth for RTP-based
- // data channels. Don't allow SDP to set the bandwidth, because
- // that would give JS the opportunity to "break the Internet".
- // See: https://code.google.com/p/chromium/issues/detail?id=280726
- // Disallow TIAS since it shouldn't be generated for RTP data channels in
- // the first place and provides another way to get around the limitation.
- if (media_type == cricket::MEDIA_TYPE_DATA &&
- cricket::IsRtpProtocol(protocol) &&
- (b > cricket::kRtpDataMaxBandwidth / 1000 ||
- bandwidth_type == kTransportSpecificBandwidth)) {
- rtc::StringBuilder description;
- description << "RTP-based data channels may not send more than "
- << cricket::kRtpDataMaxBandwidth / 1000 << "kbps.";
- return ParseFailed(line, description.str(), error);
- }
// Convert values. Prevent integer overflow.
if (bandwidth_type == kApplicationSpecificBandwidth) {
b = std::min(b, INT_MAX / 1000) * 1000;
diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc
index a4f75e262b..266fd3dfd6 100644
--- a/pc/webrtc_sdp_unittest.cc
+++ b/pc/webrtc_sdp_unittest.cc
@@ -56,7 +56,6 @@ using cricket::Candidate;
using cricket::ContentGroup;
using cricket::ContentInfo;
using cricket::CryptoParams;
-using cricket::DataCodec;
using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
using cricket::ICE_CANDIDATE_COMPONENT_RTP;
using cricket::kFecSsrcGroupSemantics;
@@ -1907,7 +1906,8 @@ class WebRtcSdpTest : public ::testing::Test {
os.clear();
os.str("");
// Pl type 100 preferred.
- os << "m=video 9 RTP/SAVPF 99 95\r\n"
+ os << "m=video 9 RTP/SAVPF 99 95 96\r\n"
+ "a=rtpmap:96 VP9/90000\r\n" // out-of-order wrt the m= line.
"a=rtpmap:99 VP8/90000\r\n"
"a=rtpmap:95 RTX/90000\r\n"
"a=fmtp:95 apt=99;\r\n";
@@ -1955,6 +1955,10 @@ class WebRtcSdpTest : public ::testing::Test {
EXPECT_EQ("RTX", rtx.name);
EXPECT_EQ(95, rtx.id);
VerifyCodecParameter(rtx.params, "apt", vp8.id);
+ // VP9 is listed last in the m= line so should come after VP8 and RTX.
+ cricket::VideoCodec vp9 = vcd->codecs()[2];
+ EXPECT_EQ("VP9", vp9.name);
+ EXPECT_EQ(96, vp9.id);
}
void TestDeserializeRtcpFb(JsepSessionDescription* jdesc_output,
@@ -2120,17 +2124,21 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) {
EXPECT_EQ(std::string(kSdpString), message);
}
-TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundle) {
- ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
- group.AddContentName(kAudioContentName);
- group.AddContentName(kVideoContentName);
- desc_.AddGroup(group);
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundles) {
+ ContentGroup group1(cricket::GROUP_TYPE_BUNDLE);
+ group1.AddContentName(kAudioContentName);
+ group1.AddContentName(kVideoContentName);
+ desc_.AddGroup(group1);
+ ContentGroup group2(cricket::GROUP_TYPE_BUNDLE);
+ group2.AddContentName(kAudioContentName2);
+ desc_.AddGroup(group2);
ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
jdesc_.session_version()));
std::string message = webrtc::SdpSerialize(jdesc_);
std::string sdp_with_bundle = kSdpFullString;
InjectAfter(kSessionTime,
- "a=group:BUNDLE audio_content_name video_content_name\r\n",
+ "a=group:BUNDLE audio_content_name video_content_name\r\n"
+ "a=group:BUNDLE audio_content_name_2\r\n",
&sdp_with_bundle);
EXPECT_EQ(sdp_with_bundle, message);
}
diff --git a/pc/webrtc_session_description_factory.cc b/pc/webrtc_session_description_factory.cc
index 348016d2d6..33826347ff 100644
--- a/pc/webrtc_session_description_factory.cc
+++ b/pc/webrtc_session_description_factory.cc
@@ -174,8 +174,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
// Generate certificate.
certificate_request_state_ = CERTIFICATE_WAITING;
- rtc::scoped_refptr<WebRtcCertificateGeneratorCallback> callback(
- new rtc::RefCountedObject<WebRtcCertificateGeneratorCallback>());
+ auto callback = rtc::make_ref_counted<WebRtcCertificateGeneratorCallback>();
callback->SignalRequestFailed.connect(
this, &WebRtcSessionDescriptionFactory::OnCertificateRequestFailed);
callback->SignalCertificateReady.connect(
diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn
index 0fdf534dd5..501ca01541 100644
--- a/rtc_base/BUILD.gn
+++ b/rtc_base/BUILD.gn
@@ -101,6 +101,7 @@ rtc_library("rtc_base_approved") {
"copy_on_write_buffer.h",
"event_tracer.cc",
"event_tracer.h",
+ "hash.h",
"location.cc",
"location.h",
"numerics/histogram_percentile_counter.cc",
@@ -198,7 +199,10 @@ rtc_source_set("refcount") {
"ref_counted_object.h",
"ref_counter.h",
]
- deps = [ ":macromagic" ]
+ deps = [
+ ":macromagic",
+ "../api:scoped_refptr",
+ ]
}
rtc_library("criticalsection") {
@@ -238,7 +242,11 @@ rtc_library("platform_thread") {
":timeutils",
"../api:sequence_checker",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_library("rtc_event") {
@@ -554,7 +562,10 @@ if (is_win) {
"../api/task_queue",
"synchronization:mutex",
]
- absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
}
@@ -792,6 +803,7 @@ rtc_library("threading") {
":socket_server",
":timeutils",
"../api:function_view",
+ "../api:refcountedbase",
"../api:scoped_refptr",
"../api:sequence_checker",
"../api/task_queue",
@@ -916,6 +928,7 @@ rtc_library("rtc_base") {
":threading",
"../api:array_view",
"../api:function_view",
+ "../api:refcountedbase",
"../api:scoped_refptr",
"../api:sequence_checker",
"../api/numerics",
@@ -1332,6 +1345,7 @@ if (rtc_include_tests) {
"deprecated/recursive_critical_section_unittest.cc",
"event_tracer_unittest.cc",
"event_unittest.cc",
+ "hash_unittest.cc",
"logging_unittest.cc",
"numerics/divide_round_unittest.cc",
"numerics/histogram_percentile_counter_unittest.cc",
@@ -1403,6 +1417,7 @@ if (rtc_include_tests) {
absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
diff --git a/rtc_base/async_invoker.cc b/rtc_base/async_invoker.cc
index 995f443f73..87d039373d 100644
--- a/rtc_base/async_invoker.cc
+++ b/rtc_base/async_invoker.cc
@@ -15,12 +15,12 @@
namespace rtc {
-AsyncInvoker::AsyncInvoker()
+DEPRECATED_AsyncInvoker::DEPRECATED_AsyncInvoker()
: pending_invocations_(0),
- invocation_complete_(new RefCountedObject<Event>()),
+ invocation_complete_(make_ref_counted<Event>()),
destroying_(false) {}
-AsyncInvoker::~AsyncInvoker() {
+DEPRECATED_AsyncInvoker::~DEPRECATED_AsyncInvoker() {
destroying_.store(true, std::memory_order_relaxed);
// Messages for this need to be cleared *before* our destructor is complete.
ThreadManager::Clear(this);
@@ -37,7 +37,7 @@ AsyncInvoker::~AsyncInvoker() {
}
}
-void AsyncInvoker::OnMessage(Message* msg) {
+void DEPRECATED_AsyncInvoker::OnMessage(Message* msg) {
// Get the AsyncClosure shared ptr from this message's data.
ScopedMessageData<AsyncClosure>* data =
static_cast<ScopedMessageData<AsyncClosure>*>(msg->pdata);
@@ -46,7 +46,8 @@ void AsyncInvoker::OnMessage(Message* msg) {
delete data;
}
-void AsyncInvoker::Flush(Thread* thread, uint32_t id /*= MQID_ANY*/) {
+void DEPRECATED_AsyncInvoker::Flush(Thread* thread,
+ uint32_t id /*= MQID_ANY*/) {
// If the destructor is waiting for invocations to finish, don't start
// running even more tasks.
if (destroying_.load(std::memory_order_relaxed))
@@ -67,14 +68,14 @@ void AsyncInvoker::Flush(Thread* thread, uint32_t id /*= MQID_ANY*/) {
}
}
-void AsyncInvoker::Clear() {
+void DEPRECATED_AsyncInvoker::Clear() {
ThreadManager::Clear(this);
}
-void AsyncInvoker::DoInvoke(const Location& posted_from,
- Thread* thread,
- std::unique_ptr<AsyncClosure> closure,
- uint32_t id) {
+void DEPRECATED_AsyncInvoker::DoInvoke(const Location& posted_from,
+ Thread* thread,
+ std::unique_ptr<AsyncClosure> closure,
+ uint32_t id) {
if (destroying_.load(std::memory_order_relaxed)) {
// Note that this may be expected, if the application is AsyncInvoking
// tasks that AsyncInvoke other tasks. But otherwise it indicates a race
@@ -87,11 +88,12 @@ void AsyncInvoker::DoInvoke(const Location& posted_from,
new ScopedMessageData<AsyncClosure>(std::move(closure)));
}
-void AsyncInvoker::DoInvokeDelayed(const Location& posted_from,
- Thread* thread,
- std::unique_ptr<AsyncClosure> closure,
- uint32_t delay_ms,
- uint32_t id) {
+void DEPRECATED_AsyncInvoker::DoInvokeDelayed(
+ const Location& posted_from,
+ Thread* thread,
+ std::unique_ptr<AsyncClosure> closure,
+ uint32_t delay_ms,
+ uint32_t id) {
if (destroying_.load(std::memory_order_relaxed)) {
// See above comment.
RTC_LOG(LS_WARNING) << "Tried to invoke while destroying the invoker.";
@@ -101,7 +103,7 @@ void AsyncInvoker::DoInvokeDelayed(const Location& posted_from,
new ScopedMessageData<AsyncClosure>(std::move(closure)));
}
-AsyncClosure::AsyncClosure(AsyncInvoker* invoker)
+AsyncClosure::AsyncClosure(DEPRECATED_AsyncInvoker* invoker)
: invoker_(invoker), invocation_complete_(invoker_->invocation_complete_) {
invoker_->pending_invocations_.fetch_add(1, std::memory_order_relaxed);
}
diff --git a/rtc_base/async_invoker.h b/rtc_base/async_invoker.h
index d3bb9a22f9..fd42ca76de 100644
--- a/rtc_base/async_invoker.h
+++ b/rtc_base/async_invoker.h
@@ -15,6 +15,7 @@
#include <memory>
#include <utility>
+#include "absl/base/attributes.h"
#include "api/scoped_refptr.h"
#include "rtc_base/async_invoker_inl.h"
#include "rtc_base/constructor_magic.h"
@@ -86,10 +87,10 @@ namespace rtc {
// destruction. This can be done by starting each chain of invocations on the
// same thread on which it will be destroyed, or by using some other
// synchronization method.
-class AsyncInvoker : public MessageHandlerAutoCleanup {
+class DEPRECATED_AsyncInvoker : public MessageHandlerAutoCleanup {
public:
- AsyncInvoker();
- ~AsyncInvoker() override;
+ DEPRECATED_AsyncInvoker();
+ ~DEPRECATED_AsyncInvoker() override;
// Call |functor| asynchronously on |thread|, with no callback upon
// completion. Returns immediately.
@@ -156,7 +157,7 @@ class AsyncInvoker : public MessageHandlerAutoCleanup {
// an AsyncClosure's destructor that's about to call
// "invocation_complete_->Set()", it's not dereferenced after being
// destroyed.
- scoped_refptr<RefCountedObject<Event>> invocation_complete_;
+ rtc::Ref<Event>::Ptr invocation_complete_;
// This flag is used to ensure that if an application AsyncInvokes tasks that
// recursively AsyncInvoke other tasks ad infinitum, the cycle eventually
@@ -165,9 +166,12 @@ class AsyncInvoker : public MessageHandlerAutoCleanup {
friend class AsyncClosure;
- RTC_DISALLOW_COPY_AND_ASSIGN(AsyncInvoker);
+ RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_AsyncInvoker);
};
+using AsyncInvoker ABSL_DEPRECATED("bugs.webrtc.org/12339") =
+ DEPRECATED_AsyncInvoker;
+
} // namespace rtc
#endif // RTC_BASE_ASYNC_INVOKER_H_
diff --git a/rtc_base/async_invoker_inl.h b/rtc_base/async_invoker_inl.h
index 6151059ab5..9fb328782c 100644
--- a/rtc_base/async_invoker_inl.h
+++ b/rtc_base/async_invoker_inl.h
@@ -21,32 +21,33 @@
namespace rtc {
-class AsyncInvoker;
+class DEPRECATED_AsyncInvoker;
// Helper class for AsyncInvoker. Runs a task and triggers a callback
// on the calling thread if necessary.
class AsyncClosure {
public:
- explicit AsyncClosure(AsyncInvoker* invoker);
+ explicit AsyncClosure(DEPRECATED_AsyncInvoker* invoker);
virtual ~AsyncClosure();
// Runs the asynchronous task, and triggers a callback to the calling
// thread if needed. Should be called from the target thread.
virtual void Execute() = 0;
protected:
- AsyncInvoker* invoker_;
+ DEPRECATED_AsyncInvoker* invoker_;
// Reference counted so that if the AsyncInvoker destructor finishes before
// an AsyncClosure's destructor that's about to call
// "invocation_complete_->Set()", it's not dereferenced after being
// destroyed.
- scoped_refptr<RefCountedObject<Event>> invocation_complete_;
+ rtc::Ref<Event>::Ptr invocation_complete_;
};
// Simple closure that doesn't trigger a callback for the calling thread.
template <class FunctorT>
class FireAndForgetAsyncClosure : public AsyncClosure {
public:
- explicit FireAndForgetAsyncClosure(AsyncInvoker* invoker, FunctorT&& functor)
+ explicit FireAndForgetAsyncClosure(DEPRECATED_AsyncInvoker* invoker,
+ FunctorT&& functor)
: AsyncClosure(invoker), functor_(std::forward<FunctorT>(functor)) {}
virtual void Execute() { functor_(); }
diff --git a/rtc_base/async_resolver.cc b/rtc_base/async_resolver.cc
index 198b4984e5..d482b4e681 100644
--- a/rtc_base/async_resolver.cc
+++ b/rtc_base/async_resolver.cc
@@ -10,9 +10,14 @@
#include "rtc_base/async_resolver.h"
+#include <memory>
#include <string>
#include <utility>
+#include "api/ref_counted_base.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
#if defined(WEBRTC_WIN)
#include <ws2spi.h>
#include <ws2tcpip.h>
@@ -30,6 +35,7 @@
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/ip_address.h"
#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread...
@@ -87,30 +93,61 @@ int ResolveHostname(const std::string& hostname,
#endif // !__native_client__
}
-AsyncResolver::AsyncResolver() : error_(-1) {}
+struct AsyncResolver::State : public RefCountedBase {
+ webrtc::Mutex mutex;
+ enum class Status {
+ kLive,
+ kDead
+ } status RTC_GUARDED_BY(mutex) = Status::kLive;
+};
+
+AsyncResolver::AsyncResolver() : error_(-1), state_(new State) {}
AsyncResolver::~AsyncResolver() {
RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ // Ensure the thread isn't using a stale reference to the current task queue,
+ // or calling into ResolveDone post destruction.
+ webrtc::MutexLock lock(&state_->mutex);
+ state_->status = State::Status::kDead;
+}
+
+void RunResolution(void* obj) {
+ std::function<void()>* function_ptr =
+ static_cast<std::function<void()>*>(obj);
+ (*function_ptr)();
+ delete function_ptr;
}
void AsyncResolver::Start(const SocketAddress& addr) {
RTC_DCHECK_RUN_ON(&sequence_checker_);
RTC_DCHECK(!destroy_called_);
addr_ = addr;
- webrtc::TaskQueueBase* current_task_queue = webrtc::TaskQueueBase::Current();
- popup_thread_ = Thread::Create();
- popup_thread_->Start();
- popup_thread_->PostTask(webrtc::ToQueuedTask(
- [this, flag = safety_.flag(), addr, current_task_queue] {
+ PlatformThread::SpawnDetached(
+ [this, addr, caller_task_queue = webrtc::TaskQueueBase::Current(),
+ state = state_] {
std::vector<IPAddress> addresses;
int error =
ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses);
- current_task_queue->PostTask(webrtc::ToQueuedTask(
- std::move(flag), [this, error, addresses = std::move(addresses)] {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- ResolveDone(std::move(addresses), error);
- }));
- }));
+ webrtc::MutexLock lock(&state->mutex);
+ if (state->status == State::Status::kLive) {
+ caller_task_queue->PostTask(webrtc::ToQueuedTask(
+ [this, error, addresses = std::move(addresses), state] {
+ bool live;
+ {
+ // ResolveDone can lead to instance destruction, so make sure
+ // we don't deadlock.
+ webrtc::MutexLock lock(&state->mutex);
+ live = state->status == State::Status::kLive;
+ }
+ if (live) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ ResolveDone(std::move(addresses), error);
+ }
+ }));
+ }
+ },
+ "AsyncResolver");
}
bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
diff --git a/rtc_base/async_resolver.h b/rtc_base/async_resolver.h
index c43685a4d8..0c053eed81 100644
--- a/rtc_base/async_resolver.h
+++ b/rtc_base/async_resolver.h
@@ -17,12 +17,13 @@
#include <winsock2.h> // NOLINT
#endif
-#include <memory>
#include <vector>
#include "api/sequence_checker.h"
#include "rtc_base/async_resolver_interface.h"
+#include "rtc_base/event.h"
#include "rtc_base/ip_address.h"
+#include "rtc_base/ref_counted_object.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/system/rtc_export.h"
@@ -52,6 +53,9 @@ class RTC_EXPORT AsyncResolver : public AsyncResolverInterface {
const std::vector<IPAddress>& addresses() const;
private:
+ // Fwd decl.
+ struct State;
+
void ResolveDone(std::vector<IPAddress> addresses, int error)
RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_);
void MaybeSelfDestruct();
@@ -59,11 +63,10 @@ class RTC_EXPORT AsyncResolver : public AsyncResolverInterface {
SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_);
std::vector<IPAddress> addresses_ RTC_GUARDED_BY(sequence_checker_);
int error_ RTC_GUARDED_BY(sequence_checker_);
- webrtc::ScopedTaskSafety safety_ RTC_GUARDED_BY(sequence_checker_);
- std::unique_ptr<Thread> popup_thread_ RTC_GUARDED_BY(sequence_checker_);
bool recursion_check_ =
false; // Protects against SignalDone calling into Destroy.
bool destroy_called_ = false;
+ scoped_refptr<State> state_;
RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_;
};
diff --git a/rtc_base/bit_buffer.cc b/rtc_base/bit_buffer.cc
index 540141fe52..d212ef5637 100644
--- a/rtc_base/bit_buffer.cc
+++ b/rtc_base/bit_buffer.cc
@@ -83,36 +83,36 @@ uint64_t BitBuffer::RemainingBitCount() const {
return (static_cast<uint64_t>(byte_count_) - byte_offset_) * 8 - bit_offset_;
}
-bool BitBuffer::ReadUInt8(uint8_t* val) {
+bool BitBuffer::ReadUInt8(uint8_t& val) {
uint32_t bit_val;
- if (!ReadBits(&bit_val, sizeof(uint8_t) * 8)) {
+ if (!ReadBits(sizeof(uint8_t) * 8, bit_val)) {
return false;
}
RTC_DCHECK(bit_val <= std::numeric_limits<uint8_t>::max());
- *val = static_cast<uint8_t>(bit_val);
+ val = static_cast<uint8_t>(bit_val);
return true;
}
-bool BitBuffer::ReadUInt16(uint16_t* val) {
+bool BitBuffer::ReadUInt16(uint16_t& val) {
uint32_t bit_val;
- if (!ReadBits(&bit_val, sizeof(uint16_t) * 8)) {
+ if (!ReadBits(sizeof(uint16_t) * 8, bit_val)) {
return false;
}
RTC_DCHECK(bit_val <= std::numeric_limits<uint16_t>::max());
- *val = static_cast<uint16_t>(bit_val);
+ val = static_cast<uint16_t>(bit_val);
return true;
}
-bool BitBuffer::ReadUInt32(uint32_t* val) {
- return ReadBits(val, sizeof(uint32_t) * 8);
+bool BitBuffer::ReadUInt32(uint32_t& val) {
+ return ReadBits(sizeof(uint32_t) * 8, val);
}
-bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) {
+bool BitBuffer::PeekBits(size_t bit_count, uint32_t& val) {
// TODO(nisse): Could allow bit_count == 0 and always return success. But
// current code reads one byte beyond end of buffer in the case that
// RemainingBitCount() == 0 and bit_count == 0.
RTC_DCHECK(bit_count > 0);
- if (!val || bit_count > RemainingBitCount() || bit_count > 32) {
+ if (bit_count > RemainingBitCount() || bit_count > 32) {
return false;
}
const uint8_t* bytes = bytes_ + byte_offset_;
@@ -121,7 +121,7 @@ bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) {
// If we're reading fewer bits than what's left in the current byte, just
// return the portion of this byte that we need.
if (bit_count < remaining_bits_in_current_byte) {
- *val = HighestBits(bits, bit_offset_ + bit_count);
+ val = HighestBits(bits, bit_offset_ + bit_count);
return true;
}
// Otherwise, subtract what we've read from the bit count and read as many
@@ -137,12 +137,50 @@ bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) {
bits <<= bit_count;
bits |= HighestBits(*bytes, bit_count);
}
- *val = bits;
+ val = bits;
return true;
}
-bool BitBuffer::ReadBits(uint32_t* val, size_t bit_count) {
- return PeekBits(val, bit_count) && ConsumeBits(bit_count);
+bool BitBuffer::PeekBits(size_t bit_count, uint64_t& val) {
+ // TODO(nisse): Could allow bit_count == 0 and always return success. But
+ // current code reads one byte beyond end of buffer in the case that
+ // RemainingBitCount() == 0 and bit_count == 0.
+ RTC_DCHECK(bit_count > 0);
+ if (bit_count > RemainingBitCount() || bit_count > 64) {
+ return false;
+ }
+ const uint8_t* bytes = bytes_ + byte_offset_;
+ size_t remaining_bits_in_current_byte = 8 - bit_offset_;
+ uint64_t bits = LowestBits(*bytes++, remaining_bits_in_current_byte);
+ // If we're reading fewer bits than what's left in the current byte, just
+ // return the portion of this byte that we need.
+ if (bit_count < remaining_bits_in_current_byte) {
+ val = HighestBits(bits, bit_offset_ + bit_count);
+ return true;
+ }
+ // Otherwise, subtract what we've read from the bit count and read as many
+ // full bytes as we can into bits.
+ bit_count -= remaining_bits_in_current_byte;
+ while (bit_count >= 8) {
+ bits = (bits << 8) | *bytes++;
+ bit_count -= 8;
+ }
+ // Whatever we have left is smaller than a byte, so grab just the bits we need
+ // and shift them into the lowest bits.
+ if (bit_count > 0) {
+ bits <<= bit_count;
+ bits |= HighestBits(*bytes, bit_count);
+ }
+ val = bits;
+ return true;
+}
+
+bool BitBuffer::ReadBits(size_t bit_count, uint32_t& val) {
+ return PeekBits(bit_count, val) && ConsumeBits(bit_count);
+}
+
+bool BitBuffer::ReadBits(size_t bit_count, uint64_t& val) {
+ return PeekBits(bit_count, val) && ConsumeBits(bit_count);
}
bool BitBuffer::ConsumeBytes(size_t byte_count) {
@@ -159,39 +197,36 @@ bool BitBuffer::ConsumeBits(size_t bit_count) {
return true;
}
-bool BitBuffer::ReadNonSymmetric(uint32_t* val, uint32_t num_values) {
+bool BitBuffer::ReadNonSymmetric(uint32_t num_values, uint32_t& val) {
RTC_DCHECK_GT(num_values, 0);
RTC_DCHECK_LE(num_values, uint32_t{1} << 31);
if (num_values == 1) {
// When there is only one possible value, it requires zero bits to store it.
// But ReadBits doesn't support reading zero bits.
- *val = 0;
+ val = 0;
return true;
}
size_t count_bits = CountBits(num_values);
uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values;
- if (!ReadBits(val, count_bits - 1)) {
+ if (!ReadBits(count_bits - 1, val)) {
return false;
}
- if (*val < num_min_bits_values) {
+ if (val < num_min_bits_values) {
return true;
}
uint32_t extra_bit;
- if (!ReadBits(&extra_bit, /*bit_count=*/1)) {
+ if (!ReadBits(/*bit_count=*/1, extra_bit)) {
return false;
}
- *val = (*val << 1) + extra_bit - num_min_bits_values;
+ val = (val << 1) + extra_bit - num_min_bits_values;
return true;
}
-bool BitBuffer::ReadExponentialGolomb(uint32_t* val) {
- if (!val) {
- return false;
- }
+bool BitBuffer::ReadExponentialGolomb(uint32_t& val) {
// Store off the current byte/bit offset, in case we want to restore them due
// to a failed parse.
size_t original_byte_offset = byte_offset_;
@@ -200,35 +235,35 @@ bool BitBuffer::ReadExponentialGolomb(uint32_t* val) {
// Count the number of leading 0 bits by peeking/consuming them one at a time.
size_t zero_bit_count = 0;
uint32_t peeked_bit;
- while (PeekBits(&peeked_bit, 1) && peeked_bit == 0) {
+ while (PeekBits(1, peeked_bit) && peeked_bit == 0) {
zero_bit_count++;
ConsumeBits(1);
}
// We should either be at the end of the stream, or the next bit should be 1.
- RTC_DCHECK(!PeekBits(&peeked_bit, 1) || peeked_bit == 1);
+ RTC_DCHECK(!PeekBits(1, peeked_bit) || peeked_bit == 1);
// The bit count of the value is the number of zeros + 1. Make sure that many
// bits fits in a uint32_t and that we have enough bits left for it, and then
// read the value.
size_t value_bit_count = zero_bit_count + 1;
- if (value_bit_count > 32 || !ReadBits(val, value_bit_count)) {
+ if (value_bit_count > 32 || !ReadBits(value_bit_count, val)) {
RTC_CHECK(Seek(original_byte_offset, original_bit_offset));
return false;
}
- *val -= 1;
+ val -= 1;
return true;
}
-bool BitBuffer::ReadSignedExponentialGolomb(int32_t* val) {
+bool BitBuffer::ReadSignedExponentialGolomb(int32_t& val) {
uint32_t unsigned_val;
- if (!ReadExponentialGolomb(&unsigned_val)) {
+ if (!ReadExponentialGolomb(unsigned_val)) {
return false;
}
if ((unsigned_val & 1) == 0) {
- *val = -static_cast<int32_t>(unsigned_val / 2);
+ val = -static_cast<int32_t>(unsigned_val / 2);
} else {
- *val = (unsigned_val + 1) / 2;
+ val = (unsigned_val + 1) / 2;
}
return true;
}
diff --git a/rtc_base/bit_buffer.h b/rtc_base/bit_buffer.h
index de7bf02d56..388218e698 100644
--- a/rtc_base/bit_buffer.h
+++ b/rtc_base/bit_buffer.h
@@ -14,6 +14,7 @@
#include <stddef.h> // For size_t.
#include <stdint.h> // For integer types.
+#include "absl/base/attributes.h"
#include "rtc_base/constructor_magic.h"
namespace rtc {
@@ -38,18 +39,35 @@ class BitBuffer {
// Reads byte-sized values from the buffer. Returns false if there isn't
// enough data left for the specified type.
- bool ReadUInt8(uint8_t* val);
- bool ReadUInt16(uint16_t* val);
- bool ReadUInt32(uint32_t* val);
+ bool ReadUInt8(uint8_t& val);
+ bool ReadUInt16(uint16_t& val);
+ bool ReadUInt32(uint32_t& val);
+ ABSL_DEPRECATED("") bool ReadUInt8(uint8_t* val) {
+ return val ? ReadUInt8(*val) : false;
+ }
+ ABSL_DEPRECATED("") bool ReadUInt16(uint16_t* val) {
+ return val ? ReadUInt16(*val) : false;
+ }
+ ABSL_DEPRECATED("") bool ReadUInt32(uint32_t* val) {
+ return val ? ReadUInt32(*val) : false;
+ }
// Reads bit-sized values from the buffer. Returns false if there isn't enough
// data left for the specified bit count.
- bool ReadBits(uint32_t* val, size_t bit_count);
+ bool ReadBits(size_t bit_count, uint32_t& val);
+ bool ReadBits(size_t bit_count, uint64_t& val);
+ ABSL_DEPRECATED("") bool ReadBits(uint32_t* val, size_t bit_count) {
+ return val ? ReadBits(bit_count, *val) : false;
+ }
// Peeks bit-sized values from the buffer. Returns false if there isn't enough
// data left for the specified number of bits. Doesn't move the current
// offset.
- bool PeekBits(uint32_t* val, size_t bit_count);
+ bool PeekBits(size_t bit_count, uint32_t& val);
+ bool PeekBits(size_t bit_count, uint64_t& val);
+ ABSL_DEPRECATED("") bool PeekBits(uint32_t* val, size_t bit_count) {
+ return val ? PeekBits(bit_count, *val) : false;
+ }
// Reads value in range [0, num_values - 1].
// This encoding is similar to ReadBits(val, Ceil(Log2(num_values)),
@@ -61,7 +79,11 @@ class BitBuffer {
// Value v in range [k, num_values - 1] is encoded as (v+k) in n bits.
// https://aomediacodec.github.io/av1-spec/#nsn
// Returns false if there isn't enough data left.
- bool ReadNonSymmetric(uint32_t* val, uint32_t num_values);
+ bool ReadNonSymmetric(uint32_t num_values, uint32_t& val);
+ ABSL_DEPRECATED("")
+ bool ReadNonSymmetric(uint32_t* val, uint32_t num_values) {
+ return val ? ReadNonSymmetric(num_values, *val) : false;
+ }
// Reads the exponential golomb encoded value at the current offset.
// Exponential golomb values are encoded as:
@@ -71,11 +93,18 @@ class BitBuffer {
// and increment the result by 1.
// Returns false if there isn't enough data left for the specified type, or if
// the value wouldn't fit in a uint32_t.
- bool ReadExponentialGolomb(uint32_t* val);
+ bool ReadExponentialGolomb(uint32_t& val);
+ ABSL_DEPRECATED("") bool ReadExponentialGolomb(uint32_t* val) {
+ return val ? ReadExponentialGolomb(*val) : false;
+ }
+
// Reads signed exponential golomb values at the current offset. Signed
// exponential golomb values are just the unsigned values mapped to the
// sequence 0, 1, -1, 2, -2, etc. in order.
- bool ReadSignedExponentialGolomb(int32_t* val);
+ bool ReadSignedExponentialGolomb(int32_t& val);
+ ABSL_DEPRECATED("") bool ReadSignedExponentialGolomb(int32_t* val) {
+ return val ? ReadSignedExponentialGolomb(*val) : false;
+ }
// Moves current position |byte_count| bytes forward. Returns false if
// there aren't enough bytes left in the buffer.
diff --git a/rtc_base/bit_buffer_unittest.cc b/rtc_base/bit_buffer_unittest.cc
index 656682c2ef..e6bb4270c7 100644
--- a/rtc_base/bit_buffer_unittest.cc
+++ b/rtc_base/bit_buffer_unittest.cc
@@ -49,13 +49,13 @@ TEST(BitBufferTest, ReadBytesAligned) {
uint16_t val16;
uint32_t val32;
BitBuffer buffer(bytes, 8);
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0x0Au, val8);
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0xBCu, val8);
- EXPECT_TRUE(buffer.ReadUInt16(&val16));
+ EXPECT_TRUE(buffer.ReadUInt16(val16));
EXPECT_EQ(0xDEF1u, val16);
- EXPECT_TRUE(buffer.ReadUInt32(&val32));
+ EXPECT_TRUE(buffer.ReadUInt32(val32));
EXPECT_EQ(0x23456789u, val32);
}
@@ -68,13 +68,13 @@ TEST(BitBufferTest, ReadBytesOffset4) {
BitBuffer buffer(bytes, 9);
EXPECT_TRUE(buffer.ConsumeBits(4));
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0xABu, val8);
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0xCDu, val8);
- EXPECT_TRUE(buffer.ReadUInt16(&val16));
+ EXPECT_TRUE(buffer.ReadUInt16(val16));
EXPECT_EQ(0xEF12u, val16);
- EXPECT_TRUE(buffer.ReadUInt32(&val32));
+ EXPECT_TRUE(buffer.ReadUInt32(val32));
EXPECT_EQ(0x34567890u, val32);
}
@@ -102,15 +102,15 @@ TEST(BitBufferTest, ReadBytesOffset3) {
uint32_t val32;
BitBuffer buffer(bytes, 8);
EXPECT_TRUE(buffer.ConsumeBits(3));
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0xFEu, val8);
- EXPECT_TRUE(buffer.ReadUInt16(&val16));
+ EXPECT_TRUE(buffer.ReadUInt16(val16));
EXPECT_EQ(0xDCBAu, val16);
- EXPECT_TRUE(buffer.ReadUInt32(&val32));
+ EXPECT_TRUE(buffer.ReadUInt32(val32));
EXPECT_EQ(0x98765432u, val32);
// 5 bits left unread. Not enough to read a uint8_t.
EXPECT_EQ(5u, buffer.RemainingBitCount());
- EXPECT_FALSE(buffer.ReadUInt8(&val8));
+ EXPECT_FALSE(buffer.ReadUInt8(val8));
}
TEST(BitBufferTest, ReadBits) {
@@ -120,26 +120,58 @@ TEST(BitBufferTest, ReadBits) {
const uint8_t bytes[] = {0x4D, 0x32};
uint32_t val;
BitBuffer buffer(bytes, 2);
- EXPECT_TRUE(buffer.ReadBits(&val, 3));
+ EXPECT_TRUE(buffer.ReadBits(3, val));
// 0b010
EXPECT_EQ(0x2u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 2));
+ EXPECT_TRUE(buffer.ReadBits(2, val));
// 0b01
EXPECT_EQ(0x1u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 7));
+ EXPECT_TRUE(buffer.ReadBits(7, val));
// 0b1010011
EXPECT_EQ(0x53u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 2));
+ EXPECT_TRUE(buffer.ReadBits(2, val));
// 0b00
EXPECT_EQ(0x0u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 1));
+ EXPECT_TRUE(buffer.ReadBits(1, val));
// 0b1
EXPECT_EQ(0x1u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 1));
+ EXPECT_TRUE(buffer.ReadBits(1, val));
// 0b0
EXPECT_EQ(0x0u, val);
- EXPECT_FALSE(buffer.ReadBits(&val, 1));
+ EXPECT_FALSE(buffer.ReadBits(1, val));
+}
+
+TEST(BitBufferTest, ReadBits64) {
+ const uint8_t bytes[] = {0x4D, 0x32, 0xAB, 0x54, 0x00, 0xFF, 0xFE, 0x01,
+ 0xAB, 0xCD, 0xEF, 0x01, 0x23, 0x45, 0x67, 0x89};
+ BitBuffer buffer(bytes, 16);
+ uint64_t val;
+
+ // Peek and read first 33 bits.
+ EXPECT_TRUE(buffer.PeekBits(33, val));
+ EXPECT_EQ(0x4D32AB5400FFFE01ull >> (64 - 33), val);
+ val = 0;
+ EXPECT_TRUE(buffer.ReadBits(33, val));
+ EXPECT_EQ(0x4D32AB5400FFFE01ull >> (64 - 33), val);
+
+ // Peek and read next 31 bits.
+ constexpr uint64_t kMask31Bits = (1ull << 32) - 1;
+ EXPECT_TRUE(buffer.PeekBits(31, val));
+ EXPECT_EQ(0x4D32AB5400FFFE01ull & kMask31Bits, val);
+ val = 0;
+ EXPECT_TRUE(buffer.ReadBits(31, val));
+ EXPECT_EQ(0x4D32AB5400FFFE01ull & kMask31Bits, val);
+
+ // Peek and read remaining 64 bits.
+ EXPECT_TRUE(buffer.PeekBits(64, val));
+ EXPECT_EQ(0xABCDEF0123456789ull, val);
+ val = 0;
+ EXPECT_TRUE(buffer.ReadBits(64, val));
+ EXPECT_EQ(0xABCDEF0123456789ull, val);
+
+ // Nothing more to read.
+ EXPECT_FALSE(buffer.ReadBits(1, val));
}
TEST(BitBufferDeathTest, SetOffsetValues) {
@@ -187,10 +219,10 @@ TEST(BitBufferTest, ReadNonSymmetricSameNumberOfBitsWhenNumValuesPowerOf2) {
uint32_t values[4];
ASSERT_EQ(reader.RemainingBitCount(), 16u);
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[0], /*num_values=*/1 << 4));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[1], /*num_values=*/1 << 4));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[2], /*num_values=*/1 << 4));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[3], /*num_values=*/1 << 4));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/1 << 4, values[0]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/1 << 4, values[1]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/1 << 4, values[2]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/1 << 4, values[3]));
ASSERT_EQ(reader.RemainingBitCount(), 0u);
EXPECT_THAT(values, ElementsAre(0xf, 0x3, 0xa, 0x0));
@@ -244,12 +276,12 @@ TEST(BitBufferWriterTest, NonSymmetricReadsMatchesWrites) {
rtc::BitBuffer reader(bytes, 2);
uint32_t values[6];
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[0], /*num_values=*/6));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[1], /*num_values=*/6));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[2], /*num_values=*/6));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[3], /*num_values=*/6));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[4], /*num_values=*/6));
- EXPECT_TRUE(reader.ReadNonSymmetric(&values[5], /*num_values=*/6));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[0]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[1]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[2]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[3]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[4]));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/6, values[5]));
EXPECT_THAT(values, ElementsAre(0, 1, 2, 3, 4, 5));
}
@@ -260,7 +292,7 @@ TEST(BitBufferTest, ReadNonSymmetricOnlyValueConsumesNoBits) {
uint32_t value = 0xFFFFFFFF;
ASSERT_EQ(reader.RemainingBitCount(), 16u);
- EXPECT_TRUE(reader.ReadNonSymmetric(&value, /*num_values=*/1));
+ EXPECT_TRUE(reader.ReadNonSymmetric(/*num_values=*/1, value));
EXPECT_EQ(value, 0u);
EXPECT_EQ(reader.RemainingBitCount(), 16u);
@@ -302,7 +334,7 @@ TEST(BitBufferTest, GolombUint32Values) {
byteBuffer.WriteUInt64(encoded_val);
uint32_t decoded_val;
EXPECT_TRUE(buffer.Seek(0, 0));
- EXPECT_TRUE(buffer.ReadExponentialGolomb(&decoded_val));
+ EXPECT_TRUE(buffer.ReadExponentialGolomb(decoded_val));
EXPECT_EQ(i, decoded_val);
}
}
@@ -319,7 +351,7 @@ TEST(BitBufferTest, SignedGolombValues) {
for (size_t i = 0; i < sizeof(golomb_bits); ++i) {
BitBuffer buffer(&golomb_bits[i], 1);
int32_t decoded_val;
- ASSERT_TRUE(buffer.ReadSignedExponentialGolomb(&decoded_val));
+ ASSERT_TRUE(buffer.ReadSignedExponentialGolomb(decoded_val));
EXPECT_EQ(expected[i], decoded_val)
<< "Mismatch in expected/decoded value for golomb_bits[" << i
<< "]: " << static_cast<int>(golomb_bits[i]);
@@ -332,13 +364,13 @@ TEST(BitBufferTest, NoGolombOverread) {
// If it didn't, the above buffer would be valid at 3 bytes.
BitBuffer buffer(bytes, 1);
uint32_t decoded_val;
- EXPECT_FALSE(buffer.ReadExponentialGolomb(&decoded_val));
+ EXPECT_FALSE(buffer.ReadExponentialGolomb(decoded_val));
BitBuffer longer_buffer(bytes, 2);
- EXPECT_FALSE(longer_buffer.ReadExponentialGolomb(&decoded_val));
+ EXPECT_FALSE(longer_buffer.ReadExponentialGolomb(decoded_val));
BitBuffer longest_buffer(bytes, 3);
- EXPECT_TRUE(longest_buffer.ReadExponentialGolomb(&decoded_val));
+ EXPECT_TRUE(longest_buffer.ReadExponentialGolomb(decoded_val));
// Golomb should have read 9 bits, so 0x01FF, and since it is golomb, the
// result is 0x01FF - 1 = 0x01FE.
EXPECT_EQ(0x01FEu, decoded_val);
@@ -360,20 +392,20 @@ TEST(BitBufferWriterTest, SymmetricReadWrite) {
EXPECT_TRUE(buffer.Seek(0, 0));
uint32_t val;
- EXPECT_TRUE(buffer.ReadBits(&val, 3));
+ EXPECT_TRUE(buffer.ReadBits(3, val));
EXPECT_EQ(0x2u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 2));
+ EXPECT_TRUE(buffer.ReadBits(2, val));
EXPECT_EQ(0x1u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 7));
+ EXPECT_TRUE(buffer.ReadBits(7, val));
EXPECT_EQ(0x53u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 2));
+ EXPECT_TRUE(buffer.ReadBits(2, val));
EXPECT_EQ(0x0u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 1));
+ EXPECT_TRUE(buffer.ReadBits(1, val));
EXPECT_EQ(0x1u, val);
- EXPECT_TRUE(buffer.ReadBits(&val, 17));
+ EXPECT_TRUE(buffer.ReadBits(17, val));
EXPECT_EQ(0x1ABCDu, val);
// And there should be nothing left.
- EXPECT_FALSE(buffer.ReadBits(&val, 1));
+ EXPECT_FALSE(buffer.ReadBits(1, val));
}
TEST(BitBufferWriterTest, SymmetricBytesMisaligned) {
@@ -390,11 +422,11 @@ TEST(BitBufferWriterTest, SymmetricBytesMisaligned) {
uint8_t val8;
uint16_t val16;
uint32_t val32;
- EXPECT_TRUE(buffer.ReadUInt8(&val8));
+ EXPECT_TRUE(buffer.ReadUInt8(val8));
EXPECT_EQ(0x12u, val8);
- EXPECT_TRUE(buffer.ReadUInt16(&val16));
+ EXPECT_TRUE(buffer.ReadUInt16(val16));
EXPECT_EQ(0x3456u, val16);
- EXPECT_TRUE(buffer.ReadUInt32(&val32));
+ EXPECT_TRUE(buffer.ReadUInt32(val32));
EXPECT_EQ(0x789ABCDEu, val32);
}
@@ -408,7 +440,7 @@ TEST(BitBufferWriterTest, SymmetricGolomb) {
buffer.Seek(0, 0);
for (size_t i = 0; i < arraysize(test_string); ++i) {
uint32_t val;
- EXPECT_TRUE(buffer.ReadExponentialGolomb(&val));
+ EXPECT_TRUE(buffer.ReadExponentialGolomb(val));
EXPECT_LE(val, std::numeric_limits<uint8_t>::max());
EXPECT_EQ(test_string[i], static_cast<char>(val));
}
diff --git a/rtc_base/cpu_time_unittest.cc b/rtc_base/cpu_time_unittest.cc
index 675e86307c..94f82f4306 100644
--- a/rtc_base/cpu_time_unittest.cc
+++ b/rtc_base/cpu_time_unittest.cc
@@ -30,8 +30,7 @@ const int kProcessingTimeMillisecs = 500;
const int kWorkingThreads = 2;
// Consumes approximately kProcessingTimeMillisecs of CPU time in single thread.
-void WorkingFunction(void* counter_pointer) {
- int64_t* counter = reinterpret_cast<int64_t*>(counter_pointer);
+void WorkingFunction(int64_t* counter) {
*counter = 0;
int64_t stop_cpu_time =
rtc::GetThreadCpuTimeNanos() +
@@ -62,14 +61,12 @@ TEST(CpuTimeTest, MAYBE_TEST(TwoThreads)) {
int64_t thread_start_time_nanos = GetThreadCpuTimeNanos();
int64_t counter1;
int64_t counter2;
- PlatformThread thread1(WorkingFunction, reinterpret_cast<void*>(&counter1),
- "Thread1");
- PlatformThread thread2(WorkingFunction, reinterpret_cast<void*>(&counter2),
- "Thread2");
- thread1.Start();
- thread2.Start();
- thread1.Stop();
- thread2.Stop();
+ auto thread1 = PlatformThread::SpawnJoinable(
+ [&counter1] { WorkingFunction(&counter1); }, "Thread1");
+ auto thread2 = PlatformThread::SpawnJoinable(
+ [&counter2] { WorkingFunction(&counter2); }, "Thread2");
+ thread1.Finalize();
+ thread2.Finalize();
EXPECT_GE(counter1, 0);
EXPECT_GE(counter2, 0);
diff --git a/rtc_base/deprecated/recursive_critical_section_unittest.cc b/rtc_base/deprecated/recursive_critical_section_unittest.cc
index 3fb7c519c1..9256a76f58 100644
--- a/rtc_base/deprecated/recursive_critical_section_unittest.cc
+++ b/rtc_base/deprecated/recursive_critical_section_unittest.cc
@@ -329,33 +329,28 @@ class PerfTestData {
class PerfTestThread {
public:
- PerfTestThread() : thread_(&ThreadFunc, this, "CsPerf") {}
-
void Start(PerfTestData* data, int repeats, int id) {
- RTC_DCHECK(!thread_.IsRunning());
RTC_DCHECK(!data_);
data_ = data;
repeats_ = repeats;
my_id_ = id;
- thread_.Start();
+ thread_ = PlatformThread::SpawnJoinable(
+ [this] {
+ for (int i = 0; i < repeats_; ++i)
+ data_->AddToCounter(my_id_);
+ },
+ "CsPerf");
}
void Stop() {
- RTC_DCHECK(thread_.IsRunning());
RTC_DCHECK(data_);
- thread_.Stop();
+ thread_.Finalize();
repeats_ = 0;
data_ = nullptr;
my_id_ = 0;
}
private:
- static void ThreadFunc(void* param) {
- PerfTestThread* me = static_cast<PerfTestThread*>(param);
- for (int i = 0; i < me->repeats_; ++i)
- me->data_->AddToCounter(me->my_id_);
- }
-
PlatformThread thread_;
PerfTestData* data_ = nullptr;
int repeats_ = 0;
diff --git a/rtc_base/event_tracer.cc b/rtc_base/event_tracer.cc
index 9e3ee6071c..1a2b41ec5c 100644
--- a/rtc_base/event_tracer.cc
+++ b/rtc_base/event_tracer.cc
@@ -79,19 +79,12 @@ namespace rtc {
namespace tracing {
namespace {
-static void EventTracingThreadFunc(void* params);
-
// Atomic-int fast path for avoiding logging when disabled.
static volatile int g_event_logging_active = 0;
// TODO(pbos): Log metadata for all threads, etc.
class EventLogger final {
public:
- EventLogger()
- : logging_thread_(EventTracingThreadFunc,
- this,
- "EventTracingThread",
- kLowPriority) {}
~EventLogger() { RTC_DCHECK(thread_checker_.IsCurrent()); }
void AddTraceEvent(const char* name,
@@ -209,7 +202,8 @@ class EventLogger final {
rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 0, 1));
// Finally start, everything should be set up now.
- logging_thread_.Start();
+ logging_thread_ =
+ PlatformThread::SpawnJoinable([this] { Log(); }, "EventTracingThread");
TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Start");
}
@@ -223,7 +217,7 @@ class EventLogger final {
// Wake up logging thread to finish writing.
shutdown_event_.Set();
// Join the logging thread.
- logging_thread_.Stop();
+ logging_thread_.Finalize();
}
private:
@@ -326,10 +320,6 @@ class EventLogger final {
bool output_file_owned_ = false;
};
-static void EventTracingThreadFunc(void* params) {
- static_cast<EventLogger*>(params)->Log();
-}
-
static EventLogger* volatile g_event_logger = nullptr;
static const char* const kDisabledTracePrefix = TRACE_DISABLED_BY_DEFAULT("");
const unsigned char* InternalGetCategoryEnabled(const char* name) {
diff --git a/rtc_base/event_unittest.cc b/rtc_base/event_unittest.cc
index 31118877cf..a634d6e426 100644
--- a/rtc_base/event_unittest.cc
+++ b/rtc_base/event_unittest.cc
@@ -43,22 +43,21 @@ TEST(EventTest, AutoReset) {
class SignalerThread {
public:
- SignalerThread() : thread_(&ThreadFn, this, "EventPerf") {}
void Start(Event* writer, Event* reader) {
writer_ = writer;
reader_ = reader;
- thread_.Start();
+ thread_ = PlatformThread::SpawnJoinable(
+ [this] {
+ while (!stop_event_.Wait(0)) {
+ writer_->Set();
+ reader_->Wait(Event::kForever);
+ }
+ },
+ "EventPerf");
}
void Stop() {
stop_event_.Set();
- thread_.Stop();
- }
- static void ThreadFn(void* param) {
- auto* me = static_cast<SignalerThread*>(param);
- while (!me->stop_event_.Wait(0)) {
- me->writer_->Set();
- me->reader_->Wait(Event::kForever);
- }
+ thread_.Finalize();
}
Event stop_event_;
Event* writer_;
diff --git a/rtc_base/hash.h b/rtc_base/hash.h
new file mode 100644
index 0000000000..56d581cdf1
--- /dev/null
+++ b/rtc_base/hash.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef RTC_BASE_HASH_H_
+#define RTC_BASE_HASH_H_
+
+#include <stddef.h>
+
+#include <functional>
+#include <utility>
+
+namespace webrtc {
+
+// A custom hash function for std::pair, to be able to be used as key in a
+// std::unordered_map. If absl::flat_hash_map would ever be used, this is
+// unnecessary as it already has a hash function for std::pair.
+struct PairHash {
+ template <class T1, class T2>
+ size_t operator()(const std::pair<T1, T2>& p) const {
+ return (3 * std::hash<T1>{}(p.first)) ^ std::hash<T2>{}(p.second);
+ }
+};
+
+} // namespace webrtc
+
+#endif // RTC_BASE_HASH_H_
diff --git a/rtc_base/hash_unittest.cc b/rtc_base/hash_unittest.cc
new file mode 100644
index 0000000000..e86c8a8586
--- /dev/null
+++ b/rtc_base/hash_unittest.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "rtc_base/hash.h"
+
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include "test/gmock.h"
+
+namespace webrtc {
+namespace {
+
+TEST(PairHashTest, CanInsertIntoSet) {
+ using MyPair = std::pair<int, int>;
+
+ std::unordered_set<MyPair, PairHash> pairs;
+
+ pairs.insert({1, 2});
+ pairs.insert({3, 4});
+
+ EXPECT_NE(pairs.find({1, 2}), pairs.end());
+ EXPECT_NE(pairs.find({3, 4}), pairs.end());
+ EXPECT_EQ(pairs.find({1, 3}), pairs.end());
+ EXPECT_EQ(pairs.find({3, 3}), pairs.end());
+}
+
+TEST(PairHashTest, CanInsertIntoMap) {
+ using MyPair = std::pair<std::string, int>;
+
+ std::unordered_map<MyPair, int, PairHash> pairs;
+
+ pairs[{"1", 2}] = 99;
+ pairs[{"3", 4}] = 100;
+
+ EXPECT_EQ((pairs[{"1", 2}]), 99);
+ EXPECT_EQ((pairs[{"3", 4}]), 100);
+ EXPECT_EQ(pairs.find({"1", 3}), pairs.end());
+ EXPECT_EQ(pairs.find({"3", 3}), pairs.end());
+}
+} // namespace
+} // namespace webrtc
diff --git a/rtc_base/ip_address.cc b/rtc_base/ip_address.cc
index 9f1df58255..86f42e0bf9 100644
--- a/rtc_base/ip_address.cc
+++ b/rtc_base/ip_address.cc
@@ -149,10 +149,6 @@ std::string IPAddress::ToString() const {
}
std::string IPAddress::ToSensitiveString() const {
-#if !defined(NDEBUG)
- // Return non-stripped in debug.
- return ToString();
-#else
switch (family_) {
case AF_INET: {
std::string address = ToString();
@@ -176,7 +172,6 @@ std::string IPAddress::ToSensitiveString() const {
}
}
return std::string();
-#endif
}
IPAddress IPAddress::Normalized() const {
diff --git a/rtc_base/ip_address_unittest.cc b/rtc_base/ip_address_unittest.cc
index d79a7b4bd6..f94649cfee 100644
--- a/rtc_base/ip_address_unittest.cc
+++ b/rtc_base/ip_address_unittest.cc
@@ -938,15 +938,9 @@ TEST(IPAddressTest, TestToSensitiveString) {
EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToString());
EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToString());
EXPECT_EQ(kIPv6PublicAddr2String, addr_v6_2.ToString());
-#if defined(NDEBUG)
EXPECT_EQ(kIPv4PublicAddrAnonymizedString, addr_v4.ToSensitiveString());
EXPECT_EQ(kIPv6PublicAddrAnonymizedString, addr_v6.ToSensitiveString());
EXPECT_EQ(kIPv6PublicAddr2AnonymizedString, addr_v6_2.ToSensitiveString());
-#else
- EXPECT_EQ(kIPv4PublicAddrString, addr_v4.ToSensitiveString());
- EXPECT_EQ(kIPv6PublicAddrString, addr_v6.ToSensitiveString());
- EXPECT_EQ(kIPv6PublicAddr2String, addr_v6_2.ToSensitiveString());
-#endif // defined(NDEBUG)
}
TEST(IPAddressTest, TestInterfaceAddress) {
diff --git a/rtc_base/logging_unittest.cc b/rtc_base/logging_unittest.cc
index 225d66d13d..dc1208f3f6 100644
--- a/rtc_base/logging_unittest.cc
+++ b/rtc_base/logging_unittest.cc
@@ -160,18 +160,13 @@ TEST(LogTest, MultipleStreams) {
class LogThread {
public:
- LogThread() : thread_(&ThreadEntry, this, "LogThread") {}
- ~LogThread() { thread_.Stop(); }
-
- void Start() { thread_.Start(); }
+ void Start() {
+ thread_ = PlatformThread::SpawnJoinable(
+ [] { RTC_LOG(LS_VERBOSE) << "RTC_LOG"; }, "LogThread");
+ }
private:
- void Run() { RTC_LOG(LS_VERBOSE) << "RTC_LOG"; }
-
- static void ThreadEntry(void* p) { static_cast<LogThread*>(p)->Run(); }
-
PlatformThread thread_;
- Event event_;
};
// Ensure we don't crash when adding/removing streams while threads are going.
diff --git a/rtc_base/nat_socket_factory.cc b/rtc_base/nat_socket_factory.cc
index 3edf4cecf4..effbb5a6c3 100644
--- a/rtc_base/nat_socket_factory.cc
+++ b/rtc_base/nat_socket_factory.cc
@@ -428,14 +428,15 @@ NATSocketServer::Translator::Translator(NATSocketServer* server,
// Create a new private network, and a NATServer running on the private
// network that bridges to the external network. Also tell the private
// network to use the same message queue as us.
- VirtualSocketServer* internal_server = new VirtualSocketServer();
- internal_server->SetMessageQueue(server_->queue());
- internal_factory_.reset(internal_server);
- nat_server_.reset(new NATServer(type, internal_server, int_ip, int_ip,
- ext_factory, ext_ip));
+ internal_server_ = std::make_unique<VirtualSocketServer>();
+ internal_server_->SetMessageQueue(server_->queue());
+ nat_server_ = std::make_unique<NATServer>(
+ type, internal_server_.get(), int_ip, int_ip, ext_factory, ext_ip);
}
-NATSocketServer::Translator::~Translator() = default;
+NATSocketServer::Translator::~Translator() {
+ internal_server_->SetMessageQueue(nullptr);
+}
NATSocketServer::Translator* NATSocketServer::Translator::GetTranslator(
const SocketAddress& ext_ip) {
diff --git a/rtc_base/nat_socket_factory.h b/rtc_base/nat_socket_factory.h
index e649d19a8e..70030d834e 100644
--- a/rtc_base/nat_socket_factory.h
+++ b/rtc_base/nat_socket_factory.h
@@ -107,7 +107,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory {
const SocketAddress& ext_addr);
~Translator();
- SocketFactory* internal_factory() { return internal_factory_.get(); }
+ SocketFactory* internal_factory() { return internal_server_.get(); }
SocketAddress internal_udp_address() const {
return nat_server_->internal_udp_address();
}
@@ -129,7 +129,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory {
private:
NATSocketServer* server_;
- std::unique_ptr<SocketFactory> internal_factory_;
+ std::unique_ptr<SocketServer> internal_server_;
std::unique_ptr<NATServer> nat_server_;
TranslatorMap nats_;
std::set<SocketAddress> clients_;
diff --git a/rtc_base/network.cc b/rtc_base/network.cc
index 1b0ba367e6..f4a349bae0 100644
--- a/rtc_base/network.cc
+++ b/rtc_base/network.cc
@@ -487,7 +487,7 @@ BasicNetworkManager::BasicNetworkManager(
allow_mac_based_ipv6_(
webrtc::field_trial::IsEnabled("WebRTC-AllowMACBasedIPv6")),
bind_using_ifname_(
- webrtc::field_trial::IsEnabled("WebRTC-BindUsingInterfaceName")) {}
+ !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName")) {}
BasicNetworkManager::~BasicNetworkManager() {}
diff --git a/rtc_base/network_unittest.cc b/rtc_base/network_unittest.cc
index abad4796fe..75856634ab 100644
--- a/rtc_base/network_unittest.cc
+++ b/rtc_base/network_unittest.cc
@@ -1307,9 +1307,6 @@ TEST_F(NetworkTest, WebRTC_AllowMACBasedIPv6Address) {
#if defined(WEBRTC_POSIX)
TEST_F(NetworkTest, WebRTC_BindUsingInterfaceName) {
- webrtc::test::ScopedFieldTrials field_trials(
- "WebRTC-BindUsingInterfaceName/Enabled/");
-
char if_name1[20] = "wlan0";
char if_name2[20] = "v4-wlan0";
ifaddrs* list = nullptr;
diff --git a/rtc_base/openssl_adapter.cc b/rtc_base/openssl_adapter.cc
index e5c2c42761..563fe0f9d9 100644
--- a/rtc_base/openssl_adapter.cc
+++ b/rtc_base/openssl_adapter.cc
@@ -981,6 +981,9 @@ SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) {
SSL_CTX_set_custom_verify(ctx, SSL_VERIFY_PEER, SSLVerifyCallback);
#else
SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER, SSLVerifyCallback);
+ // Verify certificate chains up to a depth of 4. This is not
+ // needed for DTLS-SRTP which uses self-signed certificates
+ // (so the depth is 0) but is required to support TURN/TLS.
SSL_CTX_set_verify_depth(ctx, 4);
#endif
// Use defaults, but disable HMAC-SHA256 and HMAC-SHA384 ciphers
diff --git a/rtc_base/openssl_stream_adapter.cc b/rtc_base/openssl_stream_adapter.cc
index ab2289b1f3..aa0bc3d40c 100644
--- a/rtc_base/openssl_stream_adapter.cc
+++ b/rtc_base/openssl_stream_adapter.cc
@@ -834,7 +834,12 @@ void OpenSSLStreamAdapter::SetTimeout(int delay_ms) {
if (flag->alive()) {
RTC_DLOG(LS_INFO) << "DTLS timeout expired";
timeout_task_.Stop();
- DTLSv1_handle_timeout(ssl_);
+ int res = DTLSv1_handle_timeout(ssl_);
+ if (res > 0) {
+ RTC_LOG(LS_INFO) << "DTLS retransmission";
+ } else if (res < 0) {
+ RTC_LOG(LS_INFO) << "DTLSv1_handle_timeout() return -1";
+ }
ContinueSSL();
} else {
RTC_NOTREACHED();
diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc
index 8a5f2c9d6d..6d369d747e 100644
--- a/rtc_base/platform_thread.cc
+++ b/rtc_base/platform_thread.cc
@@ -10,131 +10,37 @@
#include "rtc_base/platform_thread.h"
+#include <algorithm>
+#include <memory>
+
#if !defined(WEBRTC_WIN)
#include <sched.h>
#endif
-#include <stdint.h>
-#include <time.h>
-
-#include <algorithm>
#include "rtc_base/checks.h"
namespace rtc {
namespace {
-#if !defined(WEBRTC_WIN)
-struct ThreadAttributes {
- ThreadAttributes() { pthread_attr_init(&attr); }
- ~ThreadAttributes() { pthread_attr_destroy(&attr); }
- pthread_attr_t* operator&() { return &attr; }
- pthread_attr_t attr;
-};
-#endif // defined(WEBRTC_WIN)
-} // namespace
-
-PlatformThread::PlatformThread(ThreadRunFunction func,
- void* obj,
- absl::string_view thread_name,
- ThreadPriority priority /*= kNormalPriority*/)
- : run_function_(func), priority_(priority), obj_(obj), name_(thread_name) {
- RTC_DCHECK(func);
- RTC_DCHECK(!name_.empty());
- // TODO(tommi): Consider lowering the limit to 15 (limit on Linux).
- RTC_DCHECK(name_.length() < 64);
- spawned_thread_checker_.Detach();
-}
-
-PlatformThread::~PlatformThread() {
- RTC_DCHECK(thread_checker_.IsCurrent());
-#if defined(WEBRTC_WIN)
- RTC_DCHECK(!thread_);
- RTC_DCHECK(!thread_id_);
-#endif // defined(WEBRTC_WIN)
-}
-
-#if defined(WEBRTC_WIN)
-DWORD WINAPI PlatformThread::StartThread(void* param) {
- // The GetLastError() function only returns valid results when it is called
- // after a Win32 API function that returns a "failed" result. A crash dump
- // contains the result from GetLastError() and to make sure it does not
- // falsely report a Windows error we call SetLastError here.
- ::SetLastError(ERROR_SUCCESS);
- static_cast<PlatformThread*>(param)->Run();
- return 0;
-}
-#else
-void* PlatformThread::StartThread(void* param) {
- static_cast<PlatformThread*>(param)->Run();
- return 0;
-}
-#endif // defined(WEBRTC_WIN)
-
-void PlatformThread::Start() {
- RTC_DCHECK(thread_checker_.IsCurrent());
- RTC_DCHECK(!thread_) << "Thread already started?";
-#if defined(WEBRTC_WIN)
- // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION.
- // Set the reserved stack stack size to 1M, which is the default on Windows
- // and Linux.
- thread_ = ::CreateThread(nullptr, 1024 * 1024, &StartThread, this,
- STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id_);
- RTC_CHECK(thread_) << "CreateThread failed";
- RTC_DCHECK(thread_id_);
-#else
- ThreadAttributes attr;
- // Set the stack stack size to 1M.
- pthread_attr_setstacksize(&attr, 1024 * 1024);
- RTC_CHECK_EQ(0, pthread_create(&thread_, &attr, &StartThread, this));
-#endif // defined(WEBRTC_WIN)
-}
-bool PlatformThread::IsRunning() const {
- RTC_DCHECK(thread_checker_.IsCurrent());
#if defined(WEBRTC_WIN)
- return thread_ != nullptr;
-#else
- return thread_ != 0;
-#endif // defined(WEBRTC_WIN)
-}
-
-PlatformThreadRef PlatformThread::GetThreadRef() const {
-#if defined(WEBRTC_WIN)
- return thread_id_;
-#else
- return thread_;
-#endif // defined(WEBRTC_WIN)
-}
-
-void PlatformThread::Stop() {
- RTC_DCHECK(thread_checker_.IsCurrent());
- if (!IsRunning())
- return;
-
-#if defined(WEBRTC_WIN)
- WaitForSingleObject(thread_, INFINITE);
- CloseHandle(thread_);
- thread_ = nullptr;
- thread_id_ = 0;
-#else
- RTC_CHECK_EQ(0, pthread_join(thread_, nullptr));
- thread_ = 0;
-#endif // defined(WEBRTC_WIN)
- spawned_thread_checker_.Detach();
-}
-
-void PlatformThread::Run() {
- // Attach the worker thread checker to this thread.
- RTC_DCHECK(spawned_thread_checker_.IsCurrent());
- rtc::SetCurrentThreadName(name_.c_str());
- SetPriority(priority_);
- run_function_(obj_);
+int Win32PriorityFromThreadPriority(ThreadPriority priority) {
+ switch (priority) {
+ case ThreadPriority::kLow:
+ return THREAD_PRIORITY_BELOW_NORMAL;
+ case ThreadPriority::kNormal:
+ return THREAD_PRIORITY_NORMAL;
+ case ThreadPriority::kHigh:
+ return THREAD_PRIORITY_ABOVE_NORMAL;
+ case ThreadPriority::kRealtime:
+ return THREAD_PRIORITY_TIME_CRITICAL;
+ }
}
+#endif
-bool PlatformThread::SetPriority(ThreadPriority priority) {
- RTC_DCHECK(spawned_thread_checker_.IsCurrent());
-
+bool SetPriority(ThreadPriority priority) {
#if defined(WEBRTC_WIN)
- return SetThreadPriority(thread_, priority) != FALSE;
+ return SetThreadPriority(GetCurrentThread(),
+ Win32PriorityFromThreadPriority(priority)) != FALSE;
#elif defined(__native_client__) || defined(WEBRTC_FUCHSIA)
// Setting thread priorities is not supported in NaCl or Fuchsia.
return true;
@@ -158,35 +64,148 @@ bool PlatformThread::SetPriority(ThreadPriority priority) {
const int top_prio = max_prio - 1;
const int low_prio = min_prio + 1;
switch (priority) {
- case kLowPriority:
+ case ThreadPriority::kLow:
param.sched_priority = low_prio;
break;
- case kNormalPriority:
+ case ThreadPriority::kNormal:
// The -1 ensures that the kHighPriority is always greater or equal to
// kNormalPriority.
param.sched_priority = (low_prio + top_prio - 1) / 2;
break;
- case kHighPriority:
+ case ThreadPriority::kHigh:
param.sched_priority = std::max(top_prio - 2, low_prio);
break;
- case kHighestPriority:
- param.sched_priority = std::max(top_prio - 1, low_prio);
- break;
- case kRealtimePriority:
+ case ThreadPriority::kRealtime:
param.sched_priority = top_prio;
break;
}
- return pthread_setschedparam(thread_, policy, &param) == 0;
+ return pthread_setschedparam(pthread_self(), policy, &param) == 0;
#endif // defined(WEBRTC_WIN)
}
#if defined(WEBRTC_WIN)
-bool PlatformThread::QueueAPC(PAPCFUNC function, ULONG_PTR data) {
- RTC_DCHECK(thread_checker_.IsCurrent());
- RTC_DCHECK(IsRunning());
+DWORD WINAPI RunPlatformThread(void* param) {
+ // The GetLastError() function only returns valid results when it is called
+ // after a Win32 API function that returns a "failed" result. A crash dump
+ // contains the result from GetLastError() and to make sure it does not
+ // falsely report a Windows error we call SetLastError here.
+ ::SetLastError(ERROR_SUCCESS);
+ auto function = static_cast<std::function<void()>*>(param);
+ (*function)();
+ delete function;
+ return 0;
+}
+#else
+void* RunPlatformThread(void* param) {
+ auto function = static_cast<std::function<void()>*>(param);
+ (*function)();
+ delete function;
+ return 0;
+}
+#endif // defined(WEBRTC_WIN)
+
+} // namespace
+
+PlatformThread::PlatformThread(Handle handle, bool joinable)
+ : handle_(handle), joinable_(joinable) {}
+
+PlatformThread::PlatformThread(PlatformThread&& rhs)
+ : handle_(rhs.handle_), joinable_(rhs.joinable_) {
+ rhs.handle_ = absl::nullopt;
+}
+
+PlatformThread& PlatformThread::operator=(PlatformThread&& rhs) {
+ Finalize();
+ handle_ = rhs.handle_;
+ joinable_ = rhs.joinable_;
+ rhs.handle_ = absl::nullopt;
+ return *this;
+}
+
+PlatformThread::~PlatformThread() {
+ Finalize();
+}
+
+PlatformThread PlatformThread::SpawnJoinable(
+ std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes) {
+ return SpawnThread(std::move(thread_function), name, attributes,
+ /*joinable=*/true);
+}
+
+PlatformThread PlatformThread::SpawnDetached(
+ std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes) {
+ return SpawnThread(std::move(thread_function), name, attributes,
+ /*joinable=*/false);
+}
+
+absl::optional<PlatformThread::Handle> PlatformThread::GetHandle() const {
+ return handle_;
+}
- return QueueUserAPC(function, thread_, data) != FALSE;
+#if defined(WEBRTC_WIN)
+bool PlatformThread::QueueAPC(PAPCFUNC function, ULONG_PTR data) {
+ RTC_DCHECK(handle_.has_value());
+ return handle_.has_value() ? QueueUserAPC(function, *handle_, data) != FALSE
+ : false;
}
#endif
+void PlatformThread::Finalize() {
+ if (!handle_.has_value())
+ return;
+#if defined(WEBRTC_WIN)
+ if (joinable_)
+ WaitForSingleObject(*handle_, INFINITE);
+ CloseHandle(*handle_);
+#else
+ if (joinable_)
+ RTC_CHECK_EQ(0, pthread_join(*handle_, nullptr));
+#endif
+ handle_ = absl::nullopt;
+}
+
+PlatformThread PlatformThread::SpawnThread(
+ std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes,
+ bool joinable) {
+ RTC_DCHECK(thread_function);
+ RTC_DCHECK(!name.empty());
+ // TODO(tommi): Consider lowering the limit to 15 (limit on Linux).
+ RTC_DCHECK(name.length() < 64);
+ auto start_thread_function_ptr =
+ new std::function<void()>([thread_function = std::move(thread_function),
+ name = std::string(name), attributes] {
+ rtc::SetCurrentThreadName(name.c_str());
+ SetPriority(attributes.priority);
+ thread_function();
+ });
+#if defined(WEBRTC_WIN)
+ // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION.
+ // Set the reserved stack stack size to 1M, which is the default on Windows
+ // and Linux.
+ DWORD thread_id = 0;
+ PlatformThread::Handle handle = ::CreateThread(
+ nullptr, 1024 * 1024, &RunPlatformThread, start_thread_function_ptr,
+ STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id);
+ RTC_CHECK(handle) << "CreateThread failed";
+#else
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ // Set the stack stack size to 1M.
+ pthread_attr_setstacksize(&attr, 1024 * 1024);
+ pthread_attr_setdetachstate(
+ &attr, joinable ? PTHREAD_CREATE_JOINABLE : PTHREAD_CREATE_DETACHED);
+ PlatformThread::Handle handle;
+ RTC_CHECK_EQ(0, pthread_create(&handle, &attr, &RunPlatformThread,
+ start_thread_function_ptr));
+ pthread_attr_destroy(&attr);
+#endif // defined(WEBRTC_WIN)
+ return PlatformThread(handle, joinable);
+}
+
} // namespace rtc
diff --git a/rtc_base/platform_thread.h b/rtc_base/platform_thread.h
index 64a74d8b27..11ccfae3d0 100644
--- a/rtc_base/platform_thread.h
+++ b/rtc_base/platform_thread.h
@@ -11,92 +11,101 @@
#ifndef RTC_BASE_PLATFORM_THREAD_H_
#define RTC_BASE_PLATFORM_THREAD_H_
-#ifndef WEBRTC_WIN
-#include <pthread.h>
-#endif
+#include <functional>
#include <string>
#include "absl/strings/string_view.h"
-#include "api/sequence_checker.h"
-#include "rtc_base/constructor_magic.h"
+#include "absl/types/optional.h"
#include "rtc_base/platform_thread_types.h"
namespace rtc {
-// Callback function that the spawned thread will enter once spawned.
-typedef void (*ThreadRunFunction)(void*);
+enum class ThreadPriority {
+ kLow = 1,
+ kNormal,
+ kHigh,
+ kRealtime,
+};
-enum ThreadPriority {
-#ifdef WEBRTC_WIN
- kLowPriority = THREAD_PRIORITY_BELOW_NORMAL,
- kNormalPriority = THREAD_PRIORITY_NORMAL,
- kHighPriority = THREAD_PRIORITY_ABOVE_NORMAL,
- kHighestPriority = THREAD_PRIORITY_HIGHEST,
- kRealtimePriority = THREAD_PRIORITY_TIME_CRITICAL
-#else
- kLowPriority = 1,
- kNormalPriority = 2,
- kHighPriority = 3,
- kHighestPriority = 4,
- kRealtimePriority = 5
-#endif
+struct ThreadAttributes {
+ ThreadPriority priority = ThreadPriority::kNormal;
+ ThreadAttributes& SetPriority(ThreadPriority priority_param) {
+ priority = priority_param;
+ return *this;
+ }
};
-// Represents a simple worker thread. The implementation must be assumed
-// to be single threaded, meaning that all methods of the class, must be
-// called from the same thread, including instantiation.
-class PlatformThread {
+// Represents a simple worker thread.
+class PlatformThread final {
public:
- PlatformThread(ThreadRunFunction func,
- void* obj,
- absl::string_view thread_name,
- ThreadPriority priority = kNormalPriority);
+ // Handle is the base platform thread handle.
+#if defined(WEBRTC_WIN)
+ using Handle = HANDLE;
+#else
+ using Handle = pthread_t;
+#endif // defined(WEBRTC_WIN)
+ // This ctor creates the PlatformThread with an unset handle (returning true
+ // in empty()) and is provided for convenience.
+ // TODO(bugs.webrtc.org/12727) Look into if default and move support can be
+ // removed.
+ PlatformThread() = default;
+
+ // Moves |rhs| into this, storing an empty state in |rhs|.
+ // TODO(bugs.webrtc.org/12727) Look into if default and move support can be
+ // removed.
+ PlatformThread(PlatformThread&& rhs);
+
+ // Moves |rhs| into this, storing an empty state in |rhs|.
+ // TODO(bugs.webrtc.org/12727) Look into if default and move support can be
+ // removed.
+ PlatformThread& operator=(PlatformThread&& rhs);
+
+ // For a PlatformThread that's been spawned joinable, the destructor suspends
+ // the calling thread until the created thread exits unless the thread has
+ // already exited.
virtual ~PlatformThread();
- const std::string& name() const { return name_; }
-
- // Spawns a thread and tries to set thread priority according to the priority
- // from when CreateThread was called.
- void Start();
+ // Finalizes any allocated resources.
+ // For a PlatformThread that's been spawned joinable, Finalize() suspends
+ // the calling thread until the created thread exits unless the thread has
+ // already exited.
+ // empty() returns true after completion.
+ void Finalize();
+
+ // Returns true if default constructed, moved from, or Finalize()ed.
+ bool empty() const { return !handle_.has_value(); }
+
+ // Creates a started joinable thread which will be joined when the returned
+ // PlatformThread destructs or Finalize() is called.
+ static PlatformThread SpawnJoinable(
+ std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes = ThreadAttributes());
+
+ // Creates a started detached thread. The caller has to use external
+ // synchronization as nothing is provided by the PlatformThread construct.
+ static PlatformThread SpawnDetached(
+ std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes = ThreadAttributes());
+
+ // Returns the base platform thread handle of this thread.
+ absl::optional<Handle> GetHandle() const;
- bool IsRunning() const;
-
- // Returns an identifier for the worker thread that can be used to do
- // thread checks.
- PlatformThreadRef GetThreadRef() const;
-
- // Stops (joins) the spawned thread.
- void Stop();
-
- protected:
#if defined(WEBRTC_WIN)
- // Exposed to derived classes to allow for special cases specific to Windows.
+ // Queue a Windows APC function that runs when the thread is alertable.
bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data);
#endif
private:
- void Run();
- bool SetPriority(ThreadPriority priority);
-
- ThreadRunFunction const run_function_ = nullptr;
- const ThreadPriority priority_ = kNormalPriority;
- void* const obj_;
- // TODO(pbos): Make sure call sites use string literals and update to a const
- // char* instead of a std::string.
- const std::string name_;
- webrtc::SequenceChecker thread_checker_;
- webrtc::SequenceChecker spawned_thread_checker_;
-#if defined(WEBRTC_WIN)
- static DWORD WINAPI StartThread(void* param);
-
- HANDLE thread_ = nullptr;
- DWORD thread_id_ = 0;
-#else
- static void* StartThread(void* param);
-
- pthread_t thread_ = 0;
-#endif // defined(WEBRTC_WIN)
- RTC_DISALLOW_COPY_AND_ASSIGN(PlatformThread);
+ PlatformThread(Handle handle, bool joinable);
+ static PlatformThread SpawnThread(std::function<void()> thread_function,
+ absl::string_view name,
+ ThreadAttributes attributes,
+ bool joinable);
+
+ absl::optional<Handle> handle_;
+ bool joinable_ = false;
};
} // namespace rtc
diff --git a/rtc_base/platform_thread_unittest.cc b/rtc_base/platform_thread_unittest.cc
index a52e4cd9f5..0da822cf85 100644
--- a/rtc_base/platform_thread_unittest.cc
+++ b/rtc_base/platform_thread_unittest.cc
@@ -10,51 +10,95 @@
#include "rtc_base/platform_thread.h"
-#include "test/gtest.h"
+#include "absl/types/optional.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/gmock.h"
namespace rtc {
-namespace {
-void NullRunFunction(void* obj) {}
+TEST(PlatformThreadTest, DefaultConstructedIsEmpty) {
+ PlatformThread thread;
+ EXPECT_EQ(thread.GetHandle(), absl::nullopt);
+ EXPECT_TRUE(thread.empty());
+}
-// Function that sets a boolean.
-void SetFlagRunFunction(void* obj) {
- bool* obj_as_bool = static_cast<bool*>(obj);
- *obj_as_bool = true;
+TEST(PlatformThreadTest, StartFinalize) {
+ PlatformThread thread = PlatformThread::SpawnJoinable([] {}, "1");
+ EXPECT_NE(thread.GetHandle(), absl::nullopt);
+ EXPECT_FALSE(thread.empty());
+ thread.Finalize();
+ EXPECT_TRUE(thread.empty());
+ thread = PlatformThread::SpawnDetached([] {}, "2");
+ EXPECT_FALSE(thread.empty());
+ thread.Finalize();
+ EXPECT_TRUE(thread.empty());
}
-} // namespace
+TEST(PlatformThreadTest, MovesEmpty) {
+ PlatformThread thread1;
+ PlatformThread thread2 = std::move(thread1);
+ EXPECT_TRUE(thread1.empty());
+ EXPECT_TRUE(thread2.empty());
+}
-TEST(PlatformThreadTest, StartStop) {
- PlatformThread thread(&NullRunFunction, nullptr, "PlatformThreadTest");
- EXPECT_TRUE(thread.name() == "PlatformThreadTest");
- EXPECT_TRUE(thread.GetThreadRef() == 0);
- thread.Start();
- EXPECT_TRUE(thread.GetThreadRef() != 0);
- thread.Stop();
- EXPECT_TRUE(thread.GetThreadRef() == 0);
+TEST(PlatformThreadTest, MovesHandles) {
+ PlatformThread thread1 = PlatformThread::SpawnJoinable([] {}, "1");
+ PlatformThread thread2 = std::move(thread1);
+ EXPECT_TRUE(thread1.empty());
+ EXPECT_FALSE(thread2.empty());
+ thread1 = PlatformThread::SpawnDetached([] {}, "2");
+ thread2 = std::move(thread1);
+ EXPECT_TRUE(thread1.empty());
+ EXPECT_FALSE(thread2.empty());
}
-TEST(PlatformThreadTest, StartStop2) {
- PlatformThread thread1(&NullRunFunction, nullptr, "PlatformThreadTest1");
- PlatformThread thread2(&NullRunFunction, nullptr, "PlatformThreadTest2");
- EXPECT_TRUE(thread1.GetThreadRef() == thread2.GetThreadRef());
- thread1.Start();
- thread2.Start();
- EXPECT_TRUE(thread1.GetThreadRef() != thread2.GetThreadRef());
- thread2.Stop();
- thread1.Stop();
+TEST(PlatformThreadTest,
+ TwoThreadHandlesAreDifferentWhenStartedAndEqualWhenJoined) {
+ PlatformThread thread1 = PlatformThread();
+ PlatformThread thread2 = PlatformThread();
+ EXPECT_EQ(thread1.GetHandle(), thread2.GetHandle());
+ thread1 = PlatformThread::SpawnJoinable([] {}, "1");
+ thread2 = PlatformThread::SpawnJoinable([] {}, "2");
+ EXPECT_NE(thread1.GetHandle(), thread2.GetHandle());
+ thread1.Finalize();
+ EXPECT_NE(thread1.GetHandle(), thread2.GetHandle());
+ thread2.Finalize();
+ EXPECT_EQ(thread1.GetHandle(), thread2.GetHandle());
}
TEST(PlatformThreadTest, RunFunctionIsCalled) {
bool flag = false;
- PlatformThread thread(&SetFlagRunFunction, &flag, "RunFunctionIsCalled");
- thread.Start();
+ PlatformThread::SpawnJoinable([&] { flag = true; }, "T");
+ EXPECT_TRUE(flag);
+}
- // At this point, the flag may be either true or false.
- thread.Stop();
+TEST(PlatformThreadTest, JoinsThread) {
+ // This test flakes if there are problems with the join implementation.
+ rtc::Event event;
+ PlatformThread::SpawnJoinable([&] { event.Set(); }, "T");
+ EXPECT_TRUE(event.Wait(/*give_up_after_ms=*/0));
+}
- // We expect the thread to have run at least once.
+TEST(PlatformThreadTest, StopsBeforeDetachedThreadExits) {
+ // This test flakes if there are problems with the detached thread
+ // implementation.
+ bool flag = false;
+ rtc::Event thread_started;
+ rtc::Event thread_continue;
+ rtc::Event thread_exiting;
+ PlatformThread::SpawnDetached(
+ [&] {
+ thread_started.Set();
+ thread_continue.Wait(Event::kForever);
+ flag = true;
+ thread_exiting.Set();
+ },
+ "T");
+ thread_started.Wait(Event::kForever);
+ EXPECT_FALSE(flag);
+ thread_continue.Set();
+ thread_exiting.Wait(Event::kForever);
EXPECT_TRUE(flag);
}
diff --git a/rtc_base/rate_limiter_unittest.cc b/rtc_base/rate_limiter_unittest.cc
index 8ebf8aa67b..eda644b4ca 100644
--- a/rtc_base/rate_limiter_unittest.cc
+++ b/rtc_base/rate_limiter_unittest.cc
@@ -127,10 +127,6 @@ class ThreadTask {
rtc::Event end_signal_;
};
-void RunTask(void* thread_task) {
- reinterpret_cast<ThreadTask*>(thread_task)->Run();
-}
-
TEST_F(RateLimitTest, MultiThreadedUsage) {
// Simple sanity test, with different threads calling the various methods.
// Runs a few simple tasks, each on its own thread, but coordinated with
@@ -149,8 +145,8 @@ TEST_F(RateLimitTest, MultiThreadedUsage) {
EXPECT_TRUE(rate_limiter_->SetWindowSize(kWindowSizeMs / 2));
}
} set_window_size_task(rate_limiter.get());
- rtc::PlatformThread thread1(RunTask, &set_window_size_task, "Thread1");
- thread1.Start();
+ auto thread1 = rtc::PlatformThread::SpawnJoinable(
+ [&set_window_size_task] { set_window_size_task.Run(); }, "Thread1");
class SetMaxRateTask : public ThreadTask {
public:
@@ -160,8 +156,8 @@ TEST_F(RateLimitTest, MultiThreadedUsage) {
void DoRun() override { rate_limiter_->SetMaxRate(kMaxRateBps * 2); }
} set_max_rate_task(rate_limiter.get());
- rtc::PlatformThread thread2(RunTask, &set_max_rate_task, "Thread2");
- thread2.Start();
+ auto thread2 = rtc::PlatformThread::SpawnJoinable(
+ [&set_max_rate_task] { set_max_rate_task.Run(); }, "Thread2");
class UseRateTask : public ThreadTask {
public:
@@ -177,8 +173,8 @@ TEST_F(RateLimitTest, MultiThreadedUsage) {
SimulatedClock* const clock_;
} use_rate_task(rate_limiter.get(), &clock_);
- rtc::PlatformThread thread3(RunTask, &use_rate_task, "Thread3");
- thread3.Start();
+ auto thread3 = rtc::PlatformThread::SpawnJoinable(
+ [&use_rate_task] { use_rate_task.Run(); }, "Thread3");
set_window_size_task.start_signal_.Set();
EXPECT_TRUE(set_window_size_task.end_signal_.Wait(kMaxTimeoutMs));
@@ -191,10 +187,6 @@ TEST_F(RateLimitTest, MultiThreadedUsage) {
// All rate consumed.
EXPECT_FALSE(rate_limiter->TryUseRate(1));
-
- thread1.Stop();
- thread2.Stop();
- thread3.Stop();
}
} // namespace webrtc
diff --git a/rtc_base/ref_counted_object.h b/rtc_base/ref_counted_object.h
index 873eaccd47..331132c569 100644
--- a/rtc_base/ref_counted_object.h
+++ b/rtc_base/ref_counted_object.h
@@ -13,6 +13,7 @@
#include <type_traits>
#include <utility>
+#include "api/scoped_refptr.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/ref_count.h"
#include "rtc_base/ref_counter.h"
@@ -33,9 +34,9 @@ class RefCountedObject : public T {
std::forward<P1>(p1),
std::forward<Args>(args)...) {}
- virtual void AddRef() const { ref_count_.IncRef(); }
+ void AddRef() const override { ref_count_.IncRef(); }
- virtual RefCountReleaseStatus Release() const {
+ RefCountReleaseStatus Release() const override {
const auto status = ref_count_.DecRef();
if (status == RefCountReleaseStatus::kDroppedLastRef) {
delete this;
@@ -52,7 +53,7 @@ class RefCountedObject : public T {
virtual bool HasOneRef() const { return ref_count_.HasOneRef(); }
protected:
- virtual ~RefCountedObject() {}
+ ~RefCountedObject() override {}
mutable webrtc::webrtc_impl::RefCounter ref_count_{0};
@@ -63,9 +64,11 @@ template <class T>
class FinalRefCountedObject final : public T {
public:
using T::T;
- // Until c++17 compilers are allowed not to inherit the default constructor,
- // and msvc doesn't. Thus the default constructor is forwarded explicitly.
+ // Until c++17 compilers are allowed not to inherit the default constructors.
+ // Thus the default constructors are forwarded explicitly.
FinalRefCountedObject() = default;
+ explicit FinalRefCountedObject(const T& other) : T(other) {}
+ explicit FinalRefCountedObject(T&& other) : T(std::move(other)) {}
FinalRefCountedObject(const FinalRefCountedObject&) = delete;
FinalRefCountedObject& operator=(const FinalRefCountedObject&) = delete;
@@ -83,6 +86,113 @@ class FinalRefCountedObject final : public T {
mutable webrtc::webrtc_impl::RefCounter ref_count_{0};
};
+// General utilities for constructing a reference counted class and the
+// appropriate reference count implementation for that class.
+//
+// These utilities select either the `RefCountedObject` implementation or
+// `FinalRefCountedObject` depending on whether the to-be-shared class is
+// derived from the RefCountInterface interface or not (respectively).
+
+// `make_ref_counted`:
+//
+// Use this when you want to construct a reference counted object of type T and
+// get a `scoped_refptr<>` back. Example:
+//
+// auto p = make_ref_counted<Foo>("bar", 123);
+//
+// For a class that inherits from RefCountInterface, this is equivalent to:
+//
+// auto p = scoped_refptr<Foo>(new RefCountedObject<Foo>("bar", 123));
+//
+// If the class does not inherit from RefCountInterface, the example is
+// equivalent to:
+//
+// auto p = scoped_refptr<FinalRefCountedObject<Foo>>(
+// new FinalRefCountedObject<Foo>("bar", 123));
+//
+// In these cases, `make_ref_counted` reduces the amount of boilerplate code but
+// also helps with the most commonly intended usage of RefCountedObject whereby
+// methods for reference counting, are virtual and designed to satisfy the need
+// of an interface. When such a need does not exist, it is more efficient to use
+// the `FinalRefCountedObject` template, which does not add the vtable overhead.
+//
+// Note that in some cases, using RefCountedObject directly may still be what's
+// needed.
+
+// `make_ref_counted` for classes that are convertible to RefCountInterface.
+template <
+ typename T,
+ typename... Args,
+ typename std::enable_if<std::is_convertible<T*, RefCountInterface*>::value,
+ T>::type* = nullptr>
+scoped_refptr<T> make_ref_counted(Args&&... args) {
+ return new RefCountedObject<T>(std::forward<Args>(args)...);
+}
+
+// `make_ref_counted` for complete classes that are not convertible to
+// RefCountInterface.
+template <
+ typename T,
+ typename... Args,
+ typename std::enable_if<!std::is_convertible<T*, RefCountInterface*>::value,
+ T>::type* = nullptr>
+scoped_refptr<FinalRefCountedObject<T>> make_ref_counted(Args&&... args) {
+ return new FinalRefCountedObject<T>(std::forward<Args>(args)...);
+}
+
+// `Ref<>`, `Ref<>::Type` and `Ref<>::Ptr`:
+//
+// `Ref` is a type declaring utility that is compatible with `make_ref_counted`
+// and can be used in classes and methods where it's more convenient (or
+// readable) to have the compiler figure out the fully fleshed out type for a
+// class rather than spell it out verbatim in all places the type occurs (which
+// can mean maintenance work if the class layout changes).
+//
+// Usage examples:
+//
+// If you want to declare the parameter type that's always compatible with
+// this code:
+//
+// Bar(make_ref_counted<Foo>());
+//
+// You can use `Ref<>::Ptr` to declare a compatible scoped_refptr type:
+//
+// void Bar(Ref<Foo>::Ptr p);
+//
+// This might be more practically useful in templates though.
+//
+// In rare cases you might need to be able to declare a parameter that's fully
+// compatible with the reference counted T type - and just using T* is not
+// enough. To give a code example, we can declare a function, `Foo` that is
+// compatible with this code:
+// auto p = make_ref_counted<Foo>();
+// Foo(p.get());
+//
+// void Foo(Ref<Foo>::Type* foo_ptr);
+//
+// Alternatively this would be:
+// void Foo(Foo* foo_ptr);
+// or
+// void Foo(FinalRefCountedObject<Foo>* foo_ptr);
+
+// Declares the approprate reference counted type for T depending on whether
+// T is convertible to RefCountInterface or not.
+// For classes that are convertible, the type will simply be T.
+// For classes that cannot be converted to RefCountInterface, the type will be
+// FinalRefCountedObject<T>.
+// This is most useful for declaring a scoped_refptr<T> instance for a class
+// that may or may not implement a virtual reference counted interface:
+// * scoped_refptr<Ref<Foo>::Type> my_ptr;
+template <typename T>
+struct Ref {
+ typedef typename std::conditional<
+ std::is_convertible<T*, RefCountInterface*>::value,
+ T,
+ FinalRefCountedObject<T>>::type Type;
+
+ typedef scoped_refptr<Type> Ptr;
+};
+
} // namespace rtc
#endif // RTC_BASE_REF_COUNTED_OBJECT_H_
diff --git a/rtc_base/ref_counted_object_unittest.cc b/rtc_base/ref_counted_object_unittest.cc
index 05380b7ca9..ab7bb09191 100644
--- a/rtc_base/ref_counted_object_unittest.cc
+++ b/rtc_base/ref_counted_object_unittest.cc
@@ -64,6 +64,20 @@ class RefClassWithMixedValues : public RefCountInterface {
std::string c_;
};
+class Foo {
+ public:
+ Foo() {}
+ Foo(int i, int j) : foo_(i + j) {}
+ int foo_ = 0;
+};
+
+class FooItf : public RefCountInterface {
+ public:
+ FooItf() {}
+ FooItf(int i, int j) : foo_(i + j) {}
+ int foo_ = 0;
+};
+
} // namespace
TEST(RefCountedObject, HasOneRef) {
@@ -111,4 +125,58 @@ TEST(FinalRefCountedObject, CanWrapIntoScopedRefptr) {
EXPECT_TRUE(ref2->HasOneRef());
}
+TEST(FinalRefCountedObject, CanCreateFromMovedType) {
+ class MoveOnly {
+ public:
+ MoveOnly(int a) : a_(a) {}
+ MoveOnly(MoveOnly&&) = default;
+
+ int a() { return a_; }
+
+ private:
+ int a_;
+ };
+ MoveOnly foo(5);
+ auto ref = make_ref_counted<MoveOnly>(std::move(foo));
+ EXPECT_EQ(ref->a(), 5);
+}
+
+// This test is mostly a compile-time test for scoped_refptr compatibility.
+TEST(RefCounted, SmartPointers) {
+ // Sanity compile-time tests. FooItf is virtual, Foo is not, FooItf inherits
+ // from RefCountInterface, Foo does not.
+ static_assert(std::is_base_of<RefCountInterface, FooItf>::value, "");
+ static_assert(!std::is_base_of<RefCountInterface, Foo>::value, "");
+ static_assert(std::is_polymorphic<FooItf>::value, "");
+ static_assert(!std::is_polymorphic<Foo>::value, "");
+
+ // Check if Ref generates the expected types for Foo and FooItf.
+ static_assert(std::is_base_of<Foo, Ref<Foo>::Type>::value &&
+ !std::is_same<Foo, Ref<Foo>::Type>::value,
+ "");
+ static_assert(std::is_same<FooItf, Ref<FooItf>::Type>::value, "");
+
+ {
+ // Test with FooItf, a class that inherits from RefCountInterface.
+ // Check that we get a valid FooItf reference counted object.
+ auto p = make_ref_counted<FooItf>(2, 3);
+ EXPECT_NE(p.get(), nullptr);
+ EXPECT_EQ(p->foo_, 5); // the FooItf ctor just stores 2+3 in foo_.
+
+ // Use a couple of different ways of declaring what should result in the
+ // same type as `p` is of.
+ scoped_refptr<Ref<FooItf>::Type> p2 = p;
+ Ref<FooItf>::Ptr p3 = p;
+ }
+
+ {
+ // Same for `Foo`
+ auto p = make_ref_counted<Foo>(2, 3);
+ EXPECT_NE(p.get(), nullptr);
+ EXPECT_EQ(p->foo_, 5);
+ scoped_refptr<Ref<Foo>::Type> p2 = p;
+ Ref<Foo>::Ptr p3 = p;
+ }
+}
+
} // namespace rtc
diff --git a/rtc_base/rtc_certificate.cc b/rtc_base/rtc_certificate.cc
index 04ae99685d..937defc6c2 100644
--- a/rtc_base/rtc_certificate.cc
+++ b/rtc_base/rtc_certificate.cc
@@ -13,7 +13,6 @@
#include <memory>
#include "rtc_base/checks.h"
-#include "rtc_base/ref_counted_object.h"
#include "rtc_base/ssl_certificate.h"
#include "rtc_base/ssl_identity.h"
#include "rtc_base/time_utils.h"
@@ -22,14 +21,14 @@ namespace rtc {
scoped_refptr<RTCCertificate> RTCCertificate::Create(
std::unique_ptr<SSLIdentity> identity) {
- return new RefCountedObject<RTCCertificate>(identity.release());
+ return new RTCCertificate(identity.release());
}
RTCCertificate::RTCCertificate(SSLIdentity* identity) : identity_(identity) {
RTC_DCHECK(identity_);
}
-RTCCertificate::~RTCCertificate() {}
+RTCCertificate::~RTCCertificate() = default;
uint64_t RTCCertificate::Expires() const {
int64_t expires = GetSSLCertificate().CertificateExpirationTime();
@@ -67,7 +66,7 @@ scoped_refptr<RTCCertificate> RTCCertificate::FromPEM(
SSLIdentity::CreateFromPEMStrings(pem.private_key(), pem.certificate()));
if (!identity)
return nullptr;
- return new RefCountedObject<RTCCertificate>(identity.release());
+ return new RTCCertificate(identity.release());
}
bool RTCCertificate::operator==(const RTCCertificate& certificate) const {
diff --git a/rtc_base/rtc_certificate.h b/rtc_base/rtc_certificate.h
index 45e51b5b1b..ce9aa47512 100644
--- a/rtc_base/rtc_certificate.h
+++ b/rtc_base/rtc_certificate.h
@@ -16,8 +16,8 @@
#include <memory>
#include <string>
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/system/rtc_export.h"
namespace rtc {
@@ -49,7 +49,8 @@ class RTCCertificatePEM {
// A thin abstraction layer between "lower level crypto stuff" like
// SSLCertificate and WebRTC usage. Takes ownership of some lower level objects,
// reference counting protects these from premature destruction.
-class RTC_EXPORT RTCCertificate : public RefCountInterface {
+class RTC_EXPORT RTCCertificate final
+ : public RefCountedNonVirtual<RTCCertificate> {
public:
// Takes ownership of |identity|.
static scoped_refptr<RTCCertificate> Create(
@@ -82,7 +83,9 @@ class RTC_EXPORT RTCCertificate : public RefCountInterface {
protected:
explicit RTCCertificate(SSLIdentity* identity);
- ~RTCCertificate() override;
+
+ friend class RefCountedNonVirtual<RTCCertificate>;
+ ~RTCCertificate();
private:
// The SSLIdentity is the owner of the SSLCertificate. To protect our
diff --git a/rtc_base/socket_address.cc b/rtc_base/socket_address.cc
index 639be52c54..2996ede9d2 100644
--- a/rtc_base/socket_address.cc
+++ b/rtc_base/socket_address.cc
@@ -178,6 +178,16 @@ std::string SocketAddress::ToSensitiveString() const {
return sb.str();
}
+std::string SocketAddress::ToResolvedSensitiveString() const {
+ if (IsUnresolvedIP()) {
+ return "";
+ }
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << ipaddr().ToSensitiveString() << ":" << port();
+ return sb.str();
+}
+
bool SocketAddress::FromString(const std::string& str) {
if (str.at(0) == '[') {
std::string::size_type closebracket = str.rfind(']');
diff --git a/rtc_base/socket_address.h b/rtc_base/socket_address.h
index f459407f54..570a71281e 100644
--- a/rtc_base/socket_address.h
+++ b/rtc_base/socket_address.h
@@ -124,6 +124,10 @@ class RTC_EXPORT SocketAddress {
// Same as ToString but anonymizes it by hiding the last part.
std::string ToSensitiveString() const;
+ // Returns hostname:port string if address is resolved, otherwise returns
+ // empty string.
+ std::string ToResolvedSensitiveString() const;
+
// Parses hostname:port and [hostname]:port.
bool FromString(const std::string& str);
diff --git a/rtc_base/socket_address_unittest.cc b/rtc_base/socket_address_unittest.cc
index 14da8cb519..d1c911abff 100644
--- a/rtc_base/socket_address_unittest.cc
+++ b/rtc_base/socket_address_unittest.cc
@@ -323,25 +323,15 @@ TEST(SocketAddressTest, TestToSensitiveString) {
EXPECT_EQ("1.2.3.4", addr_v4.HostAsURIString());
EXPECT_EQ("1.2.3.4:5678", addr_v4.ToString());
-#if defined(NDEBUG)
EXPECT_EQ("1.2.3.x", addr_v4.HostAsSensitiveURIString());
EXPECT_EQ("1.2.3.x:5678", addr_v4.ToSensitiveString());
-#else
- EXPECT_EQ("1.2.3.4", addr_v4.HostAsSensitiveURIString());
- EXPECT_EQ("1.2.3.4:5678", addr_v4.ToSensitiveString());
-#endif // defined(NDEBUG)
SocketAddress addr_v6(kTestV6AddrString, 5678);
EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsURIString());
EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToString());
-#if defined(NDEBUG)
EXPECT_EQ("[" + kTestV6AddrAnonymizedString + "]",
addr_v6.HostAsSensitiveURIString());
EXPECT_EQ(kTestV6AddrFullAnonymizedString, addr_v6.ToSensitiveString());
-#else
- EXPECT_EQ("[" + kTestV6AddrString + "]", addr_v6.HostAsSensitiveURIString());
- EXPECT_EQ(kTestV6AddrFullString, addr_v6.ToSensitiveString());
-#endif // defined(NDEBUG)
}
} // namespace rtc
diff --git a/rtc_base/socket_server.h b/rtc_base/socket_server.h
index 98971e4d84..face04dbc2 100644
--- a/rtc_base/socket_server.h
+++ b/rtc_base/socket_server.h
@@ -33,9 +33,10 @@ class SocketServer : public SocketFactory {
static const int kForever = -1;
static std::unique_ptr<SocketServer> CreateDefault();
- // When the socket server is installed into a Thread, this function is
- // called to allow the socket server to use the thread's message queue for
- // any messaging that it might need to perform.
+ // When the socket server is installed into a Thread, this function is called
+ // to allow the socket server to use the thread's message queue for any
+ // messaging that it might need to perform. It is also called with a null
+ // argument before the thread is destroyed.
virtual void SetMessageQueue(Thread* queue) {}
// Sleeps until:
diff --git a/rtc_base/ssl_fingerprint.cc b/rtc_base/ssl_fingerprint.cc
index 5b261e0f53..358402eb03 100644
--- a/rtc_base/ssl_fingerprint.cc
+++ b/rtc_base/ssl_fingerprint.cc
@@ -103,9 +103,6 @@ SSLFingerprint::SSLFingerprint(const std::string& algorithm,
size_t digest_len)
: SSLFingerprint(algorithm, MakeArrayView(digest_in, digest_len)) {}
-SSLFingerprint::SSLFingerprint(const SSLFingerprint& from)
- : algorithm(from.algorithm), digest(from.digest) {}
-
bool SSLFingerprint::operator==(const SSLFingerprint& other) const {
return algorithm == other.algorithm && digest == other.digest;
}
diff --git a/rtc_base/ssl_fingerprint.h b/rtc_base/ssl_fingerprint.h
index d65d665d83..add3ab7911 100644
--- a/rtc_base/ssl_fingerprint.h
+++ b/rtc_base/ssl_fingerprint.h
@@ -57,7 +57,8 @@ struct RTC_EXPORT SSLFingerprint {
const uint8_t* digest_in,
size_t digest_len);
- SSLFingerprint(const SSLFingerprint& from);
+ SSLFingerprint(const SSLFingerprint& from) = default;
+ SSLFingerprint& operator=(const SSLFingerprint& from) = default;
bool operator==(const SSLFingerprint& other) const;
diff --git a/rtc_base/string_utils.h b/rtc_base/string_utils.h
index 23c55cb893..d844e5e125 100644
--- a/rtc_base/string_utils.h
+++ b/rtc_base/string_utils.h
@@ -88,6 +88,43 @@ std::string string_trim(const std::string& s);
// TODO(jonasolsson): replace with absl::Hex when that becomes available.
std::string ToHex(const int i);
+// CompileTimeString comprises of a string-like object which can be used as a
+// regular const char* in compile time and supports concatenation. Useful for
+// concatenating constexpr strings in for example macro declarations.
+namespace rtc_base_string_utils_internal {
+template <int NPlus1>
+struct CompileTimeString {
+ char string[NPlus1] = {0};
+ constexpr CompileTimeString() = default;
+ template <int MPlus1>
+ explicit constexpr CompileTimeString(const char (&chars)[MPlus1]) {
+ char* chars_pointer = string;
+ for (auto c : chars)
+ *chars_pointer++ = c;
+ }
+ template <int MPlus1>
+ constexpr auto Concat(CompileTimeString<MPlus1> b) {
+ CompileTimeString<NPlus1 + MPlus1 - 1> result;
+ char* chars_pointer = result.string;
+ for (auto c : string)
+ *chars_pointer++ = c;
+ chars_pointer = result.string + NPlus1 - 1;
+ for (auto c : b.string)
+ *chars_pointer++ = c;
+ result.string[NPlus1 + MPlus1 - 2] = 0;
+ return result;
+ }
+ constexpr operator const char*() { return string; }
+};
+} // namespace rtc_base_string_utils_internal
+
+// Makes a constexpr CompileTimeString<X> without having to specify X
+// explicitly.
+template <int N>
+constexpr auto MakeCompileTimeString(const char (&a)[N]) {
+ return rtc_base_string_utils_internal::CompileTimeString<N>(a);
+}
+
} // namespace rtc
#endif // RTC_BASE_STRING_UTILS_H_
diff --git a/rtc_base/string_utils_unittest.cc b/rtc_base/string_utils_unittest.cc
index 2fa1f220ac..120f7e60f5 100644
--- a/rtc_base/string_utils_unittest.cc
+++ b/rtc_base/string_utils_unittest.cc
@@ -39,4 +39,29 @@ TEST(string_toutf, Empty) {
#endif // WEBRTC_WIN
+TEST(CompileTimeString, MakeActsLikeAString) {
+ EXPECT_STREQ(MakeCompileTimeString("abc123"), "abc123");
+}
+
+TEST(CompileTimeString, ConvertibleToStdString) {
+ EXPECT_EQ(std::string(MakeCompileTimeString("abab")), "abab");
+}
+
+namespace detail {
+constexpr bool StringEquals(const char* a, const char* b) {
+ while (*a && *a == *b)
+ a++, b++;
+ return *a == *b;
+}
+} // namespace detail
+
+static_assert(detail::StringEquals(MakeCompileTimeString("handellm"),
+ "handellm"),
+ "String should initialize.");
+
+static_assert(detail::StringEquals(MakeCompileTimeString("abc123").Concat(
+ MakeCompileTimeString("def456ghi")),
+ "abc123def456ghi"),
+ "Strings should concatenate.");
+
} // namespace rtc
diff --git a/rtc_base/synchronization/mutex_race_check.h b/rtc_base/synchronization/mutex_race_check.h
index 7a79d8a965..cada6292b5 100644
--- a/rtc_base/synchronization/mutex_race_check.h
+++ b/rtc_base/synchronization/mutex_race_check.h
@@ -13,6 +13,7 @@
#include <atomic>
+#include "absl/base/attributes.h"
#include "rtc_base/checks.h"
#include "rtc_base/system/unused.h"
#include "rtc_base/thread_annotations.h"
@@ -41,7 +42,7 @@ class RTC_LOCKABLE MutexImpl final {
RTC_CHECK(was_free)
<< "WEBRTC_RACE_CHECK_MUTEX: mutex locked concurrently.";
}
- RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
bool was_free = free_.exchange(false, std::memory_order_acquire);
return was_free;
}
diff --git a/rtc_base/task_queue_libevent.cc b/rtc_base/task_queue_libevent.cc
index 38660cd5a2..909698611e 100644
--- a/rtc_base/task_queue_libevent.cc
+++ b/rtc_base/task_queue_libevent.cc
@@ -93,16 +93,12 @@ void EventAssign(struct event* ev,
rtc::ThreadPriority TaskQueuePriorityToThreadPriority(Priority priority) {
switch (priority) {
case Priority::HIGH:
- return rtc::kRealtimePriority;
+ return rtc::ThreadPriority::kRealtime;
case Priority::LOW:
- return rtc::kLowPriority;
+ return rtc::ThreadPriority::kLow;
case Priority::NORMAL:
- return rtc::kNormalPriority;
- default:
- RTC_NOTREACHED();
- break;
+ return rtc::ThreadPriority::kNormal;
}
- return rtc::kNormalPriority;
}
class TaskQueueLibevent final : public TaskQueueBase {
@@ -120,7 +116,6 @@ class TaskQueueLibevent final : public TaskQueueBase {
~TaskQueueLibevent() override = default;
- static void ThreadMain(void* context);
static void OnWakeup(int socket, short flags, void* context); // NOLINT
static void RunTimer(int fd, short flags, void* context); // NOLINT
@@ -172,8 +167,7 @@ class TaskQueueLibevent::SetTimerTask : public QueuedTask {
TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name,
rtc::ThreadPriority priority)
- : event_base_(event_base_new()),
- thread_(&TaskQueueLibevent::ThreadMain, this, queue_name, priority) {
+ : event_base_(event_base_new()) {
int fds[2];
RTC_CHECK(pipe(fds) == 0);
SetNonBlocking(fds[0]);
@@ -184,7 +178,18 @@ TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name,
EventAssign(&wakeup_event_, event_base_, wakeup_pipe_out_,
EV_READ | EV_PERSIST, OnWakeup, this);
event_add(&wakeup_event_, 0);
- thread_.Start();
+ thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ {
+ CurrentTaskQueueSetter set_current(this);
+ while (is_active_)
+ event_base_loop(event_base_, 0);
+ }
+
+ for (TimerEvent* timer : pending_timers_)
+ delete timer;
+ },
+ queue_name, rtc::ThreadAttributes().SetPriority(priority));
}
void TaskQueueLibevent::Delete() {
@@ -199,7 +204,7 @@ void TaskQueueLibevent::Delete() {
nanosleep(&ts, nullptr);
}
- thread_.Stop();
+ thread_.Finalize();
event_del(&wakeup_event_);
@@ -253,20 +258,6 @@ void TaskQueueLibevent::PostDelayedTask(std::unique_ptr<QueuedTask> task,
}
// static
-void TaskQueueLibevent::ThreadMain(void* context) {
- TaskQueueLibevent* me = static_cast<TaskQueueLibevent*>(context);
-
- {
- CurrentTaskQueueSetter set_current(me);
- while (me->is_active_)
- event_base_loop(me->event_base_, 0);
- }
-
- for (TimerEvent* timer : me->pending_timers_)
- delete timer;
-}
-
-// static
void TaskQueueLibevent::OnWakeup(int socket,
short flags, // NOLINT
void* context) {
diff --git a/rtc_base/task_queue_stdlib.cc b/rtc_base/task_queue_stdlib.cc
index 5de634512e..41da285ee7 100644
--- a/rtc_base/task_queue_stdlib.cc
+++ b/rtc_base/task_queue_stdlib.cc
@@ -36,14 +36,11 @@ rtc::ThreadPriority TaskQueuePriorityToThreadPriority(
TaskQueueFactory::Priority priority) {
switch (priority) {
case TaskQueueFactory::Priority::HIGH:
- return rtc::kRealtimePriority;
+ return rtc::ThreadPriority::kRealtime;
case TaskQueueFactory::Priority::LOW:
- return rtc::kLowPriority;
+ return rtc::ThreadPriority::kLow;
case TaskQueueFactory::Priority::NORMAL:
- return rtc::kNormalPriority;
- default:
- RTC_NOTREACHED();
- return rtc::kNormalPriority;
+ return rtc::ThreadPriority::kNormal;
}
}
@@ -78,8 +75,6 @@ class TaskQueueStdlib final : public TaskQueueBase {
NextTask GetNextTask();
- static void ThreadMain(void* context);
-
void ProcessTasks();
void NotifyWake();
@@ -87,16 +82,9 @@ class TaskQueueStdlib final : public TaskQueueBase {
// Indicates if the thread has started.
rtc::Event started_;
- // Indicates if the thread has stopped.
- rtc::Event stopped_;
-
// Signaled whenever a new task is pending.
rtc::Event flag_notify_;
- // Contains the active worker thread assigned to processing
- // tasks (including delayed tasks).
- rtc::PlatformThread thread_;
-
Mutex pending_lock_;
// Indicates if the worker thread needs to shutdown now.
@@ -119,15 +107,25 @@ class TaskQueueStdlib final : public TaskQueueBase {
// std::unique_ptr out of the queue without the presence of a hack.
std::map<DelayedEntryTimeout, std::unique_ptr<QueuedTask>> delayed_queue_
RTC_GUARDED_BY(pending_lock_);
+
+ // Contains the active worker thread assigned to processing
+ // tasks (including delayed tasks).
+ // Placing this last ensures the thread doesn't touch uninitialized attributes
+ // throughout it's lifetime.
+ rtc::PlatformThread thread_;
};
TaskQueueStdlib::TaskQueueStdlib(absl::string_view queue_name,
rtc::ThreadPriority priority)
: started_(/*manual_reset=*/false, /*initially_signaled=*/false),
- stopped_(/*manual_reset=*/false, /*initially_signaled=*/false),
flag_notify_(/*manual_reset=*/false, /*initially_signaled=*/false),
- thread_(&TaskQueueStdlib::ThreadMain, this, queue_name, priority) {
- thread_.Start();
+ thread_(rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ CurrentTaskQueueSetter set_current(this);
+ ProcessTasks();
+ },
+ queue_name,
+ rtc::ThreadAttributes().SetPriority(priority))) {
started_.Wait(rtc::Event::kForever);
}
@@ -141,8 +139,6 @@ void TaskQueueStdlib::Delete() {
NotifyWake();
- stopped_.Wait(rtc::Event::kForever);
- thread_.Stop();
delete this;
}
@@ -219,13 +215,6 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() {
return result;
}
-// static
-void TaskQueueStdlib::ThreadMain(void* context) {
- TaskQueueStdlib* me = static_cast<TaskQueueStdlib*>(context);
- CurrentTaskQueueSetter set_current(me);
- me->ProcessTasks();
-}
-
void TaskQueueStdlib::ProcessTasks() {
started_.Set();
@@ -250,8 +239,6 @@ void TaskQueueStdlib::ProcessTasks() {
else
flag_notify_.Wait(task.sleep_time_ms_);
}
-
- stopped_.Set();
}
void TaskQueueStdlib::NotifyWake() {
diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc
index 5eb3776cea..d797d478f4 100644
--- a/rtc_base/task_queue_win.cc
+++ b/rtc_base/task_queue_win.cc
@@ -29,16 +29,18 @@
#include <utility>
#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
#include "api/task_queue/queued_task.h"
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
+#include "rtc_base/constructor_magic.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/platform_thread.h"
-#include "rtc_base/time_utils.h"
#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/time_utils.h"
namespace webrtc {
namespace {
@@ -56,16 +58,12 @@ rtc::ThreadPriority TaskQueuePriorityToThreadPriority(
TaskQueueFactory::Priority priority) {
switch (priority) {
case TaskQueueFactory::Priority::HIGH:
- return rtc::kRealtimePriority;
+ return rtc::ThreadPriority::kRealtime;
case TaskQueueFactory::Priority::LOW:
- return rtc::kLowPriority;
+ return rtc::ThreadPriority::kLow;
case TaskQueueFactory::Priority::NORMAL:
- return rtc::kNormalPriority;
- default:
- RTC_NOTREACHED();
- break;
+ return rtc::ThreadPriority::kNormal;
}
- return rtc::kNormalPriority;
}
int64_t GetTick() {
@@ -167,21 +165,6 @@ class TaskQueueWin : public TaskQueueBase {
void RunPendingTasks();
private:
- static void ThreadMain(void* context);
-
- class WorkerThread : public rtc::PlatformThread {
- public:
- WorkerThread(rtc::ThreadRunFunction func,
- void* obj,
- absl::string_view thread_name,
- rtc::ThreadPriority priority)
- : PlatformThread(func, obj, thread_name, priority) {}
-
- bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data) {
- return rtc::PlatformThread::QueueAPC(apc_function, data);
- }
- };
-
void RunThreadMain();
bool ProcessQueuedMessages();
void RunDueTasks();
@@ -204,7 +187,7 @@ class TaskQueueWin : public TaskQueueBase {
greater<DelayedTaskInfo>>
timer_tasks_;
UINT_PTR timer_id_ = 0;
- WorkerThread thread_;
+ rtc::PlatformThread thread_;
Mutex pending_lock_;
std::queue<std::unique_ptr<QueuedTask>> pending_
RTC_GUARDED_BY(pending_lock_);
@@ -213,10 +196,12 @@ class TaskQueueWin : public TaskQueueBase {
TaskQueueWin::TaskQueueWin(absl::string_view queue_name,
rtc::ThreadPriority priority)
- : thread_(&TaskQueueWin::ThreadMain, this, queue_name, priority),
- in_queue_(::CreateEvent(nullptr, true, false, nullptr)) {
+ : in_queue_(::CreateEvent(nullptr, true, false, nullptr)) {
RTC_DCHECK(in_queue_);
- thread_.Start();
+ thread_ = rtc::PlatformThread::SpawnJoinable(
+ [this] { RunThreadMain(); }, queue_name,
+ rtc::ThreadAttributes().SetPriority(priority));
+
rtc::Event event(false, false);
RTC_CHECK(thread_.QueueAPC(&InitializeQueueThread,
reinterpret_cast<ULONG_PTR>(&event)));
@@ -225,11 +210,13 @@ TaskQueueWin::TaskQueueWin(absl::string_view queue_name,
void TaskQueueWin::Delete() {
RTC_DCHECK(!IsCurrent());
- while (!::PostThreadMessage(thread_.GetThreadRef(), WM_QUIT, 0, 0)) {
+ RTC_CHECK(thread_.GetHandle() != absl::nullopt);
+ while (
+ !::PostThreadMessage(GetThreadId(*thread_.GetHandle()), WM_QUIT, 0, 0)) {
RTC_CHECK_EQ(ERROR_NOT_ENOUGH_QUOTA, ::GetLastError());
Sleep(1);
}
- thread_.Stop();
+ thread_.Finalize();
::CloseHandle(in_queue_);
delete this;
}
@@ -252,7 +239,9 @@ void TaskQueueWin::PostDelayedTask(std::unique_ptr<QueuedTask> task,
// and WPARAM is 32bits in 32bit builds. Otherwise, we could pass the
// task pointer and timestamp as LPARAM and WPARAM.
auto* task_info = new DelayedTaskInfo(milliseconds, std::move(task));
- if (!::PostThreadMessage(thread_.GetThreadRef(), WM_QUEUE_DELAYED_TASK, 0,
+ RTC_CHECK(thread_.GetHandle() != absl::nullopt);
+ if (!::PostThreadMessage(GetThreadId(*thread_.GetHandle()),
+ WM_QUEUE_DELAYED_TASK, 0,
reinterpret_cast<LPARAM>(task_info))) {
delete task_info;
}
@@ -274,11 +263,6 @@ void TaskQueueWin::RunPendingTasks() {
}
}
-// static
-void TaskQueueWin::ThreadMain(void* context) {
- static_cast<TaskQueueWin*>(context)->RunThreadMain();
-}
-
void TaskQueueWin::RunThreadMain() {
CurrentTaskQueueSetter set_current(this);
HANDLE handles[2] = {*timer_.event_for_wait(), in_queue_};
diff --git a/rtc_base/task_utils/BUILD.gn b/rtc_base/task_utils/BUILD.gn
index 39e4ba1100..64a041908e 100644
--- a/rtc_base/task_utils/BUILD.gn
+++ b/rtc_base/task_utils/BUILD.gn
@@ -14,6 +14,7 @@ rtc_library("repeating_task") {
"repeating_task.h",
]
deps = [
+ ":pending_task_safety_flag",
":to_queued_task",
"..:logging",
"..:timeutils",
@@ -33,7 +34,7 @@ rtc_library("pending_task_safety_flag") {
]
deps = [
"..:checks",
- "..:refcount",
+ "../../api:refcountedbase",
"../../api:scoped_refptr",
"../../api:sequence_checker",
"../system:no_unique_address",
diff --git a/rtc_base/task_utils/pending_task_safety_flag.cc b/rtc_base/task_utils/pending_task_safety_flag.cc
index b83d714916..57b3f6ce88 100644
--- a/rtc_base/task_utils/pending_task_safety_flag.cc
+++ b/rtc_base/task_utils/pending_task_safety_flag.cc
@@ -10,19 +10,17 @@
#include "rtc_base/task_utils/pending_task_safety_flag.h"
-#include "rtc_base/ref_counted_object.h"
-
namespace webrtc {
// static
rtc::scoped_refptr<PendingTaskSafetyFlag> PendingTaskSafetyFlag::Create() {
- return new rtc::RefCountedObject<PendingTaskSafetyFlag>(true);
+ return new PendingTaskSafetyFlag(true);
}
rtc::scoped_refptr<PendingTaskSafetyFlag>
PendingTaskSafetyFlag::CreateDetached() {
rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag(
- new rtc::RefCountedObject<PendingTaskSafetyFlag>(true));
+ new PendingTaskSafetyFlag(true));
safety_flag->main_sequence_.Detach();
return safety_flag;
}
@@ -30,7 +28,7 @@ PendingTaskSafetyFlag::CreateDetached() {
rtc::scoped_refptr<PendingTaskSafetyFlag>
PendingTaskSafetyFlag::CreateDetachedInactive() {
rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag(
- new rtc::RefCountedObject<PendingTaskSafetyFlag>(false));
+ new PendingTaskSafetyFlag(false));
safety_flag->main_sequence_.Detach();
return safety_flag;
}
diff --git a/rtc_base/task_utils/pending_task_safety_flag.h b/rtc_base/task_utils/pending_task_safety_flag.h
index 4864b5de3b..fc1b5bd878 100644
--- a/rtc_base/task_utils/pending_task_safety_flag.h
+++ b/rtc_base/task_utils/pending_task_safety_flag.h
@@ -11,10 +11,10 @@
#ifndef RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_
#define RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_
+#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "api/sequence_checker.h"
#include "rtc_base/checks.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/system/no_unique_address.h"
namespace webrtc {
@@ -55,7 +55,8 @@ namespace webrtc {
// my_task_queue_->PostTask(ToQueuedTask(pending_task_safety_flag_,
// [this]() { MyMethod(); }));
//
-class PendingTaskSafetyFlag : public rtc::RefCountInterface {
+class PendingTaskSafetyFlag final
+ : public rtc::RefCountedNonVirtual<PendingTaskSafetyFlag> {
public:
static rtc::scoped_refptr<PendingTaskSafetyFlag> Create();
@@ -113,7 +114,7 @@ class PendingTaskSafetyFlag : public rtc::RefCountInterface {
// This should be used by the class that wants tasks dropped after destruction.
// The requirement is that the instance has to be constructed and destructed on
// the same thread as the potentially dropped tasks would be running on.
-class ScopedTaskSafety {
+class ScopedTaskSafety final {
public:
ScopedTaskSafety() = default;
~ScopedTaskSafety() { flag_->SetNotAlive(); }
@@ -128,7 +129,7 @@ class ScopedTaskSafety {
// Like ScopedTaskSafety, but allows construction on a different thread than
// where the flag will be used.
-class ScopedTaskSafetyDetached {
+class ScopedTaskSafetyDetached final {
public:
ScopedTaskSafetyDetached() = default;
~ScopedTaskSafetyDetached() { flag_->SetNotAlive(); }
diff --git a/rtc_base/task_utils/repeating_task.cc b/rtc_base/task_utils/repeating_task.cc
index 574e6331f1..9636680cb4 100644
--- a/rtc_base/task_utils/repeating_task.cc
+++ b/rtc_base/task_utils/repeating_task.cc
@@ -12,32 +12,36 @@
#include "absl/memory/memory.h"
#include "rtc_base/logging.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
namespace webrtc_repeating_task_impl {
-RepeatingTaskBase::RepeatingTaskBase(TaskQueueBase* task_queue,
- TimeDelta first_delay,
- Clock* clock)
+RepeatingTaskBase::RepeatingTaskBase(
+ TaskQueueBase* task_queue,
+ TimeDelta first_delay,
+ Clock* clock,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag)
: task_queue_(task_queue),
clock_(clock),
- next_run_time_(clock_->CurrentTime() + first_delay) {}
+ next_run_time_(clock_->CurrentTime() + first_delay),
+ alive_flag_(std::move(alive_flag)) {}
RepeatingTaskBase::~RepeatingTaskBase() = default;
bool RepeatingTaskBase::Run() {
RTC_DCHECK_RUN_ON(task_queue_);
// Return true to tell the TaskQueue to destruct this object.
- if (next_run_time_.IsPlusInfinity())
+ if (!alive_flag_->alive())
return true;
TimeDelta delay = RunClosure();
// The closure might have stopped this task, in which case we return true to
// destruct this object.
- if (next_run_time_.IsPlusInfinity())
+ if (!alive_flag_->alive())
return true;
RTC_DCHECK(delay.IsFinite());
@@ -53,33 +57,11 @@ bool RepeatingTaskBase::Run() {
return false;
}
-void RepeatingTaskBase::Stop() {
- RTC_DCHECK_RUN_ON(task_queue_);
- RTC_DCHECK(next_run_time_.IsFinite());
- next_run_time_ = Timestamp::PlusInfinity();
-}
-
} // namespace webrtc_repeating_task_impl
-RepeatingTaskHandle::RepeatingTaskHandle(RepeatingTaskHandle&& other)
- : repeating_task_(other.repeating_task_) {
- other.repeating_task_ = nullptr;
-}
-
-RepeatingTaskHandle& RepeatingTaskHandle::operator=(
- RepeatingTaskHandle&& other) {
- repeating_task_ = other.repeating_task_;
- other.repeating_task_ = nullptr;
- return *this;
-}
-
-RepeatingTaskHandle::RepeatingTaskHandle(
- webrtc_repeating_task_impl::RepeatingTaskBase* repeating_task)
- : repeating_task_(repeating_task) {}
-
void RepeatingTaskHandle::Stop() {
if (repeating_task_) {
- repeating_task_->Stop();
+ repeating_task_->SetNotAlive();
repeating_task_ = nullptr;
}
}
diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h
index 487b7d19d4..d5066fdb5c 100644
--- a/rtc_base/task_utils/repeating_task.h
+++ b/rtc_base/task_utils/repeating_task.h
@@ -19,22 +19,19 @@
#include "api/task_queue/task_queue_base.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
-
-class RepeatingTaskHandle;
-
namespace webrtc_repeating_task_impl {
class RepeatingTaskBase : public QueuedTask {
public:
RepeatingTaskBase(TaskQueueBase* task_queue,
TimeDelta first_delay,
- Clock* clock);
+ Clock* clock,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag);
~RepeatingTaskBase() override;
- void Stop();
-
private:
virtual TimeDelta RunClosure() = 0;
@@ -42,9 +39,10 @@ class RepeatingTaskBase : public QueuedTask {
TaskQueueBase* const task_queue_;
Clock* const clock_;
- // This is always finite, except for the special case where it's PlusInfinity
- // to signal that the task should stop.
+ // This is always finite.
Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_);
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag_
+ RTC_GUARDED_BY(task_queue_);
};
// The template closure pattern is based on rtc::ClosureTask.
@@ -54,8 +52,12 @@ class RepeatingTaskImpl final : public RepeatingTaskBase {
RepeatingTaskImpl(TaskQueueBase* task_queue,
TimeDelta first_delay,
Closure&& closure,
- Clock* clock)
- : RepeatingTaskBase(task_queue, first_delay, clock),
+ Clock* clock,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag)
+ : RepeatingTaskBase(task_queue,
+ first_delay,
+ clock,
+ std::move(alive_flag)),
closure_(std::forward<Closure>(closure)) {
static_assert(
std::is_same<TimeDelta,
@@ -81,28 +83,27 @@ class RepeatingTaskHandle {
public:
RepeatingTaskHandle() = default;
~RepeatingTaskHandle() = default;
- RepeatingTaskHandle(RepeatingTaskHandle&& other);
- RepeatingTaskHandle& operator=(RepeatingTaskHandle&& other);
+ RepeatingTaskHandle(RepeatingTaskHandle&& other) = default;
+ RepeatingTaskHandle& operator=(RepeatingTaskHandle&& other) = default;
RepeatingTaskHandle(const RepeatingTaskHandle&) = delete;
RepeatingTaskHandle& operator=(const RepeatingTaskHandle&) = delete;
// Start can be used to start a task that will be reposted with a delay
// determined by the return value of the provided closure. The actual task is
// owned by the TaskQueue and will live until it has been stopped or the
- // TaskQueue is destroyed. Note that this means that trying to stop the
- // repeating task after the TaskQueue is destroyed is an error. However, it's
- // perfectly fine to destroy the handle while the task is running, since the
- // repeated task is owned by the TaskQueue.
+ // TaskQueue deletes it. It's perfectly fine to destroy the handle while the
+ // task is running, since the repeated task is owned by the TaskQueue.
template <class Closure>
static RepeatingTaskHandle Start(TaskQueueBase* task_queue,
Closure&& closure,
Clock* clock = Clock::GetRealTimeClock()) {
- auto repeating_task = std::make_unique<
- webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, TimeDelta::Zero(), std::forward<Closure>(closure), clock);
- auto* repeating_task_ptr = repeating_task.get();
- task_queue->PostTask(std::move(repeating_task));
- return RepeatingTaskHandle(repeating_task_ptr);
+ auto alive_flag = PendingTaskSafetyFlag::CreateDetached();
+ task_queue->PostTask(
+ std::make_unique<
+ webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
+ task_queue, TimeDelta::Zero(), std::forward<Closure>(closure),
+ clock, alive_flag));
+ return RepeatingTaskHandle(std::move(alive_flag));
}
// DelayedStart is equivalent to Start except that the first invocation of the
@@ -113,12 +114,14 @@ class RepeatingTaskHandle {
TimeDelta first_delay,
Closure&& closure,
Clock* clock = Clock::GetRealTimeClock()) {
- auto repeating_task = std::make_unique<
- webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, first_delay, std::forward<Closure>(closure), clock);
- auto* repeating_task_ptr = repeating_task.get();
- task_queue->PostDelayedTask(std::move(repeating_task), first_delay.ms());
- return RepeatingTaskHandle(repeating_task_ptr);
+ auto alive_flag = PendingTaskSafetyFlag::CreateDetached();
+ task_queue->PostDelayedTask(
+ std::make_unique<
+ webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
+ task_queue, first_delay, std::forward<Closure>(closure), clock,
+ alive_flag),
+ first_delay.ms());
+ return RepeatingTaskHandle(std::move(alive_flag));
}
// Stops future invocations of the repeating task closure. Can only be called
@@ -127,15 +130,15 @@ class RepeatingTaskHandle {
// closure itself.
void Stop();
- // Returns true if Start() or DelayedStart() was called most recently. Returns
- // false initially and if Stop() or PostStop() was called most recently.
+ // Returns true until Stop() was called.
+ // Can only be called from the TaskQueue where the task is running.
bool Running() const;
private:
explicit RepeatingTaskHandle(
- webrtc_repeating_task_impl::RepeatingTaskBase* repeating_task);
- // Owned by the task queue.
- webrtc_repeating_task_impl::RepeatingTaskBase* repeating_task_ = nullptr;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag)
+ : repeating_task_(std::move(alive_flag)) {}
+ rtc::scoped_refptr<PendingTaskSafetyFlag> repeating_task_;
};
} // namespace webrtc
diff --git a/rtc_base/task_utils/repeating_task_unittest.cc b/rtc_base/task_utils/repeating_task_unittest.cc
index 2fb15d1e5a..b23284f988 100644
--- a/rtc_base/task_utils/repeating_task_unittest.cc
+++ b/rtc_base/task_utils/repeating_task_unittest.cc
@@ -276,4 +276,22 @@ TEST(RepeatingTaskTest, ClockIntegration) {
handle.Stop();
}
+TEST(RepeatingTaskTest, CanBeStoppedAfterTaskQueueDeletedTheRepeatingTask) {
+ std::unique_ptr<QueuedTask> repeating_task;
+
+ MockTaskQueue task_queue;
+ EXPECT_CALL(task_queue, PostDelayedTask)
+ .WillOnce([&](std::unique_ptr<QueuedTask> task, uint32_t milliseconds) {
+ repeating_task = std::move(task);
+ });
+
+ RepeatingTaskHandle handle =
+ RepeatingTaskHandle::DelayedStart(&task_queue, TimeDelta::Millis(100),
+ [] { return TimeDelta::Millis(100); });
+
+ // shutdown task queue: delete all pending tasks and run 'regular' task.
+ repeating_task = nullptr;
+ handle.Stop();
+}
+
} // namespace webrtc
diff --git a/rtc_base/thread.cc b/rtc_base/thread.cc
index b0b0e52a07..8ca9ce76a8 100644
--- a/rtc_base/thread.cc
+++ b/rtc_base/thread.cc
@@ -429,13 +429,11 @@ void Thread::DoDestroy() {
// The signal is done from here to ensure
// that it always gets called when the queue
// is going away.
- SignalQueueDestroyed();
- ThreadManager::Remove(this);
- ClearInternal(nullptr, MQID_ANY, nullptr);
-
if (ss_) {
ss_->SetMessageQueue(nullptr);
}
+ ThreadManager::Remove(this);
+ ClearInternal(nullptr, MQID_ANY, nullptr);
}
SocketServer* Thread::socketserver() {
@@ -931,6 +929,7 @@ void Thread::Send(const Location& posted_from,
msg.pdata = pdata;
if (IsCurrent()) {
#if RTC_DCHECK_IS_ON
+ RTC_DCHECK(this->IsInvokeToThreadAllowed(this));
RTC_DCHECK_RUN_ON(this);
could_be_blocking_call_count_++;
#endif
diff --git a/rtc_base/thread.h b/rtc_base/thread.h
index e031677f1b..6e68f1a679 100644
--- a/rtc_base/thread.h
+++ b/rtc_base/thread.h
@@ -336,10 +336,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
}
}
- // When this signal is sent out, any references to this queue should
- // no longer be used.
- sigslot::signal0<> SignalQueueDestroyed;
-
bool IsCurrent() const;
// Sleeps the calling thread for the specified number of milliseconds, during
diff --git a/rtc_base/thread_unittest.cc b/rtc_base/thread_unittest.cc
index 2a24d6ca37..789bdd943e 100644
--- a/rtc_base/thread_unittest.cc
+++ b/rtc_base/thread_unittest.cc
@@ -521,7 +521,7 @@ TEST(ThreadTest, ThreeThreadsInvoke) {
// Asynchronously invoke SetAndInvokeSet on |thread1| and wait until
// |thread1| starts the call.
- static void AsyncInvokeSetAndWait(AsyncInvoker* invoker,
+ static void AsyncInvokeSetAndWait(DEPRECATED_AsyncInvoker* invoker,
Thread* thread1,
Thread* thread2,
LockedBool* out) {
@@ -536,7 +536,7 @@ TEST(ThreadTest, ThreeThreadsInvoke) {
}
};
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
LockedBool thread_a_called(false);
// Start the sequence A --(invoke)--> B --(async invoke)--> C --(invoke)--> A.
@@ -553,36 +553,6 @@ TEST(ThreadTest, ThreeThreadsInvoke) {
EXPECT_TRUE_WAIT(thread_a_called.Get(), 2000);
}
-// Set the name on a thread when the underlying QueueDestroyed signal is
-// triggered. This causes an error if the object is already partially
-// destroyed.
-class SetNameOnSignalQueueDestroyedTester : public sigslot::has_slots<> {
- public:
- SetNameOnSignalQueueDestroyedTester(Thread* thread) : thread_(thread) {
- thread->SignalQueueDestroyed.connect(
- this, &SetNameOnSignalQueueDestroyedTester::OnQueueDestroyed);
- }
-
- void OnQueueDestroyed() {
- // Makes sure that if we access the Thread while it's being destroyed, that
- // it doesn't cause a problem because the vtable has been modified.
- thread_->SetName("foo", nullptr);
- }
-
- private:
- Thread* thread_;
-};
-
-TEST(ThreadTest, SetNameOnSignalQueueDestroyed) {
- auto thread1 = Thread::CreateWithSocketServer();
- SetNameOnSignalQueueDestroyedTester tester1(thread1.get());
- thread1.reset();
-
- Thread* thread2 = new AutoThread();
- SetNameOnSignalQueueDestroyedTester tester2(thread2);
- delete thread2;
-}
-
class ThreadQueueTest : public ::testing::Test, public Thread {
public:
ThreadQueueTest() : Thread(CreateDefaultSocketServer(), true) {}
@@ -791,7 +761,7 @@ class AsyncInvokeTest : public ::testing::Test {
};
TEST_F(AsyncInvokeTest, FireAndForget) {
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
// Create and start the thread.
auto thread = Thread::CreateWithSocketServer();
thread->Start();
@@ -803,7 +773,7 @@ TEST_F(AsyncInvokeTest, FireAndForget) {
}
TEST_F(AsyncInvokeTest, NonCopyableFunctor) {
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
// Create and start the thread.
auto thread = Thread::CreateWithSocketServer();
thread->Start();
@@ -834,7 +804,7 @@ TEST_F(AsyncInvokeTest, KillInvokerDuringExecute) {
EXPECT_FALSE(invoker_destroyed);
functor_finished.Set();
};
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
invoker.AsyncInvoke<void>(RTC_FROM_HERE, thread.get(), functor);
functor_started.Wait(Event::kForever);
@@ -863,7 +833,7 @@ TEST_F(AsyncInvokeTest, KillInvokerDuringExecuteWithReentrantInvoke) {
Thread thread(std::make_unique<NullSocketServer>());
thread.Start();
{
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
auto reentrant_functor = [&reentrant_functor_run] {
reentrant_functor_run = true;
};
@@ -882,7 +852,7 @@ TEST_F(AsyncInvokeTest, KillInvokerDuringExecuteWithReentrantInvoke) {
}
TEST_F(AsyncInvokeTest, Flush) {
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
AtomicBool flag1;
AtomicBool flag2;
// Queue two async calls to the current thread.
@@ -898,7 +868,7 @@ TEST_F(AsyncInvokeTest, Flush) {
}
TEST_F(AsyncInvokeTest, FlushWithIds) {
- AsyncInvoker invoker;
+ DEPRECATED_AsyncInvoker invoker;
AtomicBool flag1;
AtomicBool flag2;
// Queue two async calls to the current thread, one with a message id.
diff --git a/rtc_base/virtual_socket_server.cc b/rtc_base/virtual_socket_server.cc
index 80d7f3c047..f5e993645e 100644
--- a/rtc_base/virtual_socket_server.cc
+++ b/rtc_base/virtual_socket_server.cc
@@ -19,7 +19,6 @@
#include "absl/algorithm/container.h"
#include "rtc_base/checks.h"
-#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/logging.h"
#include "rtc_base/physical_socket_server.h"
@@ -164,6 +163,8 @@ int VirtualSocket::Close() {
}
if (SOCK_STREAM == type_) {
+ webrtc::MutexLock lock(&mutex_);
+
// Cancel pending sockets
if (listen_queue_) {
while (!listen_queue_->empty()) {
@@ -173,7 +174,6 @@ int VirtualSocket::Close() {
server_->Disconnect(addr);
listen_queue_->pop_front();
}
- delete listen_queue_;
listen_queue_ = nullptr;
}
// Disconnect stream sockets
@@ -231,6 +231,8 @@ int VirtualSocket::RecvFrom(void* pv,
if (timestamp) {
*timestamp = -1;
}
+
+ webrtc::MutexLock lock(&mutex_);
// If we don't have a packet, then either error or wait for one to arrive.
if (recv_buffer_.empty()) {
if (async_) {
@@ -273,6 +275,7 @@ int VirtualSocket::RecvFrom(void* pv,
}
int VirtualSocket::Listen(int backlog) {
+ webrtc::MutexLock lock(&mutex_);
RTC_DCHECK(SOCK_STREAM == type_);
RTC_DCHECK(CS_CLOSED == state_);
if (local_addr_.IsNil()) {
@@ -280,12 +283,13 @@ int VirtualSocket::Listen(int backlog) {
return -1;
}
RTC_DCHECK(nullptr == listen_queue_);
- listen_queue_ = new ListenQueue;
+ listen_queue_ = std::make_unique<ListenQueue>();
state_ = CS_CONNECTING;
return 0;
}
VirtualSocket* VirtualSocket::Accept(SocketAddress* paddr) {
+ webrtc::MutexLock lock(&mutex_);
if (nullptr == listen_queue_) {
error_ = EINVAL;
return nullptr;
@@ -304,7 +308,7 @@ VirtualSocket* VirtualSocket::Accept(SocketAddress* paddr) {
delete socket;
continue;
}
- socket->CompleteConnect(remote_addr, false);
+ socket->CompleteConnect(remote_addr);
if (paddr) {
*paddr = remote_addr;
}
@@ -341,47 +345,57 @@ int VirtualSocket::SetOption(Option opt, int value) {
}
void VirtualSocket::OnMessage(Message* pmsg) {
- if (pmsg->message_id == MSG_ID_PACKET) {
- RTC_DCHECK(nullptr != pmsg->pdata);
- Packet* packet = static_cast<Packet*>(pmsg->pdata);
-
- recv_buffer_.push_back(packet);
-
- if (async_) {
- SignalReadEvent(this);
- }
- } else if (pmsg->message_id == MSG_ID_CONNECT) {
- RTC_DCHECK(nullptr != pmsg->pdata);
- MessageAddress* data = static_cast<MessageAddress*>(pmsg->pdata);
- if (listen_queue_ != nullptr) {
- listen_queue_->push_back(data->addr);
- if (async_) {
- SignalReadEvent(this);
+ bool signal_read_event = false;
+ bool signal_close_event = false;
+ bool signal_connect_event = false;
+ int error_to_signal = 0;
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (pmsg->message_id == MSG_ID_PACKET) {
+ RTC_DCHECK(nullptr != pmsg->pdata);
+ Packet* packet = static_cast<Packet*>(pmsg->pdata);
+
+ recv_buffer_.push_back(packet);
+ signal_read_event = async_;
+ } else if (pmsg->message_id == MSG_ID_CONNECT) {
+ RTC_DCHECK(nullptr != pmsg->pdata);
+ MessageAddress* data = static_cast<MessageAddress*>(pmsg->pdata);
+ if (listen_queue_ != nullptr) {
+ listen_queue_->push_back(data->addr);
+ signal_read_event = async_;
+ } else if ((SOCK_STREAM == type_) && (CS_CONNECTING == state_)) {
+ CompleteConnect(data->addr);
+ signal_connect_event = async_;
+ } else {
+ RTC_LOG(LS_VERBOSE)
+ << "Socket at " << local_addr_.ToString() << " is not listening";
+ server_->Disconnect(data->addr);
}
- } else if ((SOCK_STREAM == type_) && (CS_CONNECTING == state_)) {
- CompleteConnect(data->addr, true);
- } else {
- RTC_LOG(LS_VERBOSE) << "Socket at " << local_addr_.ToString()
- << " is not listening";
- server_->Disconnect(data->addr);
- }
- delete data;
- } else if (pmsg->message_id == MSG_ID_DISCONNECT) {
- RTC_DCHECK(SOCK_STREAM == type_);
- if (CS_CLOSED != state_) {
- int error = (CS_CONNECTING == state_) ? ECONNREFUSED : 0;
- state_ = CS_CLOSED;
- remote_addr_.Clear();
- if (async_) {
- SignalCloseEvent(this, error);
+ delete data;
+ } else if (pmsg->message_id == MSG_ID_DISCONNECT) {
+ RTC_DCHECK(SOCK_STREAM == type_);
+ if (CS_CLOSED != state_) {
+ error_to_signal = (CS_CONNECTING == state_) ? ECONNREFUSED : 0;
+ state_ = CS_CLOSED;
+ remote_addr_.Clear();
+ signal_close_event = async_;
}
+ } else if (pmsg->message_id == MSG_ID_SIGNALREADEVENT) {
+ signal_read_event = !recv_buffer_.empty();
+ } else {
+ RTC_NOTREACHED();
}
- } else if (pmsg->message_id == MSG_ID_SIGNALREADEVENT) {
- if (!recv_buffer_.empty()) {
- SignalReadEvent(this);
- }
- } else {
- RTC_NOTREACHED();
+ }
+ // Signal events without holding `mutex_`, to avoid recursive locking, as well
+ // as issues with sigslot and lock order.
+ if (signal_read_event) {
+ SignalReadEvent(this);
+ }
+ if (signal_close_event) {
+ SignalCloseEvent(this, error_to_signal);
+ }
+ if (signal_connect_event) {
+ SignalConnectEvent(this);
}
}
@@ -416,14 +430,11 @@ int VirtualSocket::InitiateConnect(const SocketAddress& addr, bool use_delay) {
return 0;
}
-void VirtualSocket::CompleteConnect(const SocketAddress& addr, bool notify) {
+void VirtualSocket::CompleteConnect(const SocketAddress& addr) {
RTC_DCHECK(CS_CONNECTING == state_);
remote_addr_ = addr;
state_ = CS_CONNECTED;
server_->AddConnection(remote_addr_, local_addr_, this);
- if (async_ && notify) {
- SignalConnectEvent(this);
- }
}
int VirtualSocket::SendUdp(const void* pv,
@@ -475,7 +486,7 @@ void VirtualSocket::OnSocketServerReadyToSend() {
}
void VirtualSocket::SetToBlocked() {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
ready_to_send_ = false;
error_ = EWOULDBLOCK;
}
@@ -525,7 +536,7 @@ int64_t VirtualSocket::UpdateOrderedDelivery(int64_t ts) {
}
size_t VirtualSocket::PurgeNetworkPackets(int64_t cur_time) {
- CritScope cs(&crit_);
+ webrtc::MutexLock lock(&mutex_);
while (!network_.empty() && (network_.front().done_time <= cur_time)) {
RTC_DCHECK(network_size_ >= network_.front().size);
@@ -613,10 +624,6 @@ VirtualSocket* VirtualSocketServer::CreateSocketInternal(int family, int type) {
void VirtualSocketServer::SetMessageQueue(Thread* msg_queue) {
msg_queue_ = msg_queue;
- if (msg_queue_) {
- msg_queue_->SignalQueueDestroyed.connect(
- this, &VirtualSocketServer::OnMessageQueueDestroyed);
- }
}
bool VirtualSocketServer::Wait(int cmsWait, bool process_io) {
diff --git a/rtc_base/virtual_socket_server.h b/rtc_base/virtual_socket_server.h
index 54de578793..6c58a4bdfe 100644
--- a/rtc_base/virtual_socket_server.h
+++ b/rtc_base/virtual_socket_server.h
@@ -17,11 +17,11 @@
#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
-#include "rtc_base/deprecated/recursive_critical_section.h"
#include "rtc_base/event.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
namespace rtc {
@@ -33,7 +33,7 @@ class SocketAddressPair;
// interface can create as many addresses as you want. All of the sockets
// created by this network will be able to communicate with one another, unless
// they are bound to addresses from incompatible families.
-class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
+class VirtualSocketServer : public SocketServer {
public:
VirtualSocketServer();
// This constructor needs to be used if the test uses a fake clock and
@@ -259,11 +259,6 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
uint32_t samples);
static double Evaluate(const Function* f, double x);
- // Null out our message queue if it goes away. Necessary in the case where
- // our lifetime is greater than that of the thread we are using, since we
- // try to send Close messages for all connected sockets when we shutdown.
- void OnMessageQueueDestroyed() { msg_queue_ = nullptr; }
-
// Determine if two sockets should be able to communicate.
// We don't (currently) specify an address family for sockets; instead,
// the currently bound address is used to infer the address family.
@@ -399,22 +394,23 @@ class VirtualSocket : public AsyncSocket,
typedef std::map<Option, int> OptionsMap;
int InitiateConnect(const SocketAddress& addr, bool use_delay);
- void CompleteConnect(const SocketAddress& addr, bool notify);
+ void CompleteConnect(const SocketAddress& addr);
int SendUdp(const void* pv, size_t cb, const SocketAddress& addr);
int SendTcp(const void* pv, size_t cb);
void OnSocketServerReadyToSend();
- VirtualSocketServer* server_;
- int type_;
- bool async_;
+ VirtualSocketServer* const server_;
+ const int type_;
+ const bool async_;
ConnState state_;
int error_;
SocketAddress local_addr_;
SocketAddress remote_addr_;
// Pending sockets which can be Accepted
- ListenQueue* listen_queue_;
+ std::unique_ptr<ListenQueue> listen_queue_ RTC_GUARDED_BY(mutex_)
+ RTC_PT_GUARDED_BY(mutex_);
// Data which tcp has buffered for sending
SendBuffer send_buffer_;
@@ -422,8 +418,8 @@ class VirtualSocket : public AsyncSocket,
// Set back to true when the socket can send again.
bool ready_to_send_ = true;
- // Critical section to protect the recv_buffer and queue_
- RecursiveCriticalSection crit_;
+ // Mutex to protect the recv_buffer and listen_queue_
+ webrtc::Mutex mutex_;
// Network model that enforces bandwidth and capacity constraints
NetworkQueue network_;
@@ -433,7 +429,7 @@ class VirtualSocket : public AsyncSocket,
int64_t last_delivery_time_ = 0;
// Data which has been received from the network
- RecvBuffer recv_buffer_;
+ RecvBuffer recv_buffer_ RTC_GUARDED_BY(mutex_);
// The amount of data which is in flight or in recv_buffer_
size_t recv_buffer_size_;
diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn
index 202095789c..b841228a8e 100644
--- a/rtc_tools/BUILD.gn
+++ b/rtc_tools/BUILD.gn
@@ -243,6 +243,7 @@ if (!is_component_build) {
"../call:call_interfaces",
"../common_video",
"../media:rtc_internal_video_codecs",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/video_coding:video_coding_utility",
"../rtc_base:checks",
"../rtc_base:rtc_json",
@@ -397,11 +398,13 @@ if (!build_with_chromium) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
"../rtc_base:stringutils",
+ "../system_wrappers",
"../test:explicit_key_value_config",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/functional:bind_front",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/rtc_tools/frame_analyzer/video_geometry_aligner.cc b/rtc_tools/frame_analyzer/video_geometry_aligner.cc
index db397bc3a5..88da26d4d0 100644
--- a/rtc_tools/frame_analyzer/video_geometry_aligner.cc
+++ b/rtc_tools/frame_analyzer/video_geometry_aligner.cc
@@ -61,7 +61,7 @@ rtc::scoped_refptr<I420BufferInterface> CropAndZoom(
adjusted_frame->MutableDataY(), adjusted_frame->StrideY(),
adjusted_frame->MutableDataU(), adjusted_frame->StrideU(),
adjusted_frame->MutableDataV(), adjusted_frame->StrideV(),
- frame->width(), frame->height(), libyuv::kFilterBilinear);
+ frame->width(), frame->height(), libyuv::kFilterBox);
return adjusted_frame;
}
diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
index becc0044ab..51cc3b9245 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
@@ -314,7 +314,7 @@ std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
- new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
+ rtc::make_ref_counted<ReplacementAudioDecoderFactory>(
replacement_file_name, file_sample_rate_hz);
test::NetEqTest::DecoderMap codecs = {
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
index d6acda9ec3..0f727f2815 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyzer.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
@@ -19,6 +19,7 @@
#include <utility>
#include "absl/algorithm/container.h"
+#include "absl/functional/bind_front.h"
#include "absl/strings/string_view.h"
#include "api/function_view.h"
#include "api/network_state_predictor.h"
@@ -1266,7 +1267,7 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
const RtpPacketType& rtp_packet = *rtp_iterator->second;
if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) {
RtpPacketSendInfo packet_info;
- packet_info.ssrc = rtp_packet.rtp.header.ssrc;
+ packet_info.media_ssrc = rtp_packet.rtp.header.ssrc;
packet_info.transport_sequence_number =
rtp_packet.rtp.header.extension.transportSequenceNumber;
packet_info.rtp_sequence_number = rtp_packet.rtp.header.sequenceNumber;
@@ -1367,13 +1368,11 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) {
using RtpPacketType = LoggedRtpPacketIncoming;
- class RembInterceptingPacketRouter : public PacketRouter {
+ class RembInterceptor {
public:
- void OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
- uint32_t bitrate_bps) override {
+ void SendRemb(uint32_t bitrate_bps, std::vector<uint32_t> ssrcs) {
last_bitrate_bps_ = bitrate_bps;
bitrate_updated_ = true;
- PacketRouter::OnReceiveBitrateChanged(ssrcs, bitrate_bps);
}
uint32_t last_bitrate_bps() const { return last_bitrate_bps_; }
bool GetAndResetBitrateUpdated() {
@@ -1400,10 +1399,10 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) {
}
SimulatedClock clock(0);
- RembInterceptingPacketRouter packet_router;
- // TODO(terelius): The PacketRouter is used as the RemoteBitrateObserver.
- // Is this intentional?
- ReceiveSideCongestionController rscc(&clock, &packet_router);
+ RembInterceptor remb_interceptor;
+ ReceiveSideCongestionController rscc(
+ &clock, [](auto...) {},
+ absl::bind_front(&RembInterceptor::SendRemb, &remb_interceptor), nullptr);
// TODO(holmer): Log the call config and use that here instead.
// static const uint32_t kDefaultStartBitrateBps = 300000;
// rscc.SetBweBitrates(0, kDefaultStartBitrateBps, -1);
@@ -1428,9 +1427,9 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) {
float x = config_.GetCallTimeSec(clock.TimeInMicroseconds());
acked_time_series.points.emplace_back(x, y);
}
- if (packet_router.GetAndResetBitrateUpdated() ||
+ if (remb_interceptor.GetAndResetBitrateUpdated() ||
clock.TimeInMicroseconds() - last_update_us >= 1e6) {
- uint32_t y = packet_router.last_bitrate_bps() / 1000;
+ uint32_t y = remb_interceptor.last_bitrate_bps() / 1000;
float x = config_.GetCallTimeSec(clock.TimeInMicroseconds());
time_series.points.emplace_back(x, y);
last_update_us = clock.TimeInMicroseconds();
diff --git a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
index 0e5b5d04a9..c0b418de4b 100644
--- a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
+++ b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
@@ -14,6 +14,7 @@
#include "logging/rtc_event_log/rtc_event_processor.h"
#include "modules/rtp_rtcp/source/time_util.h"
+#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -83,7 +84,7 @@ void LogBasedNetworkControllerSimulation::OnPacketSent(
}
RtpPacketSendInfo packet_info;
- packet_info.ssrc = packet.ssrc;
+ packet_info.media_ssrc = packet.ssrc;
packet_info.transport_sequence_number = packet.transport_seq_no;
packet_info.rtp_sequence_number = packet.stream_seq_no;
packet_info.length = packet.size;
@@ -142,11 +143,13 @@ void LogBasedNetworkControllerSimulation::OnReceiverReport(
HandleStateUpdate(controller_->OnTransportLossReport(msg));
}
+ Clock* clock = Clock::GetRealTimeClock();
TimeDelta rtt = TimeDelta::PlusInfinity();
for (auto& rb : report.rr.report_blocks()) {
if (rb.last_sr()) {
+ Timestamp report_log_time = Timestamp::Micros(report.log_time_us());
uint32_t receive_time_ntp =
- CompactNtp(TimeMicrosToNtp(report.log_time_us()));
+ CompactNtp(clock->ConvertTimestampToNtpTime(report_log_time));
uint32_t rtt_ntp =
receive_time_ntp - rb.delay_since_last_sr() - rb.last_sr();
rtt = std::min(rtt, TimeDelta::Millis(CompactNtpRttToMs(rtt_ntp)));
diff --git a/rtc_tools/rtp_generator/rtp_generator.cc b/rtc_tools/rtp_generator/rtp_generator.cc
index 21826c8dff..3180897ec5 100644
--- a/rtc_tools/rtp_generator/rtp_generator.cc
+++ b/rtc_tools/rtp_generator/rtp_generator.cc
@@ -188,15 +188,17 @@ RtpGenerator::RtpGenerator(const RtpGeneratorOptions& options)
PayloadStringToCodecType(video_config.rtp.payload_name);
if (video_config.rtp.payload_name == cricket::kVp8CodecName) {
VideoCodecVP8 settings = VideoEncoder::GetDefaultVp8Settings();
- encoder_config.encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp8EncoderSpecificSettings>(settings);
+ encoder_config.encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ settings);
} else if (video_config.rtp.payload_name == cricket::kVp9CodecName) {
VideoCodecVP9 settings = VideoEncoder::GetDefaultVp9Settings();
- encoder_config.encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(settings);
+ encoder_config.encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ settings);
} else if (video_config.rtp.payload_name == cricket::kH264CodecName) {
VideoCodecH264 settings = VideoEncoder::GetDefaultH264Settings();
- encoder_config.encoder_specific_settings = new rtc::RefCountedObject<
+ encoder_config.encoder_specific_settings = rtc::make_ref_counted<
VideoEncoderConfig::H264EncoderSpecificSettings>(settings);
}
encoder_config.video_format.name = video_config.rtp.payload_name;
@@ -217,7 +219,7 @@ RtpGenerator::RtpGenerator(const RtpGeneratorOptions& options)
}
encoder_config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
video_config.rtp.payload_name, /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
diff --git a/rtc_tools/video_file_reader.cc b/rtc_tools/video_file_reader.cc
index b01fc0fcdd..bfdcba45fa 100644
--- a/rtc_tools/video_file_reader.cc
+++ b/rtc_tools/video_file_reader.cc
@@ -224,8 +224,8 @@ rtc::scoped_refptr<Video> OpenY4mFile(const std::string& file_name) {
}
RTC_LOG(LS_INFO) << "Video has " << frame_positions.size() << " frames";
- return new rtc::RefCountedObject<VideoFile>(*width, *height, frame_positions,
- file);
+ return rtc::make_ref_counted<VideoFile>(*width, *height, frame_positions,
+ file);
}
rtc::scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
@@ -266,8 +266,7 @@ rtc::scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
}
RTC_LOG(LS_INFO) << "Video has " << frame_positions.size() << " frames";
- return new rtc::RefCountedObject<VideoFile>(width, height, frame_positions,
- file);
+ return rtc::make_ref_counted<VideoFile>(width, height, frame_positions, file);
}
rtc::scoped_refptr<Video> OpenYuvOrY4mFile(const std::string& file_name,
diff --git a/rtc_tools/video_replay.cc b/rtc_tools/video_replay.cc
index 1faad28ff1..d96c7be410 100644
--- a/rtc_tools/video_replay.cc
+++ b/rtc_tools/video_replay.cc
@@ -25,6 +25,7 @@
#include "call/call.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/engine/internal_decoder_factory.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "rtc_base/checks.h"
#include "rtc_base/string_to_number.h"
@@ -39,7 +40,6 @@
#include "test/gtest.h"
#include "test/null_transport.h"
#include "test/rtp_file_reader.h"
-#include "test/rtp_header_parser.h"
#include "test/run_loop.h"
#include "test/run_test.h"
#include "test/test_video_capturer.h"
@@ -119,6 +119,18 @@ ABSL_FLAG(std::string, decoder_ivf_filename, "", "Decoder ivf output file");
// Flag for video codec.
ABSL_FLAG(std::string, codec, "VP8", "Video codec");
+// Flags for rtp start and stop timestamp.
+ABSL_FLAG(uint32_t,
+ start_timestamp,
+ 0,
+ "RTP start timestamp, packets with smaller timestamp will be ignored "
+ "(no wraparound)");
+ABSL_FLAG(uint32_t,
+ stop_timestamp,
+ 4294967295,
+ "RTP stop timestamp, packets with larger timestamp will be ignored "
+ "(no wraparound)");
+
namespace {
static bool ValidatePayloadType(int32_t payload_type) {
@@ -520,6 +532,8 @@ class RtpReplayer final {
int num_packets = 0;
std::map<uint32_t, int> unknown_packets;
rtc::Event event(/*manual_reset=*/false, /*initially_signalled=*/false);
+ uint32_t start_timestamp = absl::GetFlag(FLAGS_start_timestamp);
+ uint32_t stop_timestamp = absl::GetFlag(FLAGS_stop_timestamp);
while (true) {
int64_t now_ms = rtc::TimeMillis();
if (replay_start_ms == -1) {
@@ -530,6 +544,13 @@ class RtpReplayer final {
if (!rtp_reader->NextPacket(&packet)) {
break;
}
+ rtc::CopyOnWriteBuffer packet_buffer(packet.data, packet.length);
+ RtpPacket header;
+ header.Parse(packet_buffer);
+ if (header.Timestamp() < start_timestamp ||
+ header.Timestamp() > stop_timestamp) {
+ continue;
+ }
int64_t deliver_in_ms = replay_start_ms + packet.time_ms - now_ms;
if (deliver_in_ms > 0) {
@@ -539,10 +560,9 @@ class RtpReplayer final {
++num_packets;
PacketReceiver::DeliveryStatus result = PacketReceiver::DELIVERY_OK;
worker_thread->PostTask(ToQueuedTask([&]() {
- result = call->Receiver()->DeliverPacket(
- webrtc::MediaType::VIDEO,
- rtc::CopyOnWriteBuffer(packet.data, packet.length),
- /* packet_time_us */ -1);
+ result = call->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
+ std::move(packet_buffer),
+ /* packet_time_us */ -1);
event.Set();
}));
event.Wait(/*give_up_after_ms=*/10000);
@@ -550,25 +570,17 @@ class RtpReplayer final {
case PacketReceiver::DELIVERY_OK:
break;
case PacketReceiver::DELIVERY_UNKNOWN_SSRC: {
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(
- RtpHeaderParser::CreateForTest());
- parser->Parse(packet.data, packet.length, &header);
- if (unknown_packets[header.ssrc] == 0)
- fprintf(stderr, "Unknown SSRC: %u!\n", header.ssrc);
- ++unknown_packets[header.ssrc];
+ if (unknown_packets[header.Ssrc()] == 0)
+ fprintf(stderr, "Unknown SSRC: %u!\n", header.Ssrc());
+ ++unknown_packets[header.Ssrc()];
break;
}
case PacketReceiver::DELIVERY_PACKET_ERROR: {
fprintf(stderr,
"Packet error, corrupt packets or incorrect setup?\n");
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(
- RtpHeaderParser::CreateForTest());
- parser->Parse(packet.data, packet.length, &header);
fprintf(stderr, "Packet len=%zu pt=%u seq=%u ts=%u ssrc=0x%8x\n",
- packet.length, header.payloadType, header.sequenceNumber,
- header.timestamp, header.ssrc);
+ packet.length, header.PayloadType(), header.SequenceNumber(),
+ header.Timestamp(), header.Ssrc());
break;
}
}
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index e01ab97a6e..e7d186aa4b 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -390,6 +390,7 @@ if (is_ios || is_mac) {
"../api/video:video_rtp_headers",
"../common_video",
"../media:rtc_media_base",
+ "../pc:rtc_pc_base",
"../rtc_base",
"../rtc_base:checks",
"../rtc_base:threading",
@@ -487,7 +488,10 @@ if (is_ios || is_mac) {
rtc_library("ui_objc") {
visibility = [ "*" ]
- allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "default_task_queue",
+ ]
if (is_ios) {
sources = [
"objc/components/renderer/opengl/RTCDisplayLinkTimer.h",
@@ -506,61 +510,60 @@ if (is_ios || is_mac) {
deps = [
":base_objc",
":helpers_objc",
+ ":metal_objc",
":video_objc",
":videocapture_objc",
":videoframebuffer_objc",
]
}
- if (rtc_use_metal_rendering) {
- rtc_library("metal_objc") {
- visibility = [ "*" ]
- allow_poison = [
- "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
- "default_task_queue",
- ]
- sources = [
- "objc/components/renderer/metal/RTCMTLI420Renderer.h",
- "objc/components/renderer/metal/RTCMTLI420Renderer.mm",
- "objc/components/renderer/metal/RTCMTLRenderer+Private.h",
- "objc/components/renderer/metal/RTCMTLRenderer.h",
- "objc/components/renderer/metal/RTCMTLRenderer.mm",
- ]
- if (is_ios) {
- sources += [
- "objc/components/renderer/metal/RTCMTLNV12Renderer.h",
- "objc/components/renderer/metal/RTCMTLNV12Renderer.mm",
- "objc/components/renderer/metal/RTCMTLRGBRenderer.h",
- "objc/components/renderer/metal/RTCMTLRGBRenderer.mm",
- "objc/components/renderer/metal/RTCMTLVideoView.h",
- "objc/components/renderer/metal/RTCMTLVideoView.m",
- ]
- }
- frameworks = [
- "CoreVideo.framework",
- "Metal.framework",
- "MetalKit.framework",
+ rtc_library("metal_objc") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "default_task_queue",
+ ]
+ sources = [
+ "objc/components/renderer/metal/RTCMTLI420Renderer.h",
+ "objc/components/renderer/metal/RTCMTLI420Renderer.mm",
+ "objc/components/renderer/metal/RTCMTLNV12Renderer.h",
+ "objc/components/renderer/metal/RTCMTLNV12Renderer.mm",
+ "objc/components/renderer/metal/RTCMTLRGBRenderer.h",
+ "objc/components/renderer/metal/RTCMTLRGBRenderer.mm",
+ "objc/components/renderer/metal/RTCMTLRenderer+Private.h",
+ "objc/components/renderer/metal/RTCMTLRenderer.h",
+ "objc/components/renderer/metal/RTCMTLRenderer.mm",
+ ]
+ frameworks = [
+ "CoreVideo.framework",
+ "Metal.framework",
+ "MetalKit.framework",
+ ]
+ if (is_ios) {
+ sources += [
+ "objc/components/renderer/metal/RTCMTLVideoView.h",
+ "objc/components/renderer/metal/RTCMTLVideoView.m",
]
- if (is_mac) {
- sources += [
- "objc/components/renderer/metal/RTCMTLNSVideoView.h",
- "objc/components/renderer/metal/RTCMTLNSVideoView.m",
- ]
- frameworks += [ "AppKit.framework" ]
- }
- deps = [
- ":base_objc",
- ":peerconnectionfactory_base_objc",
- ":video_objc",
- ":videoframebuffer_objc",
- "../api/video:video_frame",
- "../api/video:video_rtp_headers",
- "../rtc_base:checks",
- "../rtc_base:rtc_base_approved",
+ }
+ if (is_mac) {
+ sources += [
+ "objc/components/renderer/metal/RTCMTLNSVideoView.h",
+ "objc/components/renderer/metal/RTCMTLNSVideoView.m",
]
- configs += [ "..:common_objc" ]
- public_configs = [ ":common_config_objc" ]
+ frameworks += [ "AppKit.framework" ]
}
+ deps = [
+ ":base_objc",
+ ":peerconnectionfactory_base_objc",
+ ":video_objc",
+ ":videoframebuffer_objc",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../rtc_base:checks",
+ "../rtc_base:rtc_base_approved",
+ ]
+ configs += [ "..:common_objc" ]
+ public_configs = [ ":common_config_objc" ]
}
# TODO(bugs.webrtc.org/9627): Remove this target.
@@ -1026,6 +1029,7 @@ if (is_ios || is_mac) {
"objc/unittests/RTCEncodedImage_xctest.mm",
"objc/unittests/RTCFileVideoCapturer_xctest.mm",
"objc/unittests/RTCH264ProfileLevelId_xctest.m",
+ "objc/unittests/RTCMTLVideoView_xctest.m",
"objc/unittests/RTCNV12TextureCache_xctest.m",
"objc/unittests/RTCPeerConnectionFactory_xctest.m",
"objc/unittests/frame_buffer_helpers.h",
@@ -1050,6 +1054,7 @@ if (is_ios || is_mac) {
":callback_logger_objc",
":framework_objc",
":mediaconstraints_objc",
+ ":metal_objc",
":native_api",
":native_api_audio_device_module",
":native_video",
@@ -1073,11 +1078,6 @@ if (is_ios || is_mac) {
"//third_party/libyuv",
]
- if (rtc_use_metal_rendering) {
- sources += [ "objc/unittests/RTCMTLVideoView_xctest.m" ]
- deps += [ ":metal_objc" ]
- }
-
public_deps = [
"//build/config/ios:xctest",
"//third_party/ocmock",
@@ -1318,9 +1318,6 @@ if (is_ios || is_mac) {
":videotoolbox_objc",
"../rtc_base:rtc_base_approved",
]
- if (rtc_use_metal_rendering) {
- deps += [ ":metal_objc" ]
- }
if (!build_with_chromium) {
deps += [
":callback_logger_objc",
@@ -1452,9 +1449,6 @@ if (is_ios || is_mac) {
":videotoolbox_objc",
"../rtc_base:rtc_base_approved",
]
- if (rtc_use_metal_rendering) {
- deps += [ ":metal_objc" ]
- }
if (!build_with_chromium) {
deps += [
":callback_logger_objc",
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 9b6a5dcdb1..be15a7d643 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -43,6 +43,7 @@ if (is_android) {
":filevideo_java",
":hwcodecs_java",
":java_audio_device_module_java",
+ ":libaom_av1_java",
":libjingle_peerconnection_java",
":libjingle_peerconnection_metrics_default_java",
":libvpx_vp8_java",
@@ -233,6 +234,7 @@ if (is_android) {
"src/java/org/webrtc/GlGenericDrawer.java",
"src/java/org/webrtc/H264Utils.java",
"src/java/org/webrtc/NV21Buffer.java",
+ "src/java/org/webrtc/VideoCodecMimeType.java",
"src/java/org/webrtc/VideoDecoderWrapper.java",
"src/java/org/webrtc/VideoEncoderWrapper.java",
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
@@ -402,7 +404,6 @@ if (is_android) {
"src/java/org/webrtc/MediaCodecWrapperFactory.java",
"src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java",
"src/java/org/webrtc/NV12Buffer.java",
- "src/java/org/webrtc/VideoCodecMimeType.java",
]
deps = [
@@ -500,6 +501,20 @@ if (is_android) {
]
}
+ rtc_android_library("libaom_av1_java") {
+ visibility = [ "*" ]
+ sources = [
+ "api/org/webrtc/LibaomAv1Decoder.java",
+ "api/org/webrtc/LibaomAv1Encoder.java",
+ ]
+ deps = [
+ ":base_java",
+ ":video_api_java",
+ ":video_java",
+ "//rtc_base:base_java",
+ ]
+ }
+
rtc_android_library("swcodecs_java") {
visibility = [ "*" ]
sources = [
@@ -509,6 +524,7 @@ if (is_android) {
deps = [
":base_java",
+ ":libaom_av1_java",
":libvpx_vp8_java",
":libvpx_vp9_java",
":video_api_java",
@@ -825,10 +841,24 @@ if (current_os == "linux" || is_android) {
]
}
+ rtc_library("libaom_av1_jni") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [ "src/jni/av1_codec.cc" ]
+ deps = [
+ ":base_jni",
+ ":generated_libaom_av1_jni",
+ ":video_jni",
+ "../../modules/video_coding/codecs/av1:libaom_av1_decoder",
+ "../../modules/video_coding/codecs/av1:libaom_av1_encoder",
+ ]
+ }
+
rtc_library("swcodecs_jni") {
visibility = [ "*" ]
allow_poison = [ "software_video_codecs" ]
deps = [
+ ":libaom_av1_jni",
":libvpx_vp8_jni",
":libvpx_vp9_jni",
]
@@ -1264,6 +1294,16 @@ if (current_os == "linux" || is_android) {
jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
}
+ generate_jni("generated_libaom_av1_jni") {
+ sources = [
+ "api/org/webrtc/LibaomAv1Decoder.java",
+ "api/org/webrtc/LibaomAv1Encoder.java",
+ ]
+
+ namespace = "webrtc::jni"
+ jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h"
+ }
+
generate_jni("generated_peerconnection_jni") {
sources = [
"api/org/webrtc/AddIceObserver.java",
diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java
index b32b3ad302..2c6bb57b68 100644
--- a/sdk/android/api/org/webrtc/Camera2Enumerator.java
+++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java
@@ -55,7 +55,7 @@ public class Camera2Enumerator implements CameraEnumerator {
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
- Logging.e(TAG, "Camera access exception: " + e);
+ Logging.e(TAG, "Camera access exception", e);
return new String[] {};
}
}
@@ -97,7 +97,7 @@ public class Camera2Enumerator implements CameraEnumerator {
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
} catch (/* CameraAccessException */ AndroidException e) {
- Logging.e(TAG, "Camera access exception: " + e);
+ Logging.e(TAG, "Camera access exception", e);
return null;
}
}
@@ -123,8 +123,8 @@ public class Camera2Enumerator implements CameraEnumerator {
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
// catch statement with an Exception from a newer API, even if the code is never executed.
// https://code.google.com/p/android/issues/detail?id=209129
- } catch (/* CameraAccessException */ AndroidException e) {
- Logging.e(TAG, "Camera access exception: " + e);
+ } catch (/* CameraAccessException */ AndroidException | RuntimeException e) {
+ Logging.e(TAG, "Failed to check if camera2 is supported", e);
return false;
}
return true;
@@ -186,7 +186,7 @@ public class Camera2Enumerator implements CameraEnumerator {
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
- Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+ Logging.e(TAG, "getCameraCharacteristics()", ex);
return new ArrayList<CaptureFormat>();
}
diff --git a/sdk/android/api/org/webrtc/DataChannel.java b/sdk/android/api/org/webrtc/DataChannel.java
index f51dab97af..bcbf6f093f 100644
--- a/sdk/android/api/org/webrtc/DataChannel.java
+++ b/sdk/android/api/org/webrtc/DataChannel.java
@@ -123,6 +123,7 @@ public class DataChannel {
public void unregisterObserver() {
checkDataChannelExists();
nativeUnregisterObserver(nativeObserver);
+ nativeObserver = 0;
}
public String label() {
diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
index 8fe8b36909..c9831c1843 100644
--- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
@@ -94,7 +94,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return null;
}
- VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.name);
+ VideoCodecMimeType type = VideoCodecMimeType.fromSdpCodecName(input.getName());
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
@@ -137,12 +137,12 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
- // VP8, VP9, H264 (high profile), and H264 (baseline profile).
- for (VideoCodecMimeType type : new VideoCodecMimeType[] {
- VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, VideoCodecMimeType.H264}) {
+ // VP8, VP9, H264 (high profile), H264 (baseline profile) and AV1.
+ for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
+ VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
- String name = type.name();
+ String name = type.toSdpCodecName();
// TODO(sakal): Always add H264 HP once WebRTC correctly removes codecs that are not
// supported by the decoder.
if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
@@ -202,6 +202,8 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return isHardwareSupportedInCurrentSdkVp9(info);
case H264:
return isHardwareSupportedInCurrentSdkH264(info);
+ case AV1:
+ return false;
}
return false;
}
@@ -248,6 +250,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
switch (type) {
case VP8: // Fallthrough intended.
case VP9:
+ case AV1:
return 100;
case H264:
return 20;
diff --git a/sdk/android/api/org/webrtc/LibaomAv1Decoder.java b/sdk/android/api/org/webrtc/LibaomAv1Decoder.java
new file mode 100644
index 0000000000..609203fe3f
--- /dev/null
+++ b/sdk/android/api/org/webrtc/LibaomAv1Decoder.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Decoder extends WrappedNativeVideoDecoder {
+ @Override
+ public long createNativeVideoDecoder() {
+ return nativeCreateDecoder();
+ }
+
+ static native long nativeCreateDecoder();
+
+ static native boolean nativeIsSupported();
+}
diff --git a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
new file mode 100644
index 0000000000..26648c589e
--- /dev/null
+++ b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibaomAv1Encoder extends WrappedNativeVideoEncoder {
+ @Override
+ public long createNativeVideoEncoder() {
+ return nativeCreateEncoder();
+ }
+
+ static native long nativeCreateEncoder();
+
+ @Override
+ public boolean isHardwareEncoder() {
+ return false;
+ }
+
+ static native boolean nativeIsSupported();
+}
diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java
index 38d2d38deb..67705ba3d5 100644
--- a/sdk/android/api/org/webrtc/PeerConnection.java
+++ b/sdk/android/api/org/webrtc/PeerConnection.java
@@ -141,7 +141,14 @@ public class PeerConnection {
* Triggered when a new track is signaled by the remote peer, as a result of
* setRemoteDescription.
*/
- @CalledByNative("Observer") void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams);
+ @CalledByNative("Observer")
+ default void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams){};
+
+ /**
+ * Triggered when a previously added remote track is removed by the remote
+ * peer, as a result of setRemoteDescription.
+ */
+ @CalledByNative("Observer") default void onRemoveTrack(RtpReceiver receiver){};
/**
* Triggered when the signaling from SetRemoteDescription indicates that a transceiver
diff --git a/sdk/android/api/org/webrtc/RTCStats.java b/sdk/android/api/org/webrtc/RTCStats.java
index 7ad7634c82..573d95300f 100644
--- a/sdk/android/api/org/webrtc/RTCStats.java
+++ b/sdk/android/api/org/webrtc/RTCStats.java
@@ -62,6 +62,7 @@ public class RTCStats {
* - Double
* - String
* - The array form of any of the above (e.g., Integer[])
+ * - Map of String keys to BigInteger / Double values
*/
public Map<String, Object> getMembers() {
return members;
diff --git a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
index 7abe3505a6..c59db3b47b 100644
--- a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
@@ -16,22 +16,22 @@ import java.util.HashMap;
import java.util.List;
public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
- @Deprecated
@Nullable
@Override
- public VideoDecoder createDecoder(String codecType) {
- return createDecoder(new VideoCodecInfo(codecType, new HashMap<>()));
- }
+ public VideoDecoder createDecoder(VideoCodecInfo codecInfo) {
+ String codecName = codecInfo.getName();
- @Nullable
- @Override
- public VideoDecoder createDecoder(VideoCodecInfo codecType) {
- if (codecType.getName().equalsIgnoreCase("VP8")) {
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.toSdpCodecName())) {
return new LibvpxVp8Decoder();
}
- if (codecType.getName().equalsIgnoreCase("VP9") && LibvpxVp9Decoder.nativeIsSupported()) {
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.toSdpCodecName())
+ && LibvpxVp9Decoder.nativeIsSupported()) {
return new LibvpxVp9Decoder();
}
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.toSdpCodecName())
+ && LibaomAv1Decoder.nativeIsSupported()) {
+ return new LibaomAv1Decoder();
+ }
return null;
}
@@ -44,9 +44,12 @@ public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
static VideoCodecInfo[] supportedCodecs() {
List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
- codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.toSdpCodecName(), new HashMap<>()));
if (LibvpxVp9Decoder.nativeIsSupported()) {
- codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.toSdpCodecName(), new HashMap<>()));
+ }
+ if (LibaomAv1Decoder.nativeIsSupported()) {
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.toSdpCodecName(), new HashMap<>()));
}
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
diff --git a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
index ed70d228ef..4de39dcdba 100644
--- a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
@@ -18,13 +18,20 @@ import java.util.List;
public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
@Nullable
@Override
- public VideoEncoder createEncoder(VideoCodecInfo info) {
- if (info.name.equalsIgnoreCase("VP8")) {
+ public VideoEncoder createEncoder(VideoCodecInfo codecInfo) {
+ String codecName = codecInfo.getName();
+
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.toSdpCodecName())) {
return new LibvpxVp8Encoder();
}
- if (info.name.equalsIgnoreCase("VP9") && LibvpxVp9Encoder.nativeIsSupported()) {
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.toSdpCodecName())
+ && LibvpxVp9Encoder.nativeIsSupported()) {
return new LibvpxVp9Encoder();
}
+ if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.toSdpCodecName())
+ && LibaomAv1Encoder.nativeIsSupported()) {
+ return new LibaomAv1Encoder();
+ }
return null;
}
@@ -37,9 +44,12 @@ public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
static VideoCodecInfo[] supportedCodecs() {
List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
- codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.toSdpCodecName(), new HashMap<>()));
if (LibvpxVp9Encoder.nativeIsSupported()) {
- codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.toSdpCodecName(), new HashMap<>()));
+ }
+ if (LibaomAv1Encoder.nativeIsSupported()) {
+ codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.toSdpCodecName(), new HashMap<>()));
}
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java
index 8dd9295fd7..e11782dedd 100644
--- a/sdk/android/api/org/webrtc/VideoCodecInfo.java
+++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java
@@ -69,6 +69,11 @@ public class VideoCodecInfo {
return Arrays.hashCode(values);
}
+ @Override
+ public String toString() {
+ return "VideoCodec{" + name + " " + params + "}";
+ }
+
@CalledByNative
String getName() {
return name;
diff --git a/sdk/android/api/org/webrtc/VideoDecoderFactory.java b/sdk/android/api/org/webrtc/VideoDecoderFactory.java
index 2dd09670bd..3f0168f23e 100644
--- a/sdk/android/api/org/webrtc/VideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/VideoDecoderFactory.java
@@ -18,18 +18,7 @@ public interface VideoDecoderFactory {
* Creates a VideoDecoder for the given codec. Supports the same codecs supported by
* VideoEncoderFactory.
*/
- @Deprecated
- @Nullable
- default VideoDecoder createDecoder(String codecType) {
- throw new UnsupportedOperationException("Deprecated and not implemented.");
- }
-
- /** Creates a decoder for the given video codec. */
- @Nullable
- @CalledByNative
- default VideoDecoder createDecoder(VideoCodecInfo info) {
- return createDecoder(info.getName());
- }
+ @Nullable @CalledByNative VideoDecoder createDecoder(VideoCodecInfo info);
/**
* Enumerates the list of supported video codecs.
diff --git a/sdk/android/api/org/webrtc/VideoEncoder.java b/sdk/android/api/org/webrtc/VideoEncoder.java
index cb8eb81767..460428192d 100644
--- a/sdk/android/api/org/webrtc/VideoEncoder.java
+++ b/sdk/android/api/org/webrtc/VideoEncoder.java
@@ -86,6 +86,8 @@ public interface VideoEncoder {
public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
+ public class CodecSpecificInfoAV1 extends CodecSpecificInfo {}
+
/**
* Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
* spatial and temporal layers.
diff --git a/sdk/android/api/org/webrtc/YuvConverter.java b/sdk/android/api/org/webrtc/YuvConverter.java
index 0e2d5055f7..9c00678900 100644
--- a/sdk/android/api/org/webrtc/YuvConverter.java
+++ b/sdk/android/api/org/webrtc/YuvConverter.java
@@ -12,6 +12,8 @@ package org.webrtc;
import android.graphics.Matrix;
import android.opengl.GLES20;
+import android.opengl.GLException;
+import android.support.annotation.Nullable;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer;
@@ -20,7 +22,9 @@ import org.webrtc.VideoFrame.TextureBuffer;
* Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
* should only be operated from a single thread with an active EGL context.
*/
-public class YuvConverter {
+public final class YuvConverter {
+ private static final String TAG = "YuvConverter";
+
private static final String FRAGMENT_SHADER =
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
@@ -122,9 +126,17 @@ public class YuvConverter {
}
/** Converts the texture buffer to I420. */
+ @Nullable
public I420Buffer convert(TextureBuffer inputTextureBuffer) {
- threadChecker.checkIsOnValidThread();
+ try {
+ return convertInternal(inputTextureBuffer);
+ } catch (GLException e) {
+ Logging.w(TAG, "Failed to convert TextureBuffer", e);
+ }
+ return null;
+ }
+ private I420Buffer convertInternal(TextureBuffer inputTextureBuffer) {
TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
index 69b0129c36..8135e80eaf 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
@@ -70,13 +70,14 @@ public class DefaultVideoEncoderFactoryTest {
VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, true);
DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
- assertEquals(4, videoCodecs.length);
+ assertEquals(5, videoCodecs.length);
assertEquals("VP8", videoCodecs[0].name);
assertEquals("VP9", videoCodecs[1].name);
- assertEquals("H264", videoCodecs[2].name);
- assertEquals("42e01f", videoCodecs[2].params.get("profile-level-id"));
+ assertEquals("AV1X", videoCodecs[2].name);
assertEquals("H264", videoCodecs[3].name);
- assertEquals("640c1f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
}
@SmallTest
@@ -85,11 +86,12 @@ public class DefaultVideoEncoderFactoryTest {
VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, false);
DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
- assertEquals(3, videoCodecs.length);
+ assertEquals(4, videoCodecs.length);
assertEquals("VP8", videoCodecs[0].name);
assertEquals("VP9", videoCodecs[1].name);
- assertEquals("H264", videoCodecs[2].name);
- assertEquals("42e01f", videoCodecs[2].params.get("profile-level-id"));
+ assertEquals("AV1X", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
}
@SmallTest
@@ -98,12 +100,13 @@ public class DefaultVideoEncoderFactoryTest {
VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(false, true);
DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
- assertEquals(4, videoCodecs.length);
+ assertEquals(5, videoCodecs.length);
assertEquals("VP8", videoCodecs[0].name);
assertEquals("VP9", videoCodecs[1].name);
- assertEquals("H264", videoCodecs[2].name);
- assertEquals("42e01f", videoCodecs[2].params.get("profile-level-id"));
+ assertEquals("AV1X", videoCodecs[2].name);
assertEquals("H264", videoCodecs[3].name);
- assertEquals("640c1f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
}
}
diff --git a/sdk/android/native_api/video/video_source.cc b/sdk/android/native_api/video/video_source.cc
index 56a56722eb..4f1409ef7b 100644
--- a/sdk/android/native_api/video/video_source.cc
+++ b/sdk/android/native_api/video/video_source.cc
@@ -29,7 +29,7 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
bool is_screencast,
bool align_timestamps)
: android_video_track_source_(
- new rtc::RefCountedObject<jni::AndroidVideoTrackSource>(
+ rtc::make_ref_counted<jni::AndroidVideoTrackSource>(
signaling_thread,
env,
is_screencast,
@@ -109,7 +109,7 @@ rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
rtc::Thread* signaling_thread,
bool is_screencast,
bool align_timestamps) {
- return new rtc::RefCountedObject<JavaVideoTrackSourceImpl>(
+ return rtc::make_ref_counted<JavaVideoTrackSourceImpl>(
jni, signaling_thread, is_screencast, align_timestamps);
}
diff --git a/sdk/android/native_unittests/android_network_monitor_unittest.cc b/sdk/android/native_unittests/android_network_monitor_unittest.cc
index 20e756ae7b..c342ce692e 100644
--- a/sdk/android/native_unittests/android_network_monitor_unittest.cc
+++ b/sdk/android/native_unittests/android_network_monitor_unittest.cc
@@ -127,7 +127,6 @@ TEST_F(AndroidNetworkMonitorTest,
}
TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIfName) {
- ScopedFieldTrials field_trials("WebRTC-BindUsingInterfaceName/Enabled/");
// Start() updates the states introduced by the field trial.
network_monitor_->Start();
jni::NetworkHandle ipv6_handle = 200;
diff --git a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
index fcd9c9b8f1..b77d86719f 100644
--- a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
+++ b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
@@ -153,28 +153,24 @@ class SleepDeadlock : public DeadlockInterface {
}
};
-// This is the function that is exectued by the thread that will deadlock and
-// have its stacktrace captured.
-void ThreadFunction(void* void_params) {
- ThreadParams* params = static_cast<ThreadParams*>(void_params);
- params->tid = gettid();
-
- params->deadlock_region_start_address = GetCurrentRelativeExecutionAddress();
- params->deadlock_start_event.Set();
- params->deadlock_impl->Deadlock();
- params->deadlock_region_end_address = GetCurrentRelativeExecutionAddress();
-
- params->deadlock_done_event.Set();
-}
-
void TestStacktrace(std::unique_ptr<DeadlockInterface> deadlock_impl) {
// Set params that will be sent to other thread.
ThreadParams params;
params.deadlock_impl = deadlock_impl.get();
// Spawn thread.
- rtc::PlatformThread thread(&ThreadFunction, &params, "StacktraceTest");
- thread.Start();
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&params] {
+ params.tid = gettid();
+ params.deadlock_region_start_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_start_event.Set();
+ params.deadlock_impl->Deadlock();
+ params.deadlock_region_end_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_done_event.Set();
+ },
+ "StacktraceTest");
// Wait until the thread has entered the deadlock region, and take a very
// brief nap to give it time to reach the actual deadlock.
@@ -198,8 +194,6 @@ void TestStacktrace(std::unique_ptr<DeadlockInterface> deadlock_impl) {
<< rtc::ToHex(params.deadlock_region_start_address) << ", "
<< rtc::ToHex(params.deadlock_region_end_address)
<< "] not contained in: " << StackTraceToString(stack_trace);
-
- thread.Stop();
}
class LookoutLogSink final : public rtc::LogSink {
@@ -259,13 +253,9 @@ TEST(Stacktrace, TestRtcEventDeadlockDetection) {
// Start a thread that waits for an event.
rtc::Event ev;
- rtc::PlatformThread thread(
- [](void* arg) {
- auto* ev = static_cast<rtc::Event*>(arg);
- ev->Wait(rtc::Event::kForever);
- },
- &ev, "TestRtcEventDeadlockDetection");
- thread.Start();
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&ev] { ev.Wait(rtc::Event::kForever); },
+ "TestRtcEventDeadlockDetection");
// The message should appear after 3 sec. We'll wait up to 10 sec in an
// attempt to not be flaky.
@@ -273,7 +263,7 @@ TEST(Stacktrace, TestRtcEventDeadlockDetection) {
// Unblock the thread and shut it down.
ev.Set();
- thread.Stop();
+ thread.Finalize();
rtc::LogMessage::RemoveLogToStream(&sink);
}
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
index f6d98bd14f..f116fefc83 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -169,7 +169,7 @@ class HardwareVideoEncoder implements VideoEncoder {
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
- * @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
+ * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1)
* @param surfaceColorFormat color format for surface mode or null if not available
* @param yuvColorFormat color format for bytebuffer mode
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
index cd43098015..b634fb34f5 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -91,6 +91,7 @@ class MediaCodecUtils {
switch (type) {
case VP8:
case VP9:
+ case AV1:
return new HashMap<String, String>();
case H264:
return H264Utils.getDefaultH264Params(highProfile);
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
index 036aca5822..5a1d63e1c5 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -46,7 +46,7 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
@Nullable
@Override
public VideoDecoder createDecoder(VideoCodecInfo codecType) {
- VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName());
+ VideoCodecMimeType type = VideoCodecMimeType.fromSdpCodecName(codecType.getName());
MediaCodecInfo info = findCodecForType(type);
if (info == null) {
@@ -64,11 +64,11 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory {
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), and H264 (baseline profile).
- for (VideoCodecMimeType type : new VideoCodecMimeType[] {
- VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, VideoCodecMimeType.H264}) {
+ for (VideoCodecMimeType type : new VideoCodecMimeType[] {VideoCodecMimeType.VP8,
+ VideoCodecMimeType.VP9, VideoCodecMimeType.H264, VideoCodecMimeType.AV1}) {
MediaCodecInfo codec = findCodecForType(type);
if (codec != null) {
- String name = type.name();
+ String name = type.toSdpCodecName();
if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) {
supportedCodecInfos.add(new VideoCodecInfo(
name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
diff --git a/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java b/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
index f27a9176cf..93a9286165 100644
--- a/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
+++ b/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java
@@ -14,7 +14,8 @@ package org.webrtc;
enum VideoCodecMimeType {
VP8("video/x-vnd.on2.vp8"),
VP9("video/x-vnd.on2.vp9"),
- H264("video/avc");
+ H264("video/avc"),
+ AV1("video/av01");
private final String mimeType;
@@ -25,4 +26,12 @@ enum VideoCodecMimeType {
String mimeType() {
return mimeType;
}
+
+ static VideoCodecMimeType fromSdpCodecName(String codecName) {
+ return codecName.equals("AV1X") ? AV1 : valueOf(codecName);
+ }
+
+ String toSdpCodecName() {
+ return this == AV1 ? "AV1X" : name();
+ }
}
diff --git a/sdk/android/src/jni/android_network_monitor.cc b/sdk/android/src/jni/android_network_monitor.cc
index fa82816431..686f94e1e6 100644
--- a/sdk/android/src/jni/android_network_monitor.cc
+++ b/sdk/android/src/jni/android_network_monitor.cc
@@ -247,7 +247,7 @@ void AndroidNetworkMonitor::Start() {
webrtc::field_trial::IsEnabled(
"WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart");
bind_using_ifname_ =
- webrtc::field_trial::IsEnabled("WebRTC-BindUsingInterfaceName");
+ !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName");
// This pointer is also accessed by the methods called from java threads.
// Assigning it here is safe, because the java monitor is in a stopped state,
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc
index 74a0db00f9..4c9c36b7ac 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.cc
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -641,7 +641,7 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
std::unique_ptr<AudioInput> audio_input,
std::unique_ptr<AudioOutput> audio_output) {
RTC_DLOG(INFO) << __FUNCTION__;
- return new rtc::RefCountedObject<AndroidAudioDeviceModule>(
+ return rtc::make_ref_counted<AndroidAudioDeviceModule>(
audio_layer, is_stereo_playout_supported, is_stereo_record_supported,
playout_delay_ms, std::move(audio_input), std::move(audio_output));
}
diff --git a/sdk/android/src/jni/audio_device/opensles_common.cc b/sdk/android/src/jni/audio_device/opensles_common.cc
index 04c3ae9f7a..0f35b2712a 100644
--- a/sdk/android/src/jni/audio_device/opensles_common.cc
+++ b/sdk/android/src/jni/audio_device/opensles_common.cc
@@ -106,8 +106,6 @@ OpenSLEngineManager::OpenSLEngineManager() {
thread_checker_.Detach();
}
-OpenSLEngineManager::~OpenSLEngineManager() = default;
-
SLObjectItf OpenSLEngineManager::GetOpenSLEngine() {
RTC_LOG(INFO) << "GetOpenSLEngine";
RTC_DCHECK(thread_checker_.IsCurrent());
diff --git a/sdk/android/src/jni/audio_device/opensles_common.h b/sdk/android/src/jni/audio_device/opensles_common.h
index d812b920ff..9dd1e0f7d7 100644
--- a/sdk/android/src/jni/audio_device/opensles_common.h
+++ b/sdk/android/src/jni/audio_device/opensles_common.h
@@ -68,10 +68,11 @@ typedef ScopedSLObject<SLObjectItf, const SLObjectItf_*> ScopedSLObjectItf;
// Subsequent calls returns the already created engine.
// Note: This class must be used single threaded and this is enforced by a
// thread checker.
-class OpenSLEngineManager : public rtc::RefCountedBase {
+class OpenSLEngineManager
+ : public rtc::RefCountedNonVirtual<OpenSLEngineManager> {
public:
OpenSLEngineManager();
- ~OpenSLEngineManager() override;
+ ~OpenSLEngineManager() = default;
SLObjectItf GetOpenSLEngine();
private:
diff --git a/sdk/android/src/jni/av1_codec.cc b/sdk/android/src/jni/av1_codec.cc
new file mode 100644
index 0000000000..02070f7901
--- /dev/null
+++ b/sdk/android/src/jni/av1_codec.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h"
+#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release());
+}
+
+static jboolean JNI_LibaomAv1Encoder_IsSupported(JNIEnv* jni) {
+ return webrtc::kIsLibaomAv1EncoderSupported;
+}
+
+static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release());
+}
+
+static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) {
+ return webrtc::kIsLibaomAv1DecoderSupported;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/sdk/android/src/jni/encoded_image.cc b/sdk/android/src/jni/encoded_image.cc
index 839f6a8f6a..189d7e95e4 100644
--- a/sdk/android/src/jni/encoded_image.cc
+++ b/sdk/android/src/jni/encoded_image.cc
@@ -90,7 +90,7 @@ EncodedImage JavaToNativeEncodedImage(JNIEnv* env,
const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj());
EncodedImage frame;
- frame.SetEncodedData(new rtc::RefCountedObject<JavaEncodedImageBuffer>(
+ frame.SetEncodedData(rtc::make_ref_counted<JavaEncodedImageBuffer>(
env, j_encoded_image, buffer, buffer_size));
frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image);
diff --git a/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/sdk/android/src/jni/pc/add_ice_candidate_observer.h
index ed72de9df6..1128385389 100644
--- a/sdk/android/src/jni/pc/add_ice_candidate_observer.h
+++ b/sdk/android/src/jni/pc/add_ice_candidate_observer.h
@@ -20,10 +20,11 @@
namespace webrtc {
namespace jni {
-class AddIceCandidateObserverJni final : public rtc::RefCountedBase {
+class AddIceCandidateObserverJni final
+ : public rtc::RefCountedNonVirtual<AddIceCandidateObserverJni> {
public:
AddIceCandidateObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
- ~AddIceCandidateObserverJni() override = default;
+ ~AddIceCandidateObserverJni() = default;
void OnComplete(RTCError error);
diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc
index 35f6ad56af..09b8f33edb 100644
--- a/sdk/android/src/jni/pc/peer_connection.cc
+++ b/sdk/android/src/jni/pc/peer_connection.cc
@@ -410,6 +410,16 @@ void PeerConnectionObserverJni::OnAddTrack(
NativeToJavaMediaStreamArray(env, streams));
}
+void PeerConnectionObserverJni::OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_receiver =
+ NativeToJavaRtpReceiver(env, receiver);
+ rtp_receivers_.emplace_back(env, j_rtp_receiver);
+
+ Java_Observer_onRemoveTrack(env, j_observer_global_, j_rtp_receiver);
+}
+
void PeerConnectionObserverJni::OnTrack(
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
@@ -538,10 +548,12 @@ static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateDataChannel(
const JavaParamRef<jstring>& j_label,
const JavaParamRef<jobject>& j_init) {
DataChannelInit init = JavaToNativeDataChannelInit(jni, j_init);
- rtc::scoped_refptr<DataChannelInterface> channel(
- ExtractNativePC(jni, j_pc)->CreateDataChannel(
- JavaToNativeString(jni, j_label), &init));
- return WrapNativeDataChannel(jni, channel);
+ auto result = ExtractNativePC(jni, j_pc)->CreateDataChannelOrError(
+ JavaToNativeString(jni, j_label), &init);
+ if (!result.ok()) {
+ return WrapNativeDataChannel(jni, nullptr);
+ }
+ return WrapNativeDataChannel(jni, result.MoveValue());
}
static void JNI_PeerConnection_CreateOffer(
@@ -551,9 +563,8 @@ static void JNI_PeerConnection_CreateOffer(
const JavaParamRef<jobject>& j_constraints) {
std::unique_ptr<MediaConstraints> constraints =
JavaToNativeMediaConstraints(jni, j_constraints);
- rtc::scoped_refptr<CreateSdpObserverJni> observer(
- new rtc::RefCountedObject<CreateSdpObserverJni>(jni, j_observer,
- std::move(constraints)));
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
ExtractNativePC(jni, j_pc)->CreateOffer(observer, options);
@@ -566,9 +577,8 @@ static void JNI_PeerConnection_CreateAnswer(
const JavaParamRef<jobject>& j_constraints) {
std::unique_ptr<MediaConstraints> constraints =
JavaToNativeMediaConstraints(jni, j_constraints);
- rtc::scoped_refptr<CreateSdpObserverJni> observer(
- new rtc::RefCountedObject<CreateSdpObserverJni>(jni, j_observer,
- std::move(constraints)));
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
ExtractNativePC(jni, j_pc)->CreateAnswer(observer, options);
@@ -578,8 +588,8 @@ static void JNI_PeerConnection_SetLocalDescriptionAutomatically(
JNIEnv* jni,
const JavaParamRef<jobject>& j_pc,
const JavaParamRef<jobject>& j_observer) {
- rtc::scoped_refptr<SetLocalSdpObserverJni> observer(
- new rtc::RefCountedObject<SetLocalSdpObserverJni>(jni, j_observer));
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
ExtractNativePC(jni, j_pc)->SetLocalDescription(observer);
}
@@ -588,8 +598,8 @@ static void JNI_PeerConnection_SetLocalDescription(
const JavaParamRef<jobject>& j_pc,
const JavaParamRef<jobject>& j_observer,
const JavaParamRef<jobject>& j_sdp) {
- rtc::scoped_refptr<SetLocalSdpObserverJni> observer(
- new rtc::RefCountedObject<SetLocalSdpObserverJni>(jni, j_observer));
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
ExtractNativePC(jni, j_pc)->SetLocalDescription(
JavaToNativeSessionDescription(jni, j_sdp), observer);
}
@@ -599,8 +609,8 @@ static void JNI_PeerConnection_SetRemoteDescription(
const JavaParamRef<jobject>& j_pc,
const JavaParamRef<jobject>& j_observer,
const JavaParamRef<jobject>& j_sdp) {
- rtc::scoped_refptr<SetRemoteSdpObserverJni> observer(
- new rtc::RefCountedObject<SetRemoteSdpObserverJni>(jni, j_observer));
+ auto observer =
+ rtc::make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer);
ExtractNativePC(jni, j_pc)->SetRemoteDescription(
JavaToNativeSessionDescription(jni, j_sdp), observer);
}
@@ -799,8 +809,7 @@ static jboolean JNI_PeerConnection_OldGetStats(
const JavaParamRef<jobject>& j_pc,
const JavaParamRef<jobject>& j_observer,
jlong native_track) {
- rtc::scoped_refptr<StatsObserverJni> observer(
- new rtc::RefCountedObject<StatsObserverJni>(jni, j_observer));
+ auto observer = rtc::make_ref_counted<StatsObserverJni>(jni, j_observer);
return ExtractNativePC(jni, j_pc)->GetStats(
observer, reinterpret_cast<MediaStreamTrackInterface*>(native_track),
PeerConnectionInterface::kStatsOutputLevelStandard);
@@ -810,9 +819,8 @@ static void JNI_PeerConnection_NewGetStats(
JNIEnv* jni,
const JavaParamRef<jobject>& j_pc,
const JavaParamRef<jobject>& j_callback) {
- rtc::scoped_refptr<RTCStatsCollectorCallbackWrapper> callback(
- new rtc::RefCountedObject<RTCStatsCollectorCallbackWrapper>(jni,
- j_callback));
+ auto callback =
+ rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
ExtractNativePC(jni, j_pc)->GetStats(callback);
}
diff --git a/sdk/android/src/jni/pc/peer_connection.h b/sdk/android/src/jni/pc/peer_connection.h
index a9e2af2a47..86d99f31c4 100644
--- a/sdk/android/src/jni/pc/peer_connection.h
+++ b/sdk/android/src/jni/pc/peer_connection.h
@@ -72,6 +72,8 @@ class PeerConnectionObserverJni : public PeerConnectionObserver {
streams) override;
void OnTrack(
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
private:
typedef std::map<MediaStreamInterface*, JavaMediaStream>
diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc
index 2392db2403..53e715bd08 100644
--- a/sdk/android/src/jni/pc/peer_connection_factory.cc
+++ b/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -471,14 +471,14 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnection(
jni, j_sslCertificateVerifier);
}
- rtc::scoped_refptr<PeerConnectionInterface> pc =
- PeerConnectionFactoryFromJava(factory)->CreatePeerConnection(
+ auto result =
+ PeerConnectionFactoryFromJava(factory)->CreatePeerConnectionOrError(
rtc_config, std::move(peer_connection_dependencies));
- if (!pc)
+ if (!result.ok())
return 0;
- return jlongFromPointer(
- new OwnedPeerConnection(pc, std::move(observer), std::move(constraints)));
+ return jlongFromPointer(new OwnedPeerConnection(
+ result.MoveValue(), std::move(observer), std::move(constraints)));
}
static jlong JNI_PeerConnectionFactory_CreateVideoSource(
diff --git a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
index b334bb4a72..baa3f276e7 100644
--- a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
+++ b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
@@ -94,6 +94,23 @@ ScopedJavaLocalRef<jobject> MemberToJava(
case RTCStatsMemberInterface::kSequenceString:
return NativeToJavaStringArray(
env, *member.cast_to<RTCStatsMember<std::vector<std::string>>>());
+
+ case RTCStatsMemberInterface::kMapStringUint64:
+ return NativeToJavaMap(
+ env,
+ *member.cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaBigInteger(env, entry.second));
+ });
+
+ case RTCStatsMemberInterface::kMapStringDouble:
+ return NativeToJavaMap(
+ env, *member.cast_to<RTCStatsMember<std::map<std::string, double>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaDouble(env, entry.second));
+ });
}
RTC_NOTREACHED();
return nullptr;
diff --git a/sdk/android/src/jni/pc/video.cc b/sdk/android/src/jni/pc/video.cc
index 513a0059f2..ee5ecbea6f 100644
--- a/sdk/android/src/jni/pc/video.cc
+++ b/sdk/android/src/jni/pc/video.cc
@@ -46,9 +46,8 @@ void* CreateVideoSource(JNIEnv* env,
rtc::Thread* worker_thread,
jboolean is_screencast,
jboolean align_timestamps) {
- rtc::scoped_refptr<AndroidVideoTrackSource> source(
- new rtc::RefCountedObject<AndroidVideoTrackSource>(
- signaling_thread, env, is_screencast, align_timestamps));
+ auto source = rtc::make_ref_counted<AndroidVideoTrackSource>(
+ signaling_thread, env, is_screencast, align_timestamps);
return source.release();
}
diff --git a/sdk/android/src/jni/video_frame.cc b/sdk/android/src/jni/video_frame.cc
index c92561afc2..98728032e8 100644
--- a/sdk/android/src/jni/video_frame.cc
+++ b/sdk/android/src/jni/video_frame.cc
@@ -77,8 +77,8 @@ rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
int width,
int height,
const JavaRef<jobject>& j_video_frame_buffer) {
- return new rtc::RefCountedObject<AndroidVideoI420Buffer>(
- jni, width, height, j_video_frame_buffer);
+ return rtc::make_ref_counted<AndroidVideoI420Buffer>(jni, width, height,
+ j_video_frame_buffer);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(
@@ -123,8 +123,7 @@ int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni,
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
JNIEnv* jni,
const JavaRef<jobject>& j_video_frame_buffer) {
- return new rtc::RefCountedObject<AndroidVideoBuffer>(jni,
- j_video_frame_buffer);
+ return rtc::make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
@@ -180,6 +179,10 @@ rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedJavaLocalRef<jobject> j_i420_buffer =
Java_Buffer_toI420(jni, j_video_frame_buffer_);
+ // In case I420 conversion fails, we propagate the nullptr.
+ if (j_i420_buffer.is_null()) {
+ return nullptr;
+ }
// We don't need to retain the buffer because toI420 returns a new object that
// we are assumed to take the ownership of.
diff --git a/sdk/media_constraints.cc b/sdk/media_constraints.cc
index f4d72bdf36..6f4901c97e 100644
--- a/sdk/media_constraints.cc
+++ b/sdk/media_constraints.cc
@@ -118,7 +118,6 @@ const char MediaConstraints::kUseRtpMux[] = "googUseRtpMUX";
// Below constraints should be used during PeerConnection construction.
const char MediaConstraints::kEnableDtlsSrtp[] = "DtlsSrtpKeyAgreement";
-const char MediaConstraints::kEnableRtpDataChannels[] = "RtpDataChannels";
// Google-specific constraint keys.
const char MediaConstraints::kEnableDscp[] = "googDscp";
const char MediaConstraints::kEnableIPv6[] = "googIPv6";
diff --git a/sdk/media_constraints.h b/sdk/media_constraints.h
index b85dc472e0..15cb363f7c 100644
--- a/sdk/media_constraints.h
+++ b/sdk/media_constraints.h
@@ -85,8 +85,6 @@ class MediaConstraints {
// PeerConnection constraint keys.
// Temporary pseudo-constraints used to enable DTLS-SRTP
static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP
- // Temporary pseudo-constraints used to enable DataChannels
- static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels
// Google-specific constraint keys.
// Temporary pseudo-constraint for enabling DSCP through JS.
static const char kEnableDscp[]; // googDscp
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
index 1ded45d670..cb75f061d8 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
@@ -23,14 +23,12 @@
std::string labelString = [NSString stdStringForString:label];
const webrtc::DataChannelInit nativeInit =
configuration.nativeDataChannelInit;
- rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
- self.nativePeerConnection->CreateDataChannel(labelString,
- &nativeInit);
- if (!dataChannel) {
+ auto result = self.nativePeerConnection->CreateDataChannelOrError(labelString, &nativeInit);
+ if (!result.ok()) {
return nil;
}
return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory
- nativeDataChannel:dataChannel];
+ nativeDataChannel:result.MoveValue()];
}
@end
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
index 8a47d22772..05fe581d08 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -348,11 +348,12 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
webrtc::PeerConnectionDependencies deps = std::move(*dependencies.release());
deps.observer = _observer.get();
- _peerConnection = factory.nativeFactory->CreatePeerConnection(*config, std::move(deps));
+ auto result = factory.nativeFactory->CreatePeerConnectionOrError(*config, std::move(deps));
- if (!_peerConnection) {
+ if (!result.ok()) {
return nil;
}
+ _peerConnection = result.MoveValue();
_factory = factory;
_localStreams = [[NSMutableArray alloc] init];
_delegate = delegate;
diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
index afa2fd5fe2..6135223720 100644
--- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
+++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
@@ -31,7 +31,7 @@ RTC_EXTERN const NSString *const kRTCVp8CodecName;
RTC_EXTERN const NSString *const kRTCVp9CodecName;
RTC_EXTERN const NSString *const kRTCH264CodecName;
-/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTC_OBJC_TYPE(RTCRtpCodecParameters) */
+/** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */
RTC_OBJC_EXPORT
@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject
diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.h b/sdk/objc/api/peerconnection/RTCStatisticsReport.h
index 38d93e8771..06dbf48d88 100644
--- a/sdk/objc/api/peerconnection/RTCStatisticsReport.h
+++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.h
@@ -44,8 +44,8 @@ RTC_OBJC_EXPORT
@property(nonatomic, readonly) NSString *type;
/** The keys and values of the subreport, e.g. "totalFramesDuration = 5.551".
- The values are either NSNumbers or NSStrings, or NSArrays encapsulating NSNumbers
- or NSStrings. */
+ The values are either NSNumbers or NSStrings or NSArrays encapsulating NSNumbers
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values. */
@property(nonatomic, readonly) NSDictionary<NSString *, NSObject *> *values;
- (instancetype)init NS_UNAVAILABLE;
diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
index 1dd72772ed..967683fc91 100644
--- a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
+++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm
@@ -16,7 +16,7 @@
namespace webrtc {
/** Converts a single value to a suitable NSNumber, NSString or NSArray containing NSNumbers
- or NSStrings.*/
+ or NSStrings, or NSDictionary of NSString keys to NSNumber values.*/
NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
if (member->is_defined()) {
switch (member->type()) {
@@ -91,6 +91,26 @@ NSObject *ValueFromStatsMember(const RTCStatsMemberInterface *member) {
}
return [array copy];
}
+ case RTCStatsMemberInterface::kMapStringUint64: {
+ std::map<std::string, uint64_t> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
+ case RTCStatsMemberInterface::kMapStringDouble: {
+ std::map<std::string, double> map =
+ *member->cast_to<RTCStatsMember<std::map<std::string, double>>>();
+ NSMutableDictionary<NSString *, NSNumber *> *dictionary =
+ [NSMutableDictionary dictionaryWithCapacity:map.size()];
+ for (const auto &item : map) {
+ dictionary[[NSString stringForStdString:item.first]] = @(item.second);
+ }
+ return [dictionary copy];
+ }
default:
RTC_NOTREACHED();
}
diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm
index 15b0d6f1be..3a1ea6a322 100644
--- a/sdk/objc/api/peerconnection/RTCVideoSource.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm
@@ -10,7 +10,7 @@
#import "RTCVideoSource+Private.h"
-#include "api/video_track_source_proxy.h"
+#include "pc/video_track_source_proxy.h"
#include "rtc_base/checks.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
index 843b6ad001..ea2a459360 100644
--- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
+++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
@@ -69,4 +69,14 @@
return nil;
}
+- (NSInteger)resolutionAlignment {
+ RTC_NOTREACHED();
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ RTC_NOTREACHED();
+ return NO;
+}
+
@end
diff --git a/sdk/objc/base/RTCVideoEncoder.h b/sdk/objc/base/RTCVideoEncoder.h
index 29e8a89901..26cf4ec03f 100644
--- a/sdk/objc/base/RTCVideoEncoder.h
+++ b/sdk/objc/base/RTCVideoEncoder.h
@@ -28,7 +28,7 @@ RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCVideoEncoder)<NSObject>
- - (void)setCallback : (RTCVideoEncoderCallback)callback;
+- (void)setCallback:(nullable RTCVideoEncoderCallback)callback;
- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
numberOfCores:(int)numberOfCores;
- (NSInteger)releaseEncoder;
@@ -43,6 +43,13 @@ RTC_OBJC_EXPORT
* disables quality scaling. */
- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings;
+/** Resolutions should be aligned to this value. */
+@property(nonatomic, readonly) NSInteger resolutionAlignment;
+
+/** If enabled, resolution alignment is applied to all simulcast layers simultaneously so that when
+ scaled, all resolutions comply with 'resolutionAlignment'. */
+@property(nonatomic, readonly) BOOL applyAlignmentToAllSimulcastLayers;
+
@end
NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h
index f917e327a4..79658e3c81 100644
--- a/sdk/objc/components/audio/RTCAudioSession.h
+++ b/sdk/objc/components/audio/RTCAudioSession.h
@@ -137,8 +137,6 @@ RTC_OBJC_EXPORT
* AVAudioSession.
*/
@property(nonatomic, readonly) BOOL isActive;
-/** Whether RTCAudioSession is currently locked for configuration. */
-@property(nonatomic, readonly) BOOL isLocked;
/** If YES, WebRTC will not initialize the audio unit automatically when an
* audio track is ready for playout or recording. Instead, applications should
diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
index 5678112ade..3320d12076 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
+++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
@@ -21,8 +21,6 @@ NS_ASSUME_NONNULL_BEGIN
*
* It has id<RTCVideoRenderer> property that renders video frames in the view's
* bounds using Metal.
- * NOTE: always check if metal is available on the running device via
- * RTC_SUPPORTS_METAL macro before initializing this class.
*/
NS_CLASS_AVAILABLE_IOS(9)
diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
index f5be7c061c..4c50bcf9c1 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
+++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
@@ -86,11 +86,7 @@
#pragma mark - Private
+ (BOOL)isMetalAvailable {
-#if defined(RTC_SUPPORTS_METAL)
return MTLCreateSystemDefaultDevice() != nil;
-#else
- return NO;
-#endif
}
+ (MTKView *)createMetalView:(CGRect)frame {
diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
index e64f61912a..345170388d 100644
--- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
+++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
@@ -529,6 +529,14 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
return WEBRTC_VIDEO_CODEC_OK;
}
+- (NSInteger)resolutionAlignment {
+ return 1;
+}
+
+- (BOOL)applyAlignmentToAllSimulcastLayers {
+ return NO;
+}
+
#pragma mark - Private
- (NSInteger)releaseEncoder {
@@ -759,6 +767,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
renderTimeMs:(int64_t)renderTimeMs
timestamp:(uint32_t)timestamp
rotation:(RTCVideoRotation)rotation {
+ RTCVideoEncoderCallback callback = _callback;
+ if (!callback) {
+ return;
+ }
if (status != noErr) {
RTC_LOG(LS_ERROR) << "H264 encode failed with code: " << status;
return;
@@ -805,7 +817,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id)
_h264BitstreamParser.ParseBitstream(*buffer);
frame.qp = @(_h264BitstreamParser.GetLastSliceQp().value_or(-1));
- BOOL res = _callback(frame, codecSpecificInfo);
+ BOOL res = callback(frame, codecSpecificInfo);
if (!res) {
RTC_LOG(LS_ERROR) << "Encode callback failed";
return;
diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm
index 6dd0edbcd9..cae7a50318 100644
--- a/sdk/objc/native/api/video_capturer.mm
+++ b/sdk/objc/native/api/video_capturer.mm
@@ -11,7 +11,8 @@
#include "sdk/objc/native/api/video_capturer.h"
#include "absl/memory/memory.h"
-#include "api/video_track_source_proxy.h"
+#include "api/video_track_source_proxy_factory.h"
+#include "rtc_base/ref_counted_object.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
namespace webrtc {
@@ -24,8 +25,7 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(adapter));
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
- webrtc::VideoTrackSourceProxy::Create(
- signaling_thread, worker_thread, objc_video_track_source);
+ webrtc::CreateVideoTrackSourceProxy(signaling_thread, worker_thread, objc_video_track_source);
objc_video_capturer.delegate = adapter;
diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm
index e51fc9d319..b66554b1a4 100644
--- a/sdk/objc/native/src/objc_video_encoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -48,21 +48,24 @@ class ObjCVideoEncoder : public VideoEncoder {
}
int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
- [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
- id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
- EncodedImage encodedImage = [frame nativeEncodedImage];
-
- // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
- CodecSpecificInfo codecSpecificInfo;
- if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
- codecSpecificInfo =
- [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
- }
-
- EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
- return res.error == EncodedImageCallback::Result::OK;
- }];
-
+ if (callback) {
+ [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
+ id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
+ EncodedImage encodedImage = [frame nativeEncodedImage];
+
+ // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
+ CodecSpecificInfo codecSpecificInfo;
+ if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
+ codecSpecificInfo =
+ [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
+ }
+
+ EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
+ return res.error == EncodedImageCallback::Result::OK;
+ }];
+ } else {
+ [encoder_ setCallback:nil];
+ }
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -95,6 +98,8 @@ class ObjCVideoEncoder : public VideoEncoder {
info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
ScalingSettings::kOff;
+ info.requested_resolution_alignment = encoder_.resolutionAlignment > 0 ?: 1;
+ info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
info.is_hardware_accelerated = true;
info.has_internal_source = false;
return info;
diff --git a/stats/rtc_stats.cc b/stats/rtc_stats.cc
index 59de664c0e..4895edc738 100644
--- a/stats/rtc_stats.cc
+++ b/stats/rtc_stats.cc
@@ -65,6 +65,20 @@ std::string VectorOfStringsToString(const std::vector<T>& strings) {
}
template <typename T>
+std::string MapToString(const std::map<std::string, T>& map) {
+ rtc::StringBuilder sb;
+ sb << "{";
+ const char* separator = "";
+ for (const auto& element : map) {
+ sb << separator << rtc::ToString(element.first) << ":"
+ << rtc::ToString(element.second);
+ separator = ",";
+ }
+ sb << "}";
+ return sb.Release();
+}
+
+template <typename T>
std::string ToStringAsDouble(const T value) {
// JSON represents numbers as floating point numbers with about 15 decimal
// digits of precision.
@@ -88,6 +102,20 @@ std::string VectorToStringAsDouble(const std::vector<T>& vector) {
return sb.Release();
}
+template <typename T>
+std::string MapToStringAsDouble(const std::map<std::string, T>& map) {
+ rtc::StringBuilder sb;
+ sb << "{";
+ const char* separator = "";
+ for (const auto& element : map) {
+ sb << separator << "\"" << rtc::ToString(element.first)
+ << "\":" << ToStringAsDouble(element.second);
+ separator = ",";
+ }
+ sb << "}";
+ return sb.Release();
+}
+
} // namespace
bool RTCStats::operator==(const RTCStats& other) const {
@@ -248,6 +276,18 @@ WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector<std::string>,
false,
VectorOfStringsToString(value_),
VectorOfStringsToString(value_));
+WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64,
+ kMapStringUint64,
+ false,
+ false,
+ MapToString(value_),
+ MapToStringAsDouble(value_));
+WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble,
+ kMapStringDouble,
+ false,
+ false,
+ MapToString(value_),
+ MapToStringAsDouble(value_));
template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT)
RTCNonStandardStatsMember<bool>;
diff --git a/stats/rtc_stats_report.cc b/stats/rtc_stats_report.cc
index d29d819fc3..4fbd82508e 100644
--- a/stats/rtc_stats_report.cc
+++ b/stats/rtc_stats_report.cc
@@ -56,15 +56,12 @@ bool RTCStatsReport::ConstIterator::operator!=(
rtc::scoped_refptr<RTCStatsReport> RTCStatsReport::Create(
int64_t timestamp_us) {
- return rtc::scoped_refptr<RTCStatsReport>(
- new rtc::RefCountedObject<RTCStatsReport>(timestamp_us));
+ return rtc::scoped_refptr<RTCStatsReport>(new RTCStatsReport(timestamp_us));
}
RTCStatsReport::RTCStatsReport(int64_t timestamp_us)
: timestamp_us_(timestamp_us) {}
-RTCStatsReport::~RTCStatsReport() {}
-
rtc::scoped_refptr<RTCStatsReport> RTCStatsReport::Copy() const {
rtc::scoped_refptr<RTCStatsReport> copy = Create(timestamp_us_);
for (auto it = stats_.begin(); it != stats_.end(); ++it) {
diff --git a/stats/rtc_stats_unittest.cc b/stats/rtc_stats_unittest.cc
index b159977858..aff6ab30bd 100644
--- a/stats/rtc_stats_unittest.cc
+++ b/stats/rtc_stats_unittest.cc
@@ -71,7 +71,7 @@ TEST(RTCStatsTest, RTCStatsAndMembers) {
EXPECT_EQ(stats.id(), "testId");
EXPECT_EQ(stats.timestamp_us(), static_cast<int64_t>(42));
std::vector<const RTCStatsMemberInterface*> members = stats.Members();
- EXPECT_EQ(members.size(), static_cast<size_t>(14));
+ EXPECT_EQ(members.size(), static_cast<size_t>(16));
for (const RTCStatsMemberInterface* member : members) {
EXPECT_FALSE(member->is_defined());
}
@@ -98,6 +98,9 @@ TEST(RTCStatsTest, RTCStatsAndMembers) {
std::vector<std::string> sequence_string;
sequence_string.push_back(std::string("six"));
+ std::map<std::string, uint64_t> map_string_uint64{{"seven", 8}};
+ std::map<std::string, double> map_string_double{{"nine", 10.0}};
+
stats.m_sequence_bool = sequence_bool;
stats.m_sequence_int32 = sequence_int32;
stats.m_sequence_uint32 = sequence_uint32;
@@ -106,6 +109,8 @@ TEST(RTCStatsTest, RTCStatsAndMembers) {
stats.m_sequence_uint64 = sequence_uint64;
stats.m_sequence_double = sequence_double;
stats.m_sequence_string = sequence_string;
+ stats.m_map_string_uint64 = map_string_uint64;
+ stats.m_map_string_double = map_string_double;
for (const RTCStatsMemberInterface* member : members) {
EXPECT_TRUE(member->is_defined());
}
@@ -123,6 +128,8 @@ TEST(RTCStatsTest, RTCStatsAndMembers) {
EXPECT_EQ(*stats.m_sequence_uint64, sequence_uint64);
EXPECT_EQ(*stats.m_sequence_double, sequence_double);
EXPECT_EQ(*stats.m_sequence_string, sequence_string);
+ EXPECT_EQ(*stats.m_map_string_uint64, map_string_uint64);
+ EXPECT_EQ(*stats.m_map_string_double, map_string_double);
int32_t numbers[] = {4, 8, 15, 16, 23, 42};
std::vector<int32_t> numbers_sequence(&numbers[0], &numbers[6]);
@@ -152,6 +159,8 @@ TEST(RTCStatsTest, EqualityOperator) {
stats_with_all_values.m_sequence_uint64 = std::vector<uint64_t>();
stats_with_all_values.m_sequence_double = std::vector<double>();
stats_with_all_values.m_sequence_string = std::vector<std::string>();
+ stats_with_all_values.m_map_string_uint64 = std::map<std::string, uint64_t>();
+ stats_with_all_values.m_map_string_double = std::map<std::string, double>();
EXPECT_NE(stats_with_all_values, empty_stats);
EXPECT_EQ(stats_with_all_values, stats_with_all_values);
EXPECT_NE(stats_with_all_values.m_int32, stats_with_all_values.m_uint32);
@@ -180,6 +189,8 @@ TEST(RTCStatsTest, EqualityOperator) {
one_member_different[11].m_sequence_uint64->push_back(321);
one_member_different[12].m_sequence_double->push_back(321.0);
one_member_different[13].m_sequence_string->push_back("321");
+ (*one_member_different[13].m_map_string_uint64)["321"] = 321;
+ (*one_member_different[13].m_map_string_double)["321"] = 321.0;
for (size_t i = 0; i < 14; ++i) {
EXPECT_NE(stats_with_all_values, one_member_different[i]);
}
@@ -238,6 +249,11 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
std::vector<std::string> sequence_string;
sequence_string.push_back(std::string("four"));
+ std::map<std::string, uint64_t> map_string_uint64{
+ {"long", static_cast<uint64_t>(1234567890123456499L)}};
+ std::map<std::string, double> map_string_double{
+ {"three", 123.4567890123456499}, {"thirteen", 123.4567890123456499}};
+
RTCTestStats stats(id, timestamp);
stats.m_bool = m_bool;
stats.m_int32 = m_int32;
@@ -249,6 +265,8 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
stats.m_sequence_int64 = sequence_int64;
stats.m_sequence_double = sequence_double;
stats.m_sequence_string = sequence_string;
+ stats.m_map_string_uint64 = map_string_uint64;
+ stats.m_map_string_double = map_string_double;
Json::Value json_output;
EXPECT_TRUE(Json::Reader().parse(stats.ToJson(), json_output));
@@ -278,6 +296,16 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
rtc::GetValueFromJsonObject(json_output, "mSequenceString", &json_array));
EXPECT_TRUE(rtc::JsonArrayToStringVector(json_array, &sequence_string));
+ Json::Value json_map;
+ EXPECT_TRUE(
+ rtc::GetValueFromJsonObject(json_output, "mMapStringDouble", &json_map));
+ for (const auto& entry : map_string_double) {
+ double double_output = 0.0;
+ EXPECT_TRUE(
+ rtc::GetDoubleFromJsonObject(json_map, entry.first, &double_output));
+ EXPECT_NEAR(double_output, entry.second, GetExpectedError(entry.second));
+ }
+
EXPECT_EQ(id, stats.id());
EXPECT_EQ(timestamp, stats.timestamp_us());
EXPECT_EQ(m_bool, *stats.m_bool);
@@ -286,6 +314,7 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
EXPECT_EQ(sequence_bool, *stats.m_sequence_bool);
EXPECT_EQ(sequence_int32, *stats.m_sequence_int32);
EXPECT_EQ(sequence_string, *stats.m_sequence_string);
+ EXPECT_EQ(map_string_double, *stats.m_map_string_double);
EXPECT_NEAR(m_double, *stats.m_double, GetExpectedError(*stats.m_double));
@@ -295,6 +324,13 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
GetExpectedError(stats.m_sequence_double->at(i)));
}
+ EXPECT_EQ(map_string_double.size(), stats.m_map_string_double->size());
+ for (const auto& entry : map_string_double) {
+ auto it = stats.m_map_string_double->find(entry.first);
+ EXPECT_NE(it, stats.m_map_string_double->end());
+ EXPECT_NEAR(entry.second, it->second, GetExpectedError(it->second));
+ }
+
// We read mInt64 as double since JSON stores all numbers as doubles, so there
// is not enough precision to represent large numbers.
double m_int64_as_double;
@@ -320,6 +356,19 @@ TEST(RTCStatsTest, RTCStatsPrintsValidJson) {
GetExpectedError(stats_value_as_double));
}
+ // Similarly, read Uint64 as double
+ EXPECT_TRUE(
+ rtc::GetValueFromJsonObject(json_output, "mMapStringUint64", &json_map));
+ for (const auto& entry : map_string_uint64) {
+ const double stats_value_as_double =
+ static_cast<double>((*stats.m_map_string_uint64)[entry.first]);
+ double double_output = 0.0;
+ EXPECT_TRUE(
+ rtc::GetDoubleFromJsonObject(json_map, entry.first, &double_output));
+ EXPECT_NEAR(double_output, stats_value_as_double,
+ GetExpectedError(stats_value_as_double));
+ }
+
// Neither stats.m_uint32 nor stats.m_uint64 are defined, so "mUint64" and
// "mUint32" should not be part of the generated JSON object.
int m_uint32;
diff --git a/stats/rtcstats_objects.cc b/stats/rtcstats_objects.cc
index dcd2aeb776..0db833c7fa 100644
--- a/stats/rtcstats_objects.cc
+++ b/stats/rtcstats_objects.cc
@@ -811,6 +811,7 @@ WEBRTC_RTCSTATS_IMPL(
&huge_frames_sent,
&total_packet_send_delay,
&quality_limitation_reason,
+ &quality_limitation_durations,
&quality_limitation_resolution_changes,
&content_type,
&encoder_implementation,
@@ -847,6 +848,7 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id,
huge_frames_sent("hugeFramesSent"),
total_packet_send_delay("totalPacketSendDelay"),
quality_limitation_reason("qualityLimitationReason"),
+ quality_limitation_durations("qualityLimitationDurations"),
quality_limitation_resolution_changes(
"qualityLimitationResolutionChanges"),
content_type("contentType"),
@@ -879,6 +881,7 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(
huge_frames_sent(other.huge_frames_sent),
total_packet_send_delay(other.total_packet_send_delay),
quality_limitation_reason(other.quality_limitation_reason),
+ quality_limitation_durations(other.quality_limitation_durations),
quality_limitation_resolution_changes(
other.quality_limitation_resolution_changes),
content_type(other.content_type),
diff --git a/stats/test/rtc_test_stats.cc b/stats/test/rtc_test_stats.cc
index d8bcbb19eb..e73da76fa9 100644
--- a/stats/test/rtc_test_stats.cc
+++ b/stats/test/rtc_test_stats.cc
@@ -30,7 +30,9 @@ WEBRTC_RTCSTATS_IMPL(RTCTestStats,
&m_sequence_int64,
&m_sequence_uint64,
&m_sequence_double,
- &m_sequence_string)
+ &m_sequence_string,
+ &m_map_string_uint64,
+ &m_map_string_double)
RTCTestStats::RTCTestStats(const std::string& id, int64_t timestamp_us)
: RTCStats(id, timestamp_us),
@@ -47,7 +49,9 @@ RTCTestStats::RTCTestStats(const std::string& id, int64_t timestamp_us)
m_sequence_int64("mSequenceInt64"),
m_sequence_uint64("mSequenceUint64"),
m_sequence_double("mSequenceDouble"),
- m_sequence_string("mSequenceString") {}
+ m_sequence_string("mSequenceString"),
+ m_map_string_uint64("mMapStringUint64"),
+ m_map_string_double("mMapStringDouble") {}
RTCTestStats::RTCTestStats(const RTCTestStats& other)
: RTCStats(other.id(), other.timestamp_us()),
@@ -64,7 +68,9 @@ RTCTestStats::RTCTestStats(const RTCTestStats& other)
m_sequence_int64(other.m_sequence_int64),
m_sequence_uint64(other.m_sequence_uint64),
m_sequence_double(other.m_sequence_double),
- m_sequence_string(other.m_sequence_string) {}
+ m_sequence_string(other.m_sequence_string),
+ m_map_string_uint64(other.m_map_string_uint64),
+ m_map_string_double(other.m_map_string_double) {}
RTCTestStats::~RTCTestStats() {}
diff --git a/stats/test/rtc_test_stats.h b/stats/test/rtc_test_stats.h
index 1db32c25c1..0feb07e78e 100644
--- a/stats/test/rtc_test_stats.h
+++ b/stats/test/rtc_test_stats.h
@@ -12,6 +12,7 @@
#define STATS_TEST_RTC_TEST_STATS_H_
#include <cstdint>
+#include <map>
#include <string>
#include <vector>
@@ -42,6 +43,8 @@ class RTC_EXPORT RTCTestStats : public RTCStats {
RTCStatsMember<std::vector<uint64_t>> m_sequence_uint64;
RTCStatsMember<std::vector<double>> m_sequence_double;
RTCStatsMember<std::vector<std::string>> m_sequence_string;
+ RTCStatsMember<std::map<std::string, uint64_t>> m_map_string_uint64;
+ RTCStatsMember<std::map<std::string, double>> m_map_string_double;
};
} // namespace webrtc
diff --git a/style-guide.md b/style-guide.md
deleted file mode 100644
index dd4fb527d5..0000000000
--- a/style-guide.md
+++ /dev/null
@@ -1,272 +0,0 @@
-# WebRTC coding style guide
-
-## **General advice**
-
-Some older parts of the code violate the style guide in various ways.
-
-* If making small changes to such code, follow the style guide when
- it’s reasonable to do so, but in matters of formatting etc., it is
- often better to be consistent with the surrounding code.
-* If making large changes to such code, consider first cleaning it up
- in a separate CL.
-
-## **C++**
-
-WebRTC follows the [Chromium][chr-style] and [Google][goog-style] C++
-style guides. In cases where they conflict, the Chromium style guide
-trumps the Google style guide, and the rules in this file trump them
-both.
-
-[chr-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/c++/c++.md
-[goog-style]: https://google.github.io/styleguide/cppguide.html
-
-### C++ version
-
-WebRTC is written in C++14, but with some restrictions:
-
-* We only allow the subset of C++14 (language and library) that is not
- banned by Chromium; see [this page][chromium-cpp].
-* We only allow the subset of C++14 that is also valid C++17;
- otherwise, users would not be able to compile WebRTC in C++17 mode.
-
-[chromium-cpp]: https://chromium-cpp.appspot.com/
-
-Unlike the Chromium and Google C++ style guides, we do not allow C++20-style
-designated initializers, because we want to stay compatible with compilers that
-do not yet support them.
-
-### Abseil
-
-You may use a subset of the utilities provided by the [Abseil][abseil]
-library when writing WebRTC C++ code. [Details](abseil-in-webrtc.md).
-
-[abseil]: https://abseil.io/about/
-
-### <a name="h-cc-pairs"></a>`.h` and `.cc` files come in pairs
-
-`.h` and `.cc` files should come in pairs, with the same name (except
-for the file type suffix), in the same directory, in the same build
-target.
-
-* If a declaration in `path/to/foo.h` has a definition in some `.cc`
- file, it should be in `path/to/foo.cc`.
-* If a definition in `path/to/foo.cc` file has a declaration in some
- `.h` file, it should be in `path/to/foo.h`.
-* Omit the `.cc` file if it would have been empty, but still list the
- `.h` file in a build target.
-* Omit the `.h` file if it would have been empty. (This can happen
- with unit test `.cc` files, and with `.cc` files that define
- `main`.)
-
-This makes the source code easier to navigate and organize, and
-precludes some questionable build system practices such as having
-build targets that don’t pull in definitions for everything they
-declare.
-
-[Examples and exceptions](style-guide/h-cc-pairs.md).
-
-### TODO comments
-
-Follow the [Google style][goog-style-todo]. When referencing a WebRTC bug,
-prefer the url form, e.g.
-```
-// TODO(bugs.webrtc.org/12345): Delete the hack when blocking bugs are resolved.
-```
-
-[goog-style-todo]: https://google.github.io/styleguide/cppguide.html#TODO_Comments
-
-### Deprecation
-
-Annotate the declarations of deprecated functions and classes with
-[ABSL_DEPRECATED][ABSL_DEPRECATED] to cause an error when they're used inside
-webrtc and a compiler warning when they're used by dependant projects. Like so:
-
-```
-ABSL_DEPRECATED("bugs.webrtc.org/12345")
-std::pony PonyPlz(const std::pony_spec& ps);
-```
-
-NOTE 1: The annotation goes on the declaration in the .h file, not the
-definition in the .cc file!
-
-NOTE 2: In order to have unit tests that use the deprecated function without
-getting errors, do something like this:
-
-```
-std::pony DEPRECATED_PonyPlz(const std::pony_spec& ps);
-ABSL_DEPRECATED("bugs.webrtc.org/12345")
-inline std::pony PonyPlz(const std::pony_spec& ps) {
- return DEPRECATED_PonyPlz(ps);
-}
-```
-
-In other words, rename the existing function, and provide an inline wrapper
-using the original name that calls it. That way, callers who are willing to
-call it using the DEPRECATED_-prefixed name don't get the warning.
-
-[ABSL_DEPRECATED]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/abseil-cpp/absl/base/attributes.h?q=ABSL_DEPRECATED
-
-### ArrayView
-
-When passing an array of values to a function, use `rtc::ArrayView`
-whenever possible—that is, whenever you’re not passing ownership of
-the array, and don’t allow the callee to change the array size.
-
-For example,
-
-instead of | use
-------------------------------------|---------------------
-`const std::vector<T>&` | `ArrayView<const T>`
-`const T* ptr, size_t num_elements` | `ArrayView<const T>`
-`T* ptr, size_t num_elements` | `ArrayView<T>`
-
-See [the source](api/array_view.h) for more detailed docs.
-
-### sigslot
-
-SIGSLOT IS DEPRECATED.
-
-Prefer webrtc::CallbackList, and manage thread safety yourself.
-
-### Smart pointers
-
-The following smart pointer types are recommended:
-
- * std::unique_ptr for all singly-owned objects
- * rtc::scoped_refptr for all objects with shared ownership
-
-Use of std::shared_ptr is *not permitted*. It is
-[banned](https://chromium-cpp.appspot.com/#library-blocklist) in the Chromium
-style guide (overriding the Google style guide), and offers no compelling
-advantage over rtc::scoped_refptr (which is cloned from the corresponding
-Chromium type).
-
-In most cases, one will want to explicitly control lifetimes, and therefore
-use std::unique_ptr, but in some cases, for instance where references have
-to exist both from the API users and internally, with no way to
-invalidate pointers held by the API user, rtc::scoped_refptr can be
-appropriate.
-
-### std::bind
-
-Don’t use `std::bind`—there are pitfalls, and lambdas are almost as
-succinct and already familiar to modern C++ programmers.
-
-### std::function
-
-`std::function` is allowed, but remember that it’s not the right tool
-for every occasion. Prefer to use interfaces when that makes sense,
-and consider `rtc::FunctionView` for cases where the callee will not
-save the function object.
-
-### Forward declarations
-
-WebRTC follows the [Google][goog-forward-declarations] C++ style guide
-with respect to forward declarations. In summary: avoid using forward
-declarations where possible; just `#include` the headers you need.
-
-[goog-forward-declarations]: https://google.github.io/styleguide/cppguide.html#Forward_Declarations
-
-## **C**
-
-There’s a substantial chunk of legacy C code in WebRTC, and a lot of
-it is old enough that it violates the parts of the C++ style guide
-that also applies to C (naming etc.) for the simple reason that it
-pre-dates the use of the current C++ style guide for this code base.
-
-* If making small changes to C code, mimic the style of the
- surrounding code.
-* If making large changes to C code, consider converting the whole
- thing to C++ first.
-
-## **Java**
-
-WebRTC follows the [Google Java style guide][goog-java-style].
-
-[goog-java-style]: https://google.github.io/styleguide/javaguide.html
-
-## **Objective-C and Objective-C++**
-
-WebRTC follows the
-[Chromium Objective-C and Objective-C++ style guide][chr-objc-style].
-
-[chr-objc-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/objective-c/objective-c.md
-
-## **Python**
-
-WebRTC follows [Chromium’s Python style][chr-py-style].
-
-[chr-py-style]: https://chromium.googlesource.com/chromium/src/+/HEAD/styleguide/styleguide.md#python
-
-## **Build files**
-
-The WebRTC build files are written in [GN][gn], and we follow
-the [Chromium GN style guide][chr-gn-style]. Additionally, there are
-some WebRTC-specific rules below; in case of conflict, they trump the
-Chromium style guide.
-
-[gn]: https://chromium.googlesource.com/chromium/src/tools/gn/
-[chr-gn-style]: https://chromium.googlesource.com/chromium/src/tools/gn/+/HEAD/docs/style_guide.md
-
-### <a name="webrtc-gn-templates"></a>WebRTC-specific GN templates
-
-Use the following [GN templates][gn-templ] to ensure that all
-our [targets][gn-target] are built with the same configuration:
-
-instead of | use
------------------|---------------------
-`executable` | `rtc_executable`
-`shared_library` | `rtc_shared_library`
-`source_set` | `rtc_source_set`
-`static_library` | `rtc_static_library`
-`test` | `rtc_test`
-
-[gn-templ]: https://chromium.googlesource.com/chromium/src/tools/gn/+/HEAD/docs/language.md#Templates
-[gn-target]: https://chromium.googlesource.com/chromium/src/tools/gn/+/HEAD/docs/language.md#Targets
-
-### Target visibility and the native API
-
-The [WebRTC-specific GN templates](#webrtc-gn-templates) declare build
-targets whose default `visibility` allows all other targets in the
-WebRTC tree (and no targets outside the tree) to depend on them.
-
-Prefer to restrict the visibility if possible:
-
-* If a target is used by only one or a tiny number of other targets,
- prefer to list them explicitly: `visibility = [ ":foo", ":bar" ]`
-* If a target is used only by targets in the same `BUILD.gn` file:
- `visibility = [ ":*" ]`.
-
-Setting `visibility = [ "*" ]` means that targets outside the WebRTC
-tree can depend on this target; use this only for build targets whose
-headers are part of the [native API](native-api.md).
-
-### Conditional compilation with the C preprocessor
-
-Avoid using the C preprocessor to conditionally enable or disable
-pieces of code. But if you can’t avoid it, introduce a GN variable,
-and then set a preprocessor constant to either 0 or 1 in the build
-targets that need it:
-
-```
-if (apm_debug_dump) {
- defines = [ "WEBRTC_APM_DEBUG_DUMP=1" ]
-} else {
- defines = [ "WEBRTC_APM_DEBUG_DUMP=0" ]
-}
-```
-
-In the C, C++, or Objective-C files, use `#if` when testing the flag,
-not `#ifdef` or `#if defined()`:
-
-```
-#if WEBRTC_APM_DEBUG_DUMP
-// One way.
-#else
-// Or another.
-#endif
-```
-
-When combined with the `-Wundef` compiler option, this produces
-compile time warnings if preprocessor symbols are misspelled, or used
-without corresponding build rules to set them.
diff --git a/style-guide/OWNERS.webrtc b/style-guide/OWNERS.webrtc
deleted file mode 100644
index a3ecbc9948..0000000000
--- a/style-guide/OWNERS.webrtc
+++ /dev/null
@@ -1 +0,0 @@
-danilchap@webrtc.org
diff --git a/system_wrappers/include/clock.h b/system_wrappers/include/clock.h
index bcb7feaa7d..271291c214 100644
--- a/system_wrappers/include/clock.h
+++ b/system_wrappers/include/clock.h
@@ -34,22 +34,23 @@ class RTC_EXPORT Clock {
virtual ~Clock() {}
// Return a timestamp relative to an unspecified epoch.
- // TODO(bugs.webrtc.org/11327): Make this a pure virtual function.
- virtual Timestamp CurrentTime() {
- return Timestamp::Micros(TimeInMicroseconds());
- }
-
- // TODO(bugs.webrtc.org/11327): Make the following two methods non-virtual
- // or completely remove them.
- virtual int64_t TimeInMilliseconds() { return CurrentTime().ms(); }
- virtual int64_t TimeInMicroseconds() { return CurrentTime().us(); }
+ virtual Timestamp CurrentTime() = 0;
+ int64_t TimeInMilliseconds() { return CurrentTime().ms(); }
+ int64_t TimeInMicroseconds() { return CurrentTime().us(); }
// Retrieve an NTP absolute timestamp (with an epoch of Jan 1, 1900).
- virtual NtpTime CurrentNtpTime() = 0;
+ // TODO(bugs.webrtc.org/11327): Make this non-virtual once
+ // "WebRTC-SystemIndependentNtpTimeKillSwitch" is removed.
+ virtual NtpTime CurrentNtpTime() {
+ return ConvertTimestampToNtpTime(CurrentTime());
+ }
+ int64_t CurrentNtpInMilliseconds() { return CurrentNtpTime().ToMs(); }
- // TODO(bugs.webrtc.org/11327): Make the following method non-virtual
- // or completely remove it.
- virtual int64_t CurrentNtpInMilliseconds() { return CurrentNtpTime().ToMs(); }
+ // Converts between a relative timestamp returned by this clock, to NTP time.
+ virtual NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) = 0;
+ int64_t ConvertTimestampToNtpTimeInMilliseconds(int64_t timestamp_ms) {
+ return ConvertTimestampToNtpTime(Timestamp::Millis(timestamp_ms)).ToMs();
+ }
// Returns an instance of the real-time system clock implementation.
static Clock* GetRealTimeClock();
@@ -65,7 +66,7 @@ class SimulatedClock : public Clock {
// Return a timestamp with an epoch of Jan 1, 1970.
Timestamp CurrentTime() override;
- NtpTime CurrentNtpTime() override;
+ NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) override;
// Advance the simulated clock with a given number of milliseconds or
// microseconds.
diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc
index 2c3981a5a4..77c1d36327 100644
--- a/system_wrappers/source/clock.cc
+++ b/system_wrappers/source/clock.cc
@@ -93,6 +93,12 @@ class RealTimeClock : public Clock {
: SystemDependentNtpTime();
}
+ NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) override {
+ // This method does not check |use_system_independent_ntp_time_| because
+ // all callers never used the old behavior of |CurrentNtpTime|.
+ return TimeMicrosToNtp(timestamp.us());
+ }
+
protected:
virtual timeval CurrentTimeVal() = 0;
@@ -276,11 +282,11 @@ Timestamp SimulatedClock::CurrentTime() {
return Timestamp::Micros(time_us_.load(std::memory_order_relaxed));
}
-NtpTime SimulatedClock::CurrentNtpTime() {
- int64_t now_ms = TimeInMilliseconds();
- uint32_t seconds = (now_ms / 1000) + kNtpJan1970;
- uint32_t fractions =
- static_cast<uint32_t>((now_ms % 1000) * kMagicNtpFractionalUnit / 1000);
+NtpTime SimulatedClock::ConvertTimestampToNtpTime(Timestamp timestamp) {
+ int64_t now_us = timestamp.us();
+ uint32_t seconds = (now_us / 1'000'000) + kNtpJan1970;
+ uint32_t fractions = static_cast<uint32_t>(
+ (now_us % 1'000'000) * kMagicNtpFractionalUnit / 1'000'000);
return NtpTime(seconds, fractions);
}
diff --git a/test/BUILD.gn b/test/BUILD.gn
index 988d15fd30..77f003cfbe 100644
--- a/test/BUILD.gn
+++ b/test/BUILD.gn
@@ -212,6 +212,7 @@ rtc_library("rtp_test_utils") {
"../rtc_base/synchronization:mutex",
"../rtc_base/system:arch",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("field_trial") {
diff --git a/test/call_test.cc b/test/call_test.cc
index 0ba947ce08..11230dae2f 100644
--- a/test/call_test.cc
+++ b/test/call_test.cc
@@ -409,7 +409,7 @@ void CallTest::CreateMatchingAudioAndFecConfigs(
if (num_flexfec_streams_ == 1) {
CreateMatchingFecConfig(rtcp_send_transport, *GetVideoSendConfig());
for (const RtpExtension& extension : GetVideoSendConfig()->rtp.extensions)
- GetFlexFecConfig()->rtp_header_extensions.push_back(extension);
+ GetFlexFecConfig()->rtp.extensions.push_back(extension);
}
}
@@ -444,9 +444,9 @@ void CallTest::CreateMatchingFecConfig(
const VideoSendStream::Config& send_config) {
FlexfecReceiveStream::Config config(transport);
config.payload_type = send_config.rtp.flexfec.payload_type;
- config.remote_ssrc = send_config.rtp.flexfec.ssrc;
+ config.rtp.remote_ssrc = send_config.rtp.flexfec.ssrc;
config.protected_media_ssrcs = send_config.rtp.flexfec.protected_media_ssrcs;
- config.local_ssrc = kReceiverLocalVideoSsrc;
+ config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
if (!video_receive_configs_.empty()) {
video_receive_configs_[0].rtp.protected_by_flexfec = true;
video_receive_configs_[0].rtp.packet_sink_ = this;
diff --git a/test/drifting_clock.cc b/test/drifting_clock.cc
index 1a5154557e..47c8e56916 100644
--- a/test/drifting_clock.cc
+++ b/test/drifting_clock.cc
@@ -28,22 +28,18 @@ TimeDelta DriftingClock::Drift() const {
return (now - start_time_) * drift_;
}
-Timestamp DriftingClock::CurrentTime() {
- return clock_->CurrentTime() + Drift() / 1000.;
+Timestamp DriftingClock::Drift(Timestamp timestamp) const {
+ return timestamp + Drift() / 1000.;
}
-NtpTime DriftingClock::CurrentNtpTime() {
+NtpTime DriftingClock::Drift(NtpTime ntp_time) const {
// NTP precision is 1/2^32 seconds, i.e. 2^32 ntp fractions = 1 second.
const double kNtpFracPerMicroSecond = 4294.967296; // = 2^32 / 10^6
- NtpTime ntp = clock_->CurrentNtpTime();
- uint64_t total_fractions = static_cast<uint64_t>(ntp);
+ uint64_t total_fractions = static_cast<uint64_t>(ntp_time);
total_fractions += Drift().us() * kNtpFracPerMicroSecond;
return NtpTime(total_fractions);
}
-int64_t DriftingClock::CurrentNtpInMilliseconds() {
- return clock_->CurrentNtpInMilliseconds() + Drift().ms();
-}
} // namespace test
} // namespace webrtc
diff --git a/test/drifting_clock.h b/test/drifting_clock.h
index 2539b61786..3471c008a1 100644
--- a/test/drifting_clock.h
+++ b/test/drifting_clock.h
@@ -30,12 +30,16 @@ class DriftingClock : public Clock {
return 1.0f - percent / 100.0f;
}
- Timestamp CurrentTime() override;
- NtpTime CurrentNtpTime() override;
- int64_t CurrentNtpInMilliseconds() override;
+ Timestamp CurrentTime() override { return Drift(clock_->CurrentTime()); }
+ NtpTime CurrentNtpTime() override { return Drift(clock_->CurrentNtpTime()); }
+ NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) override {
+ return Drift(clock_->ConvertTimestampToNtpTime(timestamp));
+ }
private:
TimeDelta Drift() const;
+ Timestamp Drift(Timestamp timestamp) const;
+ NtpTime Drift(NtpTime ntp_time) const;
Clock* const clock_;
const float drift_;
diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc
index 1248d8fe6d..c8251883fd 100644
--- a/test/encoder_settings.cc
+++ b/test/encoder_settings.cc
@@ -120,7 +120,7 @@ void FillEncoderConfiguration(VideoCodecType codec_type,
configuration->codec_type = codec_type;
configuration->number_of_streams = num_streams;
configuration->video_stream_factory =
- new rtc::RefCountedObject<DefaultVideoStreamFactory>();
+ rtc::make_ref_counted<DefaultVideoStreamFactory>();
configuration->max_bitrate_bps = 0;
configuration->simulcast_layers = std::vector<VideoStream>(num_streams);
for (size_t i = 0; i < num_streams; ++i) {
diff --git a/test/fake_texture_frame.cc b/test/fake_texture_frame.cc
index 4fa5e9d242..3f155184ab 100644
--- a/test/fake_texture_frame.cc
+++ b/test/fake_texture_frame.cc
@@ -23,7 +23,7 @@ VideoFrame FakeNativeBuffer::CreateFrame(int width,
VideoRotation rotation) {
return VideoFrame::Builder()
.set_video_frame_buffer(
- new rtc::RefCountedObject<FakeNativeBuffer>(width, height))
+ rtc::make_ref_counted<FakeNativeBuffer>(width, height))
.set_timestamp_rtp(timestamp)
.set_timestamp_ms(render_time_ms)
.set_rotation(rotation)
diff --git a/test/fuzzers/BUILD.gn b/test/fuzzers/BUILD.gn
index 9395c7a8f1..23ad728dba 100644
--- a/test/fuzzers/BUILD.gn
+++ b/test/fuzzers/BUILD.gn
@@ -245,6 +245,7 @@ webrtc_fuzzer_test("congestion_controller_feedback_fuzzer") {
"../../modules/remote_bitrate_estimator",
"../../modules/rtp_rtcp:rtp_rtcp_format",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/functional:bind_front" ]
}
rtc_library("audio_decoder_fuzzer") {
@@ -604,6 +605,17 @@ webrtc_fuzzer_test("sctp_utils_fuzzer") {
]
}
+webrtc_fuzzer_test("dcsctp_socket_fuzzer") {
+ sources = [ "dcsctp_socket_fuzzer.cc" ]
+ deps = [
+ "../../net/dcsctp/fuzzers:dcsctp_fuzzers",
+ "../../net/dcsctp/public:socket",
+ "../../net/dcsctp/public:types",
+ "../../net/dcsctp/socket:dcsctp_socket",
+ "../../rtc_base:rtc_base_approved",
+ ]
+}
+
webrtc_fuzzer_test("rtp_header_parser_fuzzer") {
sources = [ "rtp_header_parser_fuzzer.cc" ]
deps = [ "../:rtp_test_utils" ]
@@ -623,6 +635,30 @@ webrtc_fuzzer_test("vp8_replay_fuzzer") {
seed_corpus = "corpora/rtpdump-corpus/vp8"
}
+if (rtc_build_libvpx) {
+ webrtc_fuzzer_test("vp9_encoder_references_fuzzer") {
+ sources = [ "vp9_encoder_references_fuzzer.cc" ]
+ deps = [
+ "..:test_support",
+ "../../api:array_view",
+ "../../api/transport:webrtc_key_value_config",
+ "../../api/video:video_frame",
+ "../../api/video_codecs:video_codecs_api",
+ "../../modules/video_coding:frame_dependencies_calculator",
+ "../../modules/video_coding:mock_libvpx_interface",
+ "../../modules/video_coding:webrtc_vp9",
+ "../../rtc_base:safe_compare",
+ rtc_libvpx_dir,
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ ]
+ defines = [ "RTC_ENABLE_VP9" ]
+ }
+}
+
webrtc_fuzzer_test("vp9_replay_fuzzer") {
sources = [ "vp9_replay_fuzzer.cc" ]
deps = [
diff --git a/test/fuzzers/congestion_controller_feedback_fuzzer.cc b/test/fuzzers/congestion_controller_feedback_fuzzer.cc
index 084c8c300a..06a73b0434 100644
--- a/test/fuzzers/congestion_controller_feedback_fuzzer.cc
+++ b/test/fuzzers/congestion_controller_feedback_fuzzer.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "absl/functional/bind_front.h"
#include "modules/congestion_controller/include/receive_side_congestion_controller.h"
#include "modules/pacing/packet_router.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
@@ -21,7 +22,10 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
return;
SimulatedClock clock(data[i++]);
PacketRouter packet_router;
- ReceiveSideCongestionController cc(&clock, &packet_router);
+ ReceiveSideCongestionController cc(
+ &clock,
+ absl::bind_front(&PacketRouter::SendCombinedRtcpPacket, &packet_router),
+ absl::bind_front(&PacketRouter::SendRemb, &packet_router), nullptr);
RemoteBitrateEstimator* rbe = cc.GetRemoteBitrateEstimator(true);
RTPHeader header;
header.ssrc = ByteReader<uint32_t>::ReadBigEndian(&data[i]);
diff --git a/test/fuzzers/dcsctp_socket_fuzzer.cc b/test/fuzzers/dcsctp_socket_fuzzer.cc
new file mode 100644
index 0000000000..390cbb7f6c
--- /dev/null
+++ b/test/fuzzers/dcsctp_socket_fuzzer.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/fuzzers/dcsctp_fuzzers.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/socket/dcsctp_socket.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ dcsctp::dcsctp_fuzzers::FuzzerCallbacks cb;
+ dcsctp::DcSctpOptions options;
+ options.disable_checksum_verification = true;
+ dcsctp::DcSctpSocket socket("A", cb, nullptr, options);
+
+ dcsctp::dcsctp_fuzzers::FuzzSocket(socket, cb,
+ rtc::ArrayView<const uint8_t>(data, size));
+}
+} // namespace webrtc
diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
index aeeb5c03a4..fdb4aa5f3c 100644
--- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
+++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
@@ -12,9 +12,7 @@
#include "api/rtp_packet_infos.h"
#include "modules/video_coding/frame_object.h"
-#include "modules/video_coding/packet_buffer.h"
#include "modules/video_coding/rtp_frame_reference_finder.h"
-#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -58,10 +56,6 @@ class DataReader {
size_t offset_ = 0;
};
-class NullCallback : public OnCompleteFrameCallback {
- void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override {}
-};
-
absl::optional<RTPVideoHeader::GenericDescriptorInfo>
GenerateGenericFrameDependencies(DataReader* reader) {
absl::optional<RTPVideoHeader::GenericDescriptorInfo> result;
@@ -91,8 +85,7 @@ GenerateGenericFrameDependencies(DataReader* reader) {
void FuzzOneInput(const uint8_t* data, size_t size) {
DataReader reader(data, size);
- NullCallback cb;
- RtpFrameReferenceFinder reference_finder(&cb);
+ RtpFrameReferenceFinder reference_finder;
auto codec = static_cast<VideoCodecType>(reader.GetNum<uint8_t>() % 5);
diff --git a/test/fuzzers/rtp_header_parser_fuzzer.cc b/test/fuzzers/rtp_header_parser_fuzzer.cc
index d6af5ca3ce..cb5bea2456 100644
--- a/test/fuzzers/rtp_header_parser_fuzzer.cc
+++ b/test/fuzzers/rtp_header_parser_fuzzer.cc
@@ -22,27 +22,6 @@ namespace webrtc {
void FuzzOneInput(const uint8_t* data, size_t size) {
RtpHeaderParser::IsRtcp(data, size);
RtpHeaderParser::GetSsrc(data, size);
- RTPHeader rtp_header;
-
- std::unique_ptr<RtpHeaderParser> rtp_header_parser(
- RtpHeaderParser::CreateForTest());
-
- rtp_header_parser->Parse(data, size, &rtp_header);
- for (int i = 1; i < kRtpExtensionNumberOfExtensions; ++i) {
- if (size > 0 && i >= data[size - 1]) {
- RTPExtensionType add_extension = static_cast<RTPExtensionType>(i);
- rtp_header_parser->RegisterRtpHeaderExtension(add_extension, i);
- }
- }
- rtp_header_parser->Parse(data, size, &rtp_header);
-
- for (int i = 1; i < kRtpExtensionNumberOfExtensions; ++i) {
- if (size > 1 && i >= data[size - 2]) {
- RTPExtensionType remove_extension = static_cast<RTPExtensionType>(i);
- rtp_header_parser->DeregisterRtpHeaderExtension(remove_extension);
- }
- }
- rtp_header_parser->Parse(data, size, &rtp_header);
}
} // namespace webrtc
diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc
index 9e8fd6f4c1..3f2fc5e668 100644
--- a/test/fuzzers/rtp_packet_fuzzer.cc
+++ b/test/fuzzers/rtp_packet_fuzzer.cc
@@ -9,6 +9,7 @@
*/
#include <bitset>
+#include <vector>
#include "absl/types/optional.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
@@ -76,6 +77,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
uint8_t audio_level;
packet.GetExtension<AudioLevel>(&voice_activity, &audio_level);
break;
+ case kRtpExtensionCsrcAudioLevel: {
+ std::vector<uint8_t> audio_levels;
+ packet.GetExtension<CsrcAudioLevel>(&audio_levels);
+ break;
+ }
case kRtpExtensionAbsoluteSendTime:
uint32_t sendtime;
packet.GetExtension<AbsoluteSendTime>(&sendtime);
@@ -109,10 +115,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
VideoContentType content_type;
packet.GetExtension<VideoContentTypeExtension>(&content_type);
break;
- case kRtpExtensionVideoTiming:
+ case kRtpExtensionVideoTiming: {
VideoSendTiming timing;
packet.GetExtension<VideoTimingExtension>(&timing);
break;
+ }
case kRtpExtensionRtpStreamId: {
std::string rsid;
packet.GetExtension<RtpStreamId>(&rsid);
diff --git a/test/fuzzers/sdp_integration_fuzzer.cc b/test/fuzzers/sdp_integration_fuzzer.cc
index dba09721bb..bc181f0573 100644
--- a/test/fuzzers/sdp_integration_fuzzer.cc
+++ b/test/fuzzers/sdp_integration_fuzzer.cc
@@ -34,8 +34,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
// Note - we do not do test.ConnectFakeSignaling(); all signals
// generated are discarded.
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> srd_observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto srd_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
webrtc::SdpParseError error;
std::unique_ptr<webrtc::SessionDescriptionInterface> sdp(
@@ -47,8 +47,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
EXPECT_TRUE_WAIT(srd_observer->called(), 100);
// If set-remote-description was successful, try to answer.
- rtc::scoped_refptr<MockSetSessionDescriptionObserver> sld_observer(
- new rtc::RefCountedObject<MockSetSessionDescriptionObserver>());
+ auto sld_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
if (srd_observer->result()) {
test.caller()->pc()->SetLocalDescription(sld_observer.get());
EXPECT_TRUE_WAIT(sld_observer->called(), 100);
diff --git a/test/fuzzers/utils/BUILD.gn b/test/fuzzers/utils/BUILD.gn
index 6249156058..3e0782f39d 100644
--- a/test/fuzzers/utils/BUILD.gn
+++ b/test/fuzzers/utils/BUILD.gn
@@ -24,6 +24,7 @@ rtc_library("rtp_replayer") {
"../../../call:call_interfaces",
"../../../common_video",
"../../../media:rtc_internal_video_codecs",
+ "../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:rtc_base_tests_utils",
diff --git a/test/fuzzers/utils/rtp_replayer.cc b/test/fuzzers/utils/rtp_replayer.cc
index a664adb31d..43b1fc2ea4 100644
--- a/test/fuzzers/utils/rtp_replayer.cc
+++ b/test/fuzzers/utils/rtp_replayer.cc
@@ -17,13 +17,13 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "api/transport/field_trial_based_config.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "rtc_base/strings/json.h"
#include "system_wrappers/include/clock.h"
#include "test/call_config_utils.h"
#include "test/encoder_settings.h"
#include "test/fake_decoder.h"
#include "test/rtp_file_reader.h"
-#include "test/rtp_header_parser.h"
#include "test/run_loop.h"
namespace webrtc {
@@ -164,37 +164,32 @@ void RtpReplayer::ReplayPackets(rtc::FakeClock* clock,
std::min(deliver_in_ms, static_cast<int64_t>(100))));
}
+ rtc::CopyOnWriteBuffer packet_buffer(packet.data, packet.length);
++num_packets;
- switch (call->Receiver()->DeliverPacket(
- webrtc::MediaType::VIDEO,
- rtc::CopyOnWriteBuffer(packet.data, packet.length),
- /* packet_time_us */ -1)) {
+ switch (call->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
+ packet_buffer,
+ /* packet_time_us */ -1)) {
case PacketReceiver::DELIVERY_OK:
break;
case PacketReceiver::DELIVERY_UNKNOWN_SSRC: {
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(
- RtpHeaderParser::CreateForTest());
-
- parser->Parse(packet.data, packet.length, &header);
- if (unknown_packets[header.ssrc] == 0) {
- RTC_LOG(LS_ERROR) << "Unknown SSRC: " << header.ssrc;
+ webrtc::RtpPacket header;
+ header.Parse(packet_buffer);
+ if (unknown_packets[header.Ssrc()] == 0) {
+ RTC_LOG(LS_ERROR) << "Unknown SSRC: " << header.Ssrc();
}
- ++unknown_packets[header.ssrc];
+ ++unknown_packets[header.Ssrc()];
break;
}
case PacketReceiver::DELIVERY_PACKET_ERROR: {
RTC_LOG(LS_ERROR)
<< "Packet error, corrupt packets or incorrect setup?";
- RTPHeader header;
- std::unique_ptr<RtpHeaderParser> parser(
- RtpHeaderParser::CreateForTest());
- parser->Parse(packet.data, packet.length, &header);
+ webrtc::RtpPacket header;
+ header.Parse(packet_buffer);
RTC_LOG(LS_ERROR) << "Packet packet_length=" << packet.length
- << " payload_type=" << header.payloadType
- << " sequence_number=" << header.sequenceNumber
- << " time_stamp=" << header.timestamp
- << " ssrc=" << header.ssrc;
+ << " payload_type=" << header.PayloadType()
+ << " sequence_number=" << header.SequenceNumber()
+ << " time_stamp=" << header.Timestamp()
+ << " ssrc=" << header.Ssrc();
break;
}
}
diff --git a/test/fuzzers/vp9_encoder_references_fuzzer.cc b/test/fuzzers/vp9_encoder_references_fuzzer.cc
new file mode 100644
index 0000000000..9c793ae9aa
--- /dev/null
+++ b/test/fuzzers/vp9_encoder_references_fuzzer.cc
@@ -0,0 +1,498 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
+#include "absl/container/inlined_vector.h"
+#include "api/array_view.h"
+#include "api/transport/webrtc_key_value_config.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/interface/mock_libvpx_interface.h"
+#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h"
+#include "modules/video_coding/frame_dependencies_calculator.h"
+#include "rtc_base/numerics/safe_compare.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+#include "test/gmock.h"
+
+// Fuzzer simulates various svc configurations and libvpx encoder dropping
+// layer frames.
+// Validates vp9 encoder wrapper produces consistent frame references.
+namespace webrtc {
+namespace {
+
+using test::FuzzDataHelper;
+using ::testing::NiceMock;
+
+class FrameValidator : public EncodedImageCallback {
+ public:
+ ~FrameValidator() override = default;
+
+ Result OnEncodedImage(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) override {
+ RTC_CHECK(codec_specific_info);
+ RTC_CHECK_EQ(codec_specific_info->codecType, kVideoCodecVP9);
+ if (codec_specific_info->codecSpecific.VP9.first_frame_in_picture) {
+ ++picture_id_;
+ }
+ int64_t frame_id = frame_id_++;
+ LayerFrame& layer_frame = frames_[frame_id % kMaxFrameHistorySize];
+ layer_frame.picture_id = picture_id_;
+ layer_frame.spatial_id = encoded_image.SpatialIndex().value_or(0);
+ layer_frame.frame_id = frame_id;
+ layer_frame.temporal_id =
+ codec_specific_info->codecSpecific.VP9.temporal_idx;
+ if (layer_frame.temporal_id == kNoTemporalIdx) {
+ layer_frame.temporal_id = 0;
+ }
+ layer_frame.vp9_non_ref_for_inter_layer_pred =
+ codec_specific_info->codecSpecific.VP9.non_ref_for_inter_layer_pred;
+ CheckVp9References(layer_frame, codec_specific_info->codecSpecific.VP9);
+
+ if (codec_specific_info->generic_frame_info.has_value()) {
+ absl::InlinedVector<int64_t, 5> frame_dependencies =
+ dependencies_calculator_.FromBuffersUsage(
+ frame_id,
+ codec_specific_info->generic_frame_info->encoder_buffers);
+
+ CheckGenericReferences(frame_dependencies,
+ *codec_specific_info->generic_frame_info);
+ CheckGenericAndCodecSpecificReferencesAreConsistent(
+ frame_dependencies, *codec_specific_info, layer_frame);
+ }
+
+ return Result(Result::OK);
+ }
+
+ private:
+ // With 4 spatial layers and patterns up to 8 pictures, it should be enought
+ // to keep 32 last frames to validate dependencies.
+ static constexpr size_t kMaxFrameHistorySize = 32;
+ struct LayerFrame {
+ int64_t frame_id;
+ int64_t picture_id;
+ int spatial_id;
+ int temporal_id;
+ bool vp9_non_ref_for_inter_layer_pred;
+ };
+
+ void CheckVp9References(const LayerFrame& layer_frame,
+ const CodecSpecificInfoVP9& vp9_info) {
+ if (layer_frame.frame_id == 0) {
+ RTC_CHECK(!vp9_info.inter_layer_predicted);
+ } else {
+ const LayerFrame& previous_frame = Frame(layer_frame.frame_id - 1);
+ if (vp9_info.inter_layer_predicted) {
+ RTC_CHECK(!previous_frame.vp9_non_ref_for_inter_layer_pred);
+ RTC_CHECK_EQ(layer_frame.picture_id, previous_frame.picture_id);
+ }
+ if (previous_frame.picture_id == layer_frame.picture_id) {
+ RTC_CHECK_GT(layer_frame.spatial_id, previous_frame.spatial_id);
+ // The check below would fail for temporal shift structures. Remove it
+ // or move it to !flexible_mode section when vp9 encoder starts
+ // supporting such structures.
+ RTC_CHECK_EQ(layer_frame.temporal_id, previous_frame.temporal_id);
+ }
+ }
+ if (!vp9_info.flexible_mode) {
+ if (vp9_info.gof.num_frames_in_gof > 0) {
+ gof_.CopyGofInfoVP9(vp9_info.gof);
+ }
+ RTC_CHECK_EQ(gof_.temporal_idx[vp9_info.gof_idx],
+ layer_frame.temporal_id);
+ }
+ }
+
+ void CheckGenericReferences(rtc::ArrayView<const int64_t> frame_dependencies,
+ const GenericFrameInfo& generic_info) const {
+ for (int64_t dependency_frame_id : frame_dependencies) {
+ RTC_CHECK_GE(dependency_frame_id, 0);
+ const LayerFrame& dependency = Frame(dependency_frame_id);
+ RTC_CHECK_GE(generic_info.spatial_id, dependency.spatial_id);
+ RTC_CHECK_GE(generic_info.temporal_id, dependency.temporal_id);
+ }
+ }
+
+ void CheckGenericAndCodecSpecificReferencesAreConsistent(
+ rtc::ArrayView<const int64_t> frame_dependencies,
+ const CodecSpecificInfo& info,
+ const LayerFrame& layer_frame) const {
+ const CodecSpecificInfoVP9& vp9_info = info.codecSpecific.VP9;
+ const GenericFrameInfo& generic_info = *info.generic_frame_info;
+
+ RTC_CHECK_EQ(generic_info.spatial_id, layer_frame.spatial_id);
+ RTC_CHECK_EQ(generic_info.temporal_id, layer_frame.temporal_id);
+ auto picture_id_diffs =
+ rtc::MakeArrayView(vp9_info.p_diff, vp9_info.num_ref_pics);
+ RTC_CHECK_EQ(
+ frame_dependencies.size(),
+ picture_id_diffs.size() + (vp9_info.inter_layer_predicted ? 1 : 0));
+ for (int64_t dependency_frame_id : frame_dependencies) {
+ RTC_CHECK_GE(dependency_frame_id, 0);
+ const LayerFrame& dependency = Frame(dependency_frame_id);
+ if (dependency.spatial_id != layer_frame.spatial_id) {
+ RTC_CHECK(vp9_info.inter_layer_predicted);
+ RTC_CHECK_EQ(layer_frame.picture_id, dependency.picture_id);
+ RTC_CHECK_GT(layer_frame.spatial_id, dependency.spatial_id);
+ } else {
+ RTC_CHECK(vp9_info.inter_pic_predicted);
+ RTC_CHECK_EQ(layer_frame.spatial_id, dependency.spatial_id);
+ RTC_CHECK(absl::c_linear_search(
+ picture_id_diffs, layer_frame.picture_id - dependency.picture_id));
+ }
+ }
+ }
+
+ const LayerFrame& Frame(int64_t frame_id) const {
+ auto& frame = frames_[frame_id % kMaxFrameHistorySize];
+ RTC_CHECK_EQ(frame.frame_id, frame_id);
+ return frame;
+ }
+
+ GofInfoVP9 gof_;
+ int64_t frame_id_ = 0;
+ int64_t picture_id_ = 1;
+ FrameDependenciesCalculator dependencies_calculator_;
+ LayerFrame frames_[kMaxFrameHistorySize];
+};
+
+class FieldTrials : public WebRtcKeyValueConfig {
+ public:
+ explicit FieldTrials(FuzzDataHelper& config)
+ : flags_(config.ReadOrDefaultValue<uint8_t>(0)) {}
+
+ ~FieldTrials() override = default;
+ std::string Lookup(absl::string_view key) const override {
+ static constexpr absl::string_view kBinaryFieldTrials[] = {
+ "WebRTC-Vp9DependencyDescriptor",
+ "WebRTC-Vp9ExternalRefCtrl",
+ "WebRTC-Vp9IssueKeyFrameOnLayerDeactivation",
+ };
+ for (size_t i = 0; i < ABSL_ARRAYSIZE(kBinaryFieldTrials); ++i) {
+ if (key == kBinaryFieldTrials[i]) {
+ return (flags_ & (1u << i)) ? "Enabled" : "Disabled";
+ }
+ }
+
+ // Ignore following field trials.
+ if (key == "WebRTC-CongestionWindow" ||
+ key == "WebRTC-UseBaseHeavyVP8TL3RateAllocation" ||
+ key == "WebRTC-SimulcastUpswitchHysteresisPercent" ||
+ key == "WebRTC-SimulcastScreenshareUpswitchHysteresisPercent" ||
+ key == "WebRTC-VideoRateControl" ||
+ key == "WebRTC-VP9-PerformanceFlags" ||
+ key == "WebRTC-VP9VariableFramerateScreenshare" ||
+ key == "WebRTC-VP9QualityScaler") {
+ return "";
+ }
+ // Crash when using unexpected field trial to decide if it should be fuzzed
+ // or have a constant value.
+ RTC_CHECK(false) << "Unfuzzed field trial " << key << "\n";
+ }
+
+ private:
+ const uint8_t flags_;
+};
+
+VideoCodec CodecSettings(FuzzDataHelper& rng) {
+ uint16_t config = rng.ReadOrDefaultValue<uint16_t>(0);
+ // Test up to to 4 spatial and 4 temporal layers.
+ int num_spatial_layers = 1 + (config & 0b11);
+ int num_temporal_layers = 1 + ((config >> 2) & 0b11);
+
+ VideoCodec codec_settings = {};
+ codec_settings.codecType = kVideoCodecVP9;
+ codec_settings.maxFramerate = 30;
+ codec_settings.width = 320 << (num_spatial_layers - 1);
+ codec_settings.height = 180 << (num_spatial_layers - 1);
+ if (num_spatial_layers > 1) {
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ SpatialLayer& spatial_layer = codec_settings.spatialLayers[sid];
+ codec_settings.width = 320 << sid;
+ codec_settings.height = 180 << sid;
+ spatial_layer.maxFramerate = codec_settings.maxFramerate;
+ spatial_layer.numberOfTemporalLayers = num_temporal_layers;
+ }
+ }
+ codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers;
+ codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers;
+ int inter_layer_pred = (config >> 4) & 0b11;
+ // There are only 3 valid values.
+ codec_settings.VP9()->interLayerPred = static_cast<InterLayerPredMode>(
+ inter_layer_pred < 3 ? inter_layer_pred : 0);
+ codec_settings.VP9()->flexibleMode = (config & (1u << 6)) != 0;
+ codec_settings.VP9()->frameDroppingOn = (config & (1u << 7)) != 0;
+ codec_settings.mode = VideoCodecMode::kRealtimeVideo;
+ return codec_settings;
+}
+
+VideoEncoder::Settings EncoderSettings() {
+ return VideoEncoder::Settings(VideoEncoder::Capabilities(false),
+ /*number_of_cores=*/1,
+ /*max_payload_size=*/0);
+}
+
+struct LibvpxState {
+ LibvpxState() {
+ pkt.kind = VPX_CODEC_CX_FRAME_PKT;
+ pkt.data.frame.buf = pkt_buffer;
+ pkt.data.frame.sz = ABSL_ARRAYSIZE(pkt_buffer);
+ layer_id.spatial_layer_id = -1;
+ }
+
+ uint8_t pkt_buffer[1000] = {};
+ vpx_codec_enc_cfg_t config = {};
+ vpx_codec_priv_output_cx_pkt_cb_pair_t callback = {};
+ vpx_image_t img = {};
+ vpx_svc_ref_frame_config_t ref_config = {};
+ vpx_svc_layer_id_t layer_id = {};
+ vpx_svc_frame_drop_t frame_drop = {};
+ vpx_codec_cx_pkt pkt = {};
+};
+
+class StubLibvpx : public NiceMock<MockLibvpxInterface> {
+ public:
+ explicit StubLibvpx(LibvpxState* state) : state_(state) { RTC_CHECK(state_); }
+
+ vpx_codec_err_t codec_enc_config_default(vpx_codec_iface_t* iface,
+ vpx_codec_enc_cfg_t* cfg,
+ unsigned int usage) const override {
+ state_->config = *cfg;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_enc_init(vpx_codec_ctx_t* ctx,
+ vpx_codec_iface_t* iface,
+ const vpx_codec_enc_cfg_t* cfg,
+ vpx_codec_flags_t flags) const override {
+ RTC_CHECK(ctx);
+ ctx->err = VPX_CODEC_OK;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_image_t* img_wrap(vpx_image_t* img,
+ vpx_img_fmt_t fmt,
+ unsigned int d_w,
+ unsigned int d_h,
+ unsigned int stride_align,
+ unsigned char* img_data) const override {
+ state_->img.fmt = fmt;
+ state_->img.d_w = d_w;
+ state_->img.d_h = d_h;
+ return &state_->img;
+ }
+
+ vpx_codec_err_t codec_encode(vpx_codec_ctx_t* ctx,
+ const vpx_image_t* img,
+ vpx_codec_pts_t pts,
+ uint64_t duration,
+ vpx_enc_frame_flags_t flags,
+ uint64_t deadline) const override {
+ if (flags & VPX_EFLAG_FORCE_KF) {
+ state_->pkt.data.frame.flags = VPX_FRAME_IS_KEY;
+ } else {
+ state_->pkt.data.frame.flags = 0;
+ }
+ state_->pkt.data.frame.duration = duration;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ void* param) const override {
+ if (ctrl_id == VP9E_REGISTER_CX_CALLBACK) {
+ state_->callback =
+ *reinterpret_cast<vpx_codec_priv_output_cx_pkt_cb_pair_t*>(param);
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(
+ vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_ref_frame_config_t* param) const override {
+ switch (ctrl_id) {
+ case VP9E_SET_SVC_REF_FRAME_CONFIG:
+ state_->ref_config = *param;
+ break;
+ case VP9E_GET_SVC_REF_FRAME_CONFIG:
+ *param = state_->ref_config;
+ break;
+ default:
+ break;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_layer_id_t* param) const override {
+ switch (ctrl_id) {
+ case VP9E_SET_SVC_LAYER_ID:
+ state_->layer_id = *param;
+ break;
+ case VP9E_GET_SVC_LAYER_ID:
+ *param = state_->layer_id;
+ break;
+ default:
+ break;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_frame_drop_t* param) const override {
+ if (ctrl_id == VP9E_SET_SVC_FRAME_DROP_LAYER) {
+ state_->frame_drop = *param;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_enc_config_set(
+ vpx_codec_ctx_t* ctx,
+ const vpx_codec_enc_cfg_t* cfg) const override {
+ state_->config = *cfg;
+ return VPX_CODEC_OK;
+ }
+
+ private:
+ LibvpxState* const state_;
+};
+
+enum Actions {
+ kEncode,
+ kSetRates,
+};
+
+// When a layer frame is marked for drop, drops all layer frames from that
+// pictures with larger spatial ids.
+constexpr bool DropAbove(uint8_t layers_mask, int sid) {
+ uint8_t full_mask = (uint8_t{1} << (sid + 1)) - 1;
+ return (layers_mask & full_mask) != full_mask;
+}
+// inline unittests
+static_assert(DropAbove(0b1011, /*sid=*/0) == false, "");
+static_assert(DropAbove(0b1011, /*sid=*/1) == false, "");
+static_assert(DropAbove(0b1011, /*sid=*/2) == true, "");
+static_assert(DropAbove(0b1011, /*sid=*/3) == true, "");
+
+// When a layer frame is marked for drop, drops all layer frames from that
+// pictures with smaller spatial ids.
+constexpr bool DropBelow(uint8_t layers_mask, int sid, int num_layers) {
+ return (layers_mask >> sid) != (1 << (num_layers - sid)) - 1;
+}
+// inline unittests
+static_assert(DropBelow(0b1101, /*sid=*/0, 4) == true, "");
+static_assert(DropBelow(0b1101, /*sid=*/1, 4) == true, "");
+static_assert(DropBelow(0b1101, /*sid=*/2, 4) == false, "");
+static_assert(DropBelow(0b1101, /*sid=*/3, 4) == false, "");
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ FuzzDataHelper helper(rtc::MakeArrayView(data, size));
+
+ FrameValidator validator;
+ FieldTrials field_trials(helper);
+ // Setup call callbacks for the fake
+ LibvpxState state;
+
+ // Initialize encoder
+ LibvpxVp9Encoder encoder(cricket::VideoCodec(),
+ std::make_unique<StubLibvpx>(&state), field_trials);
+ VideoCodec codec = CodecSettings(helper);
+ if (encoder.InitEncode(&codec, EncoderSettings()) != WEBRTC_VIDEO_CODEC_OK) {
+ return;
+ }
+ RTC_CHECK_EQ(encoder.RegisterEncodeCompleteCallback(&validator),
+ WEBRTC_VIDEO_CODEC_OK);
+ {
+ // Enable all the layers initially. Encoder doesn't support producing
+ // frames when no layers are enabled.
+ LibvpxVp9Encoder::RateControlParameters parameters;
+ parameters.framerate_fps = 30.0;
+ for (int sid = 0; sid < codec.VP9()->numberOfSpatialLayers; ++sid) {
+ for (int tid = 0; tid < codec.VP9()->numberOfTemporalLayers; ++tid) {
+ parameters.bitrate.SetBitrate(sid, tid, 100'000);
+ }
+ }
+ encoder.SetRates(parameters);
+ }
+
+ std::vector<VideoFrameType> frame_types(1);
+ VideoFrame fake_image = VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(
+ int{codec.width}, int{codec.height}))
+ .build();
+
+ // Start producing frames at random.
+ while (helper.CanReadBytes(1)) {
+ uint8_t action = helper.Read<uint8_t>();
+ switch (action & 0b11) {
+ case kEncode: {
+ // bitmask of the action: SSSS-K00, where
+ // four S bit indicate which spatial layers should be produced,
+ // K bit indicates if frame should be a key frame.
+ frame_types[0] = (action & 0b100) ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+ encoder.Encode(fake_image, &frame_types);
+ uint8_t encode_spatial_layers = (action >> 4);
+ for (size_t sid = 0; sid < state.config.ss_number_layers; ++sid) {
+ bool drop = true;
+ switch (state.frame_drop.framedrop_mode) {
+ case FULL_SUPERFRAME_DROP:
+ drop = encode_spatial_layers == 0;
+ break;
+ case LAYER_DROP:
+ drop = (encode_spatial_layers & (1 << sid)) == 0;
+ break;
+ case CONSTRAINED_LAYER_DROP:
+ drop = DropBelow(encode_spatial_layers, sid,
+ state.config.ss_number_layers);
+ break;
+ case CONSTRAINED_FROM_ABOVE_DROP:
+ drop = DropAbove(encode_spatial_layers, sid);
+ break;
+ }
+ if (!drop) {
+ state.layer_id.spatial_layer_id = sid;
+ state.callback.output_cx_pkt(&state.pkt, state.callback.user_priv);
+ }
+ }
+ } break;
+ case kSetRates: {
+ // bitmask of the action: (S3)(S1)(S0)01,
+ // where Sx is number of temporal layers to enable for spatial layer x
+ // In pariculat Sx = 0 indicates spatial layer x should be disabled.
+ LibvpxVp9Encoder::RateControlParameters parameters;
+ parameters.framerate_fps = 30.0;
+ for (int sid = 0; sid < codec.VP9()->numberOfSpatialLayers; ++sid) {
+ int temporal_layers = (action >> ((1 + sid) * 2)) & 0b11;
+ for (int tid = 0; tid < temporal_layers; ++tid) {
+ parameters.bitrate.SetBitrate(sid, tid, 100'000);
+ }
+ }
+ // Ignore allocation that turns off all the layers. in such case
+ // it is up to upper-layer code not to call Encode.
+ if (parameters.bitrate.get_sum_bps() > 0) {
+ encoder.SetRates(parameters);
+ }
+ } break;
+ default:
+ // Unspecificed values are noop.
+ break;
+ }
+ }
+}
+} // namespace webrtc
diff --git a/test/mappable_native_buffer.cc b/test/mappable_native_buffer.cc
index cff58ff8e7..bd0b304545 100644
--- a/test/mappable_native_buffer.cc
+++ b/test/mappable_native_buffer.cc
@@ -45,14 +45,14 @@ VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms,
VideoFrameBuffer::Type mappable_type,
int width,
int height) {
- VideoFrame frame = VideoFrame::Builder()
- .set_video_frame_buffer(
- new rtc::RefCountedObject<MappableNativeBuffer>(
- mappable_type, width, height))
- .set_timestamp_rtp(99)
- .set_timestamp_ms(99)
- .set_rotation(kVideoRotation_0)
- .build();
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(rtc::make_ref_counted<MappableNativeBuffer>(
+ mappable_type, width, height))
+ .set_timestamp_rtp(99)
+ .set_timestamp_ms(99)
+ .set_rotation(kVideoRotation_0)
+ .build();
frame.set_ntp_time_ms(ntp_time_ms);
return frame;
}
@@ -77,9 +77,8 @@ MappableNativeBuffer::ScaledBuffer::CropAndScale(int offset_x,
int crop_height,
int scaled_width,
int scaled_height) {
- return rtc::scoped_refptr<VideoFrameBuffer>(
- new rtc::RefCountedObject<ScaledBuffer>(parent_, scaled_width,
- scaled_height));
+ return rtc::make_ref_counted<ScaledBuffer>(parent_, scaled_width,
+ scaled_height);
}
rtc::scoped_refptr<I420BufferInterface>
@@ -146,8 +145,7 @@ bool MappableNativeBuffer::DidConvertToI420() const {
rtc::scoped_refptr<MappableNativeBuffer::ScaledBuffer>
MappableNativeBuffer::FullSizeBuffer() {
- return rtc::scoped_refptr<MappableNativeBuffer::ScaledBuffer>(
- new rtc::RefCountedObject<ScaledBuffer>(this, width_, height_));
+ return rtc::make_ref_counted<ScaledBuffer>(this, width_, height_);
}
rtc::scoped_refptr<VideoFrameBuffer>
@@ -168,11 +166,10 @@ MappableNativeBuffer::GetOrCreateMappedBuffer(int width, int height) {
break;
}
case VideoFrameBuffer::Type::kNV12: {
- rtc::scoped_refptr<NV12Buffer> nv12_buffer;
- nv12_buffer = new rtc::RefCountedObject<NV12BufferWithDidConvertToI420>(
- width, height);
+ auto nv12_buffer =
+ rtc::make_ref_counted<NV12BufferWithDidConvertToI420>(width, height);
nv12_buffer->InitializeData();
- mapped_buffer = nv12_buffer;
+ mapped_buffer = std::move(nv12_buffer);
break;
}
default:
diff --git a/test/mock_audio_decoder_factory.h b/test/mock_audio_decoder_factory.h
index cdb03d3f38..4d3eed212c 100644
--- a/test/mock_audio_decoder_factory.h
+++ b/test/mock_audio_decoder_factory.h
@@ -52,7 +52,7 @@ class MockAudioDecoderFactory : public AudioDecoderFactory {
using ::testing::Return;
rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>;
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
ON_CALL(*factory.get(), GetSupportedDecoders())
.WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber());
@@ -73,7 +73,7 @@ class MockAudioDecoderFactory : public AudioDecoderFactory {
using ::testing::SetArgPointee;
rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> factory =
- new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>;
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
ON_CALL(*factory.get(), GetSupportedDecoders())
.WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber());
diff --git a/test/network/BUILD.gn b/test/network/BUILD.gn
index a4fa900ab1..1e39a3f89b 100644
--- a/test/network/BUILD.gn
+++ b/test/network/BUILD.gn
@@ -70,7 +70,6 @@ rtc_library("emulated_network") {
"../../rtc_base/task_utils:pending_task_safety_flag",
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/task_utils:to_queued_task",
- "../../rtc_base/third_party/sigslot",
"../../system_wrappers",
"../scenario:column_printer",
"../time_controller",
diff --git a/test/network/fake_network_socket_server.cc b/test/network/fake_network_socket_server.cc
index e2e1319949..bf6ef5f12d 100644
--- a/test/network/fake_network_socket_server.cc
+++ b/test/network/fake_network_socket_server.cc
@@ -276,10 +276,6 @@ FakeNetworkSocketServer::FakeNetworkSocketServer(
wakeup_(/*manual_reset=*/false, /*initially_signaled=*/false) {}
FakeNetworkSocketServer::~FakeNetworkSocketServer() = default;
-void FakeNetworkSocketServer::OnMessageQueueDestroyed() {
- thread_ = nullptr;
-}
-
EmulatedEndpointImpl* FakeNetworkSocketServer::GetEndpointNode(
const rtc::IPAddress& ip) {
return endpoints_container_->LookupByLocalAddress(ip);
@@ -311,10 +307,6 @@ rtc::AsyncSocket* FakeNetworkSocketServer::CreateAsyncSocket(int family,
void FakeNetworkSocketServer::SetMessageQueue(rtc::Thread* thread) {
thread_ = thread;
- if (thread_) {
- thread_->SignalQueueDestroyed.connect(
- this, &FakeNetworkSocketServer::OnMessageQueueDestroyed);
- }
}
// Always returns true (if return false, it won't be invoked again...)
diff --git a/test/network/fake_network_socket_server.h b/test/network/fake_network_socket_server.h
index 2cf4d7c86d..d8be2e24b8 100644
--- a/test/network/fake_network_socket_server.h
+++ b/test/network/fake_network_socket_server.h
@@ -19,7 +19,6 @@
#include "rtc_base/event.h"
#include "rtc_base/socket_server.h"
#include "rtc_base/synchronization/mutex.h"
-#include "rtc_base/third_party/sigslot/sigslot.h"
#include "system_wrappers/include/clock.h"
#include "test/network/network_emulation.h"
@@ -28,8 +27,7 @@ namespace test {
class FakeNetworkSocket;
// FakeNetworkSocketServer must outlive any sockets it creates.
-class FakeNetworkSocketServer : public rtc::SocketServer,
- public sigslot::has_slots<> {
+class FakeNetworkSocketServer : public rtc::SocketServer {
public:
explicit FakeNetworkSocketServer(EndpointsContainer* endpoints_controller);
~FakeNetworkSocketServer() override;
@@ -52,8 +50,6 @@ class FakeNetworkSocketServer : public rtc::SocketServer,
void Unregister(FakeNetworkSocket* socket);
private:
- void OnMessageQueueDestroyed();
-
const EndpointsContainer* endpoints_container_;
rtc::Event wakeup_;
rtc::Thread* thread_ = nullptr;
diff --git a/test/network/network_emulation_pc_unittest.cc b/test/network/network_emulation_pc_unittest.cc
index 6420e36275..bd15b5ad38 100644
--- a/test/network/network_emulation_pc_unittest.cc
+++ b/test/network/network_emulation_pc_unittest.cc
@@ -99,7 +99,12 @@ rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
rtc_configuration.servers.push_back(server);
}
- return pcf->CreatePeerConnection(rtc_configuration, std::move(pc_deps));
+ auto result =
+ pcf->CreatePeerConnectionOrError(rtc_configuration, std::move(pc_deps));
+ if (!result.ok()) {
+ return nullptr;
+ }
+ return result.MoveValue();
}
} // namespace
diff --git a/test/pc/e2e/BUILD.gn b/test/pc/e2e/BUILD.gn
index 95b0a2a31c..9e9d5c2db5 100644
--- a/test/pc/e2e/BUILD.gn
+++ b/test/pc/e2e/BUILD.gn
@@ -289,6 +289,7 @@ if (!build_with_chromium) {
"../../../api:peer_connection_quality_test_fixture_api",
"../../../api/video:video_frame",
"../../../pc:peerconnection",
+ "../../../pc:session_description",
"../../../pc:video_track_source",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ]
@@ -737,6 +738,8 @@ if (!build_with_chromium) {
"../../../p2p:rtc_p2p",
"../../../pc:peerconnection",
"../../../pc:rtc_pc_base",
+ "../../../pc:session_description",
+ "../../../pc:simulcast_description",
"../../../rtc_base:stringutils",
]
absl_deps = [
diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
index 8830436b09..30c17c1ca9 100644
--- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
+++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
@@ -26,7 +26,7 @@ void DefaultAudioQualityAnalyzer::Start(std::string test_case_name,
void DefaultAudioQualityAnalyzer::OnStatsReports(
absl::string_view pc_label,
const rtc::scoped_refptr<const RTCStatsReport>& report) {
- // TODO(https://crbug.com/webrtc/11683): use "inbound-rtp" instead of "track"
+ // TODO(https://crbug.com/webrtc/11789): use "inbound-rtp" instead of "track"
// stats when required audio metrics moved there
auto stats = report->GetStatsOfType<RTCMediaStreamTrackStats>();
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
index 7d523897e6..53fb14e606 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
@@ -21,6 +21,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "rtc_base/cpu_time.h"
#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
@@ -141,12 +142,9 @@ void DefaultVideoQualityAnalyzer::Start(
int max_threads_count) {
test_label_ = std::move(test_case_name);
for (int i = 0; i < max_threads_count; i++) {
- auto thread = std::make_unique<rtc::PlatformThread>(
- &DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this,
- ("DefaultVideoQualityAnalyzerWorker-" + std::to_string(i)).data(),
- rtc::ThreadPriority::kNormalPriority);
- thread->Start();
- thread_pool_.push_back(std::move(thread));
+ thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable(
+ [this] { ProcessComparisons(); },
+ "DefaultVideoQualityAnalyzerWorker-" + std::to_string(i)));
}
{
MutexLock lock(&lock_);
@@ -352,17 +350,16 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
stream_frame_counters_.at(key).received++;
// Determine the time of the last received packet of this video frame.
RTC_DCHECK(!input_image.PacketInfos().empty());
- int64_t last_receive_time =
+ Timestamp last_receive_time =
std::max_element(input_image.PacketInfos().cbegin(),
input_image.PacketInfos().cend(),
[](const RtpPacketInfo& a, const RtpPacketInfo& b) {
- return a.receive_time_ms() < b.receive_time_ms();
+ return a.receive_time() < b.receive_time();
})
- ->receive_time_ms();
- it->second.OnFramePreDecode(
- peer_index,
- /*received_time=*/Timestamp::Millis(last_receive_time),
- /*decode_start_time=*/Now());
+ ->receive_time();
+ it->second.OnFramePreDecode(peer_index,
+ /*received_time=*/last_receive_time,
+ /*decode_start_time=*/Now());
}
void DefaultVideoQualityAnalyzer::OnFrameDecoded(
@@ -547,10 +544,6 @@ void DefaultVideoQualityAnalyzer::Stop() {
}
StopMeasuringCpuProcessTime();
comparison_available_event_.Set();
- for (auto& thread : thread_pool_) {
- thread->Stop();
- }
- // PlatformThread have to be deleted on the same thread, where it was created
thread_pool_.clear();
// Perform final Metrics update. On this place analyzer is stopped and no one
@@ -677,10 +670,6 @@ void DefaultVideoQualityAnalyzer::AddComparison(
StopExcludingCpuThreadTime();
}
-void DefaultVideoQualityAnalyzer::ProcessComparisonsThread(void* obj) {
- static_cast<DefaultVideoQualityAnalyzer*>(obj)->ProcessComparisons();
-}
-
void DefaultVideoQualityAnalyzer::ProcessComparisons() {
while (true) {
// Try to pick next comparison to perform from the queue.
@@ -926,6 +915,9 @@ void DefaultVideoQualityAnalyzer::ReportResults(
frame_counters.dropped,
"count",
/*important=*/false, ImproveDirection::kSmallerIsBetter);
+ test::PrintResult("rendered_frames", "", test_case_name,
+ frame_counters.rendered, "count", /*important=*/false,
+ ImproveDirection::kBiggerIsBetter);
ReportResult("max_skipped", test_case_name, stats.skipped_between_rendered,
"count", ImproveDirection::kSmallerIsBetter);
ReportResult("target_encode_bitrate", test_case_name,
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
index de9419dda9..626fa246e5 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
@@ -560,7 +560,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_);
- std::vector<std::unique_ptr<rtc::PlatformThread>> thread_pool_;
+ std::vector<rtc::PlatformThread> thread_pool_;
rtc::Event comparison_available_event_;
Mutex cpu_measurement_lock_;
diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
index 4cdc681012..8d8a1af848 100644
--- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
+++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
@@ -63,13 +63,13 @@ VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
EncodedImage FakeEncode(const VideoFrame& frame) {
EncodedImage image;
std::vector<RtpPacketInfo> packet_infos;
- packet_infos.push_back(
- RtpPacketInfo(/*ssrc=*/1,
- /*csrcs=*/{},
- /*rtp_timestamp=*/frame.timestamp(),
- /*audio_level=*/absl::nullopt,
- /*absolute_capture_time=*/absl::nullopt,
- /*receive_time_ms=*/frame.timestamp_us() + 10));
+ packet_infos.push_back(RtpPacketInfo(
+ /*ssrc=*/1,
+ /*csrcs=*/{},
+ /*rtp_timestamp=*/frame.timestamp(),
+ /*audio_level=*/absl::nullopt,
+ /*absolute_capture_time=*/absl::nullopt,
+ /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000)));
image.SetPacketInfos(RtpPacketInfos(packet_infos));
return image;
}
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
index c8b586221c..68b76cd37d 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
@@ -255,15 +255,5 @@ QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder(
peer_name_, std::move(decoder), extractor_, analyzer_);
}
-std::unique_ptr<VideoDecoder>
-QualityAnalyzingVideoDecoderFactory::LegacyCreateVideoDecoder(
- const SdpVideoFormat& format,
- const std::string& receive_stream_id) {
- std::unique_ptr<VideoDecoder> decoder =
- delegate_->LegacyCreateVideoDecoder(format, receive_stream_id);
- return std::make_unique<QualityAnalyzingVideoDecoder>(
- peer_name_, std::move(decoder), extractor_, analyzer_);
-}
-
} // namespace webrtc_pc_e2e
} // namespace webrtc
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
index a969a5b687..e150c91cb4 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
@@ -136,9 +136,6 @@ class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
std::unique_ptr<VideoDecoder> CreateVideoDecoder(
const SdpVideoFormat& format) override;
- std::unique_ptr<VideoDecoder> LegacyCreateVideoDecoder(
- const SdpVideoFormat& format,
- const std::string& receive_stream_id) override;
private:
const std::string peer_name_;
diff --git a/test/pc/e2e/media/media_helper.cc b/test/pc/e2e/media/media_helper.cc
index d1c27838a6..6b1996adaa 100644
--- a/test/pc/e2e/media/media_helper.cc
+++ b/test/pc/e2e/media/media_helper.cc
@@ -64,7 +64,7 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) {
video_config.content_hint ==
VideoTrackInterface::ContentHint::kDetailed;
rtc::scoped_refptr<TestVideoCapturerVideoTrackSource> source =
- new rtc::RefCountedObject<TestVideoCapturerVideoTrackSource>(
+ rtc::make_ref_counted<TestVideoCapturerVideoTrackSource>(
std::move(capturer), is_screencast);
out.push_back(source);
RTC_LOG(INFO) << "Adding video with video_config.stream_label="
diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc
index 2bc5344165..eb676a92bd 100644
--- a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc
+++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc
@@ -215,7 +215,7 @@ void StatsBasedNetworkQualityMetricsReporter::ReportStats(
ReportResult("bytes_discarded_no_receiver", pc_label,
network_layer_stats.stats->BytesDropped().bytes(),
"sizeInBytes");
- ReportResult("packets_discarded_no_recevier", pc_label,
+ ReportResult("packets_discarded_no_receiver", pc_label,
network_layer_stats.stats->PacketsDropped(), "unitless");
ReportResult("payload_bytes_received", pc_label,
diff --git a/test/pc/e2e/stats_poller.cc b/test/pc/e2e/stats_poller.cc
index e6973e6af1..5f1424cd29 100644
--- a/test/pc/e2e/stats_poller.cc
+++ b/test/pc/e2e/stats_poller.cc
@@ -31,7 +31,7 @@ void InternalStatsObserver::OnStatsDelivered(
StatsPoller::StatsPoller(std::vector<StatsObserverInterface*> observers,
std::map<std::string, TestPeer*> peers) {
for (auto& peer : peers) {
- pollers_.push_back(new rtc::RefCountedObject<InternalStatsObserver>(
+ pollers_.push_back(rtc::make_ref_counted<InternalStatsObserver>(
peer.first, peer.second, observers));
}
}
diff --git a/test/pc/e2e/test_peer_factory.cc b/test/pc/e2e/test_peer_factory.cc
index eceec778df..869b40f703 100644
--- a/test/pc/e2e/test_peer_factory.cc
+++ b/test/pc/e2e/test_peer_factory.cc
@@ -348,8 +348,10 @@ std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
PeerConnectionDependencies pc_deps = CreatePCDependencies(
observer.get(), std::move(components->pc_dependencies));
rtc::scoped_refptr<PeerConnectionInterface> peer_connection =
- peer_connection_factory->CreatePeerConnection(params->rtc_configuration,
- std::move(pc_deps));
+ peer_connection_factory
+ ->CreatePeerConnectionOrError(params->rtc_configuration,
+ std::move(pc_deps))
+ .MoveValue();
peer_connection->SetBitrate(params->bitrate_settings);
return absl::WrapUnique(new TestPeer(
diff --git a/test/pc/sctp/fake_sctp_transport.h b/test/pc/sctp/fake_sctp_transport.h
index fa4cdc4920..42b978a900 100644
--- a/test/pc/sctp/fake_sctp_transport.h
+++ b/test/pc/sctp/fake_sctp_transport.h
@@ -29,7 +29,8 @@ class FakeSctpTransport : public cricket::SctpTransportInternal {
}
bool OpenStream(int sid) override { return true; }
bool ResetStream(int sid) override { return true; }
- bool SendData(const cricket::SendDataParams& params,
+ bool SendData(int sid,
+ const webrtc::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result = nullptr) override {
return true;
diff --git a/test/peer_scenario/BUILD.gn b/test/peer_scenario/BUILD.gn
index 2034c9ad99..033ef4115a 100644
--- a/test/peer_scenario/BUILD.gn
+++ b/test/peer_scenario/BUILD.gn
@@ -47,6 +47,7 @@ if (rtc_include_tests) {
"../../p2p:rtc_p2p",
"../../pc:pc_test_utils",
"../../pc:rtc_pc_base",
+ "../../pc:session_description",
"../../rtc_base",
"../../rtc_base:null_socket_server",
"../../rtc_base:stringutils",
diff --git a/test/peer_scenario/peer_scenario_client.cc b/test/peer_scenario/peer_scenario_client.cc
index 681a90704f..7f3e126287 100644
--- a/test/peer_scenario/peer_scenario_client.cc
+++ b/test/peer_scenario/peer_scenario_client.cc
@@ -241,7 +241,9 @@ PeerScenarioClient::PeerScenarioClient(
pc_deps.allocator->set_flags(pc_deps.allocator->flags() |
cricket::PORTALLOCATOR_DISABLE_TCP);
peer_connection_ =
- pc_factory_->CreatePeerConnection(config.rtc_config, std::move(pc_deps));
+ pc_factory_
+ ->CreatePeerConnectionOrError(config.rtc_config, std::move(pc_deps))
+ .MoveValue();
if (log_writer_factory_) {
peer_connection_->StartRtcEventLog(log_writer_factory_->Create(".rtc.dat"),
/*output_period_ms=*/1000);
diff --git a/test/peer_scenario/tests/BUILD.gn b/test/peer_scenario/tests/BUILD.gn
index 0cf7cf3472..a8b9c2563e 100644
--- a/test/peer_scenario/tests/BUILD.gn
+++ b/test/peer_scenario/tests/BUILD.gn
@@ -25,6 +25,7 @@ if (rtc_include_tests) {
"../../../modules/rtp_rtcp:rtp_rtcp",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../pc:rtc_pc_base",
+ "../../../pc:session_description",
]
}
}
diff --git a/test/peer_scenario/tests/unsignaled_stream_test.cc b/test/peer_scenario/tests/unsignaled_stream_test.cc
index 95510a24bd..edcfb36ea3 100644
--- a/test/peer_scenario/tests/unsignaled_stream_test.cc
+++ b/test/peer_scenario/tests/unsignaled_stream_test.cc
@@ -24,6 +24,31 @@ namespace webrtc {
namespace test {
namespace {
+enum class MidTestConfiguration {
+ // Legacy endpoint setup where PT demuxing is used.
+ kMidNotNegotiated,
+ // MID is negotiated but missing from packets. PT demuxing is disabled, so
+ // SSRCs have to be added to the SDP for WebRTC to forward packets correctly.
+ // Happens when client is spec compliant but the SFU isn't. Popular legacy.
+ kMidNegotiatedButMissingFromPackets,
+ // Fully spec-compliant: MID is present so we can safely drop packets with
+ // unknown MIDs.
+ kMidNegotiatedAndPresentInPackets,
+};
+
+// Gives the parameterized test a readable suffix.
+std::string TestParametersMidTestConfigurationToString(
+ testing::TestParamInfo<MidTestConfiguration> info) {
+ switch (info.param) {
+ case MidTestConfiguration::kMidNotNegotiated:
+ return "MidNotNegotiated";
+ case MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ return "MidNegotiatedButMissingFromPackets";
+ case MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ return "MidNegotiatedAndPresentInPackets";
+ }
+}
+
class FrameObserver : public rtc::VideoSinkInterface<VideoFrame> {
public:
FrameObserver() : frame_observed_(false) {}
@@ -53,19 +78,24 @@ void set_ssrc(SessionDescriptionInterface* offer, size_t index, uint32_t ssrc) {
} // namespace
-TEST(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
+class UnsignaledStreamTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<MidTestConfiguration> {};
+
+TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
// This test covers a scenario that might occur if a remote client starts
- // sending media packets before negotiation has completed. These packets will
- // trigger an unsignalled default stream to be created, and connects that to
- // a default video sink.
- // In some edge cases using unified plan, the default stream is create in a
- // different transceiver to where the media SSRC will actually be used.
- // This test verifies that the default stream is removed properly, and that
- // packets are demuxed and video frames reach the desired sink.
+ // sending media packets before negotiation has completed. Depending on setup,
+ // these packets either get dropped or trigger an unsignalled default stream
+ // to be created, and connects that to a default video sink.
+ // In some edge cases using Unified Plan and PT demuxing, the default stream
+ // is create in a different transceiver to where the media SSRC will actually
+ // be used. This test verifies that the default stream is removed properly,
+ // and that packets are demuxed and video frames reach the desired sink.
+ const MidTestConfiguration kMidTestConfiguration = GetParam();
// Defined before PeerScenario so it gets destructed after, to avoid use after
// free.
- PeerScenario s(*test_info_);
+ PeerScenario s(*::testing::UnitTest::GetInstance()->current_test_info());
PeerScenarioClient::Config config = PeerScenarioClient::Config();
// Disable encryption so that we can inject a fake early media packet without
@@ -93,14 +123,61 @@ TEST(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
std::atomic<bool> got_unsignaled_packet(false);
// We will capture the media ssrc of the first added stream, and preemptively
- // inject a new media packet using a different ssrc.
- // This will create "default stream" for the second ssrc and connected it to
- // the default video sink (not set in this test).
+ // inject a new media packet using a different ssrc. What happens depends on
+ // the test configuration.
+ //
+ // MidTestConfiguration::kMidNotNegotiated:
+ // - MID is not negotiated which means PT-based demuxing is enabled. Because
+ // the packets have no MID, the second ssrc packet gets forwarded to the
+ // first m= section. This will create a "default stream" for the second ssrc
+ // and connect it to the default video sink (not set in this test). The test
+ // verifies we can recover from this when we later get packets for the first
+ // ssrc.
+ //
+ // MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ // - MID is negotiated wich means PT-based demuxing is disabled. Because we
+ // modify the packets not to contain the MID anyway (simulating a legacy SFU
+ // that does not negotiate properly) unknown SSRCs are dropped but do not
+ // otherwise cause any issues.
+ //
+ // MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ // - MID is negotiated which means PT-based demuxing is enabled. In this case
+ // the packets have the MID so they either get forwarded or dropped
+ // depending on if the MID is known. The spec-compliant way is also the most
+ // straight-forward one.
+
uint32_t first_ssrc = 0;
uint32_t second_ssrc = 0;
+ absl::optional<int> mid_header_extension_id = absl::nullopt;
signaling.NegotiateSdp(
- /* munge_sdp = */ {},
+ /* munge_sdp = */
+ [&](SessionDescriptionInterface* offer) {
+ // Obtain the MID header extension ID and if we want the
+ // MidTestConfiguration::kMidNotNegotiated setup then we remove the MID
+ // header extension through SDP munging (otherwise SDP is not modified).
+ for (cricket::ContentInfo& content_info :
+ offer->description()->contents()) {
+ std::vector<RtpExtension> header_extensions =
+ content_info.media_description()->rtp_header_extensions();
+ for (auto it = header_extensions.begin();
+ it != header_extensions.end(); ++it) {
+ if (it->uri == RtpExtension::kMidUri) {
+ // MID header extension found!
+ mid_header_extension_id = it->id;
+ if (kMidTestConfiguration ==
+ MidTestConfiguration::kMidNotNegotiated) {
+ // Munge away the extension.
+ header_extensions.erase(it);
+ }
+ break;
+ }
+ }
+ content_info.media_description()->set_rtp_header_extensions(
+ std::move(header_extensions));
+ }
+ ASSERT_TRUE(mid_header_extension_id.has_value());
+ },
/* modify_sdp = */
[&](SessionDescriptionInterface* offer) {
first_ssrc = get_ssrc(offer, 0);
@@ -113,9 +190,40 @@ TEST(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
if (ByteReader<uint32_t>::ReadBigEndian(&(packet.cdata()[8])) ==
first_ssrc &&
!got_unsignaled_packet) {
- rtc::CopyOnWriteBuffer updated_buffer = packet.data;
- ByteWriter<uint32_t>::WriteBigEndian(
- updated_buffer.MutableData() + 8, second_ssrc);
+ // Parse packet and modify the SSRC to simulate a second m=
+ // section that has not been negotiated yet.
+ std::vector<RtpExtension> extensions;
+ extensions.emplace_back(RtpExtension::kMidUri,
+ mid_header_extension_id.value());
+ RtpHeaderExtensionMap extensions_map(extensions);
+ RtpPacket parsed_packet;
+ parsed_packet.IdentifyExtensions(extensions_map);
+ ASSERT_TRUE(parsed_packet.Parse(packet.data));
+ parsed_packet.SetSsrc(second_ssrc);
+ // The MID extension is present if and only if it was negotiated.
+ // If present, we either want to remove it or modify it depending
+ // on setup.
+ switch (kMidTestConfiguration) {
+ case MidTestConfiguration::kMidNotNegotiated:
+ EXPECT_FALSE(parsed_packet.HasExtension<RtpMid>());
+ break;
+ case MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ EXPECT_TRUE(parsed_packet.HasExtension<RtpMid>());
+ ASSERT_TRUE(parsed_packet.RemoveExtension(RtpMid::kId));
+ break;
+ case MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ EXPECT_TRUE(parsed_packet.HasExtension<RtpMid>());
+ // The simulated second m= section would have a different MID.
+ // If we don't modify it here then |second_ssrc| would end up
+ // being mapped to the first m= section which would cause SSRC
+ // conflicts if we later add the same SSRC to a second m=
+ // section. Hidden assumption: first m= section does not use
+ // MID:1.
+ ASSERT_TRUE(parsed_packet.SetExtension<RtpMid>("1"));
+ break;
+ }
+ // Inject the modified packet.
+ rtc::CopyOnWriteBuffer updated_buffer = parsed_packet.Buffer();
EmulatedIpPacket updated_packet(
packet.from, packet.to, updated_buffer, packet.arrival_time);
send_node->OnPacketReceived(std::move(updated_packet));
@@ -153,5 +261,13 @@ TEST(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
EXPECT_TRUE(s.WaitAndProcess(&second_sink.frame_observed_));
}
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ UnsignaledStreamTest,
+ ::testing::Values(MidTestConfiguration::kMidNotNegotiated,
+ MidTestConfiguration::kMidNegotiatedButMissingFromPackets,
+ MidTestConfiguration::kMidNegotiatedAndPresentInPackets),
+ TestParametersMidTestConfigurationToString);
+
} // namespace test
} // namespace webrtc
diff --git a/test/rtp_header_parser.cc b/test/rtp_header_parser.cc
index 45686acb4c..6a7df4b095 100644
--- a/test/rtp_header_parser.cc
+++ b/test/rtp_header_parser.cc
@@ -9,41 +9,10 @@
*/
#include "test/rtp_header_parser.h"
-#include <memory>
-
-#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
-#include "rtc_base/synchronization/mutex.h"
-#include "rtc_base/thread_annotations.h"
namespace webrtc {
-class RtpHeaderParserImpl : public RtpHeaderParser {
- public:
- RtpHeaderParserImpl();
- ~RtpHeaderParserImpl() override = default;
-
- bool Parse(const uint8_t* packet,
- size_t length,
- RTPHeader* header) const override;
-
- bool RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id) override;
- bool RegisterRtpHeaderExtension(RtpExtension extension) override;
-
- bool DeregisterRtpHeaderExtension(RTPExtensionType type) override;
- bool DeregisterRtpHeaderExtension(RtpExtension extension) override;
-
- private:
- mutable Mutex mutex_;
- RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(mutex_);
-};
-
-std::unique_ptr<RtpHeaderParser> RtpHeaderParser::CreateForTest() {
- return std::make_unique<RtpHeaderParserImpl>();
-}
-
-RtpHeaderParserImpl::RtpHeaderParserImpl() {}
-
bool RtpHeaderParser::IsRtcp(const uint8_t* packet, size_t length) {
RtpUtility::RtpHeaderParser rtp_parser(packet, length);
return rtp_parser.RTCP();
@@ -59,43 +28,4 @@ absl::optional<uint32_t> RtpHeaderParser::GetSsrc(const uint8_t* packet,
return absl::nullopt;
}
-bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
- size_t length,
- RTPHeader* header) const {
- RtpUtility::RtpHeaderParser rtp_parser(packet, length);
- *header = RTPHeader();
-
- RtpHeaderExtensionMap map;
- {
- MutexLock lock(&mutex_);
- map = rtp_header_extension_map_;
- }
-
- const bool valid_rtpheader = rtp_parser.Parse(header, &map);
- if (!valid_rtpheader) {
- return false;
- }
- return true;
-}
-bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RtpExtension extension) {
- MutexLock lock(&mutex_);
- return rtp_header_extension_map_.RegisterByUri(extension.id, extension.uri);
-}
-
-bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RTPExtensionType type,
- uint8_t id) {
- MutexLock lock(&mutex_);
- return rtp_header_extension_map_.RegisterByType(id, type);
-}
-
-bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RtpExtension extension) {
- MutexLock lock(&mutex_);
- return rtp_header_extension_map_.Deregister(
- rtp_header_extension_map_.GetType(extension.id));
-}
-
-bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RTPExtensionType type) {
- MutexLock lock(&mutex_);
- return rtp_header_extension_map_.Deregister(type) == 0;
-}
} // namespace webrtc
diff --git a/test/rtp_header_parser.h b/test/rtp_header_parser.h
index 851ccf3bc2..ca4eaf657c 100644
--- a/test/rtp_header_parser.h
+++ b/test/rtp_header_parser.h
@@ -10,44 +10,18 @@
#ifndef TEST_RTP_HEADER_PARSER_H_
#define TEST_RTP_HEADER_PARSER_H_
-#include <memory>
+#include <stddef.h>
+#include <stdint.h>
-#include "api/rtp_parameters.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "absl/types/optional.h"
namespace webrtc {
-struct RTPHeader;
-
class RtpHeaderParser {
public:
- static std::unique_ptr<RtpHeaderParser> CreateForTest();
- virtual ~RtpHeaderParser() {}
-
// Returns true if the packet is an RTCP packet, false otherwise.
static bool IsRtcp(const uint8_t* packet, size_t length);
static absl::optional<uint32_t> GetSsrc(const uint8_t* packet, size_t length);
-
- // Parses the packet and stores the parsed packet in |header|. Returns true on
- // success, false otherwise.
- // This method is thread-safe in the sense that it can parse multiple packets
- // at once.
- virtual bool Parse(const uint8_t* packet,
- size_t length,
- RTPHeader* header) const = 0;
-
- // Registers an RTP header extension and binds it to |id|.
- virtual bool RegisterRtpHeaderExtension(RTPExtensionType type,
- uint8_t id) = 0;
-
- // Registers an RTP header extension.
- virtual bool RegisterRtpHeaderExtension(RtpExtension extension) = 0;
-
- // De-registers an RTP header extension.
- virtual bool DeregisterRtpHeaderExtension(RTPExtensionType type) = 0;
-
- // De-registers an RTP header extension.
- virtual bool DeregisterRtpHeaderExtension(RtpExtension extension) = 0;
};
} // namespace webrtc
#endif // TEST_RTP_HEADER_PARSER_H_
diff --git a/test/scenario/audio_stream.cc b/test/scenario/audio_stream.cc
index f3cb8320aa..63f78c8f71 100644
--- a/test/scenario/audio_stream.cc
+++ b/test/scenario/audio_stream.cc
@@ -185,7 +185,6 @@ ReceiveAudioStream::ReceiveAudioStream(
recv_config.rtp.extensions = {{RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberExtensionId}};
}
- receiver_->AddExtensions(recv_config.rtp.extensions);
recv_config.decoder_factory = decoder_factory;
recv_config.decoder_map = {
{CallTest::kAudioSendPayloadType, {"opus", 48000, 2}}};
diff --git a/test/scenario/call_client.cc b/test/scenario/call_client.cc
index f7cd47c36e..cebfc0be0d 100644
--- a/test/scenario/call_client.cc
+++ b/test/scenario/call_client.cc
@@ -17,6 +17,7 @@
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/transport/network_types.h"
#include "modules/audio_mixer/audio_mixer_impl.h"
+#include "test/rtp_header_parser.h"
namespace webrtc {
namespace test {
@@ -213,7 +214,6 @@ CallClient::CallClient(
clock_(time_controller->GetClock()),
log_writer_factory_(std::move(log_writer_factory)),
network_controller_factory_(log_writer_factory_.get(), config.transport),
- header_parser_(RtpHeaderParser::CreateForTest()),
task_queue_(time_controller->GetTaskQueueFactory()->CreateTaskQueue(
"CallClient",
TaskQueueFactory::Priority::NORMAL)) {
@@ -338,11 +338,6 @@ uint32_t CallClient::GetNextRtxSsrc() {
return kSendRtxSsrcs[next_rtx_ssrc_index_++];
}
-void CallClient::AddExtensions(std::vector<RtpExtension> extensions) {
- for (const auto& extension : extensions)
- header_parser_->RegisterRtpHeaderExtension(extension);
-}
-
void CallClient::SendTask(std::function<void()> task) {
task_queue_.SendTask(std::move(task), RTC_FROM_HERE);
}
diff --git a/test/scenario/call_client.h b/test/scenario/call_client.h
index 27ec9fa39c..08b0131350 100644
--- a/test/scenario/call_client.h
+++ b/test/scenario/call_client.h
@@ -26,7 +26,6 @@
#include "rtc_base/task_queue_for_test.h"
#include "test/logging/log_writer.h"
#include "test/network/network_emulation.h"
-#include "test/rtp_header_parser.h"
#include "test/scenario/column_printer.h"
#include "test/scenario/network_node.h"
#include "test/scenario/scenario_config.h"
@@ -137,7 +136,6 @@ class CallClient : public EmulatedNetworkReceiverInterface {
uint32_t GetNextAudioSsrc();
uint32_t GetNextAudioLocalSsrc();
uint32_t GetNextRtxSsrc();
- void AddExtensions(std::vector<RtpExtension> extensions);
int16_t Bind(EmulatedEndpoint* endpoint);
void UnBind();
@@ -149,7 +147,6 @@ class CallClient : public EmulatedNetworkReceiverInterface {
CallClientFakeAudio fake_audio_setup_;
std::unique_ptr<Call> call_;
std::unique_ptr<NetworkNodeTransport> transport_;
- std::unique_ptr<RtpHeaderParser> const header_parser_;
std::vector<std::pair<EmulatedEndpoint*, uint16_t>> endpoints_;
int next_video_ssrc_index_ = 0;
diff --git a/test/scenario/scenario_unittest.cc b/test/scenario/scenario_unittest.cc
index fc370fba77..6861151a2d 100644
--- a/test/scenario/scenario_unittest.cc
+++ b/test/scenario/scenario_unittest.cc
@@ -182,7 +182,11 @@ TEST(ScenarioTest,
s.RunFor(TimeDelta::Seconds(10));
// Make sure retransmissions have happened.
int retransmit_packets = 0;
- for (const auto& substream : video->send()->GetStats().substreams) {
+
+ VideoSendStream::Stats stats;
+ alice->SendTask([&]() { stats = video->send()->GetStats(); });
+
+ for (const auto& substream : stats.substreams) {
retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
}
EXPECT_GT(retransmit_packets, 0);
diff --git a/test/scenario/stats_collection_unittest.cc b/test/scenario/stats_collection_unittest.cc
index 593cecdaae..96b2830c76 100644
--- a/test/scenario/stats_collection_unittest.cc
+++ b/test/scenario/stats_collection_unittest.cc
@@ -33,8 +33,14 @@ void CreateAnalyzedStream(Scenario* s,
auto* audio = s->CreateAudioStream(route->forward(), AudioStreamConfig());
s->Every(TimeDelta::Seconds(1), [=] {
collectors->call.AddStats(caller->GetStats());
- collectors->video_send.AddStats(video->send()->GetStats(), s->Now());
- collectors->audio_receive.AddStats(audio->receive()->GetStats());
+
+ VideoSendStream::Stats send_stats;
+ caller->SendTask([&]() { send_stats = video->send()->GetStats(); });
+ collectors->video_send.AddStats(send_stats, s->Now());
+
+ AudioReceiveStream::Stats receive_stats;
+ caller->SendTask([&]() { receive_stats = audio->receive()->GetStats(); });
+ collectors->audio_receive.AddStats(receive_stats);
// Querying the video stats from within the expected runtime environment
// (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that
diff --git a/test/scenario/video_frame_matcher.h b/test/scenario/video_frame_matcher.h
index f7f62436ac..a3aa85447d 100644
--- a/test/scenario/video_frame_matcher.h
+++ b/test/scenario/video_frame_matcher.h
@@ -52,7 +52,7 @@ class VideoFrameMatcher {
rtc::scoped_refptr<VideoFrameBuffer> thumb;
int repeat_count = 0;
};
- using DecodedFrame = rtc::RefCountedObject<DecodedFrameBase>;
+ using DecodedFrame = rtc::FinalRefCountedObject<DecodedFrameBase>;
struct CapturedFrame {
int id;
Timestamp capture_time = Timestamp::PlusInfinity();
diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc
index 5525a9d203..96f6f5bc59 100644
--- a/test/scenario/video_stream.cc
+++ b/test/scenario/video_stream.cc
@@ -175,8 +175,8 @@ CreateVp9SpecificSettings(VideoStreamConfig video_config) {
vp9.automaticResizeOn = conf.single.automatic_scaling;
vp9.denoisingOn = conf.single.denoising;
}
- return new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9);
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
}
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
@@ -192,8 +192,8 @@ CreateVp8SpecificSettings(VideoStreamConfig config) {
vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling;
vp8_settings.denoisingOn = config.encoder.single.denoising;
}
- return new rtc::RefCountedObject<
- VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ vp8_settings);
}
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
@@ -205,8 +205,8 @@ CreateH264SpecificSettings(VideoStreamConfig config) {
h264_settings.frameDroppingOn = config.encoder.frame_dropping;
h264_settings.keyFrameInterval =
config.encoder.key_frame_interval.value_or(0);
- return new rtc::RefCountedObject<
- VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
+ return rtc::make_ref_counted<VideoEncoderConfig::H264EncoderSpecificSettings>(
+ h264_settings);
}
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
@@ -248,11 +248,11 @@ VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) {
bool screenshare = config.encoder.content_type ==
VideoStreamConfig::Encoder::ContentType::kScreen;
encoder_config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
cricket_codec, kDefaultMaxQp, screenshare, screenshare);
} else {
encoder_config.video_stream_factory =
- new rtc::RefCountedObject<DefaultVideoStreamFactory>();
+ rtc::make_ref_counted<DefaultVideoStreamFactory>();
}
// TODO(srte): Base this on encoder capabilities.
@@ -571,10 +571,10 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver,
RTC_DCHECK(num_streams == 1);
FlexfecReceiveStream::Config flexfec(feedback_transport);
flexfec.payload_type = CallTest::kFlexfecPayloadType;
- flexfec.remote_ssrc = CallTest::kFlexfecSendSsrc;
+ flexfec.rtp.remote_ssrc = CallTest::kFlexfecSendSsrc;
flexfec.protected_media_ssrcs = send_stream->rtx_ssrcs_;
- flexfec.local_ssrc = recv_config.rtp.local_ssrc;
- receiver_->ssrc_media_types_[flexfec.remote_ssrc] = MediaType::VIDEO;
+ flexfec.rtp.local_ssrc = recv_config.rtp.local_ssrc;
+ receiver_->ssrc_media_types_[flexfec.rtp.remote_ssrc] = MediaType::VIDEO;
receiver_->SendTask([this, &flexfec] {
flecfec_stream_ = receiver_->call_->CreateFlexfecReceiveStream(flexfec);
diff --git a/test/scenario/video_stream_unittest.cc b/test/scenario/video_stream_unittest.cc
index 95936c763f..c1649a39b3 100644
--- a/test/scenario/video_stream_unittest.cc
+++ b/test/scenario/video_stream_unittest.cc
@@ -130,7 +130,9 @@ TEST(VideoStreamTest, SendsNacksOnLoss) {
auto video = s.CreateVideoStream(route->forward(), VideoStreamConfig());
s.RunFor(TimeDelta::Seconds(1));
int retransmit_packets = 0;
- for (const auto& substream : video->send()->GetStats().substreams) {
+ VideoSendStream::Stats stats;
+ route->first()->SendTask([&]() { stats = video->send()->GetStats(); });
+ for (const auto& substream : stats.substreams) {
retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
}
EXPECT_GT(retransmit_packets, 0);
@@ -152,7 +154,8 @@ TEST(VideoStreamTest, SendsFecWithUlpFec) {
c->stream.use_ulpfec = true;
});
s.RunFor(TimeDelta::Seconds(5));
- VideoSendStream::Stats video_stats = video->send()->GetStats();
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
}
TEST(VideoStreamTest, SendsFecWithFlexFec) {
@@ -169,7 +172,8 @@ TEST(VideoStreamTest, SendsFecWithFlexFec) {
c->stream.use_flexfec = true;
});
s.RunFor(TimeDelta::Seconds(5));
- VideoSendStream::Stats video_stats = video->send()->GetStats();
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
}
diff --git a/test/time_controller/simulated_time_controller.cc b/test/time_controller/simulated_time_controller.cc
index aba8c6600e..a34abe8ced 100644
--- a/test/time_controller/simulated_time_controller.cc
+++ b/test/time_controller/simulated_time_controller.cc
@@ -226,4 +226,14 @@ void GlobalSimulatedTimeController::AdvanceTime(TimeDelta duration) {
impl_.RunReadyRunners();
}
+void GlobalSimulatedTimeController::Register(
+ sim_time_impl::SimulatedSequenceRunner* runner) {
+ impl_.Register(runner);
+}
+
+void GlobalSimulatedTimeController::Unregister(
+ sim_time_impl::SimulatedSequenceRunner* runner) {
+ impl_.Unregister(runner);
+}
+
} // namespace webrtc
diff --git a/test/time_controller/simulated_time_controller.h b/test/time_controller/simulated_time_controller.h
index 0ff3c2f894..9ded4689de 100644
--- a/test/time_controller/simulated_time_controller.h
+++ b/test/time_controller/simulated_time_controller.h
@@ -140,6 +140,17 @@ class GlobalSimulatedTimeController : public TimeController {
void AdvanceTime(TimeDelta duration) override;
+ // Makes the simulated time controller aware of a custom
+ // SimulatedSequenceRunner.
+ // TODO(bugs.webrtc.org/11581): remove method once the ModuleRtpRtcpImpl2 unit
+ // test stops using it.
+ void Register(sim_time_impl::SimulatedSequenceRunner* runner);
+ // Removes a previously installed custom SimulatedSequenceRunner from the
+ // simulated time controller.
+ // TODO(bugs.webrtc.org/11581): remove method once the ModuleRtpRtcpImpl2 unit
+ // test stops using it.
+ void Unregister(sim_time_impl::SimulatedSequenceRunner* runner);
+
private:
rtc::ScopedBaseFakeClock global_clock_;
// Provides simulated CurrentNtpInMilliseconds()
diff --git a/third_party/abseil-cpp/CMake/AbseilDll.cmake b/third_party/abseil-cpp/CMake/AbseilDll.cmake
index 39f85f2ffd..8ee4120f69 100644
--- a/third_party/abseil-cpp/CMake/AbseilDll.cmake
+++ b/third_party/abseil-cpp/CMake/AbseilDll.cmake
@@ -1,4 +1,5 @@
include(CMakeParseArguments)
+include(GNUInstallDirs)
set(ABSL_INTERNAL_DLL_FILES
"algorithm/algorithm.h"
@@ -196,16 +197,27 @@ set(ABSL_INTERNAL_DLL_FILES
"strings/cord.h"
"strings/escaping.cc"
"strings/escaping.h"
+ "strings/internal/charconv_bigint.cc"
+ "strings/internal/charconv_bigint.h"
+ "strings/internal/charconv_parse.cc"
+ "strings/internal/charconv_parse.h"
"strings/internal/cord_internal.cc"
"strings/internal/cord_internal.h"
"strings/internal/cord_rep_flat.h"
"strings/internal/cord_rep_ring.cc"
"strings/internal/cord_rep_ring.h"
"strings/internal/cord_rep_ring_reader.h"
- "strings/internal/charconv_bigint.cc"
- "strings/internal/charconv_bigint.h"
- "strings/internal/charconv_parse.cc"
- "strings/internal/charconv_parse.h"
+ "strings/internal/cordz_functions.cc"
+ "strings/internal/cordz_functions.h"
+ "strings/internal/cordz_handle.cc"
+ "strings/internal/cordz_handle.h"
+ "strings/internal/cordz_info.cc"
+ "strings/internal/cordz_info.h"
+ "strings/internal/cordz_sample_token.cc"
+ "strings/internal/cordz_sample_token.h"
+ "strings/internal/cordz_statistics.h"
+ "strings/internal/cordz_update_scope.h"
+ "strings/internal/cordz_update_tracker.h"
"strings/internal/stl_type_traits.h"
"strings/internal/string_constant.h"
"strings/match.cc"
@@ -500,7 +512,7 @@ function(absl_make_dll)
abseil_dll
PUBLIC
"$<BUILD_INTERFACE:${ABSL_COMMON_INCLUDE_DIRS}>"
- $<INSTALL_INTERFACE:${ABSL_INSTALL_INCLUDEDIR}>
+ $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
target_compile_options(
@@ -518,8 +530,8 @@ function(absl_make_dll)
${ABSL_CC_LIB_DEFINES}
)
install(TARGETS abseil_dll EXPORT ${PROJECT_NAME}Targets
- RUNTIME DESTINATION ${ABSL_INSTALL_BINDIR}
- LIBRARY DESTINATION ${ABSL_INSTALL_LIBDIR}
- ARCHIVE DESTINATION ${ABSL_INSTALL_LIBDIR}
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
endfunction()
diff --git a/third_party/abseil-cpp/CMake/AbseilHelpers.cmake b/third_party/abseil-cpp/CMake/AbseilHelpers.cmake
index 8502c02c9a..6a64a2c788 100644
--- a/third_party/abseil-cpp/CMake/AbseilHelpers.cmake
+++ b/third_party/abseil-cpp/CMake/AbseilHelpers.cmake
@@ -17,7 +17,6 @@
include(CMakeParseArguments)
include(AbseilConfigureCopts)
include(AbseilDll)
-include(AbseilInstallDirs)
# The IDE folder for Abseil that will be used if Abseil is included in a CMake
# project that sets
@@ -142,7 +141,8 @@ function(absl_cc_library)
endif()
# Generate a pkg-config file for every library:
- if((_build_type STREQUAL "static" OR _build_type STREQUAL "shared") AND ABSL_ENABLE_INSTALL)
+ if((_build_type STREQUAL "static" OR _build_type STREQUAL "shared")
+ AND ABSL_ENABLE_INSTALL)
if(NOT ABSL_CC_LIB_TESTONLY)
if(absl_VERSION)
set(PC_VERSION "${absl_VERSION}")
@@ -151,6 +151,10 @@ function(absl_cc_library)
endif()
foreach(dep ${ABSL_CC_LIB_DEPS})
if(${dep} MATCHES "^absl::(.*)")
+ # Join deps with commas.
+ if(PC_DEPS)
+ set(PC_DEPS "${PC_DEPS},")
+ endif()
set(PC_DEPS "${PC_DEPS} absl_${CMAKE_MATCH_1} = ${PC_VERSION}")
endif()
endforeach()
@@ -167,18 +171,18 @@ function(absl_cc_library)
FILE(GENERATE OUTPUT "${CMAKE_BINARY_DIR}/lib/pkgconfig/absl_${_NAME}.pc" CONTENT "\
prefix=${CMAKE_INSTALL_PREFIX}\n\
exec_prefix=\${prefix}\n\
-libdir=\${prefix}/lib\n\
-includedir=\${prefix}/include\n\
+libdir=${CMAKE_INSTALL_FULL_LIBDIR}\n\
+includedir=${CMAKE_INSTALL_FULL_INCLUDEDIR}\n\
\n\
Name: absl_${_NAME}\n\
Description: Abseil ${_NAME} library\n\
URL: https://abseil.io/\n\
Version: ${PC_VERSION}\n\
-Requires.private:${PC_DEPS}\n\
+Requires:${PC_DEPS}\n\
Libs: -L\${libdir} $<JOIN:${ABSL_CC_LIB_LINKOPTS}, > $<$<NOT:$<BOOL:${ABSL_CC_LIB_IS_INTERFACE}>>:-labsl_${_NAME}>\n\
Cflags: -I\${includedir}${PC_CFLAGS}\n")
INSTALL(FILES "${CMAKE_BINARY_DIR}/lib/pkgconfig/absl_${_NAME}.pc"
- DESTINATION "${CMAKE_INSTALL_PREFIX}/lib/pkgconfig")
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
endif()
endif()
@@ -235,7 +239,7 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n")
target_include_directories(${_NAME}
PUBLIC
"$<BUILD_INTERFACE:${ABSL_COMMON_INCLUDE_DIRS}>"
- $<INSTALL_INTERFACE:${ABSL_INSTALL_INCLUDEDIR}>
+ $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
target_compile_options(${_NAME}
PRIVATE ${ABSL_CC_LIB_COPTS})
@@ -260,7 +264,6 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n")
if(ABSL_ENABLE_INSTALL)
set_target_properties(${_NAME} PROPERTIES
OUTPUT_NAME "absl_${_NAME}"
- # TODO(b/173696973): Figure out how to set SOVERSION for LTS releases.
SOVERSION 0
)
endif()
@@ -270,7 +273,7 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n")
target_include_directories(${_NAME}
INTERFACE
"$<BUILD_INTERFACE:${ABSL_COMMON_INCLUDE_DIRS}>"
- $<INSTALL_INTERFACE:${ABSL_INSTALL_INCLUDEDIR}>
+ $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
if (_build_type STREQUAL "dll")
@@ -290,9 +293,9 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n")
# installed abseil can't be tested.
if(NOT ABSL_CC_LIB_TESTONLY AND ABSL_ENABLE_INSTALL)
install(TARGETS ${_NAME} EXPORT ${PROJECT_NAME}Targets
- RUNTIME DESTINATION ${ABSL_INSTALL_BINDIR}
- LIBRARY DESTINATION ${ABSL_INSTALL_LIBDIR}
- ARCHIVE DESTINATION ${ABSL_INSTALL_LIBDIR}
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
endif()
@@ -333,8 +336,8 @@ endfunction()
# "awesome_test.cc"
# DEPS
# absl::awesome
-# gmock
-# gtest_main
+# GTest::gmock
+# GTest::gtest_main
# )
function(absl_cc_test)
if(NOT BUILD_TESTING)
diff --git a/third_party/abseil-cpp/CMake/AbseilInstallDirs.cmake b/third_party/abseil-cpp/CMake/AbseilInstallDirs.cmake
deleted file mode 100644
index 6fc914b60f..0000000000
--- a/third_party/abseil-cpp/CMake/AbseilInstallDirs.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-include(GNUInstallDirs)
-
-# absl_VERSION is only set if we are an LTS release being installed, in which
-# case it may be into a system directory and so we need to make subdirectories
-# for each installed version of Abseil. This mechanism is implemented in
-# Abseil's internal Copybara (https://github.com/google/copybara) workflows and
-# isn't visible in the CMake buildsystem itself.
-
-if(absl_VERSION)
- set(ABSL_SUBDIR "${PROJECT_NAME}_${PROJECT_VERSION}")
- set(ABSL_INSTALL_BINDIR "${CMAKE_INSTALL_BINDIR}/${ABSL_SUBDIR}")
- set(ABSL_INSTALL_CONFIGDIR "${CMAKE_INSTALL_LIBDIR}/cmake/${ABSL_SUBDIR}")
- set(ABSL_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_INCLUDEDIR}/${ABSL_SUBDIR}")
- set(ABSL_INSTALL_LIBDIR "${CMAKE_INSTALL_LIBDIR}/${ABSL_SUBDIR}")
-else()
- set(ABSL_INSTALL_BINDIR "${CMAKE_INSTALL_BINDIR}")
- set(ABSL_INSTALL_CONFIGDIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
- set(ABSL_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_INCLUDEDIR}")
- set(ABSL_INSTALL_LIBDIR "${CMAKE_INSTALL_LIBDIR}")
-endif()
diff --git a/third_party/abseil-cpp/CMake/install_test_project/CMakeLists.txt b/third_party/abseil-cpp/CMake/install_test_project/CMakeLists.txt
index 06b797e9ed..eebfe617a4 100644
--- a/third_party/abseil-cpp/CMake/install_test_project/CMakeLists.txt
+++ b/third_party/abseil-cpp/CMake/install_test_project/CMakeLists.txt
@@ -18,8 +18,6 @@
cmake_minimum_required(VERSION 3.5)
project(absl_cmake_testing CXX)
-set(CMAKE_CXX_STANDARD 11)
-
add_executable(simple simple.cc)
find_package(absl REQUIRED)
diff --git a/third_party/abseil-cpp/CMakeLists.txt b/third_party/abseil-cpp/CMakeLists.txt
index e68810e3cf..42bcbe100b 100644
--- a/third_party/abseil-cpp/CMakeLists.txt
+++ b/third_party/abseil-cpp/CMakeLists.txt
@@ -41,11 +41,16 @@ if (POLICY CMP0077)
cmake_policy(SET CMP0077 NEW)
endif (POLICY CMP0077)
+# Allow the user to specify the MSVC runtime
+if (POLICY CMP0091)
+ cmake_policy(SET CMP0091 NEW)
+endif (POLICY CMP0091)
+
# Set BUILD_TESTING to OFF by default.
# This must come before the project() and include(CTest) lines.
OPTION(BUILD_TESTING "Build tests" OFF)
-project(absl CXX)
+project(absl LANGUAGES CXX)
include(CTest)
# Output directory is correct by default for most build setups. However, when
@@ -67,8 +72,8 @@ list(APPEND CMAKE_MODULE_PATH
${CMAKE_CURRENT_LIST_DIR}/absl/copts
)
-include(AbseilInstallDirs)
include(CMakePackageConfigHelpers)
+include(GNUInstallDirs)
include(AbseilDll)
include(AbseilHelpers)
@@ -97,9 +102,18 @@ endif()
## pthread
find_package(Threads REQUIRED)
+include(CMakeDependentOption)
+
option(ABSL_USE_EXTERNAL_GOOGLETEST
"If ON, Abseil will assume that the targets for GoogleTest are already provided by the including project. This makes sense when Abseil is used with add_subproject." OFF)
+cmake_dependent_option(ABSL_FIND_GOOGLETEST
+ "If ON, Abseil will use find_package(GTest) rather than assuming that GoogleTest is already provided by the including project."
+ ON
+ "ABSL_USE_EXTERNAL_GOOGLETEST"
+ OFF)
+
+
option(ABSL_USE_GOOGLETEST_HEAD
"If ON, abseil will download HEAD from GoogleTest at config time." OFF)
@@ -111,7 +125,15 @@ set(ABSL_LOCAL_GOOGLETEST_DIR "/usr/src/googletest" CACHE PATH
if(BUILD_TESTING)
## check targets
- if (NOT ABSL_USE_EXTERNAL_GOOGLETEST)
+ if (ABSL_USE_EXTERNAL_GOOGLETEST)
+ if (ABSL_FIND_GOOGLETEST)
+ find_package(GTest REQUIRED)
+ else()
+ if (NOT TARGET gtest AND NOT TARGET GTest::gtest)
+ message(FATAL_ERROR "ABSL_USE_EXTERNAL_GOOGLETEST is ON and ABSL_FIND_GOOGLETEST is OFF, which means that the top-level project must build the Google Test project. However, the target gtest was not found.")
+ endif()
+ endif()
+ else()
set(absl_gtest_build_dir ${CMAKE_BINARY_DIR}/googletest-build)
if(ABSL_USE_GOOGLETEST_HEAD AND ABSL_GOOGLETEST_DOWNLOAD_URL)
message(FATAL_ERROR "Do not set both ABSL_USE_GOOGLETEST_HEAD and ABSL_GOOGLETEST_DOWNLOAD_URL")
@@ -129,14 +151,22 @@ if(BUILD_TESTING)
include(CMake/Googletest/DownloadGTest.cmake)
endif()
- check_target(gtest)
- check_target(gtest_main)
- check_target(gmock)
+ if (NOT ABSL_FIND_GOOGLETEST)
+ # When Google Test is included directly rather than through find_package, the aliases are missing.
+ add_library(GTest::gtest_main ALIAS gtest_main)
+ add_library(GTest::gtest ALIAS gtest)
+ add_library(GTest::gmock ALIAS gmock)
+ endif()
+
+ check_target(GTest::gtest)
+ check_target(GTest::gtest_main)
+ check_target(GTest::gmock)
+ check_target(GTest::gmock_main)
list(APPEND ABSL_TEST_COMMON_LIBRARIES
- gtest_main
- gtest
- gmock
+ GTest::gtest_main
+ GTest::gtest
+ GTest::gmock
${CMAKE_THREAD_LIBS_INIT}
)
endif()
@@ -144,7 +174,6 @@ endif()
add_subdirectory(absl)
if(ABSL_ENABLE_INSTALL)
- message(FATAL_ERROR "Please do not install abseil")
# absl:lts-remove-begin(system installation is supported for LTS releases)
# We don't support system-wide installation
list(APPEND SYSTEM_INSTALL_DIRS "/usr/local" "/usr" "/opt/" "/opt/local" "c:/Program Files/${PROJECT_NAME}")
@@ -160,16 +189,16 @@ if(ABSL_ENABLE_INSTALL)
# install as a subdirectory only
install(EXPORT ${PROJECT_NAME}Targets
NAMESPACE absl::
- DESTINATION "${ABSL_INSTALL_CONFIGDIR}"
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
)
configure_package_config_file(
CMake/abslConfig.cmake.in
"${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
- INSTALL_DESTINATION "${ABSL_INSTALL_CONFIGDIR}"
+ INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
)
install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
- DESTINATION "${ABSL_INSTALL_CONFIGDIR}"
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
)
# Abseil only has a version in LTS releases. This mechanism is accomplished
@@ -182,12 +211,12 @@ if(ABSL_ENABLE_INSTALL)
)
install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
- DESTINATION ${ABSL_INSTALL_CONFIGDIR}
+ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
)
endif() # absl_VERSION
install(DIRECTORY absl
- DESTINATION ${ABSL_INSTALL_INCLUDEDIR}
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.inc"
PATTERN "*.h"
diff --git a/third_party/abseil-cpp/absl/CMakeLists.txt b/third_party/abseil-cpp/absl/CMakeLists.txt
index fbfa7822b5..a41e1eeb35 100644
--- a/third_party/abseil-cpp/absl/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/CMakeLists.txt
@@ -16,6 +16,7 @@
add_subdirectory(base)
add_subdirectory(algorithm)
+add_subdirectory(cleanup)
add_subdirectory(container)
add_subdirectory(debugging)
add_subdirectory(flags)
diff --git a/third_party/abseil-cpp/absl/algorithm/CMakeLists.txt b/third_party/abseil-cpp/absl/algorithm/CMakeLists.txt
index 56cd0fb85b..609d858946 100644
--- a/third_party/abseil-cpp/absl/algorithm/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/algorithm/CMakeLists.txt
@@ -35,7 +35,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::algorithm
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -65,5 +65,5 @@ absl_cc_test(
absl::core_headers
absl::memory
absl::span
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/algorithm/container.h b/third_party/abseil-cpp/absl/algorithm/container.h
index 6398438f08..1652e7b055 100644
--- a/third_party/abseil-cpp/absl/algorithm/container.h
+++ b/third_party/abseil-cpp/absl/algorithm/container.h
@@ -905,11 +905,11 @@ void c_sort(C& c) {
// Overload of c_sort() for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-void c_sort(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+void c_sort(C& c, LessThan&& comp) {
std::sort(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_stable_sort()
@@ -925,11 +925,11 @@ void c_stable_sort(C& c) {
// Overload of c_stable_sort() for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-void c_stable_sort(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+void c_stable_sort(C& c, LessThan&& comp) {
std::stable_sort(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_sorted()
@@ -944,11 +944,11 @@ bool c_is_sorted(const C& c) {
// c_is_sorted() overload for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-bool c_is_sorted(const C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_is_sorted(const C& c, LessThan&& comp) {
return std::is_sorted(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_partial_sort()
@@ -966,14 +966,14 @@ void c_partial_sort(
// Overload of c_partial_sort() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
void c_partial_sort(
RandomAccessContainer& sequence,
container_algorithm_internal::ContainerIter<RandomAccessContainer> middle,
- Compare&& comp) {
+ LessThan&& comp) {
std::partial_sort(container_algorithm_internal::c_begin(sequence), middle,
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_partial_sort_copy()
@@ -994,15 +994,15 @@ c_partial_sort_copy(const C& sequence, RandomAccessContainer& result) {
// Overload of c_partial_sort_copy() for performing a `comp` comparison other
// than the default `operator<`.
-template <typename C, typename RandomAccessContainer, typename Compare>
+template <typename C, typename RandomAccessContainer, typename LessThan>
container_algorithm_internal::ContainerIter<RandomAccessContainer>
c_partial_sort_copy(const C& sequence, RandomAccessContainer& result,
- Compare&& comp) {
+ LessThan&& comp) {
return std::partial_sort_copy(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
container_algorithm_internal::c_begin(result),
container_algorithm_internal::c_end(result),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_sorted_until()
@@ -1018,12 +1018,12 @@ container_algorithm_internal::ContainerIter<C> c_is_sorted_until(C& c) {
// Overload of c_is_sorted_until() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
container_algorithm_internal::ContainerIter<C> c_is_sorted_until(
- C& c, Compare&& comp) {
+ C& c, LessThan&& comp) {
return std::is_sorted_until(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_nth_element()
@@ -1043,14 +1043,14 @@ void c_nth_element(
// Overload of c_nth_element() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
void c_nth_element(
RandomAccessContainer& sequence,
container_algorithm_internal::ContainerIter<RandomAccessContainer> nth,
- Compare&& comp) {
+ LessThan&& comp) {
std::nth_element(container_algorithm_internal::c_begin(sequence), nth,
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1072,12 +1072,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_lower_bound(
// Overload of c_lower_bound() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_lower_bound(
- Sequence& sequence, T&& value, Compare&& comp) {
+ Sequence& sequence, T&& value, LessThan&& comp) {
return std::lower_bound(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_upper_bound()
@@ -1095,12 +1095,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_upper_bound(
// Overload of c_upper_bound() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_upper_bound(
- Sequence& sequence, T&& value, Compare&& comp) {
+ Sequence& sequence, T&& value, LessThan&& comp) {
return std::upper_bound(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_equal_range()
@@ -1118,12 +1118,12 @@ c_equal_range(Sequence& sequence, T&& value) {
// Overload of c_equal_range() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIterPairType<Sequence, Sequence>
-c_equal_range(Sequence& sequence, T&& value, Compare&& comp) {
+c_equal_range(Sequence& sequence, T&& value, LessThan&& comp) {
return std::equal_range(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_binary_search()
@@ -1140,12 +1140,12 @@ bool c_binary_search(Sequence&& sequence, T&& value) {
// Overload of c_binary_search() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
-bool c_binary_search(Sequence&& sequence, T&& value, Compare&& comp) {
+template <typename Sequence, typename T, typename LessThan>
+bool c_binary_search(Sequence&& sequence, T&& value, LessThan&& comp) {
return std::binary_search(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
std::forward<T>(value),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1166,14 +1166,14 @@ OutputIterator c_merge(const C1& c1, const C2& c2, OutputIterator result) {
// Overload of c_merge() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare>
+template <typename C1, typename C2, typename OutputIterator, typename LessThan>
OutputIterator c_merge(const C1& c1, const C2& c2, OutputIterator result,
- Compare&& comp) {
+ LessThan&& comp) {
return std::merge(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), result,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_inplace_merge()
@@ -1189,13 +1189,13 @@ void c_inplace_merge(C& c,
// Overload of c_inplace_merge() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
void c_inplace_merge(C& c,
container_algorithm_internal::ContainerIter<C> middle,
- Compare&& comp) {
+ LessThan&& comp) {
std::inplace_merge(container_algorithm_internal::c_begin(c), middle,
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_includes()
@@ -1213,13 +1213,13 @@ bool c_includes(const C1& c1, const C2& c2) {
// Overload of c_includes() for performing a merge using a `comp` other than
// `operator<`.
-template <typename C1, typename C2, typename Compare>
-bool c_includes(const C1& c1, const C2& c2, Compare&& comp) {
+template <typename C1, typename C2, typename LessThan>
+bool c_includes(const C1& c1, const C2& c2, LessThan&& comp) {
return std::includes(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_union()
@@ -1243,7 +1243,7 @@ OutputIterator c_set_union(const C1& c1, const C2& c2, OutputIterator output) {
// Overload of c_set_union() for performing a merge using a `comp` other than
// `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1251,12 +1251,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_union(const C1& c1, const C2& c2, OutputIterator output,
- Compare&& comp) {
+ LessThan&& comp) {
return std::set_union(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_intersection()
@@ -1280,7 +1280,7 @@ OutputIterator c_set_intersection(const C1& c1, const C2& c2,
// Overload of c_set_intersection() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1288,12 +1288,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_intersection(const C1& c1, const C2& c2,
- OutputIterator output, Compare&& comp) {
+ OutputIterator output, LessThan&& comp) {
return std::set_intersection(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_difference()
@@ -1318,7 +1318,7 @@ OutputIterator c_set_difference(const C1& c1, const C2& c2,
// Overload of c_set_difference() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1326,12 +1326,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_difference(const C1& c1, const C2& c2,
- OutputIterator output, Compare&& comp) {
+ OutputIterator output, LessThan&& comp) {
return std::set_difference(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_symmetric_difference()
@@ -1357,7 +1357,7 @@ OutputIterator c_set_symmetric_difference(const C1& c1, const C2& c2,
// Overload of c_set_symmetric_difference() for performing a merge using a
// `comp` other than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1366,13 +1366,13 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
void>::type>
OutputIterator c_set_symmetric_difference(const C1& c1, const C2& c2,
OutputIterator output,
- Compare&& comp) {
+ LessThan&& comp) {
return std::set_symmetric_difference(
container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1391,11 +1391,11 @@ void c_push_heap(RandomAccessContainer& sequence) {
// Overload of c_push_heap() for performing a push operation on a heap using a
// `comp` other than `operator<`.
-template <typename RandomAccessContainer, typename Compare>
-void c_push_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_push_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::push_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_pop_heap()
@@ -1410,11 +1410,11 @@ void c_pop_heap(RandomAccessContainer& sequence) {
// Overload of c_pop_heap() for performing a pop operation on a heap using a
// `comp` other than `operator<`.
-template <typename RandomAccessContainer, typename Compare>
-void c_pop_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_pop_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::pop_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_make_heap()
@@ -1429,11 +1429,11 @@ void c_make_heap(RandomAccessContainer& sequence) {
// Overload of c_make_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-void c_make_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_make_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::make_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_sort_heap()
@@ -1448,11 +1448,11 @@ void c_sort_heap(RandomAccessContainer& sequence) {
// Overload of c_sort_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-void c_sort_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_sort_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::sort_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_heap()
@@ -1467,11 +1467,11 @@ bool c_is_heap(const RandomAccessContainer& sequence) {
// Overload of c_is_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-bool c_is_heap(const RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+bool c_is_heap(const RandomAccessContainer& sequence, LessThan&& comp) {
return std::is_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_heap_until()
@@ -1487,12 +1487,12 @@ c_is_heap_until(RandomAccessContainer& sequence) {
// Overload of c_is_heap_until() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
container_algorithm_internal::ContainerIter<RandomAccessContainer>
-c_is_heap_until(RandomAccessContainer& sequence, Compare&& comp) {
+c_is_heap_until(RandomAccessContainer& sequence, LessThan&& comp) {
return std::is_heap_until(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1513,12 +1513,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_min_element(
// Overload of c_min_element() for performing a `comp` comparison other than
// `operator<`.
-template <typename Sequence, typename Compare>
+template <typename Sequence, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_min_element(
- Sequence& sequence, Compare&& comp) {
+ Sequence& sequence, LessThan&& comp) {
return std::min_element(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_max_element()
@@ -1535,12 +1535,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_max_element(
// Overload of c_max_element() for performing a `comp` comparison other than
// `operator<`.
-template <typename Sequence, typename Compare>
+template <typename Sequence, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_max_element(
- Sequence& sequence, Compare&& comp) {
+ Sequence& sequence, LessThan&& comp) {
return std::max_element(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_minmax_element()
@@ -1558,12 +1558,12 @@ c_minmax_element(C& c) {
// Overload of c_minmax_element() for performing `comp` comparisons other than
// `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
container_algorithm_internal::ContainerIterPairType<C, C>
-c_minmax_element(C& c, Compare&& comp) {
+c_minmax_element(C& c, LessThan&& comp) {
return std::minmax_element(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1588,15 +1588,15 @@ bool c_lexicographical_compare(Sequence1&& sequence1, Sequence2&& sequence2) {
// Overload of c_lexicographical_compare() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename Sequence1, typename Sequence2, typename Compare>
+template <typename Sequence1, typename Sequence2, typename LessThan>
bool c_lexicographical_compare(Sequence1&& sequence1, Sequence2&& sequence2,
- Compare&& comp) {
+ LessThan&& comp) {
return std::lexicographical_compare(
container_algorithm_internal::c_begin(sequence1),
container_algorithm_internal::c_end(sequence1),
container_algorithm_internal::c_begin(sequence2),
container_algorithm_internal::c_end(sequence2),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_next_permutation()
@@ -1612,11 +1612,11 @@ bool c_next_permutation(C& c) {
// Overload of c_next_permutation() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename C, typename Compare>
-bool c_next_permutation(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_next_permutation(C& c, LessThan&& comp) {
return std::next_permutation(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_prev_permutation()
@@ -1632,11 +1632,11 @@ bool c_prev_permutation(C& c) {
// Overload of c_prev_permutation() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename C, typename Compare>
-bool c_prev_permutation(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_prev_permutation(C& c, LessThan&& comp) {
return std::prev_permutation(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
diff --git a/third_party/abseil-cpp/absl/base/CMakeLists.txt b/third_party/abseil-cpp/absl/base/CMakeLists.txt
index 981b8cc008..7d56aa1346 100644
--- a/third_party/abseil-cpp/absl/base/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/base/CMakeLists.txt
@@ -230,7 +230,7 @@ absl_cc_library(
${ABSL_DEFAULT_COPTS}
DEPS
absl::config
- gtest
+ GTest::gtest
TESTONLY
)
@@ -259,7 +259,7 @@ absl_cc_library(
absl::meta
absl::strings
absl::utility
- gtest
+ GTest::gtest
TESTONLY
)
@@ -273,7 +273,7 @@ absl_cc_test(
DEPS
absl::exception_safety_testing
absl::memory
- gtest_main
+ GTest::gtest_main
)
absl_cc_library(
@@ -300,8 +300,8 @@ absl_cc_test(
absl::atomic_hook_test_helper
absl::atomic_hook
absl::core_headers
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -314,7 +314,7 @@ absl_cc_test(
DEPS
absl::base
absl::core_headers
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -327,8 +327,8 @@ absl_cc_test(
DEPS
absl::errno_saver
absl::strerror
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -342,7 +342,7 @@ absl_cc_test(
absl::base
absl::config
absl::throw_delegate
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -357,7 +357,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::base_internal
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -371,8 +371,8 @@ absl_cc_test(
absl::base_internal
absl::memory
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_library(
@@ -388,7 +388,7 @@ absl_cc_library(
absl::base_internal
absl::core_headers
absl::synchronization
- gtest
+ GTest::gtest
TESTONLY
)
@@ -406,7 +406,7 @@ absl_cc_test(
absl::config
absl::core_headers
absl::synchronization
- gtest_main
+ GTest::gtest_main
)
absl_cc_library(
@@ -435,7 +435,7 @@ absl_cc_test(
absl::base
absl::config
absl::endian
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -448,7 +448,7 @@ absl_cc_test(
DEPS
absl::config
absl::synchronization
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -462,7 +462,7 @@ absl_cc_test(
absl::base
absl::core_headers
absl::synchronization
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -475,7 +475,7 @@ absl_cc_test(
DEPS
absl::raw_logging_internal
absl::strings
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -488,7 +488,7 @@ absl_cc_test(
DEPS
absl::base
absl::synchronization
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -516,7 +516,7 @@ absl_cc_test(
absl::core_headers
absl::synchronization
Threads::Threads
- gtest_main
+ GTest::gtest_main
)
absl_cc_library(
@@ -543,7 +543,7 @@ absl_cc_test(
DEPS
absl::exponential_biased
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -570,7 +570,7 @@ absl_cc_test(
DEPS
absl::core_headers
absl::periodic_sampler
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -596,7 +596,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::scoped_set_env
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -620,8 +620,8 @@ absl_cc_test(
absl::flags_marshalling
absl::log_severity
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_library(
@@ -651,8 +651,8 @@ absl_cc_test(
DEPS
absl::strerror
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_library(
@@ -677,7 +677,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::fast_type_id
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -690,5 +690,5 @@ absl_cc_test(
DEPS
absl::core_headers
absl::optional
- gtest_main
+ GTest::gtest_main
)
diff --git a/third_party/abseil-cpp/absl/base/attributes.h b/third_party/abseil-cpp/absl/base/attributes.h
index cf2cb5501e..52139556f2 100644
--- a/third_party/abseil-cpp/absl/base/attributes.h
+++ b/third_party/abseil-cpp/absl/base/attributes.h
@@ -131,14 +131,14 @@
// ABSL_ATTRIBUTE_WEAK
//
// Tags a function as weak for the purposes of compilation and linking.
-// Weak attributes currently do not work properly in LLVM's Windows backend,
-// so disable them there. See https://bugs.llvm.org/show_bug.cgi?id=37598
+// Weak attributes did not work properly in LLVM's Windows backend before
+// 9.0.0, so disable them there. See https://bugs.llvm.org/show_bug.cgi?id=37598
// for further information.
// The MinGW compiler doesn't complain about the weak attribute until the link
// step, presumably because Windows doesn't use ELF binaries.
#if (ABSL_HAVE_ATTRIBUTE(weak) || \
(defined(__GNUC__) && !defined(__clang__))) && \
- !(defined(__llvm__) && defined(_WIN32)) && !defined(__MINGW32__)
+ (!defined(_WIN32) || __clang_major__ < 9) && !defined(__MINGW32__)
#undef ABSL_ATTRIBUTE_WEAK
#define ABSL_ATTRIBUTE_WEAK __attribute__((weak))
#define ABSL_HAVE_ATTRIBUTE_WEAK 1
@@ -281,10 +281,7 @@
// ABSL_ATTRIBUTE_RETURNS_NONNULL
//
// Tells the compiler that a particular function never returns a null pointer.
-#if ABSL_HAVE_ATTRIBUTE(returns_nonnull) || \
- (defined(__GNUC__) && \
- (__GNUC__ > 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)) && \
- !defined(__clang__))
+#if ABSL_HAVE_ATTRIBUTE(returns_nonnull)
#define ABSL_ATTRIBUTE_RETURNS_NONNULL __attribute__((returns_nonnull))
#else
#define ABSL_ATTRIBUTE_RETURNS_NONNULL
@@ -524,6 +521,13 @@
// ABSL_ATTRIBUTE_UNUSED
//
// Prevents the compiler from complaining about variables that appear unused.
+//
+// For code or headers that are assured to only build with C++17 and up, prefer
+// just using the standard '[[maybe_unused]]' directly over this macro.
+//
+// Due to differences in positioning requirements between the old, compiler
+// specific __attribute__ syntax and the now standard [[maybe_unused]], this
+// macro does not attempt to take advantage of '[[maybe_unused]]'.
#if ABSL_HAVE_ATTRIBUTE(unused) || (defined(__GNUC__) && !defined(__clang__))
#undef ABSL_ATTRIBUTE_UNUSED
#define ABSL_ATTRIBUTE_UNUSED __attribute__((__unused__))
@@ -595,31 +599,24 @@
// case 42:
// ...
//
-// Notes: when compiled with clang in C++11 mode, the ABSL_FALLTHROUGH_INTENDED
-// macro is expanded to the [[clang::fallthrough]] attribute, which is analysed
-// when performing switch labels fall-through diagnostic
-// (`-Wimplicit-fallthrough`). See clang documentation on language extensions
-// for details:
+// Notes: When supported, GCC and Clang can issue a warning on switch labels
+// with unannotated fallthrough using the warning `-Wimplicit-fallthrough`. See
+// clang documentation on language extensions for details:
// https://clang.llvm.org/docs/AttributeReference.html#fallthrough-clang-fallthrough
//
-// When used with unsupported compilers, the ABSL_FALLTHROUGH_INTENDED macro
-// has no effect on diagnostics. In any case this macro has no effect on runtime
+// When used with unsupported compilers, the ABSL_FALLTHROUGH_INTENDED macro has
+// no effect on diagnostics. In any case this macro has no effect on runtime
// behavior and performance of code.
#ifdef ABSL_FALLTHROUGH_INTENDED
#error "ABSL_FALLTHROUGH_INTENDED should not be defined."
-#endif
-
-// TODO(zhangxy): Use c++17 standard [[fallthrough]] macro, when supported.
-#if defined(__clang__) && defined(__has_warning)
-#if __has_feature(cxx_attributes) && __has_warning("-Wimplicit-fallthrough")
+#elif ABSL_HAVE_CPP_ATTRIBUTE(fallthrough)
+#define ABSL_FALLTHROUGH_INTENDED [[fallthrough]]
+#elif ABSL_HAVE_CPP_ATTRIBUTE(clang::fallthrough)
#define ABSL_FALLTHROUGH_INTENDED [[clang::fallthrough]]
-#endif
-#elif defined(__GNUC__) && __GNUC__ >= 7
+#elif ABSL_HAVE_CPP_ATTRIBUTE(gnu::fallthrough)
#define ABSL_FALLTHROUGH_INTENDED [[gnu::fallthrough]]
-#endif
-
-#ifndef ABSL_FALLTHROUGH_INTENDED
+#else
#define ABSL_FALLTHROUGH_INTENDED \
do { \
} while (0)
@@ -699,4 +696,26 @@
#define ABSL_ATTRIBUTE_PURE_FUNCTION
#endif
+// ABSL_ATTRIBUTE_LIFETIME_BOUND indicates that a resource owned by a function
+// parameter or implicit object parameter is retained by the return value of the
+// annotated function (or, for a parameter of a constructor, in the value of the
+// constructed object). This attribute causes warnings to be produced if a
+// temporary object does not live long enough.
+//
+// When applied to a reference parameter, the referenced object is assumed to be
+// retained by the return value of the function. When applied to a non-reference
+// parameter (for example, a pointer or a class type), all temporaries
+// referenced by the parameter are assumed to be retained by the return value of
+// the function.
+//
+// See also the upstream documentation:
+// https://clang.llvm.org/docs/AttributeReference.html#lifetimebound
+#if ABSL_HAVE_CPP_ATTRIBUTE(clang::lifetimebound)
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND [[clang::lifetimebound]]
+#elif ABSL_HAVE_ATTRIBUTE(lifetimebound)
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND __attribute__((lifetimebound))
+#else
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND
+#endif
+
#endif // ABSL_BASE_ATTRIBUTES_H_
diff --git a/third_party/abseil-cpp/absl/base/config.h b/third_party/abseil-cpp/absl/base/config.h
index 95449969e7..0524196d56 100644
--- a/third_party/abseil-cpp/absl/base/config.h
+++ b/third_party/abseil-cpp/absl/base/config.h
@@ -166,6 +166,22 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#define ABSL_HAVE_FEATURE(f) 0
#endif
+// Portable check for GCC minimum version:
+// https://gcc.gnu.org/onlinedocs/cpp/Common-Predefined-Macros.html
+#if defined(__GNUC__) && defined(__GNUC_MINOR__)
+#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) \
+ (__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y))
+#else
+#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) 0
+#endif
+
+#if defined(__clang__) && defined(__clang_major__) && defined(__clang_minor__)
+#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) \
+ (__clang_major__ > (x) || __clang_major__ == (x) && __clang_minor__ >= (y))
+#else
+#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) 0
+#endif
+
// ABSL_HAVE_TLS is defined to 1 when __thread should be supported.
// We assume __thread is supported on Linux when compiled with Clang or compiled
// against libstdc++ with _GLIBCXX_HAVE_TLS defined.
@@ -183,10 +199,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// gcc >= 4.8.1 using libstdc++, and Visual Studio.
#ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE
#error ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE cannot be directly set
-#elif defined(_LIBCPP_VERSION) || \
- (!defined(__clang__) && defined(__GNUC__) && defined(__GLIBCXX__) && \
- (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))) || \
- defined(_MSC_VER)
+#elif defined(_LIBCPP_VERSION) || defined(_MSC_VER) || \
+ (!defined(__clang__) && defined(__GLIBCXX__) && \
+ ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(4, 8))
#define ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE 1
#endif
@@ -205,10 +220,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#error ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE cannot be directly set
#elif defined(ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE)
#error ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE cannot directly set
-#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
- (!defined(__clang__) && defined(__GNUC__) && \
- (__GNUC__ > 7 || (__GNUC__ == 7 && __GNUC_MINOR__ >= 4)) && \
- (defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
+#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
+ (!defined(__clang__) && ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && \
+ (defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
(defined(_MSC_VER) && !defined(__NVCC__))
#define ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE 1
#define ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE 1
@@ -222,7 +236,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#if ABSL_INTERNAL_HAS_KEYWORD(__builtin_LINE) && \
ABSL_INTERNAL_HAS_KEYWORD(__builtin_FILE)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
-#elif defined(__GNUC__) && __GNUC__ >= 5
+#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
#endif
#endif
@@ -319,25 +333,21 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// For further details, consult the compiler's documentation.
#ifdef ABSL_HAVE_EXCEPTIONS
#error ABSL_HAVE_EXCEPTIONS cannot be directly set.
-
-#elif defined(__clang__)
-
-#if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
+#elif ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(3, 6)
// Clang >= 3.6
#if ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // ABSL_HAVE_FEATURE(cxx_exceptions)
-#else
+#elif defined(__clang__)
// Clang < 3.6
// http://releases.llvm.org/3.6.0/tools/clang/docs/ReleaseNotes.html#the-exceptions-macro
#if defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
-#endif // __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
-
// Handle remaining special cases and default to exceptions being supported.
-#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
- !(defined(__GNUC__) && (__GNUC__ >= 5) && !defined(__cpp_exceptions)) && \
+#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
+ !(ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0) && \
+ !defined(__cpp_exceptions)) && \
!(defined(_MSC_VER) && !defined(_CPPUNWIND))
#define ABSL_HAVE_EXCEPTIONS 1
#endif
@@ -690,10 +700,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// a compiler instrumentation module and a run-time library.
#ifdef ABSL_HAVE_MEMORY_SANITIZER
#error "ABSL_HAVE_MEMORY_SANITIZER cannot be directly set."
-#elif defined(MEMORY_SANITIZER)
-// The MEMORY_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_MEMORY_SANITIZER 1
#elif defined(__SANITIZE_MEMORY__)
#define ABSL_HAVE_MEMORY_SANITIZER 1
#elif !defined(__native_client__) && ABSL_HAVE_FEATURE(memory_sanitizer)
@@ -705,10 +711,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// ThreadSanitizer (TSan) is a fast data race detector.
#ifdef ABSL_HAVE_THREAD_SANITIZER
#error "ABSL_HAVE_THREAD_SANITIZER cannot be directly set."
-#elif defined(THREAD_SANITIZER)
-// The THREAD_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_THREAD_SANITIZER 1
#elif defined(__SANITIZE_THREAD__)
#define ABSL_HAVE_THREAD_SANITIZER 1
#elif ABSL_HAVE_FEATURE(thread_sanitizer)
@@ -720,10 +722,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// AddressSanitizer (ASan) is a fast memory error detector.
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
#error "ABSL_HAVE_ADDRESS_SANITIZER cannot be directly set."
-#elif defined(ADDRESS_SANITIZER)
-// The ADDRESS_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_ADDRESS_SANITIZER 1
#elif defined(__SANITIZE_ADDRESS__)
#define ABSL_HAVE_ADDRESS_SANITIZER 1
#elif ABSL_HAVE_FEATURE(address_sanitizer)
diff --git a/third_party/abseil-cpp/absl/base/dynamic_annotations.h b/third_party/abseil-cpp/absl/base/dynamic_annotations.h
index bf874db990..065bd5be09 100644
--- a/third_party/abseil-cpp/absl/base/dynamic_annotations.h
+++ b/third_party/abseil-cpp/absl/base/dynamic_annotations.h
@@ -468,7 +468,7 @@ using absl::base_internal::ValgrindSlowdown;
__sanitizer_annotate_contiguous_container(beg, end, old_mid, new_mid)
#define ABSL_ADDRESS_SANITIZER_REDZONE(name) \
struct { \
- char x[8] __attribute__((aligned(8))); \
+ alignas(8) char x[8]; \
} name
#else
diff --git a/third_party/abseil-cpp/absl/base/internal/exception_safety_testing.h b/third_party/abseil-cpp/absl/base/internal/exception_safety_testing.h
index 6ba89d05df..77a5aec642 100644
--- a/third_party/abseil-cpp/absl/base/internal/exception_safety_testing.h
+++ b/third_party/abseil-cpp/absl/base/internal/exception_safety_testing.h
@@ -536,7 +536,22 @@ class ThrowingValue : private exceptions_internal::TrackedObject {
}
// Memory management operators
- // Args.. allows us to overload regular and placement new in one shot
+ static void* operator new(size_t s) noexcept(
+ IsSpecified(TypeSpec::kNoThrowNew)) {
+ if (!IsSpecified(TypeSpec::kNoThrowNew)) {
+ exceptions_internal::MaybeThrow(ABSL_PRETTY_FUNCTION, true);
+ }
+ return ::operator new(s);
+ }
+
+ static void* operator new[](size_t s) noexcept(
+ IsSpecified(TypeSpec::kNoThrowNew)) {
+ if (!IsSpecified(TypeSpec::kNoThrowNew)) {
+ exceptions_internal::MaybeThrow(ABSL_PRETTY_FUNCTION, true);
+ }
+ return ::operator new[](s);
+ }
+
template <typename... Args>
static void* operator new(size_t s, Args&&... args) noexcept(
IsSpecified(TypeSpec::kNoThrowNew)) {
@@ -557,12 +572,6 @@ class ThrowingValue : private exceptions_internal::TrackedObject {
// Abseil doesn't support throwing overloaded operator delete. These are
// provided so a throwing operator-new can clean up after itself.
- //
- // We provide both regular and templated operator delete because if only the
- // templated version is provided as we did with operator new, the compiler has
- // no way of knowing which overload of operator delete to call. See
- // https://en.cppreference.com/w/cpp/memory/new/operator_delete and
- // https://en.cppreference.com/w/cpp/language/delete for the gory details.
void operator delete(void* p) noexcept { ::operator delete(p); }
template <typename... Args>
@@ -726,9 +735,8 @@ class ThrowingAllocator : private exceptions_internal::TrackedObject {
ThrowingAllocator select_on_container_copy_construction() noexcept(
IsSpecified(AllocSpec::kNoThrowAllocate)) {
- auto& out = *this;
ReadStateAndMaybeThrow(ABSL_PRETTY_FUNCTION);
- return out;
+ return *this;
}
template <typename U>
diff --git a/third_party/abseil-cpp/absl/base/internal/sysinfo.cc b/third_party/abseil-cpp/absl/base/internal/sysinfo.cc
index 4a3b205034..08a1e28894 100644
--- a/third_party/abseil-cpp/absl/base/internal/sysinfo.cc
+++ b/third_party/abseil-cpp/absl/base/internal/sysinfo.cc
@@ -61,9 +61,76 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace base_internal {
+namespace {
+
+#if defined(_WIN32)
+
+// Returns number of bits set in `bitMask`
+DWORD Win32CountSetBits(ULONG_PTR bitMask) {
+ for (DWORD bitSetCount = 0; ; ++bitSetCount) {
+ if (bitMask == 0) return bitSetCount;
+ bitMask &= bitMask - 1;
+ }
+}
+
+// Returns the number of logical CPUs using GetLogicalProcessorInformation(), or
+// 0 if the number of processors is not available or can not be computed.
+// https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getlogicalprocessorinformation
+int Win32NumCPUs() {
+#pragma comment(lib, "kernel32.lib")
+ using Info = SYSTEM_LOGICAL_PROCESSOR_INFORMATION;
+
+ DWORD info_size = sizeof(Info);
+ Info* info(static_cast<Info*>(malloc(info_size)));
+ if (info == nullptr) return 0;
+
+ bool success = GetLogicalProcessorInformation(info, &info_size);
+ if (!success && GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+ free(info);
+ info = static_cast<Info*>(malloc(info_size));
+ if (info == nullptr) return 0;
+ success = GetLogicalProcessorInformation(info, &info_size);
+ }
+
+ DWORD logicalProcessorCount = 0;
+ if (success) {
+ Info* ptr = info;
+ DWORD byteOffset = 0;
+ while (byteOffset + sizeof(Info) <= info_size) {
+ switch (ptr->Relationship) {
+ case RelationProcessorCore:
+ logicalProcessorCount += Win32CountSetBits(ptr->ProcessorMask);
+ break;
+
+ case RelationNumaNode:
+ case RelationCache:
+ case RelationProcessorPackage:
+ // Ignore other entries
+ break;
+
+ default:
+ // Ignore unknown entries
+ break;
+ }
+ byteOffset += sizeof(Info);
+ ptr++;
+ }
+ }
+ free(info);
+ return logicalProcessorCount;
+}
+
+#endif
+
+} // namespace
+
+
static int GetNumCPUs() {
#if defined(__myriad2__)
return 1;
+#elif defined(_WIN32)
+ const unsigned hardware_concurrency = Win32NumCPUs();
+ return hardware_concurrency ? hardware_concurrency : 1;
#else
// Other possibilities:
// - Read /sys/devices/system/cpu/online and use cpumask_parse()
diff --git a/third_party/abseil-cpp/absl/base/internal/thread_identity.cc b/third_party/abseil-cpp/absl/base/internal/thread_identity.cc
index 6ea010ed0d..9950e63a79 100644
--- a/third_party/abseil-cpp/absl/base/internal/thread_identity.cc
+++ b/third_party/abseil-cpp/absl/base/internal/thread_identity.cc
@@ -120,10 +120,10 @@ void SetCurrentThreadIdentity(
ABSL_THREAD_IDENTITY_MODE == ABSL_THREAD_IDENTITY_MODE_USE_CPP11
// Please see the comment on `CurrentThreadIdentityIfPresent` in
-// thread_identity.h. Because DLLs cannot expose thread_local variables in
-// headers, we opt for the correct-but-slower option of placing the definition
-// of this function only in a translation unit inside DLL.
-#if defined(ABSL_BUILD_DLL) || defined(ABSL_CONSUME_DLL)
+// thread_identity.h. When we cannot expose thread_local variables in
+// headers, we opt for the correct-but-slower option of not inlining this
+// function.
+#ifndef ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT
ThreadIdentity* CurrentThreadIdentityIfPresent() { return thread_identity_ptr; }
#endif
#endif
diff --git a/third_party/abseil-cpp/absl/base/internal/thread_identity.h b/third_party/abseil-cpp/absl/base/internal/thread_identity.h
index 9ee651a3a6..6e25b92fa2 100644
--- a/third_party/abseil-cpp/absl/base/internal/thread_identity.h
+++ b/third_party/abseil-cpp/absl/base/internal/thread_identity.h
@@ -236,13 +236,18 @@ ABSL_CONST_INIT extern thread_local ThreadIdentity* thread_identity_ptr;
#error Thread-local storage not detected on this platform
#endif
-// thread_local variables cannot be in headers exposed by DLLs. However, it is
-// important for performance reasons in general that
-// `CurrentThreadIdentityIfPresent` be inlined. This is not possible across a
-// DLL boundary so, with DLLs, we opt to have the function not be inlined. Note
+// thread_local variables cannot be in headers exposed by DLLs or in certain
+// build configurations on Apple platforms. However, it is important for
+// performance reasons in general that `CurrentThreadIdentityIfPresent` be
+// inlined. In the other cases we opt to have the function not be inlined. Note
// that `CurrentThreadIdentityIfPresent` is declared above so we can exclude
-// this entire inline definition when compiling as a DLL.
-#if !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL)
+// this entire inline definition.
+#if !defined(__APPLE__) && !defined(ABSL_BUILD_DLL) && \
+ !defined(ABSL_CONSUME_DLL)
+#define ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT 1
+#endif
+
+#ifdef ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT
inline ThreadIdentity* CurrentThreadIdentityIfPresent() {
return thread_identity_ptr;
}
diff --git a/third_party/abseil-cpp/absl/base/optimization.h b/third_party/abseil-cpp/absl/base/optimization.h
index 6332b62584..d090be1286 100644
--- a/third_party/abseil-cpp/absl/base/optimization.h
+++ b/third_party/abseil-cpp/absl/base/optimization.h
@@ -106,9 +106,10 @@
// Cacheline aligning objects properly allows constructive memory sharing and
// prevents destructive (or "false") memory sharing.
//
-// NOTE: this macro should be replaced with usage of `alignas()` using
+// NOTE: callers should replace uses of this macro with `alignas()` using
// `std::hardware_constructive_interference_size` and/or
-// `std::hardware_destructive_interference_size` when available within C++17.
+// `std::hardware_destructive_interference_size` when C++17 becomes available to
+// them.
//
// See http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0154r1.html
// for more information.
diff --git a/third_party/abseil-cpp/absl/cleanup/CMakeLists.txt b/third_party/abseil-cpp/absl/cleanup/CMakeLists.txt
index a2dd78a84a..26a6d0dce3 100644
--- a/third_party/abseil-cpp/absl/cleanup/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/cleanup/CMakeLists.txt
@@ -51,5 +51,5 @@ absl_cc_test(
absl::cleanup
absl::config
absl::utility
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/container/CMakeLists.txt b/third_party/abseil-cpp/absl/container/CMakeLists.txt
index 2d7d0e65f2..91c4015437 100644
--- a/third_party/abseil-cpp/absl/container/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/container/CMakeLists.txt
@@ -80,7 +80,7 @@ absl_cc_test(
absl::strings
absl::test_instance_tracker
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -109,7 +109,7 @@ absl_cc_test(
absl::optional
absl::test_instance_tracker
absl::utility
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -144,7 +144,7 @@ absl_cc_test(
absl::exception_testing
absl::hash_testing
absl::memory
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -158,7 +158,7 @@ absl_cc_test(
absl::fixed_array
absl::config
absl::exception_safety_testing
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -222,7 +222,7 @@ absl_cc_test(
absl::memory
absl::raw_logging_internal
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -236,7 +236,7 @@ absl_cc_test(
absl::inlined_vector
absl::config
absl::exception_safety_testing
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -262,7 +262,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::test_instance_tracker
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -297,7 +297,7 @@ absl_cc_test(
absl::unordered_map_modifiers_test
absl::any
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -335,7 +335,7 @@ absl_cc_test(
absl::memory
absl::raw_logging_internal
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -370,7 +370,7 @@ absl_cc_test(
absl::unordered_map_lookup_test
absl::unordered_map_members_test
absl::unordered_map_modifiers_test
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -404,7 +404,7 @@ absl_cc_test(
absl::unordered_set_lookup_test
absl::unordered_set_members_test
absl::unordered_set_modifiers_test
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -433,7 +433,7 @@ absl_cc_test(
absl::container_memory
absl::strings
absl::test_instance_tracker
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -465,7 +465,7 @@ absl_cc_test(
absl::hash
absl::random_random
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -507,7 +507,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::hash_policy_testing
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -531,7 +531,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::hash_policy_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -561,7 +561,7 @@ absl_cc_test(
DEPS
absl::hashtablez_sampler
absl::have_sse
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -618,7 +618,7 @@ absl_cc_test(
DEPS
absl::hash_policy_traits
absl::node_hash_policy
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -693,7 +693,7 @@ absl_cc_test(
absl::core_headers
absl::raw_logging_internal
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -707,7 +707,7 @@ absl_cc_test(
absl::raw_hash_set
absl::tracked
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -740,7 +740,7 @@ absl_cc_test(
absl::core_headers
absl::raw_logging_internal
absl::span
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -765,7 +765,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -779,7 +779,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -792,7 +792,7 @@ absl_cc_library(
${ABSL_TEST_COPTS}
DEPS
absl::type_traits
- gmock
+ GTest::gmock
TESTONLY
)
@@ -806,7 +806,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -820,7 +820,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -834,7 +834,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -847,7 +847,7 @@ absl_cc_library(
${ABSL_TEST_COPTS}
DEPS
absl::type_traits
- gmock
+ GTest::gmock
TESTONLY
)
@@ -861,7 +861,7 @@ absl_cc_library(
DEPS
absl::hash_generator_testing
absl::hash_policy_testing
- gmock
+ GTest::gmock
TESTONLY
)
@@ -877,7 +877,7 @@ absl_cc_test(
absl::unordered_set_lookup_test
absl::unordered_set_members_test
absl::unordered_set_modifiers_test
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -892,5 +892,5 @@ absl_cc_test(
absl::unordered_map_lookup_test
absl::unordered_map_members_test
absl::unordered_map_modifiers_test
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/container/btree_test.cc b/third_party/abseil-cpp/absl/container/btree_test.cc
index 74337df2c1..d5d79151aa 100644
--- a/third_party/abseil-cpp/absl/container/btree_test.cc
+++ b/third_party/abseil-cpp/absl/container/btree_test.cc
@@ -1708,10 +1708,25 @@ TEST(Btree, StrSplitCompatible) {
EXPECT_EQ(split_set, expected_set);
}
-// We can't use EXPECT_EQ/etc. to compare absl::weak_ordering because they
-// convert literal 0 to int and absl::weak_ordering can only be compared with
-// literal 0. Defining this function allows for avoiding ClangTidy warnings.
-bool Identity(const bool b) { return b; }
+TEST(Btree, KeyComp) {
+ absl::btree_set<int> s;
+ EXPECT_TRUE(s.key_comp()(1, 2));
+ EXPECT_FALSE(s.key_comp()(2, 2));
+ EXPECT_FALSE(s.key_comp()(2, 1));
+
+ absl::btree_map<int, int> m1;
+ EXPECT_TRUE(m1.key_comp()(1, 2));
+ EXPECT_FALSE(m1.key_comp()(2, 2));
+ EXPECT_FALSE(m1.key_comp()(2, 1));
+
+ // Even though we internally adapt the comparator of `m2` to be three-way and
+ // heterogeneous, the comparator we expose through key_comp() is the original
+ // unadapted comparator.
+ absl::btree_map<std::string, int> m2;
+ EXPECT_TRUE(m2.key_comp()("a", "b"));
+ EXPECT_FALSE(m2.key_comp()("b", "b"));
+ EXPECT_FALSE(m2.key_comp()("b", "a"));
+}
TEST(Btree, ValueComp) {
absl::btree_set<int> s;
@@ -1724,13 +1739,13 @@ TEST(Btree, ValueComp) {
EXPECT_FALSE(m1.value_comp()(std::make_pair(2, 0), std::make_pair(2, 0)));
EXPECT_FALSE(m1.value_comp()(std::make_pair(2, 0), std::make_pair(1, 0)));
+ // Even though we internally adapt the comparator of `m2` to be three-way and
+ // heterogeneous, the comparator we expose through value_comp() is based on
+ // the original unadapted comparator.
absl::btree_map<std::string, int> m2;
- EXPECT_TRUE(Identity(
- m2.value_comp()(std::make_pair("a", 0), std::make_pair("b", 0)) < 0));
- EXPECT_TRUE(Identity(
- m2.value_comp()(std::make_pair("b", 0), std::make_pair("b", 0)) == 0));
- EXPECT_TRUE(Identity(
- m2.value_comp()(std::make_pair("b", 0), std::make_pair("a", 0)) > 0));
+ EXPECT_TRUE(m2.value_comp()(std::make_pair("a", 0), std::make_pair("b", 0)));
+ EXPECT_FALSE(m2.value_comp()(std::make_pair("b", 0), std::make_pair("b", 0)));
+ EXPECT_FALSE(m2.value_comp()(std::make_pair("b", 0), std::make_pair("a", 0)));
}
TEST(Btree, DefaultConstruction) {
@@ -2893,6 +2908,46 @@ TEST(Btree, AllocMoveConstructor_DifferentAlloc) {
EXPECT_EQ(bytes_used2, original_bytes_used);
}
+bool IntCmp(const int a, const int b) { return a < b; }
+
+TEST(Btree, SupportsFunctionPtrComparator) {
+ absl::btree_set<int, decltype(IntCmp) *> set(IntCmp);
+ set.insert({1, 2, 3});
+ EXPECT_THAT(set, ElementsAre(1, 2, 3));
+ EXPECT_TRUE(set.key_comp()(1, 2));
+ EXPECT_TRUE(set.value_comp()(1, 2));
+
+ absl::btree_map<int, int, decltype(IntCmp) *> map(&IntCmp);
+ map[1] = 1;
+ EXPECT_THAT(map, ElementsAre(Pair(1, 1)));
+ EXPECT_TRUE(map.key_comp()(1, 2));
+ EXPECT_TRUE(map.value_comp()(std::make_pair(1, 1), std::make_pair(2, 2)));
+}
+
+template <typename Compare>
+struct TransparentPassThroughComp {
+ using is_transparent = void;
+
+ // This will fail compilation if we attempt a comparison that Compare does not
+ // support, and the failure will happen inside the function implementation so
+ // it can't be avoided by using SFINAE on this comparator.
+ template <typename T, typename U>
+ bool operator()(const T &lhs, const U &rhs) const {
+ return Compare()(lhs, rhs);
+ }
+};
+
+TEST(Btree,
+ SupportsTransparentComparatorThatDoesNotImplementAllVisibleOperators) {
+ absl::btree_set<MultiKey, TransparentPassThroughComp<MultiKeyComp>> set;
+ set.insert(MultiKey{1, 2});
+ EXPECT_TRUE(set.contains(1));
+}
+
+TEST(Btree, ConstructImplicitlyWithUnadaptedComparator) {
+ absl::btree_set<MultiKey, MultiKeyComp> set = {{}, MultiKeyComp{}};
+}
+
} // namespace
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/container/flat_hash_map_test.cc b/third_party/abseil-cpp/absl/container/flat_hash_map_test.cc
index 89ec60c916..8dda1d3539 100644
--- a/third_party/abseil-cpp/absl/container/flat_hash_map_test.cc
+++ b/third_party/abseil-cpp/absl/container/flat_hash_map_test.cc
@@ -282,6 +282,32 @@ TEST(FlatHashMap, NodeHandleMutableKeyAccess) {
}
#endif
+TEST(FlatHashMap, Reserve) {
+ // Verify that if we reserve(size() + n) then we can perform n insertions
+ // without a rehash, i.e., without invalidating any references.
+ for (size_t trial = 0; trial < 20; ++trial) {
+ for (size_t initial = 3; initial < 100; ++initial) {
+ // Fill in `initial` entries, then erase 2 of them, then reserve space for
+ // two inserts and check for reference stability while doing the inserts.
+ flat_hash_map<size_t, size_t> map;
+ for (size_t i = 0; i < initial; ++i) {
+ map[i] = i;
+ }
+ map.erase(0);
+ map.erase(1);
+ map.reserve(map.size() + 2);
+ size_t& a2 = map[2];
+ // In the event of a failure, asan will complain in one of these two
+ // assignments.
+ map[initial] = a2;
+ map[initial + 1] = a2;
+ // Fail even when not under asan:
+ size_t& a2new = map[2];
+ EXPECT_EQ(&a2, &a2new);
+ }
+ }
+}
+
} // namespace
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/container/internal/btree.h b/third_party/abseil-cpp/absl/container/internal/btree.h
index 00444a5397..f636c5fc73 100644
--- a/third_party/abseil-cpp/absl/container/internal/btree.h
+++ b/third_party/abseil-cpp/absl/container/internal/btree.h
@@ -88,7 +88,12 @@ struct StringBtreeDefaultLess {
// Compatibility constructor.
StringBtreeDefaultLess(std::less<std::string>) {} // NOLINT
- StringBtreeDefaultLess(std::less<string_view>) {} // NOLINT
+ StringBtreeDefaultLess(std::less<absl::string_view>) {} // NOLINT
+
+ // Allow converting to std::less for use in key_comp()/value_comp().
+ explicit operator std::less<std::string>() const { return {}; }
+ explicit operator std::less<absl::string_view>() const { return {}; }
+ explicit operator std::less<absl::Cord>() const { return {}; }
absl::weak_ordering operator()(absl::string_view lhs,
absl::string_view rhs) const {
@@ -115,7 +120,12 @@ struct StringBtreeDefaultGreater {
StringBtreeDefaultGreater() = default;
StringBtreeDefaultGreater(std::greater<std::string>) {} // NOLINT
- StringBtreeDefaultGreater(std::greater<string_view>) {} // NOLINT
+ StringBtreeDefaultGreater(std::greater<absl::string_view>) {} // NOLINT
+
+ // Allow converting to std::greater for use in key_comp()/value_comp().
+ explicit operator std::greater<std::string>() const { return {}; }
+ explicit operator std::greater<absl::string_view>() const { return {}; }
+ explicit operator std::greater<absl::Cord>() const { return {}; }
absl::weak_ordering operator()(absl::string_view lhs,
absl::string_view rhs) const {
@@ -217,6 +227,8 @@ struct prefers_linear_node_search<
template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
bool Multi, typename SlotPolicy>
struct common_params {
+ using original_key_compare = Compare;
+
// If Compare is a common comparator for a string-like type, then we adapt it
// to use heterogeneous lookup and to be a key-compare-to comparator.
using key_compare = typename key_compare_to_adapter<Compare>::type;
@@ -317,16 +329,21 @@ struct map_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
using value_type = typename super_type::value_type;
using init_type = typename super_type::init_type;
- using key_compare = typename super_type::key_compare;
- // Inherit from key_compare for empty base class optimization.
- struct value_compare : private key_compare {
- value_compare() = default;
- explicit value_compare(const key_compare &cmp) : key_compare(cmp) {}
+ using original_key_compare = typename super_type::original_key_compare;
+ // Reference: https://en.cppreference.com/w/cpp/container/map/value_compare
+ class value_compare {
+ template <typename Params>
+ friend class btree;
- template <typename T, typename U>
- auto operator()(const T &left, const U &right) const
- -> decltype(std::declval<key_compare>()(left.first, right.first)) {
- return key_compare::operator()(left.first, right.first);
+ protected:
+ explicit value_compare(original_key_compare c) : comp(std::move(c)) {}
+
+ original_key_compare comp; // NOLINT
+
+ public:
+ auto operator()(const value_type &lhs, const value_type &rhs) const
+ -> decltype(comp(lhs.first, rhs.first)) {
+ return comp(lhs.first, rhs.first);
}
};
using is_map_container = std::true_type;
@@ -392,7 +409,8 @@ struct set_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
set_slot_policy<Key>> {
using value_type = Key;
using slot_type = typename set_params::common_params::slot_type;
- using value_compare = typename set_params::common_params::key_compare;
+ using value_compare =
+ typename set_params::common_params::original_key_compare;
using is_map_container = std::false_type;
template <typename V>
@@ -484,8 +502,8 @@ class btree_node {
std::is_same<std::greater<key_type>,
key_compare>::value)>;
- // This class is organized by gtl::Layout as if it had the following
- // structure:
+ // This class is organized by absl::container_internal::Layout as if it had
+ // the following structure:
// // A pointer to the node's parent.
// btree_node *parent;
//
@@ -579,10 +597,10 @@ class btree_node {
};
// Leaves can have less than kNodeSlots values.
- constexpr static layout_type LeafLayout(const int slots = kNodeSlots) {
+ constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots) {
return layout_type(/*parent*/ 1,
/*position, start, finish, max_count*/ 4,
- /*slots*/ slots,
+ /*slots*/ slot_count,
/*children*/ 0);
}
constexpr static layout_type InternalLayout() {
@@ -591,8 +609,8 @@ class btree_node {
/*slots*/ kNodeSlots,
/*children*/ kNodeSlots + 1);
}
- constexpr static size_type LeafSize(const int slots = kNodeSlots) {
- return LeafLayout(slots).AllocSize();
+ constexpr static size_type LeafSize(const int slot_count = kNodeSlots) {
+ return LeafLayout(slot_count).AllocSize();
}
constexpr static size_type InternalSize() {
return InternalLayout().AllocSize();
@@ -1129,6 +1147,7 @@ class btree {
using size_type = typename Params::size_type;
using difference_type = typename Params::difference_type;
using key_compare = typename Params::key_compare;
+ using original_key_compare = typename Params::original_key_compare;
using value_compare = typename Params::value_compare;
using allocator_type = typename Params::allocator_type;
using reference = typename Params::reference;
@@ -1338,7 +1357,9 @@ class btree {
return compare_internal::compare_result_as_less_than(key_comp()(a, b));
}
- value_compare value_comp() const { return value_compare(key_comp()); }
+ value_compare value_comp() const {
+ return value_compare(original_key_compare(key_comp()));
+ }
// Verifies the structure of the btree.
void verify() const;
diff --git a/third_party/abseil-cpp/absl/container/internal/btree_container.h b/third_party/abseil-cpp/absl/container/internal/btree_container.h
index 03be708e4f..a99668c713 100644
--- a/third_party/abseil-cpp/absl/container/internal/btree_container.h
+++ b/third_party/abseil-cpp/absl/container/internal/btree_container.h
@@ -20,6 +20,7 @@
#include <iterator>
#include <utility>
+#include "absl/base/attributes.h"
#include "absl/base/internal/throw_delegate.h"
#include "absl/container/internal/btree.h" // IWYU pragma: export
#include "absl/container/internal/common.h"
@@ -51,7 +52,7 @@ class btree_container {
using value_type = typename Tree::value_type;
using size_type = typename Tree::size_type;
using difference_type = typename Tree::difference_type;
- using key_compare = typename Tree::key_compare;
+ using key_compare = typename Tree::original_key_compare;
using value_compare = typename Tree::value_compare;
using allocator_type = typename Tree::allocator_type;
using reference = typename Tree::reference;
@@ -176,7 +177,7 @@ class btree_container {
}
// Utility routines.
- void clear() { tree_.clear(); }
+ ABSL_ATTRIBUTE_REINITIALIZES void clear() { tree_.clear(); }
void swap(btree_container &other) { tree_.swap(other.tree_); }
void verify() const { tree_.verify(); }
@@ -214,7 +215,7 @@ class btree_container {
allocator_type get_allocator() const { return tree_.get_allocator(); }
// The key comparator used by the btree.
- key_compare key_comp() const { return tree_.key_comp(); }
+ key_compare key_comp() const { return key_compare(tree_.key_comp()); }
value_compare value_comp() const { return tree_.value_comp(); }
// Support absl::Hash.
@@ -247,7 +248,7 @@ class btree_set_container : public btree_container<Tree> {
using key_type = typename Tree::key_type;
using value_type = typename Tree::value_type;
using size_type = typename Tree::size_type;
- using key_compare = typename Tree::key_compare;
+ using key_compare = typename Tree::original_key_compare;
using allocator_type = typename Tree::allocator_type;
using iterator = typename Tree::iterator;
using const_iterator = typename Tree::const_iterator;
@@ -398,7 +399,7 @@ class btree_map_container : public btree_set_container<Tree> {
using key_type = typename Tree::key_type;
using mapped_type = typename params_type::mapped_type;
using value_type = typename Tree::value_type;
- using key_compare = typename Tree::key_compare;
+ using key_compare = typename Tree::original_key_compare;
using allocator_type = typename Tree::allocator_type;
using iterator = typename Tree::iterator;
using const_iterator = typename Tree::const_iterator;
@@ -543,7 +544,7 @@ class btree_multiset_container : public btree_container<Tree> {
using key_type = typename Tree::key_type;
using value_type = typename Tree::value_type;
using size_type = typename Tree::size_type;
- using key_compare = typename Tree::key_compare;
+ using key_compare = typename Tree::original_key_compare;
using allocator_type = typename Tree::allocator_type;
using iterator = typename Tree::iterator;
using const_iterator = typename Tree::const_iterator;
diff --git a/third_party/abseil-cpp/absl/container/internal/hash_generator_testing.h b/third_party/abseil-cpp/absl/container/internal/hash_generator_testing.h
index 6869fe45e8..f1f555a5c1 100644
--- a/third_party/abseil-cpp/absl/container/internal/hash_generator_testing.h
+++ b/third_party/abseil-cpp/absl/container/internal/hash_generator_testing.h
@@ -21,11 +21,13 @@
#include <stdint.h>
#include <algorithm>
+#include <cassert>
#include <iosfwd>
#include <random>
#include <tuple>
#include <type_traits>
#include <utility>
+#include <vector>
#include "absl/container/internal/hash_policy_testing.h"
#include "absl/memory/memory.h"
@@ -153,6 +155,25 @@ using GeneratedType = decltype(
typename Container::value_type,
typename Container::key_type>::type>&>()());
+// Naive wrapper that performs a linear search of previous values.
+// Beware this is O(SQR), which is reasonable for smaller kMaxValues.
+template <class T, size_t kMaxValues = 64, class E = void>
+struct UniqueGenerator {
+ Generator<T, E> gen;
+ std::vector<T> values;
+
+ T operator()() {
+ assert(values.size() < kMaxValues);
+ for (;;) {
+ T value = gen();
+ if (std::find(values.begin(), values.end(), value) == values.end()) {
+ values.push_back(value);
+ return value;
+ }
+ }
+ }
+};
+
} // namespace hash_internal
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/container/internal/inlined_vector.h b/third_party/abseil-cpp/absl/container/internal/inlined_vector.h
index b8aec45b79..49822af0b7 100644
--- a/third_party/abseil-cpp/absl/container/internal/inlined_vector.h
+++ b/third_party/abseil-cpp/absl/container/internal/inlined_vector.h
@@ -36,6 +36,7 @@ namespace inlined_vector_internal {
// GCC does not deal very well with the below code
#if !defined(__clang__) && defined(__GNUC__)
#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Warray-bounds"
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
#endif
@@ -955,7 +956,7 @@ auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
swap(*GetAllocPtr(), *other_storage_ptr->GetAllocPtr());
}
-// End ignore "maybe-uninitialized"
+// End ignore "array-bounds" and "maybe-uninitialized"
#if !defined(__clang__) && defined(__GNUC__)
#pragma GCC diagnostic pop
#endif
diff --git a/third_party/abseil-cpp/absl/container/internal/layout.h b/third_party/abseil-cpp/absl/container/internal/layout.h
index 2336783315..a59a243059 100644
--- a/third_party/abseil-cpp/absl/container/internal/layout.h
+++ b/third_party/abseil-cpp/absl/container/internal/layout.h
@@ -404,7 +404,7 @@ class LayoutImpl<std::tuple<Elements...>, absl::index_sequence<SizeSeq...>,
constexpr size_t Offset() const {
static_assert(N < NumOffsets, "Index out of bounds");
return adl_barrier::Align(
- Offset<N - 1>() + SizeOf<ElementType<N - 1>>() * size_[N - 1],
+ Offset<N - 1>() + SizeOf<ElementType<N - 1>>::value * size_[N - 1],
ElementAlignment<N>::value);
}
@@ -597,7 +597,7 @@ class LayoutImpl<std::tuple<Elements...>, absl::index_sequence<SizeSeq...>,
constexpr size_t AllocSize() const {
static_assert(NumTypes == NumSizes, "You must specify sizes of all fields");
return Offset<NumTypes - 1>() +
- SizeOf<ElementType<NumTypes - 1>>() * size_[NumTypes - 1];
+ SizeOf<ElementType<NumTypes - 1>>::value * size_[NumTypes - 1];
}
// If built with --config=asan, poisons padding bytes (if any) in the
@@ -621,7 +621,7 @@ class LayoutImpl<std::tuple<Elements...>, absl::index_sequence<SizeSeq...>,
// The `if` is an optimization. It doesn't affect the observable behaviour.
if (ElementAlignment<N - 1>::value % ElementAlignment<N>::value) {
size_t start =
- Offset<N - 1>() + SizeOf<ElementType<N - 1>>() * size_[N - 1];
+ Offset<N - 1>() + SizeOf<ElementType<N - 1>>::value * size_[N - 1];
ASAN_POISON_MEMORY_REGION(p + start, Offset<N>() - start);
}
#endif
@@ -645,7 +645,7 @@ class LayoutImpl<std::tuple<Elements...>, absl::index_sequence<SizeSeq...>,
// produce "unsigned*" where another produces "unsigned int *".
std::string DebugString() const {
const auto offsets = Offsets();
- const size_t sizes[] = {SizeOf<ElementType<OffsetSeq>>()...};
+ const size_t sizes[] = {SizeOf<ElementType<OffsetSeq>>::value...};
const std::string types[] = {
adl_barrier::TypeName<ElementType<OffsetSeq>>()...};
std::string res = absl::StrCat("@0", types[0], "(", sizes[0], ")");
diff --git a/third_party/abseil-cpp/absl/container/internal/raw_hash_map.h b/third_party/abseil-cpp/absl/container/internal/raw_hash_map.h
index 0a02757ddf..c7df2efc62 100644
--- a/third_party/abseil-cpp/absl/container/internal/raw_hash_map.h
+++ b/third_party/abseil-cpp/absl/container/internal/raw_hash_map.h
@@ -51,8 +51,9 @@ class raw_hash_map : public raw_hash_set<Policy, Hash, Eq, Alloc> {
using key_arg = typename KeyArgImpl::template type<K, key_type>;
static_assert(!std::is_reference<key_type>::value, "");
- // TODO(alkis): remove this assertion and verify that reference mapped_type is
- // supported.
+
+ // TODO(b/187807849): Evaluate whether to support reference mapped_type and
+ // remove this assertion if/when it is supported.
static_assert(!std::is_reference<mapped_type>::value, "");
using iterator = typename raw_hash_map::raw_hash_set::iterator;
diff --git a/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h b/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h
index 80fc2cba3f..aa78265ca1 100644
--- a/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h
+++ b/third_party/abseil-cpp/absl/container/internal/raw_hash_set.h
@@ -628,7 +628,9 @@ class raw_hash_set {
static Layout MakeLayout(size_t capacity) {
assert(IsValidCapacity(capacity));
- return Layout(capacity + Group::kWidth + 1, capacity);
+ // The extra control bytes are for 1 sentinel byte followed by
+ // `Group::kWidth - 1` bytes that are cloned from the beginning.
+ return Layout(capacity + Group::kWidth, capacity);
}
using AllocTraits = absl::allocator_traits<allocator_type>;
@@ -792,7 +794,8 @@ class raw_hash_set {
explicit raw_hash_set(size_t bucket_count, const hasher& hash = hasher(),
const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type())
- : ctrl_(EmptyGroup()), settings_(0, hash, eq, alloc) {
+ : ctrl_(EmptyGroup()),
+ settings_(0, HashtablezInfoHandle(), hash, eq, alloc) {
if (bucket_count) {
capacity_ = NormalizeCapacity(bucket_count);
initialize_slots();
@@ -903,7 +906,7 @@ class raw_hash_set {
auto target = find_first_non_full(ctrl_, hash, capacity_);
set_ctrl(target.offset, H2(hash));
emplace_at(target.offset, v);
- infoz_.RecordInsert(hash, target.probe_length);
+ infoz().RecordInsert(hash, target.probe_length);
}
size_ = that.size();
growth_left() -= that.size();
@@ -917,28 +920,27 @@ class raw_hash_set {
slots_(absl::exchange(that.slots_, nullptr)),
size_(absl::exchange(that.size_, 0)),
capacity_(absl::exchange(that.capacity_, 0)),
- infoz_(absl::exchange(that.infoz_, HashtablezInfoHandle())),
// Hash, equality and allocator are copied instead of moved because
// `that` must be left valid. If Hash is std::function<Key>, moving it
// would create a nullptr functor that cannot be called.
- settings_(that.settings_) {
- // growth_left was copied above, reset the one from `that`.
- that.growth_left() = 0;
- }
+ settings_(absl::exchange(that.growth_left(), 0),
+ absl::exchange(that.infoz(), HashtablezInfoHandle()),
+ that.hash_ref(), that.eq_ref(), that.alloc_ref()) {}
raw_hash_set(raw_hash_set&& that, const allocator_type& a)
: ctrl_(EmptyGroup()),
slots_(nullptr),
size_(0),
capacity_(0),
- settings_(0, that.hash_ref(), that.eq_ref(), a) {
+ settings_(0, HashtablezInfoHandle(), that.hash_ref(), that.eq_ref(),
+ a) {
if (a == that.alloc_ref()) {
std::swap(ctrl_, that.ctrl_);
std::swap(slots_, that.slots_);
std::swap(size_, that.size_);
std::swap(capacity_, that.capacity_);
std::swap(growth_left(), that.growth_left());
- std::swap(infoz_, that.infoz_);
+ std::swap(infoz(), that.infoz());
} else {
reserve(that.size());
// Note: this will copy elements of dense_set and unordered_set instead of
@@ -1009,7 +1011,7 @@ class raw_hash_set {
reset_growth_left();
}
assert(empty());
- infoz_.RecordStorageChanged(0, capacity_);
+ infoz().RecordStorageChanged(0, capacity_);
}
// This overload kicks in when the argument is an rvalue of insertable and
@@ -1301,7 +1303,7 @@ class raw_hash_set {
swap(growth_left(), that.growth_left());
swap(hash_ref(), that.hash_ref());
swap(eq_ref(), that.eq_ref());
- swap(infoz_, that.infoz_);
+ swap(infoz(), that.infoz());
SwapAlloc(alloc_ref(), that.alloc_ref(),
typename AllocTraits::propagate_on_container_swap{});
}
@@ -1310,7 +1312,7 @@ class raw_hash_set {
if (n == 0 && capacity_ == 0) return;
if (n == 0 && size_ == 0) {
destroy_slots();
- infoz_.RecordStorageChanged(0, 0);
+ infoz().RecordStorageChanged(0, 0);
return;
}
// bitor is a faster way of doing `max` here. We will round up to the next
@@ -1323,8 +1325,8 @@ class raw_hash_set {
}
void reserve(size_t n) {
- size_t m = GrowthToLowerboundCapacity(n);
- if (m > capacity_) {
+ if (n > size() + growth_left()) {
+ size_t m = GrowthToLowerboundCapacity(n);
resize(NormalizeCapacity(m));
}
}
@@ -1528,7 +1530,7 @@ class raw_hash_set {
set_ctrl(index, was_never_full ? kEmpty : kDeleted);
growth_left() += was_never_full;
- infoz_.RecordErase();
+ infoz().RecordErase();
}
void initialize_slots() {
@@ -1545,17 +1547,17 @@ class raw_hash_set {
// bound more carefully.
if (std::is_same<SlotAlloc, std::allocator<slot_type>>::value &&
slots_ == nullptr) {
- infoz_ = Sample();
+ infoz() = Sample();
}
auto layout = MakeLayout(capacity_);
char* mem = static_cast<char*>(
Allocate<Layout::Alignment()>(&alloc_ref(), layout.AllocSize()));
- ctrl_ = reinterpret_cast<ctrl_t*>(layout.template Pointer<0>(mem));
+ ctrl_ = layout.template Pointer<0>(mem);
slots_ = layout.template Pointer<1>(mem);
reset_ctrl();
reset_growth_left();
- infoz_.RecordStorageChanged(size_, capacity_);
+ infoz().RecordStorageChanged(size_, capacity_);
}
void destroy_slots() {
@@ -1603,7 +1605,7 @@ class raw_hash_set {
Deallocate<Layout::Alignment()>(&alloc_ref(), old_ctrl,
layout.AllocSize());
}
- infoz_.RecordRehash(total_probe_length);
+ infoz().RecordRehash(total_probe_length);
}
void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE {
@@ -1669,7 +1671,7 @@ class raw_hash_set {
}
}
reset_growth_left();
- infoz_.RecordRehash(total_probe_length);
+ infoz().RecordRehash(total_probe_length);
}
void rehash_and_grow_if_necessary() {
@@ -1743,7 +1745,7 @@ class raw_hash_set {
++size_;
growth_left() -= IsEmpty(ctrl_[target.offset]);
set_ctrl(target.offset, H2(hash));
- infoz_.RecordInsert(hash, target.probe_length);
+ infoz().RecordInsert(hash, target.probe_length);
return target.offset;
}
@@ -1782,8 +1784,8 @@ class raw_hash_set {
growth_left() = CapacityToGrowth(capacity()) - size_;
}
- // Sets the control byte, and if `i < Group::kWidth`, set the cloned byte at
- // the end too.
+ // Sets the control byte, and if `i < Group::kWidth - 1`, set the cloned byte
+ // at the end too.
void set_ctrl(size_t i, ctrl_t h) {
assert(i < capacity_);
@@ -1794,32 +1796,35 @@ class raw_hash_set {
}
ctrl_[i] = h;
- ctrl_[((i - Group::kWidth) & capacity_) + 1 +
- ((Group::kWidth - 1) & capacity_)] = h;
+ constexpr size_t kClonedBytes = Group::kWidth - 1;
+ ctrl_[((i - kClonedBytes) & capacity_) + (kClonedBytes & capacity_)] = h;
}
size_t& growth_left() { return settings_.template get<0>(); }
- hasher& hash_ref() { return settings_.template get<1>(); }
- const hasher& hash_ref() const { return settings_.template get<1>(); }
- key_equal& eq_ref() { return settings_.template get<2>(); }
- const key_equal& eq_ref() const { return settings_.template get<2>(); }
- allocator_type& alloc_ref() { return settings_.template get<3>(); }
+ HashtablezInfoHandle& infoz() { return settings_.template get<1>(); }
+
+ hasher& hash_ref() { return settings_.template get<2>(); }
+ const hasher& hash_ref() const { return settings_.template get<2>(); }
+ key_equal& eq_ref() { return settings_.template get<3>(); }
+ const key_equal& eq_ref() const { return settings_.template get<3>(); }
+ allocator_type& alloc_ref() { return settings_.template get<4>(); }
const allocator_type& alloc_ref() const {
- return settings_.template get<3>();
+ return settings_.template get<4>();
}
// TODO(alkis): Investigate removing some of these fields:
// - ctrl/slots can be derived from each other
// - size can be moved into the slot array
- ctrl_t* ctrl_ = EmptyGroup(); // [(capacity + 1) * ctrl_t]
+ ctrl_t* ctrl_ = EmptyGroup(); // [(capacity + Group::kWidth) * ctrl_t]
slot_type* slots_ = nullptr; // [capacity * slot_type]
size_t size_ = 0; // number of full slots
size_t capacity_ = 0; // total number of slots
- HashtablezInfoHandle infoz_;
- absl::container_internal::CompressedTuple<size_t /* growth_left */, hasher,
+ absl::container_internal::CompressedTuple<size_t /* growth_left */,
+ HashtablezInfoHandle, hasher,
key_equal, allocator_type>
- settings_{0, hasher{}, key_equal{}, allocator_type{}};
+ settings_{0, HashtablezInfoHandle{}, hasher{}, key_equal{},
+ allocator_type{}};
};
// Erases all elements that satisfy the predicate `pred` from the container `c`.
diff --git a/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc b/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc
index 81c4b47c04..af882ef49f 100644
--- a/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc
+++ b/third_party/abseil-cpp/absl/container/internal/raw_hash_set_test.cc
@@ -419,6 +419,13 @@ TEST(Table, EmptyFunctorOptimization) {
size_t growth_left;
void* infoz;
};
+ struct MockTableInfozDisabled {
+ void* ctrl;
+ void* slots;
+ size_t size;
+ size_t capacity;
+ size_t growth_left;
+ };
struct StatelessHash {
size_t operator()(absl::string_view) const { return 0; }
};
@@ -426,17 +433,27 @@ TEST(Table, EmptyFunctorOptimization) {
size_t dummy;
};
- EXPECT_EQ(
- sizeof(MockTable),
- sizeof(
- raw_hash_set<StringPolicy, StatelessHash,
- std::equal_to<absl::string_view>, std::allocator<int>>));
+ if (std::is_empty<HashtablezInfoHandle>::value) {
+ EXPECT_EQ(sizeof(MockTableInfozDisabled),
+ sizeof(raw_hash_set<StringPolicy, StatelessHash,
+ std::equal_to<absl::string_view>,
+ std::allocator<int>>));
+
+ EXPECT_EQ(sizeof(MockTableInfozDisabled) + sizeof(StatefulHash),
+ sizeof(raw_hash_set<StringPolicy, StatefulHash,
+ std::equal_to<absl::string_view>,
+ std::allocator<int>>));
+ } else {
+ EXPECT_EQ(sizeof(MockTable),
+ sizeof(raw_hash_set<StringPolicy, StatelessHash,
+ std::equal_to<absl::string_view>,
+ std::allocator<int>>));
- EXPECT_EQ(
- sizeof(MockTable) + sizeof(StatefulHash),
- sizeof(
- raw_hash_set<StringPolicy, StatefulHash,
- std::equal_to<absl::string_view>, std::allocator<int>>));
+ EXPECT_EQ(sizeof(MockTable) + sizeof(StatefulHash),
+ sizeof(raw_hash_set<StringPolicy, StatefulHash,
+ std::equal_to<absl::string_view>,
+ std::allocator<int>>));
+ }
}
TEST(Table, Empty) {
@@ -524,6 +541,37 @@ TEST(Table, InsertCollisionAndFindAfterDelete) {
EXPECT_TRUE(t.empty());
}
+TEST(Table, InsertWithinCapacity) {
+ IntTable t;
+ t.reserve(10);
+ const size_t original_capacity = t.capacity();
+ const auto addr = [&](int i) {
+ return reinterpret_cast<uintptr_t>(&*t.find(i));
+ };
+ // Inserting an element does not change capacity.
+ t.insert(0);
+ EXPECT_THAT(t.capacity(), original_capacity);
+ const uintptr_t original_addr_0 = addr(0);
+ // Inserting another element does not rehash.
+ t.insert(1);
+ EXPECT_THAT(t.capacity(), original_capacity);
+ EXPECT_THAT(addr(0), original_addr_0);
+ // Inserting lots of duplicate elements does not rehash.
+ for (int i = 0; i < 100; ++i) {
+ t.insert(i % 10);
+ }
+ EXPECT_THAT(t.capacity(), original_capacity);
+ EXPECT_THAT(addr(0), original_addr_0);
+ // Inserting a range of duplicate elements does not rehash.
+ std::vector<int> dup_range;
+ for (int i = 0; i < 100; ++i) {
+ dup_range.push_back(i % 10);
+ }
+ t.insert(dup_range.begin(), dup_range.end());
+ EXPECT_THAT(t.capacity(), original_capacity);
+ EXPECT_THAT(addr(0), original_addr_0);
+}
+
TEST(Table, LazyEmplace) {
StringTable t;
bool called = false;
diff --git a/third_party/abseil-cpp/absl/container/internal/unordered_map_constructor_test.h b/third_party/abseil-cpp/absl/container/internal/unordered_map_constructor_test.h
index 3f90ad7ca8..c1d20f3c52 100644
--- a/third_party/abseil-cpp/absl/container/internal/unordered_map_constructor_test.h
+++ b/third_party/abseil-cpp/absl/container/internal/unordered_map_constructor_test.h
@@ -179,7 +179,7 @@ TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashEqualAlloc) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, equal, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.key_eq(), equal);
@@ -198,7 +198,7 @@ void InputIteratorBucketAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, alloc);
EXPECT_EQ(m.get_allocator(), alloc);
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
@@ -221,7 +221,7 @@ void InputIteratorBucketHashAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.get_allocator(), alloc);
@@ -241,8 +241,9 @@ TYPED_TEST_P(ConstructorTest, CopyConstructor) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m);
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@@ -262,8 +263,9 @@ void CopyConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m, A(11));
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@@ -285,8 +287,9 @@ TYPED_TEST_P(ConstructorTest, MoveConstructor) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t));
EXPECT_EQ(m.hash_function(), n.hash_function());
@@ -307,8 +310,9 @@ void MoveConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t), A(1));
EXPECT_EQ(m.hash_function(), n.hash_function());
@@ -325,7 +329,7 @@ TYPED_TEST_P(ConstructorTest, MoveConstructorAlloc) {
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashEqualAlloc) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
using H = typename TypeParam::hasher;
using E = typename TypeParam::key_equal;
@@ -348,7 +352,7 @@ template <typename TypeParam>
void InitializerListBucketAllocTest(std::true_type) {
using T = hash_internal::GeneratedType<TypeParam>;
using A = typename TypeParam::allocator_type;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
A alloc(0);
TypeParam m(values, 123, alloc);
@@ -371,7 +375,7 @@ void InitializerListBucketHashAllocTest(std::true_type) {
using A = typename TypeParam::allocator_type;
H hasher;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values, 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
@@ -392,7 +396,7 @@ TYPED_TEST_P(ConstructorTest, Assignment) {
H hasher;
E equal;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam n;
n = m;
@@ -412,7 +416,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
H hasher;
E equal;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam t(m);
TypeParam n;
@@ -424,7 +428,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@@ -433,7 +437,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam n({gen()});
n = m;
@@ -442,7 +446,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam t(m);
TypeParam n({gen()});
@@ -452,7 +456,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@@ -461,7 +465,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values);
m = *&m; // Avoid -Wself-assign
diff --git a/third_party/abseil-cpp/absl/container/internal/unordered_map_modifiers_test.h b/third_party/abseil-cpp/absl/container/internal/unordered_map_modifiers_test.h
index 8c9ca779a4..d3543936f7 100644
--- a/third_party/abseil-cpp/absl/container/internal/unordered_map_modifiers_test.h
+++ b/third_party/abseil-cpp/absl/container/internal/unordered_map_modifiers_test.h
@@ -81,6 +81,38 @@ TYPED_TEST_P(ModifiersTest, InsertRange) {
ASSERT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
}
+TYPED_TEST_P(ModifiersTest, InsertWithinCapacity) {
+ using T = hash_internal::GeneratedType<TypeParam>;
+ using V = typename TypeParam::mapped_type;
+ T val = hash_internal::Generator<T>()();
+ TypeParam m;
+ m.reserve(10);
+ const size_t original_capacity = m.bucket_count();
+ m.insert(val);
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+ T val2 = {val.first, hash_internal::Generator<V>()()};
+ m.insert(val2);
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+}
+
+TYPED_TEST_P(ModifiersTest, InsertRangeWithinCapacity) {
+#if !defined(__GLIBCXX__)
+ using T = hash_internal::GeneratedType<TypeParam>;
+ std::vector<T> base_values;
+ std::generate_n(std::back_inserter(base_values), 10,
+ hash_internal::Generator<T>());
+ std::vector<T> values;
+ while (values.size() != 100) {
+ std::copy_n(base_values.begin(), 10, std::back_inserter(values));
+ }
+ TypeParam m;
+ m.reserve(10);
+ const size_t original_capacity = m.bucket_count();
+ m.insert(values.begin(), values.end());
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+#endif
+}
+
TYPED_TEST_P(ModifiersTest, InsertOrAssign) {
#ifdef UNORDERED_MAP_CXX17
using std::get;
@@ -266,9 +298,10 @@ TYPED_TEST_P(ModifiersTest, Swap) {
// TODO(alkis): Write tests for merge.
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
- InsertRange, InsertOrAssign, InsertOrAssignHint,
- Emplace, EmplaceHint, TryEmplace, TryEmplaceHint,
- Erase, EraseRange, EraseKey, Swap);
+ InsertRange, InsertWithinCapacity,
+ InsertRangeWithinCapacity, InsertOrAssign,
+ InsertOrAssignHint, Emplace, EmplaceHint, TryEmplace,
+ TryEmplaceHint, Erase, EraseRange, EraseKey, Swap);
template <typename Type>
struct is_unique_ptr : std::false_type {};
diff --git a/third_party/abseil-cpp/absl/container/internal/unordered_set_modifiers_test.h b/third_party/abseil-cpp/absl/container/internal/unordered_set_modifiers_test.h
index 26be58d99f..6e473e45da 100644
--- a/third_party/abseil-cpp/absl/container/internal/unordered_set_modifiers_test.h
+++ b/third_party/abseil-cpp/absl/container/internal/unordered_set_modifiers_test.h
@@ -74,6 +74,36 @@ TYPED_TEST_P(ModifiersTest, InsertRange) {
ASSERT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
}
+TYPED_TEST_P(ModifiersTest, InsertWithinCapacity) {
+ using T = hash_internal::GeneratedType<TypeParam>;
+ T val = hash_internal::Generator<T>()();
+ TypeParam m;
+ m.reserve(10);
+ const size_t original_capacity = m.bucket_count();
+ m.insert(val);
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+ m.insert(val);
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+}
+
+TYPED_TEST_P(ModifiersTest, InsertRangeWithinCapacity) {
+#if !defined(__GLIBCXX__)
+ using T = hash_internal::GeneratedType<TypeParam>;
+ std::vector<T> base_values;
+ std::generate_n(std::back_inserter(base_values), 10,
+ hash_internal::Generator<T>());
+ std::vector<T> values;
+ while (values.size() != 100) {
+ values.insert(values.end(), base_values.begin(), base_values.end());
+ }
+ TypeParam m;
+ m.reserve(10);
+ const size_t original_capacity = m.bucket_count();
+ m.insert(values.begin(), values.end());
+ EXPECT_EQ(m.bucket_count(), original_capacity);
+#endif
+}
+
TYPED_TEST_P(ModifiersTest, Emplace) {
using T = hash_internal::GeneratedType<TypeParam>;
T val = hash_internal::Generator<T>()();
@@ -180,8 +210,9 @@ TYPED_TEST_P(ModifiersTest, Swap) {
// TODO(alkis): Write tests for merge.
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
- InsertRange, Emplace, EmplaceHint, Erase, EraseRange,
- EraseKey, Swap);
+ InsertRange, InsertWithinCapacity,
+ InsertRangeWithinCapacity, Emplace, EmplaceHint,
+ Erase, EraseRange, EraseKey, Swap);
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/copts/AbseilConfigureCopts.cmake b/third_party/abseil-cpp/absl/copts/AbseilConfigureCopts.cmake
index 9cd6fd1b2a..942ce90a4d 100644
--- a/third_party/abseil-cpp/absl/copts/AbseilConfigureCopts.cmake
+++ b/third_party/abseil-cpp/absl/copts/AbseilConfigureCopts.cmake
@@ -35,8 +35,7 @@ endif()
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set(ABSL_DEFAULT_COPTS "${ABSL_GCC_FLAGS}")
set(ABSL_TEST_COPTS "${ABSL_GCC_FLAGS};${ABSL_GCC_TEST_FLAGS}")
-elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
- # MATCHES so we get both Clang and AppleClang
+elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang") # MATCHES so we get both Clang and AppleClang
if(MSVC)
# clang-cl is half MSVC, half LLVM
set(ABSL_DEFAULT_COPTS "${ABSL_CLANG_CL_FLAGS}")
diff --git a/third_party/abseil-cpp/absl/copts/GENERATED_AbseilCopts.cmake b/third_party/abseil-cpp/absl/copts/GENERATED_AbseilCopts.cmake
index 51742c9b6b..22a25eba7f 100644
--- a/third_party/abseil-cpp/absl/copts/GENERATED_AbseilCopts.cmake
+++ b/third_party/abseil-cpp/absl/copts/GENERATED_AbseilCopts.cmake
@@ -71,12 +71,13 @@ list(APPEND ABSL_LLVM_FLAGS
"-Wformat-security"
"-Wgnu-redeclared-enum"
"-Winfinite-recursion"
+ "-Winvalid-constexpr"
"-Wliteral-conversion"
"-Wmissing-declarations"
"-Woverlength-strings"
"-Wpointer-arith"
"-Wself-assign"
- "-Wshadow"
+ "-Wshadow-all"
"-Wstring-conversion"
"-Wtautological-overlap-compare"
"-Wundef"
diff --git a/third_party/abseil-cpp/absl/debugging/CMakeLists.txt b/third_party/abseil-cpp/absl/debugging/CMakeLists.txt
index 074b44cf17..bb4d4c92da 100644
--- a/third_party/abseil-cpp/absl/debugging/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/debugging/CMakeLists.txt
@@ -87,7 +87,7 @@ absl_cc_test(
absl::memory
absl::raw_logging_internal
absl::strings
- gmock
+ GTest::gmock
)
absl_cc_library(
@@ -141,7 +141,7 @@ absl_cc_test(
absl::strings
absl::raw_logging_internal
Threads::Threads
- gmock
+ GTest::gmock
)
absl_cc_library(
@@ -194,7 +194,7 @@ absl_cc_test(
absl::core_headers
absl::memory
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -261,7 +261,7 @@ absl_cc_test(
DEPS
absl::leak_check_api_enabled_for_testing
absl::base
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -275,7 +275,7 @@ absl_cc_test(
DEPS
absl::leak_check_api_disabled_for_testing
absl::base
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -292,7 +292,7 @@ absl_cc_test(
absl::leak_check_disable
absl::base
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -322,7 +322,7 @@ absl_cc_test(
absl::stack_consumption
absl::core_headers
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
# component target
diff --git a/third_party/abseil-cpp/absl/debugging/failure_signal_handler.cc b/third_party/abseil-cpp/absl/debugging/failure_signal_handler.cc
index a9ed6ef964..689e5979e7 100644
--- a/third_party/abseil-cpp/absl/debugging/failure_signal_handler.cc
+++ b/third_party/abseil-cpp/absl/debugging/failure_signal_handler.cc
@@ -136,7 +136,8 @@ static bool SetupAlternateStackOnce() {
#else
const size_t page_mask = sysconf(_SC_PAGESIZE) - 1;
#endif
- size_t stack_size = (std::max(SIGSTKSZ, 65536) + page_mask) & ~page_mask;
+ size_t stack_size =
+ (std::max<size_t>(SIGSTKSZ, 65536) + page_mask) & ~page_mask;
#if defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
defined(ABSL_HAVE_MEMORY_SANITIZER) || defined(ABSL_HAVE_THREAD_SANITIZER)
// Account for sanitizer instrumentation requiring additional stack space.
@@ -366,6 +367,7 @@ static void AbslFailureSignalHandler(int signo, siginfo_t*, void* ucontext) {
// goes after this point.
if (fsh_options.writerfn != nullptr) {
WriteFailureInfo(signo, ucontext, my_cpu, fsh_options.writerfn);
+ fsh_options.writerfn(nullptr);
}
if (fsh_options.call_previous_handler) {
diff --git a/third_party/abseil-cpp/absl/debugging/failure_signal_handler.h b/third_party/abseil-cpp/absl/debugging/failure_signal_handler.h
index 0c0f585d0f..500115c0ab 100644
--- a/third_party/abseil-cpp/absl/debugging/failure_signal_handler.h
+++ b/third_party/abseil-cpp/absl/debugging/failure_signal_handler.h
@@ -90,7 +90,7 @@ struct FailureSignalHandlerOptions {
// If non-null, indicates a pointer to a callback function that will be called
// upon failure, with a string argument containing failure data. This function
// may be used as a hook to write failure data to a secondary location, such
- // as a log file. This function may also be called with null data, as a hint
+ // as a log file. This function will also be called with null data, as a hint
// to flush any buffered data before the program may be terminated. Consider
// flushing any buffered data in all calls to this function.
//
diff --git a/third_party/abseil-cpp/absl/debugging/internal/demangle.cc b/third_party/abseil-cpp/absl/debugging/internal/demangle.cc
index 46cdb67b1f..5cd563208e 100644
--- a/third_party/abseil-cpp/absl/debugging/internal/demangle.cc
+++ b/third_party/abseil-cpp/absl/debugging/internal/demangle.cc
@@ -386,24 +386,28 @@ static bool IsDigit(char c) { return c >= '0' && c <= '9'; }
// by GCC 4.5.x and later versions (and our locally-modified version of GCC
// 4.4.x) to indicate functions which have been cloned during optimization.
// We treat any sequence (.<alpha>+.<digit>+)+ as a function clone suffix.
+// Additionally, '_' is allowed along with the alphanumeric sequence.
static bool IsFunctionCloneSuffix(const char *str) {
size_t i = 0;
while (str[i] != '\0') {
- // Consume a single .<alpha>+.<digit>+ sequence.
- if (str[i] != '.' || !IsAlpha(str[i + 1])) {
- return false;
+ bool parsed = false;
+ // Consume a single [.<alpha> | _]*[.<digit>]* sequence.
+ if (str[i] == '.' && (IsAlpha(str[i + 1]) || str[i + 1] == '_')) {
+ parsed = true;
+ i += 2;
+ while (IsAlpha(str[i]) || str[i] == '_') {
+ ++i;
+ }
}
- i += 2;
- while (IsAlpha(str[i])) {
- ++i;
+ if (str[i] == '.' && IsDigit(str[i + 1])) {
+ parsed = true;
+ i += 2;
+ while (IsDigit(str[i])) {
+ ++i;
+ }
}
- if (str[i] != '.' || !IsDigit(str[i + 1])) {
+ if (!parsed)
return false;
- }
- i += 2;
- while (IsDigit(str[i])) {
- ++i;
- }
}
return true; // Consumed everything in "str".
}
diff --git a/third_party/abseil-cpp/absl/debugging/internal/demangle_test.cc b/third_party/abseil-cpp/absl/debugging/internal/demangle_test.cc
index 0bed7359d8..6b142902ca 100644
--- a/third_party/abseil-cpp/absl/debugging/internal/demangle_test.cc
+++ b/third_party/abseil-cpp/absl/debugging/internal/demangle_test.cc
@@ -70,12 +70,34 @@ TEST(Demangle, Clones) {
EXPECT_STREQ("Foo()", tmp);
EXPECT_TRUE(Demangle("_ZL3Foov.isra.2.constprop.18", tmp, sizeof(tmp)));
EXPECT_STREQ("Foo()", tmp);
- // Invalid (truncated), should not demangle.
- EXPECT_FALSE(Demangle("_ZL3Foov.clo", tmp, sizeof(tmp)));
+ // Demangle suffixes produced by -funique-internal-linkage-names.
+ EXPECT_TRUE(Demangle("_ZL3Foov.__uniq.12345", tmp, sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ EXPECT_TRUE(Demangle("_ZL3Foov.__uniq.12345.isra.2.constprop.18", tmp,
+ sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // Suffixes without the number should also demangle.
+ EXPECT_TRUE(Demangle("_ZL3Foov.clo", tmp, sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // Suffixes with just the number should also demangle.
+ EXPECT_TRUE(Demangle("_ZL3Foov.123", tmp, sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // (.clone. followed by non-number), should also demangle.
+ EXPECT_TRUE(Demangle("_ZL3Foov.clone.foo", tmp, sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // (.clone. followed by multiple numbers), should also demangle.
+ EXPECT_TRUE(Demangle("_ZL3Foov.clone.123.456", tmp, sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // (a long valid suffix), should demangle.
+ EXPECT_TRUE(Demangle("_ZL3Foov.part.9.165493.constprop.775.31805", tmp,
+ sizeof(tmp)));
+ EXPECT_STREQ("Foo()", tmp);
+ // Invalid (. without anything else), should not demangle.
+ EXPECT_FALSE(Demangle("_ZL3Foov.", tmp, sizeof(tmp)));
+ // Invalid (. with mix of alpha and digits), should not demangle.
+ EXPECT_FALSE(Demangle("_ZL3Foov.abc123", tmp, sizeof(tmp)));
// Invalid (.clone. not followed by number), should not demangle.
EXPECT_FALSE(Demangle("_ZL3Foov.clone.", tmp, sizeof(tmp)));
- // Invalid (.clone. followed by non-number), should not demangle.
- EXPECT_FALSE(Demangle("_ZL3Foov.clone.foo", tmp, sizeof(tmp)));
// Invalid (.constprop. not followed by number), should not demangle.
EXPECT_FALSE(Demangle("_ZL3Foov.isra.2.constprop.", tmp, sizeof(tmp)));
}
diff --git a/third_party/abseil-cpp/absl/debugging/internal/stacktrace_x86-inl.inc b/third_party/abseil-cpp/absl/debugging/internal/stacktrace_x86-inl.inc
index bc320ff75b..70f79dfcb8 100644
--- a/third_party/abseil-cpp/absl/debugging/internal/stacktrace_x86-inl.inc
+++ b/third_party/abseil-cpp/absl/debugging/internal/stacktrace_x86-inl.inc
@@ -132,9 +132,8 @@ static uintptr_t GetFP(const void *vuc) {
const uintptr_t bp = 0;
const uintptr_t sp = 0;
#endif
- // Sanity-check that the base pointer is valid. It should be as long as
- // SHRINK_WRAP_FRAME_POINTER is not set, but it's possible that some code in
- // the process is compiled with --copt=-fomit-frame-pointer or
+ // Sanity-check that the base pointer is valid. It's possible that some
+ // code in the process is compiled with --copt=-fomit-frame-pointer or
// --copt=-momit-leaf-frame-pointer.
//
// TODO(bcmills): -momit-leaf-frame-pointer is currently the default
@@ -247,7 +246,7 @@ static void **NextStackFrame(void **old_fp, const void *uc) {
// using an alternate signal stack.
//
// TODO(bcmills): The GetFP call should be completely unnecessary when
- // SHRINK_WRAP_FRAME_POINTER is set (because we should be back in the thread's
+ // ENABLE_COMBINED_UNWINDER is set (because we should be back in the thread's
// stack by this point), but it is empirically still needed (e.g. when the
// stack includes a call to abort). unw_get_reg returns UNW_EBADREG for some
// frames. Figure out why GetValidFrameAddr and/or libunwind isn't doing what
diff --git a/third_party/abseil-cpp/absl/debugging/leak_check.cc b/third_party/abseil-cpp/absl/debugging/leak_check.cc
index ff9049559d..764ca0ad00 100644
--- a/third_party/abseil-cpp/absl/debugging/leak_check.cc
+++ b/third_party/abseil-cpp/absl/debugging/leak_check.cc
@@ -16,6 +16,7 @@
// When lsan is not linked in, these functions are not available,
// therefore Abseil code which depends on these functions is conditioned on the
// definition of LEAK_SANITIZER.
+#include "absl/base/attributes.h"
#include "absl/debugging/leak_check.h"
#ifndef LEAK_SANITIZER
@@ -23,6 +24,7 @@
namespace absl {
ABSL_NAMESPACE_BEGIN
bool HaveLeakSanitizer() { return false; }
+bool LeakCheckerIsActive() { return false; }
void DoIgnoreLeak(const void*) { }
void RegisterLivePointers(const void*, size_t) { }
void UnRegisterLivePointers(const void*, size_t) { }
@@ -35,9 +37,23 @@ ABSL_NAMESPACE_END
#include <sanitizer/lsan_interface.h>
+#if ABSL_HAVE_ATTRIBUTE_WEAK
+extern "C" ABSL_ATTRIBUTE_WEAK int __lsan_is_turned_off();
+#endif
+
namespace absl {
ABSL_NAMESPACE_BEGIN
bool HaveLeakSanitizer() { return true; }
+
+#if ABSL_HAVE_ATTRIBUTE_WEAK
+bool LeakCheckerIsActive() {
+ return !(&__lsan_is_turned_off && __lsan_is_turned_off());
+}
+#else
+bool LeakCheckerIsActive() { return true; }
+#endif
+
+bool FindAndReportLeaks() { return __lsan_do_recoverable_leak_check(); }
void DoIgnoreLeak(const void* ptr) { __lsan_ignore_object(ptr); }
void RegisterLivePointers(const void* ptr, size_t size) {
__lsan_register_root_region(ptr, size);
diff --git a/third_party/abseil-cpp/absl/debugging/leak_check.h b/third_party/abseil-cpp/absl/debugging/leak_check.h
index b66a81c3bc..5fc2b052e4 100644
--- a/third_party/abseil-cpp/absl/debugging/leak_check.h
+++ b/third_party/abseil-cpp/absl/debugging/leak_check.h
@@ -43,6 +43,12 @@ ABSL_NAMESPACE_BEGIN
// currently built into this target.
bool HaveLeakSanitizer();
+// LeakCheckerIsActive()
+//
+// Returns true if a leak-checking sanitizer (either ASan or standalone LSan) is
+// currently built into this target and is turned on.
+bool LeakCheckerIsActive();
+
// DoIgnoreLeak()
//
// Implements `IgnoreLeak()` below. This function should usually
@@ -71,6 +77,19 @@ T* IgnoreLeak(T* ptr) {
return ptr;
}
+// FindAndReportLeaks()
+//
+// If any leaks are detected, prints a leak report and returns true. This
+// function may be called repeatedly, and does not affect end-of-process leak
+// checking.
+//
+// Example:
+// if (FindAndReportLeaks()) {
+// ... diagnostic already printed. Exit with failure code.
+// exit(1)
+// }
+bool FindAndReportLeaks();
+
// LeakCheckDisabler
//
// This helper class indicates that any heap allocations done in the code block
diff --git a/third_party/abseil-cpp/absl/debugging/leak_check_test.cc b/third_party/abseil-cpp/absl/debugging/leak_check_test.cc
index b5cc487488..9fcfc8e50b 100644
--- a/third_party/abseil-cpp/absl/debugging/leak_check_test.cc
+++ b/third_party/abseil-cpp/absl/debugging/leak_check_test.cc
@@ -23,8 +23,10 @@ namespace {
TEST(LeakCheckTest, DetectLeakSanitizer) {
#ifdef ABSL_EXPECT_LEAK_SANITIZER
EXPECT_TRUE(absl::HaveLeakSanitizer());
+ EXPECT_TRUE(absl::LeakCheckerIsActive());
#else
EXPECT_FALSE(absl::HaveLeakSanitizer());
+ EXPECT_FALSE(absl::LeakCheckerIsActive());
#endif
}
diff --git a/third_party/abseil-cpp/absl/debugging/symbolize_elf.inc b/third_party/abseil-cpp/absl/debugging/symbolize_elf.inc
index f4d5727bde..87dbd078b9 100644
--- a/third_party/abseil-cpp/absl/debugging/symbolize_elf.inc
+++ b/third_party/abseil-cpp/absl/debugging/symbolize_elf.inc
@@ -701,6 +701,16 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol(
const char *start_address =
ComputeOffset(original_start_address, relocation);
+#ifdef __arm__
+ // ARM functions are always aligned to multiples of two bytes; the
+ // lowest-order bit in start_address is ignored by the CPU and indicates
+ // whether the function contains ARM (0) or Thumb (1) code. We don't care
+ // about what encoding is being used; we just want the real start address
+ // of the function.
+ start_address = reinterpret_cast<const char *>(
+ reinterpret_cast<uintptr_t>(start_address) & ~1);
+#endif
+
if (deref_function_descriptor_pointer &&
InSection(original_start_address, opd)) {
// The opd section is mapped into memory. Just dereference
diff --git a/third_party/abseil-cpp/absl/debugging/symbolize_test.cc b/third_party/abseil-cpp/absl/debugging/symbolize_test.cc
index a2dd4956c4..35de02e24b 100644
--- a/third_party/abseil-cpp/absl/debugging/symbolize_test.cc
+++ b/third_party/abseil-cpp/absl/debugging/symbolize_test.cc
@@ -477,6 +477,46 @@ void ABSL_ATTRIBUTE_NOINLINE TestWithReturnAddress() {
#endif
}
+#if defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+// Test that we correctly identify bounds of Thumb functions on ARM.
+//
+// Thumb functions have the lowest-order bit set in their addresses in the ELF
+// symbol table. This requires some extra logic to properly compute function
+// bounds. To test this logic, nudge a Thumb function right up against an ARM
+// function and try to symbolize the ARM function.
+//
+// A naive implementation will simply use the Thumb function's entry point as
+// written in the symbol table and will therefore treat the Thumb function as
+// extending one byte further in the instruction stream than it actually does.
+// When asked to symbolize the start of the ARM function, it will identify an
+// overlap between the Thumb and ARM functions, and it will return the name of
+// the Thumb function.
+//
+// A correct implementation, on the other hand, will null out the lowest-order
+// bit in the Thumb function's entry point. It will correctly compute the end of
+// the Thumb function, it will find no overlap between the Thumb and ARM
+// functions, and it will return the name of the ARM function.
+
+__attribute__((target("thumb"))) int ArmThumbOverlapThumb(int x) {
+ return x * x * x;
+}
+
+__attribute__((target("arm"))) int ArmThumbOverlapArm(int x) {
+ return x * x * x;
+}
+
+void ABSL_ATTRIBUTE_NOINLINE TestArmThumbOverlap() {
+#if defined(ABSL_HAVE_ATTRIBUTE_NOINLINE)
+ const char *symbol = TrySymbolize((void *)&ArmThumbOverlapArm);
+ ABSL_RAW_CHECK(symbol != nullptr, "TestArmThumbOverlap failed");
+ ABSL_RAW_CHECK(strcmp("ArmThumbOverlapArm()", symbol) == 0,
+ "TestArmThumbOverlap failed");
+ std::cout << "TestArmThumbOverlap passed" << std::endl;
+#endif
+}
+
+#endif // defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+
#elif defined(_WIN32)
#if !defined(ABSL_CONSUME_DLL)
@@ -551,6 +591,9 @@ int main(int argc, char **argv) {
TestWithPCInsideInlineFunction();
TestWithPCInsideNonInlineFunction();
TestWithReturnAddress();
+#if defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+ TestArmThumbOverlap();
+#endif
#endif
return RUN_ALL_TESTS();
diff --git a/third_party/abseil-cpp/absl/flags/CMakeLists.txt b/third_party/abseil-cpp/absl/flags/CMakeLists.txt
index caac69cf89..956f70f868 100644
--- a/third_party/abseil-cpp/absl/flags/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/flags/CMakeLists.txt
@@ -239,6 +239,7 @@ absl_cc_library(
absl::flags_private_handle_accessor
absl::flags_program_name
absl::flags_reflection
+ absl::flat_hash_map
absl::strings
absl::synchronization
)
@@ -309,7 +310,7 @@ absl_cc_test(
absl::flags_reflection
absl::memory
absl::strings
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -321,7 +322,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::flags_config
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -341,7 +342,7 @@ absl_cc_test(
absl::flags_reflection
absl::strings
absl::time
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -353,7 +354,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::flags_marshalling
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -372,7 +373,7 @@ absl_cc_test(
absl::scoped_set_env
absl::span
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -384,7 +385,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::flags_path_util
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -397,7 +398,7 @@ absl_cc_test(
DEPS
absl::flags_program_name
absl::strings
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -414,7 +415,7 @@ absl_cc_test(
absl::flags_usage
absl::memory
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -428,7 +429,7 @@ absl_cc_test(
absl::base
absl::flags_internal
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -443,7 +444,7 @@ absl_cc_test(
absl::flags_path_util
absl::flags_program_name
absl::strings
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -462,5 +463,5 @@ absl_cc_test(
absl::flags_reflection
absl::flags_usage
absl::strings
- gtest
+ GTest::gtest
)
diff --git a/third_party/abseil-cpp/absl/flags/flag.h b/third_party/abseil-cpp/absl/flags/flag.h
index f09580b06a..14209e7ba7 100644
--- a/third_party/abseil-cpp/absl/flags/flag.h
+++ b/third_party/abseil-cpp/absl/flags/flag.h
@@ -265,6 +265,8 @@ ABSL_NAMESPACE_END
//
// ABSL_FLAG(T, name, default_value, help).OnUpdate(callback);
//
+// `callback` should be convertible to `void (*)()`.
+//
// After any setting of the flag value, the callback will be called at least
// once. A rapid sequence of changes may be merged together into the same
// callback. No concurrent calls to the callback will be made for the same
@@ -279,7 +281,6 @@ ABSL_NAMESPACE_END
// Note: ABSL_FLAG.OnUpdate() does not have a public definition. Hence, this
// comment serves as its API documentation.
-
// -----------------------------------------------------------------------------
// Implementation details below this section
// -----------------------------------------------------------------------------
diff --git a/third_party/abseil-cpp/absl/flags/internal/usage.cc b/third_party/abseil-cpp/absl/flags/internal/usage.cc
index a588c7f73a..949709e883 100644
--- a/third_party/abseil-cpp/absl/flags/internal/usage.cc
+++ b/third_party/abseil-cpp/absl/flags/internal/usage.cc
@@ -245,7 +245,7 @@ void FlagsHelpImpl(std::ostream& out, PerFlagFilter filter_cb,
<< XMLElement("usage", program_usage_message) << '\n';
}
- // Map of package name to
+ // Ordered map of package name to
// map of file name to
// vector of flags in the file.
// This map is used to output matching flags grouped by package and file
@@ -273,20 +273,26 @@ void FlagsHelpImpl(std::ostream& out, PerFlagFilter filter_cb,
absl::string_view package_separator; // controls blank lines between packages
absl::string_view file_separator; // controls blank lines between files
- for (const auto& package : matching_flags) {
+ for (auto& package : matching_flags) {
if (format == HelpFormat::kHumanReadable) {
out << package_separator;
package_separator = "\n\n";
}
file_separator = "";
- for (const auto& flags_in_file : package.second) {
+ for (auto& flags_in_file : package.second) {
if (format == HelpFormat::kHumanReadable) {
out << file_separator << " Flags from " << flags_in_file.first
<< ":\n";
file_separator = "\n";
}
+ std::sort(std::begin(flags_in_file.second),
+ std::end(flags_in_file.second),
+ [](const CommandLineFlag* lhs, const CommandLineFlag* rhs) {
+ return lhs->Name() < rhs->Name();
+ });
+
for (const auto* flag : flags_in_file.second) {
flags_internal::FlagHelp(out, *flag, format);
}
diff --git a/third_party/abseil-cpp/absl/flags/reflection.cc b/third_party/abseil-cpp/absl/flags/reflection.cc
index 0c76110163..dbce4032ab 100644
--- a/third_party/abseil-cpp/absl/flags/reflection.cc
+++ b/third_party/abseil-cpp/absl/flags/reflection.cc
@@ -18,11 +18,11 @@
#include <assert.h>
#include <atomic>
-#include <map>
#include <string>
#include "absl/base/config.h"
#include "absl/base/thread_annotations.h"
+#include "absl/container/flat_hash_map.h"
#include "absl/flags/commandlineflag.h"
#include "absl/flags/internal/private_handle_accessor.h"
#include "absl/flags/internal/registry.h"
@@ -68,7 +68,7 @@ class FlagRegistry {
friend void FinalizeRegistry();
// The map from name to flag, for FindFlag().
- using FlagMap = std::map<absl::string_view, CommandLineFlag*>;
+ using FlagMap = absl::flat_hash_map<absl::string_view, CommandLineFlag*>;
using FlagIterator = FlagMap::iterator;
using FlagConstIterator = FlagMap::const_iterator;
FlagMap flags_;
@@ -204,6 +204,10 @@ void FinalizeRegistry() {
for (const auto& f : registry.flags_) {
registry.flat_flags_.push_back(f.second);
}
+ std::sort(std::begin(registry.flat_flags_), std::end(registry.flat_flags_),
+ [](const CommandLineFlag* lhs, const CommandLineFlag* rhs) {
+ return lhs->Name() < rhs->Name();
+ });
registry.flags_.clear();
registry.finalized_flags_.store(true, std::memory_order_release);
}
diff --git a/third_party/abseil-cpp/absl/functional/CMakeLists.txt b/third_party/abseil-cpp/absl/functional/CMakeLists.txt
index cda914f2cd..3919e9a1de 100644
--- a/third_party/abseil-cpp/absl/functional/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/functional/CMakeLists.txt
@@ -39,7 +39,7 @@ absl_cc_test(
DEPS
absl::bind_front
absl::memory
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -68,5 +68,5 @@ absl_cc_test(
absl::function_ref
absl::memory
absl::test_instance_tracker
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/functional/function_ref.h b/third_party/abseil-cpp/absl/functional/function_ref.h
index 6e03ac2e04..5790a65251 100644
--- a/third_party/abseil-cpp/absl/functional/function_ref.h
+++ b/third_party/abseil-cpp/absl/functional/function_ref.h
@@ -122,6 +122,7 @@ class FunctionRef<R(Args...)> {
// To help prevent subtle lifetime bugs, FunctionRef is not assignable.
// Typically, it should only be used as an argument type.
FunctionRef& operator=(const FunctionRef& rhs) = delete;
+ FunctionRef(const FunctionRef& rhs) = default;
// Call the underlying object.
R operator()(Args... args) const {
diff --git a/third_party/abseil-cpp/absl/hash/CMakeLists.txt b/third_party/abseil-cpp/absl/hash/CMakeLists.txt
index b43bfa542f..c82f66f02c 100644
--- a/third_party/abseil-cpp/absl/hash/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/hash/CMakeLists.txt
@@ -52,7 +52,7 @@ absl_cc_library(
absl::meta
absl::strings
absl::variant
- gmock
+ GTest::gmock
TESTONLY
)
@@ -72,7 +72,7 @@ absl_cc_test(
absl::spy_hash_state
absl::meta
absl::int128
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -113,7 +113,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::city
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -141,5 +141,5 @@ absl_cc_test(
DEPS
absl::wyhash
absl::strings
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/hash/hash.h b/third_party/abseil-cpp/absl/hash/hash.h
index 5de132cac8..8282ea53c6 100644
--- a/third_party/abseil-cpp/absl/hash/hash.h
+++ b/third_party/abseil-cpp/absl/hash/hash.h
@@ -73,6 +73,8 @@
#ifndef ABSL_HASH_HASH_H_
#define ABSL_HASH_HASH_H_
+#include <tuple>
+
#include "absl/hash/internal/hash.h"
namespace absl {
@@ -214,6 +216,26 @@ ABSL_NAMESPACE_BEGIN
template <typename T>
using Hash = absl::hash_internal::Hash<T>;
+// HashOf
+//
+// absl::HashOf() is a helper that generates a hash from the values of its
+// arguments. It dispatches to absl::Hash directly, as follows:
+// * HashOf(t) == absl::Hash<T>{}(t)
+// * HashOf(a, b, c) == HashOf(std::make_tuple(a, b, c))
+//
+// HashOf(a1, a2, ...) == HashOf(b1, b2, ...) is guaranteed when
+// * The argument lists have pairwise identical C++ types
+// * a1 == b1 && a2 == b2 && ...
+//
+// The requirement that the arguments match in both type and value is critical.
+// It means that `a == b` does not necessarily imply `HashOf(a) == HashOf(b)` if
+// `a` and `b` have different types. For example, `HashOf(2) != HashOf(2.0)`.
+template <int&... ExplicitArgumentBarrier, typename... Types>
+size_t HashOf(const Types&... values) {
+ auto tuple = std::tie(values...);
+ return absl::Hash<decltype(tuple)>{}(tuple);
+}
+
// HashState
//
// A type erased version of the hash state concept, for use in user-defined
diff --git a/third_party/abseil-cpp/absl/hash/hash_test.cc b/third_party/abseil-cpp/absl/hash/hash_test.cc
index 1d2e6cf0df..b3ddebdd42 100644
--- a/third_party/abseil-cpp/absl/hash/hash_test.cc
+++ b/third_party/abseil-cpp/absl/hash/hash_test.cc
@@ -973,4 +973,39 @@ TEST(HashTest, DoesNotUseImplicitConversionsToBool) {
absl::Hash<ValueWithBoolConversion>()(ValueWithBoolConversion{1}));
}
+TEST(HashOf, MatchesHashForSingleArgument) {
+ std::string s = "forty two";
+ int i = 42;
+ double d = 42.0;
+ std::tuple<int, int> t{4, 2};
+
+ EXPECT_EQ(absl::HashOf(s), absl::Hash<std::string>{}(s));
+ EXPECT_EQ(absl::HashOf(i), absl::Hash<int>{}(i));
+ EXPECT_EQ(absl::HashOf(d), absl::Hash<double>{}(d));
+ EXPECT_EQ(absl::HashOf(t), (absl::Hash<std::tuple<int, int>>{}(t)));
+}
+
+TEST(HashOf, MatchesHashOfTupleForMultipleArguments) {
+ std::string hello = "hello";
+ std::string world = "world";
+
+ EXPECT_EQ(absl::HashOf(), absl::HashOf(std::make_tuple()));
+ EXPECT_EQ(absl::HashOf(hello), absl::HashOf(std::make_tuple(hello)));
+ EXPECT_EQ(absl::HashOf(hello, world),
+ absl::HashOf(std::make_tuple(hello, world)));
+}
+
+template <typename T>
+std::true_type HashOfExplicitParameter(decltype(absl::HashOf<T>(0))) {
+ return {};
+}
+template <typename T>
+std::false_type HashOfExplicitParameter(size_t) {
+ return {};
+}
+
+TEST(HashOf, CantPassExplicitTemplateParameters) {
+ EXPECT_FALSE(HashOfExplicitParameter<int>(0));
+}
+
} // namespace
diff --git a/third_party/abseil-cpp/absl/hash/internal/hash.cc b/third_party/abseil-cpp/absl/hash/internal/hash.cc
index 1433eb9db3..06f53a59c5 100644
--- a/third_party/abseil-cpp/absl/hash/internal/hash.cc
+++ b/third_party/abseil-cpp/absl/hash/internal/hash.cc
@@ -18,9 +18,8 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace hash_internal {
-uint64_t HashState::CombineLargeContiguousImpl32(uint64_t state,
- const unsigned char* first,
- size_t len) {
+uint64_t MixingHashState::CombineLargeContiguousImpl32(
+ uint64_t state, const unsigned char* first, size_t len) {
while (len >= PiecewiseChunkSize()) {
state =
Mix(state, absl::hash_internal::CityHash32(reinterpret_cast<const char*>(first),
@@ -33,9 +32,8 @@ uint64_t HashState::CombineLargeContiguousImpl32(uint64_t state,
std::integral_constant<int, 4>{});
}
-uint64_t HashState::CombineLargeContiguousImpl64(uint64_t state,
- const unsigned char* first,
- size_t len) {
+uint64_t MixingHashState::CombineLargeContiguousImpl64(
+ uint64_t state, const unsigned char* first, size_t len) {
while (len >= PiecewiseChunkSize()) {
state = Mix(state, Hash64(first, PiecewiseChunkSize()));
len -= PiecewiseChunkSize();
@@ -46,7 +44,7 @@ uint64_t HashState::CombineLargeContiguousImpl64(uint64_t state,
std::integral_constant<int, 8>{});
}
-ABSL_CONST_INIT const void* const HashState::kSeed = &kSeed;
+ABSL_CONST_INIT const void* const MixingHashState::kSeed = &kSeed;
// The salt array used by Wyhash. This array is NOT the mechanism used to make
// absl::Hash non-deterministic between program invocations. See `Seed()` for
@@ -61,7 +59,7 @@ constexpr uint64_t kWyhashSalt[5] = {
uint64_t{0x452821E638D01377},
};
-uint64_t HashState::WyhashImpl(const unsigned char* data, size_t len) {
+uint64_t MixingHashState::WyhashImpl(const unsigned char* data, size_t len) {
return Wyhash(data, len, Seed(), kWyhashSalt);
}
diff --git a/third_party/abseil-cpp/absl/hash/internal/hash.h b/third_party/abseil-cpp/absl/hash/internal/hash.h
index 7fb0af0b96..69dbbc6ba0 100644
--- a/third_party/abseil-cpp/absl/hash/internal/hash.h
+++ b/third_party/abseil-cpp/absl/hash/internal/hash.h
@@ -379,7 +379,7 @@ template <typename H, typename... Ts>
// This SFINAE gets MSVC confused under some conditions. Let's just disable it
// for now.
H
-#else // _MSC_VER
+#else // _MSC_VER
typename std::enable_if<absl::conjunction<is_hashable<Ts>...>::value, H>::type
#endif // _MSC_VER
AbslHashValue(H hash_state, const std::tuple<Ts...>& t) {
@@ -714,8 +714,8 @@ template <typename T>
struct is_hashable
: std::integral_constant<bool, HashSelect::template Apply<T>::value> {};
-// HashState
-class ABSL_DLL HashState : public HashStateBase<HashState> {
+// MixingHashState
+class ABSL_DLL MixingHashState : public HashStateBase<MixingHashState> {
// absl::uint128 is not an alias or a thin wrapper around the intrinsic.
// We use the intrinsic when available to improve performance.
#ifdef ABSL_HAVE_INTRINSIC_INT128
@@ -734,22 +734,23 @@ class ABSL_DLL HashState : public HashStateBase<HashState> {
public:
// Move only
- HashState(HashState&&) = default;
- HashState& operator=(HashState&&) = default;
+ MixingHashState(MixingHashState&&) = default;
+ MixingHashState& operator=(MixingHashState&&) = default;
- // HashState::combine_contiguous()
+ // MixingHashState::combine_contiguous()
//
// Fundamental base case for hash recursion: mixes the given range of bytes
// into the hash state.
- static HashState combine_contiguous(HashState hash_state,
- const unsigned char* first, size_t size) {
- return HashState(
+ static MixingHashState combine_contiguous(MixingHashState hash_state,
+ const unsigned char* first,
+ size_t size) {
+ return MixingHashState(
CombineContiguousImpl(hash_state.state_, first, size,
std::integral_constant<int, sizeof(size_t)>{}));
}
- using HashState::HashStateBase::combine_contiguous;
+ using MixingHashState::HashStateBase::combine_contiguous;
- // HashState::hash()
+ // MixingHashState::hash()
//
// For performance reasons in non-opt mode, we specialize this for
// integral types.
@@ -761,24 +762,24 @@ class ABSL_DLL HashState : public HashStateBase<HashState> {
return static_cast<size_t>(Mix(Seed(), static_cast<uint64_t>(value)));
}
- // Overload of HashState::hash()
+ // Overload of MixingHashState::hash()
template <typename T, absl::enable_if_t<!IntegralFastPath<T>::value, int> = 0>
static size_t hash(const T& value) {
- return static_cast<size_t>(combine(HashState{}, value).state_);
+ return static_cast<size_t>(combine(MixingHashState{}, value).state_);
}
private:
// Invoked only once for a given argument; that plus the fact that this is
// move-only ensures that there is only one non-moved-from object.
- HashState() : state_(Seed()) {}
+ MixingHashState() : state_(Seed()) {}
// Workaround for MSVC bug.
// We make the type copyable to fix the calling convention, even though we
// never actually copy it. Keep it private to not affect the public API of the
// type.
- HashState(const HashState&) = default;
+ MixingHashState(const MixingHashState&) = default;
- explicit HashState(uint64_t state) : state_(state) {}
+ explicit MixingHashState(uint64_t state) : state_(state) {}
// Implementation of the base case for combine_contiguous where we actually
// mix the bytes into the state.
@@ -793,7 +794,6 @@ class ABSL_DLL HashState : public HashStateBase<HashState> {
std::integral_constant<int, 8>
/* sizeof_size_t */);
-
// Slow dispatch path for calls to CombineContiguousImpl with a size argument
// larger than PiecewiseChunkSize(). Has the same effect as calling
// CombineContiguousImpl() repeatedly with the chunk stride size.
@@ -911,8 +911,8 @@ class ABSL_DLL HashState : public HashStateBase<HashState> {
uint64_t state_;
};
-// HashState::CombineContiguousImpl()
-inline uint64_t HashState::CombineContiguousImpl(
+// MixingHashState::CombineContiguousImpl()
+inline uint64_t MixingHashState::CombineContiguousImpl(
uint64_t state, const unsigned char* first, size_t len,
std::integral_constant<int, 4> /* sizeof_size_t */) {
// For large values we use CityHash, for small ones we just use a
@@ -934,8 +934,8 @@ inline uint64_t HashState::CombineContiguousImpl(
return Mix(state, v);
}
-// Overload of HashState::CombineContiguousImpl()
-inline uint64_t HashState::CombineContiguousImpl(
+// Overload of MixingHashState::CombineContiguousImpl()
+inline uint64_t MixingHashState::CombineContiguousImpl(
uint64_t state, const unsigned char* first, size_t len,
std::integral_constant<int, 8> /* sizeof_size_t */) {
// For large values we use Wyhash or CityHash depending on the platform, for
@@ -976,7 +976,9 @@ struct PoisonedHash : private AggregateBarrier {
template <typename T>
struct HashImpl {
- size_t operator()(const T& value) const { return HashState::hash(value); }
+ size_t operator()(const T& value) const {
+ return MixingHashState::hash(value);
+ }
};
template <typename T>
diff --git a/third_party/abseil-cpp/absl/hash/internal/wyhash.h b/third_party/abseil-cpp/absl/hash/internal/wyhash.h
index 4aff4e931a..2b534b4706 100644
--- a/third_party/abseil-cpp/absl/hash/internal/wyhash.h
+++ b/third_party/abseil-cpp/absl/hash/internal/wyhash.h
@@ -36,7 +36,7 @@ namespace hash_internal {
// integers are hashed into the result.
//
// To allow all hashable types (including string_view and Span) to depend on
-// this algoritm, we keep the API low-level, with as few dependencies as
+// this algorithm, we keep the API low-level, with as few dependencies as
// possible.
uint64_t Wyhash(const void* data, size_t len, uint64_t seed,
const uint64_t salt[5]);
diff --git a/third_party/abseil-cpp/absl/memory/CMakeLists.txt b/third_party/abseil-cpp/absl/memory/CMakeLists.txt
index 78fb7e1b31..9d50e1dcd4 100644
--- a/third_party/abseil-cpp/absl/memory/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/memory/CMakeLists.txt
@@ -37,7 +37,7 @@ absl_cc_test(
DEPS
absl::memory
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -51,5 +51,5 @@ absl_cc_test(
absl::memory
absl::config
absl::exception_safety_testing
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/memory/memory.h b/third_party/abseil-cpp/absl/memory/memory.h
index 2b5ff623d4..d63326068f 100644
--- a/third_party/abseil-cpp/absl/memory/memory.h
+++ b/third_party/abseil-cpp/absl/memory/memory.h
@@ -420,7 +420,7 @@ struct pointer_traits<T*> {
//
// A C++11 compatible implementation of C++17's std::allocator_traits.
//
-#if __cplusplus >= 201703L
+#if __cplusplus >= 201703L || (defined(_MSVC_LANG) && _MSVC_LANG >= 201703L)
using std::allocator_traits;
#else // __cplusplus >= 201703L
template <typename Alloc>
diff --git a/third_party/abseil-cpp/absl/meta/CMakeLists.txt b/third_party/abseil-cpp/absl/meta/CMakeLists.txt
index 672ead2fd0..9de4bd3751 100644
--- a/third_party/abseil-cpp/absl/meta/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/meta/CMakeLists.txt
@@ -35,7 +35,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
# component target
diff --git a/third_party/abseil-cpp/absl/meta/type_traits.h b/third_party/abseil-cpp/absl/meta/type_traits.h
index d5cb5f3be3..e7c123936d 100644
--- a/third_party/abseil-cpp/absl/meta/type_traits.h
+++ b/third_party/abseil-cpp/absl/meta/type_traits.h
@@ -499,6 +499,27 @@ struct is_trivially_copy_assignable
#endif // ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE
};
+#if defined(__cpp_lib_remove_cvref) && __cpp_lib_remove_cvref >= 201711L
+template <typename T>
+using remove_cvref = std::remove_cvref<T>;
+
+template <typename T>
+using remove_cvref_t = typename std::remove_cvref<T>::type;
+#else
+// remove_cvref()
+//
+// C++11 compatible implementation of std::remove_cvref which was added in
+// C++20.
+template <typename T>
+struct remove_cvref {
+ using type =
+ typename std::remove_cv<typename std::remove_reference<T>::type>::type;
+};
+
+template <typename T>
+using remove_cvref_t = typename remove_cvref<T>::type;
+#endif
+
namespace type_traits_internal {
// is_trivially_copyable()
//
@@ -613,7 +634,7 @@ using underlying_type_t = typename std::underlying_type<T>::type;
namespace type_traits_internal {
-#if __cplusplus >= 201703L
+#if __cplusplus >= 201703L || (defined(_MSVC_LANG) && _MSVC_LANG >= 201703L)
// std::result_of is deprecated (C++17) or removed (C++20)
template<typename> struct result_of;
template<typename F, typename... Args>
diff --git a/third_party/abseil-cpp/absl/meta/type_traits_test.cc b/third_party/abseil-cpp/absl/meta/type_traits_test.cc
index 1aafd0d49a..0ef5b66558 100644
--- a/third_party/abseil-cpp/absl/meta/type_traits_test.cc
+++ b/third_party/abseil-cpp/absl/meta/type_traits_test.cc
@@ -942,6 +942,34 @@ TEST(TypeTraitsTest, TestTriviallyCopyable) {
absl::type_traits_internal::is_trivially_copyable<Trivial&>::value);
}
+TEST(TypeTraitsTest, TestRemoveCVRef) {
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int&>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int&&>::type, int>::value));
+ EXPECT_TRUE((
+ std::is_same<typename absl::remove_cvref<const int&>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int*>::type, int*>::value));
+ // Does not remove const in this case.
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int*>::type,
+ const int*>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int(&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int(&&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int(&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int(&&)[2]>::type,
+ int[2]>::value));
+}
+
#define ABSL_INTERNAL_EXPECT_ALIAS_EQUIVALENCE(trait_name, ...) \
EXPECT_TRUE((std::is_same<typename std::trait_name<__VA_ARGS__>::type, \
absl::trait_name##_t<__VA_ARGS__>>::value))
diff --git a/third_party/abseil-cpp/absl/numeric/CMakeLists.txt b/third_party/abseil-cpp/absl/numeric/CMakeLists.txt
index 781987dc88..26df5cf703 100644
--- a/third_party/abseil-cpp/absl/numeric/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/numeric/CMakeLists.txt
@@ -38,7 +38,7 @@ absl_cc_test(
absl::bits
absl::core_headers
absl::random_random
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -73,7 +73,7 @@ absl_cc_test(
absl::core_headers
absl::hash_testing
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
# component target
diff --git a/third_party/abseil-cpp/absl/numeric/int128.h b/third_party/abseil-cpp/absl/numeric/int128.h
index 0dd814a890..198aa19504 100644
--- a/third_party/abseil-cpp/absl/numeric/int128.h
+++ b/third_party/abseil-cpp/absl/numeric/int128.h
@@ -810,6 +810,14 @@ inline bool operator>=(uint128 lhs, uint128 rhs) { return !(lhs < rhs); }
// Unary operators.
+constexpr inline uint128 operator+(uint128 val) {
+ return val;
+}
+
+constexpr inline int128 operator+(int128 val) {
+ return val;
+}
+
inline uint128 operator-(uint128 val) {
uint64_t hi = ~Uint128High64(val);
uint64_t lo = ~Uint128Low64(val) + 1;
@@ -817,27 +825,27 @@ inline uint128 operator-(uint128 val) {
return MakeUint128(hi, lo);
}
-inline bool operator!(uint128 val) {
+constexpr inline bool operator!(uint128 val) {
return !Uint128High64(val) && !Uint128Low64(val);
}
// Logical operators.
-inline uint128 operator~(uint128 val) {
+constexpr inline uint128 operator~(uint128 val) {
return MakeUint128(~Uint128High64(val), ~Uint128Low64(val));
}
-inline uint128 operator|(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator|(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) | Uint128High64(rhs),
Uint128Low64(lhs) | Uint128Low64(rhs));
}
-inline uint128 operator&(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator&(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) & Uint128High64(rhs),
Uint128Low64(lhs) & Uint128Low64(rhs));
}
-inline uint128 operator^(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator^(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) ^ Uint128High64(rhs),
Uint128Low64(lhs) ^ Uint128Low64(rhs));
}
diff --git a/third_party/abseil-cpp/absl/numeric/int128_test.cc b/third_party/abseil-cpp/absl/numeric/int128_test.cc
index bc86c714ac..c445d89a99 100644
--- a/third_party/abseil-cpp/absl/numeric/int128_test.cc
+++ b/third_party/abseil-cpp/absl/numeric/int128_test.cc
@@ -226,6 +226,11 @@ TEST(Uint128, AllTests) {
EXPECT_EQ(test >>= 1, one);
EXPECT_EQ(test <<= 1, two);
+ EXPECT_EQ(big, +big);
+ EXPECT_EQ(two, +two);
+ EXPECT_EQ(absl::Uint128Max(), +absl::Uint128Max());
+ EXPECT_EQ(zero, +zero);
+
EXPECT_EQ(big, -(-big));
EXPECT_EQ(two, -((-one) - 1));
EXPECT_EQ(absl::Uint128Max(), -one);
@@ -769,6 +774,19 @@ TEST(Int128, ComparisonTest) {
}
}
+TEST(Int128, UnaryPlusTest) {
+ int64_t values64[] = {0, 1, 12345, 0x4000000000000000,
+ std::numeric_limits<int64_t>::max()};
+ for (int64_t value : values64) {
+ SCOPED_TRACE(::testing::Message() << "value = " << value);
+
+ EXPECT_EQ(absl::int128(value), +absl::int128(value));
+ EXPECT_EQ(absl::int128(-value), +absl::int128(-value));
+ EXPECT_EQ(absl::MakeInt128(value, 0), +absl::MakeInt128(value, 0));
+ EXPECT_EQ(absl::MakeInt128(-value, 0), +absl::MakeInt128(-value, 0));
+ }
+}
+
TEST(Int128, UnaryNegationTest) {
int64_t values64[] = {0, 1, 12345, 0x4000000000000000,
std::numeric_limits<int64_t>::max()};
diff --git a/third_party/abseil-cpp/absl/random/CMakeLists.txt b/third_party/abseil-cpp/absl/random/CMakeLists.txt
index 3009a0348a..9d1c67fb33 100644
--- a/third_party/abseil-cpp/absl/random/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/random/CMakeLists.txt
@@ -62,8 +62,8 @@ absl_cc_test(
absl::random_random
absl::random_internal_sequence_urbg
absl::fast_type_id
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -119,8 +119,8 @@ absl_cc_library(
absl::type_traits
absl::utility
absl::variant
- gmock
- gtest
+ GTest::gmock
+ GTest::gtest
TESTONLY
)
@@ -136,8 +136,8 @@ absl_cc_test(
DEPS
absl::random_mocking_bit_gen
absl::random_random
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -153,8 +153,8 @@ absl_cc_test(
absl::random_bit_gen_ref
absl::random_mocking_bit_gen
absl::random_random
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_library(
@@ -245,8 +245,8 @@ absl_cc_test(
absl::random_random
absl::random_internal_sequence_urbg
absl::random_internal_pcg_engine
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -268,8 +268,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::str_format
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -285,8 +285,8 @@ absl_cc_test(
absl::random_distributions
absl::random_random
absl::random_internal_distribution_test_util
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -301,8 +301,8 @@ absl_cc_test(
absl::random_distributions
absl::random_random
absl::raw_logging_internal
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -322,8 +322,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::str_format
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -343,8 +343,8 @@ absl_cc_test(
absl::random_random
absl::raw_logging_internal
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -367,8 +367,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::str_format
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -391,8 +391,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::str_format
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -414,8 +414,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::str_format
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -435,8 +435,8 @@ absl_cc_test(
absl::random_random
absl::raw_logging_internal
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -456,8 +456,8 @@ absl_cc_test(
absl::random_internal_sequence_urbg
absl::random_random
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -477,8 +477,8 @@ absl_cc_test(
absl::random_random
absl::raw_logging_internal
absl::strings
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
absl_cc_test(
@@ -492,7 +492,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_random
- gtest_main
+ GTest::gtest_main
)
absl_cc_test(
@@ -508,8 +508,8 @@ absl_cc_test(
absl::random_seed_sequences
absl::random_internal_nonsecure_base
absl::random_random
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -894,7 +894,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_traits
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -911,7 +911,7 @@ absl_cc_test(
absl::bits
absl::flags
absl::random_internal_generate_real
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -926,7 +926,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_distribution_test_util
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -941,7 +941,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_fastmath
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -957,8 +957,8 @@ absl_cc_test(
DEPS
absl::random_internal_explicit_seed_seq
absl::random_seed_sequences
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -973,8 +973,8 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_salted_seed_seq
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -990,7 +990,7 @@ absl_cc_test(
DEPS
absl::core_headers
absl::random_internal_distribution_test_util
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1005,7 +1005,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_fast_uniform_bits
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1024,7 +1024,7 @@ absl_cc_test(
absl::random_distributions
absl::random_seed_sequences
absl::strings
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1039,8 +1039,8 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_seed_material
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1057,7 +1057,7 @@ absl_cc_test(
absl::random_internal_pool_urbg
absl::span
absl::type_traits
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1074,8 +1074,8 @@ absl_cc_test(
absl::random_internal_explicit_seed_seq
absl::random_internal_pcg_engine
absl::time
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1094,8 +1094,8 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::time
- gmock
- gtest_main
+ GTest::gmock
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1111,7 +1111,7 @@ absl_cc_test(
DEPS
absl::random_internal_randen
absl::type_traits
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1127,7 +1127,7 @@ absl_cc_test(
DEPS
absl::endian
absl::random_internal_randen_slow
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1146,8 +1146,8 @@ absl_cc_test(
absl::random_internal_randen_hwaes_impl
absl::raw_logging_internal
absl::str_format
- gmock
- gtest
+ GTest::gmock
+ GTest::gtest
)
# Internal-only target, do not depend on directly.
@@ -1178,7 +1178,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_uniform_helper
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1193,7 +1193,7 @@ absl_cc_test(
${ABSL_DEFAULT_LINKOPTS}
DEPS
absl::random_internal_iostream_state_saver
- gtest_main
+ GTest::gtest_main
)
# Internal-only target, do not depend on directly.
@@ -1210,5 +1210,5 @@ absl_cc_test(
absl::random_internal_wide_multiply
absl::bits
absl::int128
- gtest_main
+ GTest::gtest_main
)
diff --git a/third_party/abseil-cpp/absl/random/beta_distribution_test.cc b/third_party/abseil-cpp/absl/random/beta_distribution_test.cc
index 44cdfdd049..d980c969f7 100644
--- a/third_party/abseil-cpp/absl/random/beta_distribution_test.cc
+++ b/third_party/abseil-cpp/absl/random/beta_distribution_test.cc
@@ -15,6 +15,7 @@
#include "absl/random/beta_distribution.h"
#include <algorithm>
+#include <cfloat>
#include <cstddef>
#include <cstdint>
#include <iterator>
@@ -558,6 +559,14 @@ TEST(BetaDistributionTest, StabilityTest) {
// dependencies of the distribution change, such as RandU64ToDouble, then this
// is also likely to change.
TEST(BetaDistributionTest, AlgorithmBounds) {
+#if (defined(__i386__) || defined(_M_IX86)) && FLT_EVAL_METHOD != 0
+ // We're using an x87-compatible FPU, and intermediate operations are
+ // performed with 80-bit floats. This produces slightly different results from
+ // what we expect below.
+ GTEST_SKIP()
+ << "Skipping the test because we detected x87 floating-point semantics";
+#endif
+
{
absl::random_internal::sequence_urbg urbg(
{0x7fbe76c8b4395800ull, 0x8000000000000000ull});
diff --git a/third_party/abseil-cpp/absl/random/discrete_distribution_test.cc b/third_party/abseil-cpp/absl/random/discrete_distribution_test.cc
index 6d007006ef..415b14cc76 100644
--- a/third_party/abseil-cpp/absl/random/discrete_distribution_test.cc
+++ b/third_party/abseil-cpp/absl/random/discrete_distribution_test.cc
@@ -99,6 +99,7 @@ TYPED_TEST(DiscreteDistributionTypeTest, Constructor) {
}
TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
+ using testing::_;
using testing::Pair;
{
@@ -111,8 +112,8 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
// Each bucket is p=1/3, so bucket 0 will send half it's traffic
// to bucket 2, while the rest will retain all of their traffic.
EXPECT_THAT(q, testing::ElementsAre(Pair(0.5, 2), //
- Pair(1.0, 1), //
- Pair(1.0, 2)));
+ Pair(1.0, _), //
+ Pair(1.0, _)));
}
{
@@ -135,7 +136,7 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
EXPECT_THAT(q, testing::ElementsAre(Pair(b0, 3), //
Pair(b1, 3), //
- Pair(1.0, 2), //
+ Pair(1.0, _), //
Pair(b3, 2), //
Pair(b1, 3)));
}
diff --git a/third_party/abseil-cpp/absl/random/distributions_test.cc b/third_party/abseil-cpp/absl/random/distributions_test.cc
index 5866a07257..d3a5dd75e5 100644
--- a/third_party/abseil-cpp/absl/random/distributions_test.cc
+++ b/third_party/abseil-cpp/absl/random/distributions_test.cc
@@ -14,6 +14,7 @@
#include "absl/random/distributions.h"
+#include <cfloat>
#include <cmath>
#include <cstdint>
#include <random>
@@ -224,6 +225,15 @@ TEST_F(RandomDistributionsTest, UniformNoBounds) {
TEST_F(RandomDistributionsTest, UniformNonsenseRanges) {
// The ranges used in this test are undefined behavior.
// The results are arbitrary and subject to future changes.
+
+#if (defined(__i386__) || defined(_M_IX86)) && FLT_EVAL_METHOD != 0
+ // We're using an x87-compatible FPU, and intermediate operations can be
+ // performed with 80-bit floats. This produces slightly different results from
+ // what we expect below.
+ GTEST_SKIP()
+ << "Skipping the test because we detected x87 floating-point semantics";
+#endif
+
absl::InsecureBitGen gen;
// <uint>
diff --git a/third_party/abseil-cpp/absl/random/exponential_distribution_test.cc b/third_party/abseil-cpp/absl/random/exponential_distribution_test.cc
index af11d61c15..81a5d17bac 100644
--- a/third_party/abseil-cpp/absl/random/exponential_distribution_test.cc
+++ b/third_party/abseil-cpp/absl/random/exponential_distribution_test.cc
@@ -15,6 +15,7 @@
#include "absl/random/exponential_distribution.h"
#include <algorithm>
+#include <cfloat>
#include <cmath>
#include <cstddef>
#include <cstdint>
@@ -384,6 +385,15 @@ TEST(ExponentialDistributionTest, StabilityTest) {
TEST(ExponentialDistributionTest, AlgorithmBounds) {
// Relies on absl::uniform_real_distribution, so some of these comments
// reference that.
+
+#if (defined(__i386__) || defined(_M_IX86)) && FLT_EVAL_METHOD != 0
+ // We're using an x87-compatible FPU, and intermediate operations can be
+ // performed with 80-bit floats. This produces slightly different results from
+ // what we expect below.
+ GTEST_SKIP()
+ << "Skipping the test because we detected x87 floating-point semantics";
+#endif
+
absl::exponential_distribution<double> dist;
{
diff --git a/third_party/abseil-cpp/absl/random/internal/pool_urbg.cc b/third_party/abseil-cpp/absl/random/internal/pool_urbg.cc
index 5bee530770..725100a415 100644
--- a/third_party/abseil-cpp/absl/random/internal/pool_urbg.cc
+++ b/third_party/abseil-cpp/absl/random/internal/pool_urbg.cc
@@ -194,11 +194,10 @@ RandenPoolEntry* PoolAlignedAlloc() {
// Not all the platforms that we build for have std::aligned_alloc, however
// since we never free these objects, we can over allocate and munge the
// pointers to the correct alignment.
- void* memory = std::malloc(sizeof(RandenPoolEntry) + kAlignment);
- auto x = reinterpret_cast<intptr_t>(memory);
+ intptr_t x = reinterpret_cast<intptr_t>(
+ new char[sizeof(RandenPoolEntry) + kAlignment]);
auto y = x % kAlignment;
- void* aligned =
- (y == 0) ? memory : reinterpret_cast<void*>(x + kAlignment - y);
+ void* aligned = reinterpret_cast<void*>(y == 0 ? x : (x + kAlignment - y));
return new (aligned) RandenPoolEntry();
}
diff --git a/third_party/abseil-cpp/absl/random/internal/seed_material.cc b/third_party/abseil-cpp/absl/random/internal/seed_material.cc
index 4d38a57419..7c1d9efa42 100644
--- a/third_party/abseil-cpp/absl/random/internal/seed_material.cc
+++ b/third_party/abseil-cpp/absl/random/internal/seed_material.cc
@@ -28,6 +28,7 @@
#include <cstdlib>
#include <cstring>
+#include "absl/base/dynamic_annotations.h"
#include "absl/base/internal/raw_logging.h"
#include "absl/strings/ascii.h"
#include "absl/strings/escaping.h"
@@ -50,6 +51,12 @@
#endif
+#if defined(__GLIBC__) && \
+ (__GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 25))
+// glibc >= 2.25 has getentropy()
+#define ABSL_RANDOM_USE_GET_ENTROPY 1
+#endif
+
#if defined(ABSL_RANDOM_USE_BCRYPT)
#include <bcrypt.h>
@@ -122,8 +129,32 @@ bool ReadSeedMaterialFromOSEntropyImpl(absl::Span<uint32_t> values) {
#else
+#if defined(ABSL_RANDOM_USE_GET_ENTROPY)
+// On *nix, use getentropy() if supported. Note that libc may support
+// getentropy(), but the kernel may not, in which case this function will return
+// false.
+bool ReadSeedMaterialFromGetEntropy(absl::Span<uint32_t> values) {
+ auto buffer = reinterpret_cast<uint8_t*>(values.data());
+ size_t buffer_size = sizeof(uint32_t) * values.size();
+ while (buffer_size > 0) {
+ // getentropy() has a maximum permitted length of 256.
+ size_t to_read = std::min<size_t>(buffer_size, 256);
+ int result = getentropy(buffer, to_read);
+ if (result < 0) {
+ return false;
+ }
+ // https://github.com/google/sanitizers/issues/1173
+ // MemorySanitizer can't see through getentropy().
+ ABSL_ANNOTATE_MEMORY_IS_INITIALIZED(buffer, to_read);
+ buffer += to_read;
+ buffer_size -= to_read;
+ }
+ return true;
+}
+#endif // defined(ABSL_RANDOM_GETENTROPY)
+
// On *nix, read entropy from /dev/urandom.
-bool ReadSeedMaterialFromOSEntropyImpl(absl::Span<uint32_t> values) {
+bool ReadSeedMaterialFromDevURandom(absl::Span<uint32_t> values) {
const char kEntropyFile[] = "/dev/urandom";
auto buffer = reinterpret_cast<uint8_t*>(values.data());
@@ -150,6 +181,17 @@ bool ReadSeedMaterialFromOSEntropyImpl(absl::Span<uint32_t> values) {
return success;
}
+bool ReadSeedMaterialFromOSEntropyImpl(absl::Span<uint32_t> values) {
+#if defined(ABSL_RANDOM_USE_GET_ENTROPY)
+ if (ReadSeedMaterialFromGetEntropy(values)) {
+ return true;
+ }
+#endif
+ // Libc may support getentropy, but the kernel may not, so we still have
+ // to fallback to ReadSeedMaterialFromDevURandom().
+ return ReadSeedMaterialFromDevURandom(values);
+}
+
#endif
} // namespace
diff --git a/third_party/abseil-cpp/absl/random/uniform_real_distribution_test.cc b/third_party/abseil-cpp/absl/random/uniform_real_distribution_test.cc
index 18bcd3bce8..035bd284d1 100644
--- a/third_party/abseil-cpp/absl/random/uniform_real_distribution_test.cc
+++ b/third_party/abseil-cpp/absl/random/uniform_real_distribution_test.cc
@@ -14,6 +14,7 @@
#include "absl/random/uniform_real_distribution.h"
+#include <cfloat>
#include <cmath>
#include <cstdint>
#include <iterator>
@@ -70,6 +71,14 @@ using RealTypes =
TYPED_TEST_SUITE(UniformRealDistributionTest, RealTypes);
TYPED_TEST(UniformRealDistributionTest, ParamSerializeTest) {
+#if (defined(__i386__) || defined(_M_IX86)) && FLT_EVAL_METHOD != 0
+ // We're using an x87-compatible FPU, and intermediate operations are
+ // performed with 80-bit floats. This produces slightly different results from
+ // what we expect below.
+ GTEST_SKIP()
+ << "Skipping the test because we detected x87 floating-point semantics";
+#endif
+
using param_type =
typename absl::uniform_real_distribution<TypeParam>::param_type;
diff --git a/third_party/abseil-cpp/absl/status/CMakeLists.txt b/third_party/abseil-cpp/absl/status/CMakeLists.txt
index f0d798a373..1248dff03e 100644
--- a/third_party/abseil-cpp/absl/status/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/status/CMakeLists.txt
@@ -50,7 +50,7 @@ absl_cc_test(
DEPS
absl::status
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -84,5 +84,5 @@ absl_cc_test(
DEPS
absl::status
absl::statusor
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/status/internal/status_internal.h b/third_party/abseil-cpp/absl/status/internal/status_internal.h
index 279f8f55be..ac12940a6d 100644
--- a/third_party/abseil-cpp/absl/status/internal/status_internal.h
+++ b/third_party/abseil-cpp/absl/status/internal/status_internal.h
@@ -19,6 +19,17 @@
#include "absl/container/inlined_vector.h"
#include "absl/strings/cord.h"
+#ifndef SWIG
+// Disabled for SWIG as it doesn't parse attributes correctly.
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+// Returned Status objects may not be ignored. Codesearch doesn't handle ifdefs
+// as part of a class definitions (b/6995610), so we use a forward declaration.
+class ABSL_MUST_USE_RESULT Status;
+ABSL_NAMESPACE_END
+} // namespace absl
+#endif // !SWIG
+
namespace absl {
ABSL_NAMESPACE_BEGIN
@@ -36,12 +47,12 @@ using Payloads = absl::InlinedVector<Payload, 1>;
// Reference-counted representation of Status data.
struct StatusRep {
- StatusRep(absl::StatusCode code, std::string message,
- std::unique_ptr<status_internal::Payloads> payloads)
+ StatusRep(absl::StatusCode code_arg, absl::string_view message_arg,
+ std::unique_ptr<status_internal::Payloads> payloads_arg)
: ref(int32_t{1}),
- code(code),
- message(std::move(message)),
- payloads(std::move(payloads)) {}
+ code(code_arg),
+ message(message_arg),
+ payloads(std::move(payloads_arg)) {}
std::atomic<int32_t> ref;
absl::StatusCode code;
diff --git a/third_party/abseil-cpp/absl/status/status.cc b/third_party/abseil-cpp/absl/status/status.cc
index 51a0d26897..5a5cd5c239 100644
--- a/third_party/abseil-cpp/absl/status/status.cc
+++ b/third_party/abseil-cpp/absl/status/status.cc
@@ -207,19 +207,10 @@ void Status::UnrefNonInlined(uintptr_t rep) {
}
}
-uintptr_t Status::NewRep(
- absl::StatusCode code, absl::string_view msg,
- std::unique_ptr<status_internal::Payloads> payloads) {
- status_internal::StatusRep* rep = new status_internal::StatusRep(
- code, std::string(msg.data(), msg.size()),
- std::move(payloads));
- return PointerToRep(rep);
-}
-
Status::Status(absl::StatusCode code, absl::string_view msg)
: rep_(CodeToInlinedRep(code)) {
if (code != absl::StatusCode::kOk && !msg.empty()) {
- rep_ = NewRep(code, msg, nullptr);
+ rep_ = PointerToRep(new status_internal::StatusRep(code, msg, nullptr));
}
}
@@ -238,9 +229,9 @@ absl::StatusCode Status::code() const {
void Status::PrepareToModify() {
ABSL_RAW_CHECK(!ok(), "PrepareToModify shouldn't be called on OK status.");
if (IsInlined(rep_)) {
- rep_ =
- NewRep(static_cast<absl::StatusCode>(raw_code()), absl::string_view(),
- nullptr);
+ rep_ = PointerToRep(new status_internal::StatusRep(
+ static_cast<absl::StatusCode>(raw_code()), absl::string_view(),
+ nullptr));
return;
}
@@ -251,8 +242,9 @@ void Status::PrepareToModify() {
if (rep->payloads) {
payloads = absl::make_unique<status_internal::Payloads>(*rep->payloads);
}
- rep_ = NewRep(rep->code, message(),
- std::move(payloads));
+ status_internal::StatusRep* const new_rep = new status_internal::StatusRep(
+ rep->code, message(), std::move(payloads));
+ rep_ = PointerToRep(new_rep);
UnrefNonInlined(rep_i);
}
}
diff --git a/third_party/abseil-cpp/absl/status/status.h b/third_party/abseil-cpp/absl/status/status.h
index df9e330c00..2e05f46e87 100644
--- a/third_party/abseil-cpp/absl/status/status.h
+++ b/third_party/abseil-cpp/absl/status/status.h
@@ -291,6 +291,10 @@ enum class StatusToStringMode : int {
kWithNoExtraData = 0,
// ToString will contain the payloads.
kWithPayload = 1 << 0,
+ // ToString will include all the extra data this Status has.
+ kWithEverything = ~kWithNoExtraData,
+ // Default mode used by ToString. Its exact value might change in the future.
+ kDefault = kWithPayload,
};
// absl::StatusToStringMode is specified as a bitmask type, which means the
@@ -410,7 +414,12 @@ inline StatusToStringMode& operator^=(StatusToStringMode& lhs,
// return result;
// }
//
-class ABSL_MUST_USE_RESULT Status final {
+// For documentation see https://abseil.io/docs/cpp/guides/status.
+//
+// Returned Status objects may not be ignored. status_internal.h has a forward
+// declaration of the form
+// class ABSL_MUST_USE_RESULT Status;
+class Status final {
public:
// Constructors
@@ -502,7 +511,7 @@ class ABSL_MUST_USE_RESULT Status final {
// result, and the payloads to be printed use the status payload printer
// mechanism (which is internal).
std::string ToString(
- StatusToStringMode mode = StatusToStringMode::kWithPayload) const;
+ StatusToStringMode mode = StatusToStringMode::kDefault) const;
// Status::IgnoreError()
//
diff --git a/third_party/abseil-cpp/absl/status/status_test.cc b/third_party/abseil-cpp/absl/status/status_test.cc
index 7116ba671f..1b038f6d98 100644
--- a/third_party/abseil-cpp/absl/status/status_test.cc
+++ b/third_party/abseil-cpp/absl/status/status_test.cc
@@ -36,7 +36,9 @@ TEST(StatusCode, InsertionOperator) {
// its creator, and its classifier.
struct ErrorTest {
absl::StatusCode code;
- using Creator = absl::Status (*)(absl::string_view);
+ using Creator = absl::Status (*)(
+ absl::string_view
+ );
using Classifier = bool (*)(const absl::Status&);
Creator creator;
Classifier classifier;
@@ -78,7 +80,9 @@ TEST(Status, CreateAndClassify) {
// expected error code and message.
std::string message =
absl::StrCat("error code ", test.code, " test message");
- absl::Status status = test.creator(message);
+ absl::Status status = test.creator(
+ message
+ );
EXPECT_EQ(test.code, status.code());
EXPECT_EQ(message, status.message());
@@ -292,6 +296,10 @@ TEST(Status, ToStringMode) {
AllOf(HasSubstr("INTERNAL: fail"), HasSubstr("[foo='bar']"),
HasSubstr("[bar='\\xff']")));
+ EXPECT_THAT(s.ToString(absl::StatusToStringMode::kWithEverything),
+ AllOf(HasSubstr("INTERNAL: fail"), HasSubstr("[foo='bar']"),
+ HasSubstr("[bar='\\xff']")));
+
EXPECT_THAT(s.ToString(~absl::StatusToStringMode::kWithPayload),
AllOf(HasSubstr("INTERNAL: fail"), Not(HasSubstr("[foo='bar']")),
Not(HasSubstr("[bar='\\xff']"))));
diff --git a/third_party/abseil-cpp/absl/status/statusor.h b/third_party/abseil-cpp/absl/status/statusor.h
index 469d486fdd..b7c55cc8ac 100644
--- a/third_party/abseil-cpp/absl/status/statusor.h
+++ b/third_party/abseil-cpp/absl/status/statusor.h
@@ -135,7 +135,7 @@ class ABSL_MUST_USE_RESULT StatusOr;
//
// NOTE: using `absl::StatusOr<T>::value()` when no valid value is present will
// throw an exception if exceptions are enabled or terminate the process when
-// execeptions are not enabled.
+// exceptions are not enabled.
//
// Example:
//
diff --git a/third_party/abseil-cpp/absl/strings/CMakeLists.txt b/third_party/abseil-cpp/absl/strings/CMakeLists.txt
index 3b7ae639f5..0246dc3851 100644
--- a/third_party/abseil-cpp/absl/strings/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/strings/CMakeLists.txt
@@ -101,7 +101,7 @@ absl_cc_test(
DEPS
absl::strings
absl::base
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -115,7 +115,7 @@ absl_cc_test(
absl::strings
absl::core_headers
absl::fixed_array
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -128,7 +128,7 @@ absl_cc_test(
DEPS
absl::strings
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -142,7 +142,7 @@ absl_cc_test(
DEPS
absl::strings
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -156,7 +156,7 @@ absl_cc_test(
absl::strings_internal
absl::base
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -169,7 +169,7 @@ absl_cc_test(
DEPS
absl::strings
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -184,7 +184,7 @@ absl_cc_test(
absl::config
absl::core_headers
absl::dynamic_annotations
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -197,7 +197,7 @@ absl_cc_test(
DEPS
absl::strings
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -209,7 +209,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -221,12 +221,12 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::strings
- absl::base
absl::core_headers
absl::dynamic_annotations
+ absl::btree
absl::flat_hash_map
absl::node_hash_map
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -238,7 +238,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::strings_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -253,7 +253,7 @@ absl_cc_test(
absl::base
absl::core_headers
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -268,7 +268,7 @@ absl_cc_test(
absl::base
absl::core_headers
absl::memory
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -281,7 +281,7 @@ absl_cc_test(
DEPS
absl::strings
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -301,7 +301,7 @@ absl_cc_test(
absl::random_random
absl::random_distributions
absl::strings_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -314,7 +314,7 @@ absl_cc_test(
DEPS
absl::strings
absl::base
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -326,7 +326,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::strings_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -340,7 +340,7 @@ absl_cc_test(
absl::strings
absl::str_format
absl::pow10_helper
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -355,7 +355,7 @@ absl_cc_test(
absl::strings
absl::config
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -370,7 +370,7 @@ absl_cc_test(
DEPS
absl::strings
absl::config
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -428,7 +428,7 @@ absl_cc_test(
absl::cord
absl::strings
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -442,7 +442,7 @@ absl_cc_test(
absl::str_format
absl::str_format_internal
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -455,7 +455,7 @@ absl_cc_test(
DEPS
absl::str_format
absl::str_format_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -467,7 +467,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::str_format_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -479,7 +479,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::str_format
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -494,7 +494,7 @@ absl_cc_test(
absl::str_format_internal
absl::raw_logging_internal
absl::int128
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -507,7 +507,7 @@ absl_cc_test(
DEPS
absl::str_format_internal
absl::cord
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -520,7 +520,7 @@ absl_cc_test(
DEPS
absl::str_format_internal
absl::core_headers
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -547,39 +547,300 @@ absl_cc_test(
DEPS
absl::pow10_helper
absl::str_format
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
NAME
- cord
+ cord_internal
HDRS
- "cord.h"
- SRCS
- "cord.cc"
- "internal/cord_internal.cc"
"internal/cord_internal.h"
+ "internal/cord_rep_flat.h"
"internal/cord_rep_ring.h"
- "internal/cord_rep_ring.cc"
"internal/cord_rep_ring_reader.h"
- "internal/cord_rep_flat.h"
+ SRCS
+ "internal/cord_internal.cc"
+ "internal/cord_rep_ring.cc"
COPTS
${ABSL_DEFAULT_COPTS}
DEPS
- absl::base
absl::base_internal
absl::compressed_tuple
absl::config
absl::core_headers
absl::endian
+ absl::inlined_vector
+ absl::layout
+ absl::raw_logging_internal
+ absl::strings
+ absl::throw_delegate
+ absl::type_traits
+)
+
+absl_cc_library(
+ NAME
+ cordz_update_tracker
+ HDRS
+ "internal/cordz_update_tracker.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+)
+
+absl_cc_test(
+ NAME
+ cordz_update_tracker_test
+ SRCS
+ "internal/cordz_update_tracker_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::synchronization
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_functions
+ HDRS
+ "internal/cordz_functions.h"
+ SRCS
+ "internal/cordz_functions.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::core_headers
+ absl::exponential_biased
+ absl::raw_logging_internal
+)
+
+absl_cc_test(
+ NAME
+ cordz_functions_test
+ SRCS
+ "internal/cordz_functions_test.cc"
+ DEPS
+ absl::config
+ absl::cordz_functions
+ absl::cordz_test_helpers
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_statistics
+ HDRS
+ "internal/cordz_statistics.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::core_headers
+ absl::cordz_update_tracker
+ absl::synchronization
+)
+
+absl_cc_library(
+ NAME
+ cordz_handle
+ HDRS
+ "internal/cordz_handle.h"
+ SRCS
+ "internal/cordz_handle.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::raw_logging_internal
+ absl::synchronization
+)
+
+absl_cc_test(
+ NAME
+ cordz_handle_test
+ SRCS
+ "internal/cordz_handle_test.cc"
+ DEPS
+ absl::config
+ absl::cordz_handle
+ absl::cordz_test_helpers
+ absl::memory
+ absl::random_random
+ absl::random_distributions
+ absl::synchronization
+ absl::time
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_info
+ HDRS
+ "internal/cordz_info.h"
+ SRCS
+ "internal/cordz_info.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::cord_internal
+ absl::cordz_functions
+ absl::cordz_handle
+ absl::cordz_statistics
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::inlined_vector
+ absl::span
+ absl::raw_logging_internal
+ absl::stacktrace
+ absl::synchronization
+)
+
+absl_cc_test(
+ NAME
+ cordz_info_test
+ SRCS
+ "internal/cordz_info_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_test_helpers
+ absl::cordz_handle
+ absl::cordz_info
+ absl::cordz_statistics
+ absl::cordz_test_helpers
+ absl::cordz_update_tracker
+ absl::span
+ absl::stacktrace
+ absl::symbolize
+ GTest::gmock_main
+)
+
+absl_cc_test(
+ NAME
+ cordz_info_statistics_test
+ SRCS
+ "internal/cordz_info_statistics_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::thread_pool
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_sample_token
+ HDRS
+ "internal/cordz_sample_token.h"
+ SRCS
+ "internal/cordz_sample_token.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::cordz_handle
+ absl::cordz_info
+)
+
+absl_cc_test(
+ NAME
+ cordz_sample_token_test
+ SRCS
+ "internal/cordz_sample_token_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_handle
+ absl::cordz_info
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_test_helpers
+ absl::memory
+ absl::random_random
+ absl::synchronization
+ absl::thread_pool
+ absl::time
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_update_scope
+ HDRS
+ "internal/cordz_update_scope.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_update_tracker
+ absl::core_headers
+)
+
+absl_cc_test(
+ NAME
+ cordz_update_scope_test
+ SRCS
+ "internal/cordz_update_scope_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_test_helpers
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::core_headers
+ GTest::gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cord
+ HDRS
+ "cord.h"
+ SRCS
+ "cord.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::cord_internal
+ absl::cordz_functions
+ absl::cordz_info
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::endian
absl::fixed_array
absl::function_ref
absl::inlined_vector
absl::optional
absl::raw_logging_internal
absl::strings
- absl::strings_internal
- absl::throw_delegate
absl::type_traits
PUBLIC
)
@@ -592,7 +853,30 @@ absl_cc_library(
COPTS
${ABSL_TEST_COPTS}
DEPS
+ absl::config
+ absl::cord
+ absl::cord_internal
+ absl::strings
+ TESTONLY
+)
+
+absl_cc_library(
+ NAME
+ cordz_test_helpers
+ HDRS
+ "cordz_test_helpers.h"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
absl::cord
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::strings
TESTONLY
)
@@ -609,11 +893,13 @@ absl_cc_test(
absl::strings
absl::base
absl::config
+ absl::cord_test_helpers
+ absl::cordz_test_helpers
absl::core_headers
absl::endian
absl::raw_logging_internal
absl::fixed_array
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -624,13 +910,13 @@ absl_cc_test(
COPTS
${ABSL_TEST_COPTS}
DEPS
- absl::config
- absl::cord
- absl::strings
absl::base
+ absl::config
+ absl::cord_internal
absl::core_headers
absl::raw_logging_internal
- gmock_main
+ absl::strings
+ GTest::gmock_main
)
absl_cc_test(
@@ -641,9 +927,33 @@ absl_cc_test(
COPTS
${ABSL_TEST_COPTS}
DEPS
- absl::cord
+ absl::base
+ absl::cord_internal
+ absl::core_headers
absl::strings
+ GTest::gmock_main
+)
+
+absl_cc_test(
+ NAME
+ cordz_test
+ SRCS
+ "cordz_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::cord
+ absl::cord_test_helpers
+ absl::cordz_test_helpers
+ absl::cordz_functions
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_tracker
absl::base
+ absl::config
absl::core_headers
- gmock_main
+ absl::raw_logging_internal
+ absl::strings
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/strings/charconv.cc b/third_party/abseil-cpp/absl/strings/charconv.cc
index b8674c2802..fefcfc90a5 100644
--- a/third_party/abseil-cpp/absl/strings/charconv.cc
+++ b/third_party/abseil-cpp/absl/strings/charconv.cc
@@ -111,7 +111,7 @@ struct FloatTraits<double> {
return sign ? -ldexp(mantissa, exponent) : ldexp(mantissa, exponent);
#else
constexpr uint64_t kMantissaMask =
- (uint64_t(1) << (kTargetMantissaBits - 1)) - 1;
+ (uint64_t{1} << (kTargetMantissaBits - 1)) - 1;
uint64_t dbl = static_cast<uint64_t>(sign) << 63;
if (mantissa > kMantissaMask) {
// Normal value.
@@ -151,7 +151,7 @@ struct FloatTraits<float> {
return sign ? -ldexpf(mantissa, exponent) : ldexpf(mantissa, exponent);
#else
constexpr uint32_t kMantissaMask =
- (uint32_t(1) << (kTargetMantissaBits - 1)) - 1;
+ (uint32_t{1} << (kTargetMantissaBits - 1)) - 1;
uint32_t flt = static_cast<uint32_t>(sign) << 31;
if (mantissa > kMantissaMask) {
// Normal value.
@@ -499,7 +499,7 @@ bool MustRoundUp(uint64_t guess_mantissa, int guess_exponent,
template <typename FloatType>
CalculatedFloat CalculatedFloatFromRawValues(uint64_t mantissa, int exponent) {
CalculatedFloat result;
- if (mantissa == uint64_t(1) << FloatTraits<FloatType>::kTargetMantissaBits) {
+ if (mantissa == uint64_t{1} << FloatTraits<FloatType>::kTargetMantissaBits) {
mantissa >>= 1;
exponent += 1;
}
diff --git a/third_party/abseil-cpp/absl/strings/cord.cc b/third_party/abseil-cpp/absl/strings/cord.cc
index 93533757f5..f5aa6e4788 100644
--- a/third_party/abseil-cpp/absl/strings/cord.cc
+++ b/third_party/abseil-cpp/absl/strings/cord.cc
@@ -38,6 +38,9 @@
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cord_rep_flat.h"
#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
#include "absl/strings/internal/resize_uninitialized.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
@@ -53,8 +56,10 @@ using ::absl::cord_internal::CordRepExternal;
using ::absl::cord_internal::CordRepFlat;
using ::absl::cord_internal::CordRepRing;
using ::absl::cord_internal::CordRepSubstring;
-using ::absl::cord_internal::kMinFlatLength;
+using ::absl::cord_internal::CordzUpdateTracker;
+using ::absl::cord_internal::InlineData;
using ::absl::cord_internal::kMaxFlatLength;
+using ::absl::cord_internal::kMinFlatLength;
using ::absl::cord_internal::CONCAT;
using ::absl::cord_internal::EXTERNAL;
@@ -206,7 +211,7 @@ static CordRep* MakeBalancedTree(CordRep** reps, size_t n) {
}
static CordRepFlat* CreateFlat(const char* data, size_t length,
- size_t alloc_hint) {
+ size_t alloc_hint) {
CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
flat->length = length;
memcpy(flat->Data(), data, length);
@@ -230,9 +235,7 @@ static CordRep* RingNewTree(const char* data, size_t length,
// Create a new tree out of the specified array.
// The returned node has a refcount of 1.
-static CordRep* NewTree(const char* data,
- size_t length,
- size_t alloc_hint) {
+static CordRep* NewTree(const char* data, size_t length, size_t alloc_hint) {
if (length == 0) return nullptr;
if (cord_ring_enabled()) {
return RingNewTree(data, length, alloc_hint);
@@ -279,6 +282,35 @@ static CordRep* NewSubstring(CordRep* child, size_t offset, size_t length) {
}
}
+// Creates a CordRep from the provided string. If the string is large enough,
+// and not wasteful, we move the string into an external cord rep, preserving
+// the already allocated string contents.
+// Requires the provided string length to be larger than `kMaxInline`.
+static CordRep* CordRepFromString(std::string&& src) {
+ assert(src.length() > cord_internal::kMaxInline);
+ if (
+ // String is short: copy data to avoid external block overhead.
+ src.size() <= kMaxBytesToCopy ||
+ // String is wasteful: copy data to avoid pinning too much unused memory.
+ src.size() < src.capacity() / 2
+ ) {
+ return NewTree(src.data(), src.size(), 0);
+ }
+
+ struct StringReleaser {
+ void operator()(absl::string_view /* data */) {}
+ std::string data;
+ };
+ const absl::string_view original_data = src;
+ auto* rep =
+ static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
+ absl::cord_internal::NewExternalRep(original_data,
+ StringReleaser{std::move(src)}));
+ // Moving src may have invalidated its data pointer, so adjust it.
+ rep->base = rep->template get<0>().data.data();
+ return rep;
+}
+
// --------------------------------------------------------------------
// Cord::InlineRep functions
@@ -299,20 +331,6 @@ inline char* Cord::InlineRep::set_data(size_t n) {
return data_.as_chars();
}
-inline CordRep* Cord::InlineRep::force_tree(size_t extra_hint) {
- if (data_.is_tree()) {
- return data_.as_tree();
- }
-
- size_t len = inline_size();
- CordRepFlat* result = CordRepFlat::New(len + extra_hint);
- result->length = len;
- static_assert(kMinFlatLength >= sizeof(data_), "");
- memcpy(result->Data(), data_.as_chars(), sizeof(data_));
- set_tree(result);
- return result;
-}
-
inline void Cord::InlineRep::reduce_size(size_t n) {
size_t tag = inline_size();
assert(tag <= kMaxInline);
@@ -334,25 +352,72 @@ static CordRepRing* ForceRing(CordRep* rep, size_t extra) {
return (rep->tag == RING) ? rep->ring() : CordRepRing::Create(rep, extra);
}
-void Cord::InlineRep::AppendTree(CordRep* tree) {
+void Cord::InlineRep::AppendTreeToInlined(CordRep* tree,
+ MethodIdentifier method) {
+ assert(!is_tree());
+ if (!data_.is_empty()) {
+ CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Append(CordRepRing::Create(flat, 1), tree);
+ } else {
+ tree = Concat(flat, tree);
+ }
+ }
+ EmplaceTree(tree, method);
+}
+
+void Cord::InlineRep::AppendTreeToTree(CordRep* tree, MethodIdentifier method) {
+ assert(is_tree());
+ const CordzUpdateScope scope(data_.cordz_info(), method);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Append(ForceRing(data_.as_tree(), 1), tree);
+ } else {
+ tree = Concat(data_.as_tree(), tree);
+ }
+ SetTree(tree, scope);
+}
+
+void Cord::InlineRep::AppendTree(CordRep* tree, MethodIdentifier method) {
if (tree == nullptr) return;
- if (data_.is_empty()) {
- set_tree(tree);
- } else if (cord_ring_enabled()) {
- set_tree(CordRepRing::Append(ForceRing(force_tree(0), 1), tree));
+ if (data_.is_tree()) {
+ AppendTreeToTree(tree, method);
} else {
- set_tree(Concat(force_tree(0), tree));
+ AppendTreeToInlined(tree, method);
}
}
-void Cord::InlineRep::PrependTree(CordRep* tree) {
+void Cord::InlineRep::PrependTreeToInlined(CordRep* tree,
+ MethodIdentifier method) {
+ assert(!is_tree());
+ if (!data_.is_empty()) {
+ CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Prepend(CordRepRing::Create(flat, 1), tree);
+ } else {
+ tree = Concat(tree, flat);
+ }
+ }
+ EmplaceTree(tree, method);
+}
+
+void Cord::InlineRep::PrependTreeToTree(CordRep* tree,
+ MethodIdentifier method) {
+ assert(is_tree());
+ const CordzUpdateScope scope(data_.cordz_info(), method);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Prepend(ForceRing(data_.as_tree(), 1), tree);
+ } else {
+ tree = Concat(tree, data_.as_tree());
+ }
+ SetTree(tree, scope);
+}
+
+void Cord::InlineRep::PrependTree(CordRep* tree, MethodIdentifier method) {
assert(tree != nullptr);
- if (data_.is_empty()) {
- set_tree(tree);
- } else if (cord_ring_enabled()) {
- set_tree(CordRepRing::Prepend(ForceRing(force_tree(0), 1), tree));
+ if (data_.is_tree()) {
+ PrependTreeToTree(tree, method);
} else {
- set_tree(Concat(tree, force_tree(0)));
+ PrependTreeToInlined(tree, method);
}
}
@@ -404,76 +469,43 @@ static inline bool PrepareAppendRegion(CordRep* root, char** region,
return true;
}
+template <bool has_length>
void Cord::InlineRep::GetAppendRegion(char** region, size_t* size,
- size_t max_length) {
- if (max_length == 0) {
- *region = nullptr;
- *size = 0;
- return;
- }
-
- // Try to fit in the inline buffer if possible.
- if (!is_tree()) {
- size_t inline_length = inline_size();
- if (max_length <= kMaxInline - inline_length) {
- *region = data_.as_chars() + inline_length;
- *size = max_length;
- set_inline_size(inline_length + max_length);
- return;
- }
- }
-
- CordRep* root = force_tree(max_length);
-
- if (PrepareAppendRegion(root, region, size, max_length)) {
- return;
- }
-
- // Allocate new node.
- CordRepFlat* new_node =
- CordRepFlat::New(std::max(static_cast<size_t>(root->length), max_length));
- new_node->length = std::min(new_node->Capacity(), max_length);
- *region = new_node->Data();
- *size = new_node->length;
-
- if (cord_ring_enabled()) {
- replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
- return;
- }
- replace_tree(Concat(root, new_node));
-}
-
-void Cord::InlineRep::GetAppendRegion(char** region, size_t* size) {
- const size_t max_length = std::numeric_limits<size_t>::max();
-
- // Try to fit in the inline buffer if possible.
- if (!data_.is_tree()) {
- size_t inline_length = inline_size();
- if (inline_length < kMaxInline) {
- *region = data_.as_chars() + inline_length;
- *size = kMaxInline - inline_length;
- set_inline_size(kMaxInline);
+ size_t length) {
+ auto constexpr method = CordzUpdateTracker::kGetAppendRegion;
+
+ CordRep* root = tree();
+ size_t sz = root ? root->length : inline_size();
+ if (root == nullptr) {
+ size_t available = kMaxInline - sz;
+ if (available >= (has_length ? length : 1)) {
+ *region = data_.as_chars() + sz;
+ *size = has_length ? length : available;
+ set_inline_size(has_length ? sz + length : kMaxInline);
return;
}
}
- CordRep* root = force_tree(max_length);
-
- if (PrepareAppendRegion(root, region, size, max_length)) {
+ size_t extra = has_length ? length : (std::max)(sz, kMinFlatLength);
+ CordRep* rep = root ? root : MakeFlatWithExtraCapacity(extra);
+ CordzUpdateScope scope(root ? data_.cordz_info() : nullptr, method);
+ if (PrepareAppendRegion(rep, region, size, length)) {
+ CommitTree(root, rep, scope, method);
return;
}
// Allocate new node.
- CordRepFlat* new_node = CordRepFlat::New(root->length);
- new_node->length = new_node->Capacity();
+ CordRepFlat* new_node = CordRepFlat::New(extra);
+ new_node->length = std::min(new_node->Capacity(), length);
*region = new_node->Data();
*size = new_node->length;
if (cord_ring_enabled()) {
- replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
- return;
+ rep = CordRepRing::Append(ForceRing(rep, 1), new_node);
+ } else {
+ rep = Concat(rep, new_node);
}
- replace_tree(Concat(root, new_node));
+ CommitTree(root, rep, scope, method);
}
// If the rep is a leaf, this will increment the value at total_mem_usage and
@@ -484,68 +516,67 @@ static bool RepMemoryUsageLeaf(const CordRep* rep, size_t* total_mem_usage) {
return true;
}
if (rep->tag == EXTERNAL) {
- *total_mem_usage += sizeof(CordRepConcat) + rep->length;
+ // We don't know anything about the embedded / bound data, but we can safely
+ // assume it is 'at least' a word / pointer to data. In the future we may
+ // choose to use the 'data' byte as a tag to identify the types of some
+ // well-known externals, such as a std::string instance.
+ *total_mem_usage +=
+ sizeof(cord_internal::CordRepExternalImpl<intptr_t>) + rep->length;
return true;
}
return false;
}
void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
- ClearSlow();
+ assert(&src != this);
+ assert(is_tree() || src.is_tree());
+ auto constexpr method = CordzUpdateTracker::kAssignCord;
+ if (ABSL_PREDICT_TRUE(!is_tree())) {
+ EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
+ return;
+ }
- data_ = src.data_;
- if (is_tree()) {
- data_.set_profiled(false);
- CordRep::Ref(tree());
- clear_cordz_info();
+ CordRep* tree = as_tree();
+ if (CordRep* src_tree = src.tree()) {
+ // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
+ // decide if this cord should be (or remains to be) sampled or not.
+ data_.set_tree(CordRep::Ref(src_tree));
+ CordzInfo::MaybeTrackCord(data_, src.data_, method);
+ } else {
+ CordzInfo::MaybeUntrackCord(data_.cordz_info());
+ data_ = src.data_;
}
+ CordRep::Unref(tree);
}
-void Cord::InlineRep::ClearSlow() {
+void Cord::InlineRep::UnrefTree() {
if (is_tree()) {
+ CordzInfo::MaybeUntrackCord(data_.cordz_info());
CordRep::Unref(tree());
}
- ResetToEmpty();
}
// --------------------------------------------------------------------
// Constructors and destructors
-Cord::Cord(absl::string_view src) {
+Cord::Cord(absl::string_view src, MethodIdentifier method)
+ : contents_(InlineData::kDefaultInit) {
const size_t n = src.size();
if (n <= InlineRep::kMaxInline) {
- contents_.set_data(src.data(), n, false);
+ contents_.set_data(src.data(), n, true);
} else {
- contents_.set_tree(NewTree(src.data(), n, 0));
+ CordRep* rep = NewTree(src.data(), n, 0);
+ contents_.EmplaceTree(rep, method);
}
}
template <typename T, Cord::EnableIfString<T>>
-Cord::Cord(T&& src) {
- if (
- // String is short: copy data to avoid external block overhead.
- src.size() <= kMaxBytesToCopy ||
- // String is wasteful: copy data to avoid pinning too much unused memory.
- src.size() < src.capacity() / 2
- ) {
- if (src.size() <= InlineRep::kMaxInline) {
- contents_.set_data(src.data(), src.size(), false);
- } else {
- contents_.set_tree(NewTree(src.data(), src.size(), 0));
- }
+Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
+ if (src.size() <= InlineRep::kMaxInline) {
+ contents_.set_data(src.data(), src.size(), true);
} else {
- struct StringReleaser {
- void operator()(absl::string_view /* data */) {}
- std::string data;
- };
- const absl::string_view original_data = src;
- auto* rep = static_cast<
- ::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
- absl::cord_internal::NewExternalRep(
- original_data, StringReleaser{std::forward<T>(src)}));
- // Moving src may have invalidated its data pointer, so adjust it.
- rep->base = rep->template get<0>().data.data();
- contents_.set_tree(rep);
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
}
}
@@ -554,9 +585,9 @@ template Cord::Cord(std::string&& src);
// The destruction code is separate so that the compiler can determine
// that it does not need to call the destructor on a moved-from Cord.
void Cord::DestroyCordSlow() {
- if (CordRep* tree = contents_.tree()) {
- CordRep::Unref(VerifyTree(tree));
- }
+ assert(contents_.is_tree());
+ CordzInfo::MaybeUntrackCord(contents_.cordz_info());
+ CordRep::Unref(VerifyTree(contents_.as_tree()));
}
// --------------------------------------------------------------------
@@ -568,109 +599,117 @@ void Cord::Clear() {
}
}
-Cord& Cord::operator=(absl::string_view src) {
+Cord& Cord::AssignLargeString(std::string&& src) {
+ auto constexpr method = CordzUpdateTracker::kAssignString;
+ assert(src.size() > kMaxBytesToCopy);
+ CordRep* rep = CordRepFromString(std::move(src));
+ if (CordRep* tree = contents_.tree()) {
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ contents_.SetTree(rep, scope);
+ CordRep::Unref(tree);
+ } else {
+ contents_.EmplaceTree(rep, method);
+ }
+ return *this;
+}
+Cord& Cord::operator=(absl::string_view src) {
+ auto constexpr method = CordzUpdateTracker::kAssignString;
const char* data = src.data();
size_t length = src.size();
CordRep* tree = contents_.tree();
if (length <= InlineRep::kMaxInline) {
- // Embed into this->contents_
+ // Embed into this->contents_, which is somewhat subtle:
+ // - MaybeUntrackCord must be called before Unref(tree).
+ // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
+ // - set_data() must be called before Unref(tree) as it may reference tree.
+ if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
contents_.set_data(data, length, true);
- if (tree) CordRep::Unref(tree);
- return *this;
- }
- if (tree != nullptr && tree->tag >= FLAT &&
- tree->flat()->Capacity() >= length &&
- tree->refcount.IsOne()) {
- // Copy in place if the existing FLAT node is reusable.
- memmove(tree->flat()->Data(), data, length);
- tree->length = length;
- VerifyTree(tree);
+ if (tree != nullptr) CordRep::Unref(tree);
return *this;
}
- contents_.set_tree(NewTree(data, length, 0));
- if (tree) CordRep::Unref(tree);
- return *this;
-}
-
-template <typename T, Cord::EnableIfString<T>>
-Cord& Cord::operator=(T&& src) {
- if (src.size() <= kMaxBytesToCopy) {
- *this = absl::string_view(src);
+ if (tree != nullptr) {
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag >= FLAT && tree->flat()->Capacity() >= length &&
+ tree->refcount.IsOne()) {
+ // Copy in place if the existing FLAT node is reusable.
+ memmove(tree->flat()->Data(), data, length);
+ tree->length = length;
+ VerifyTree(tree);
+ return *this;
+ }
+ contents_.SetTree(NewTree(data, length, 0), scope);
+ CordRep::Unref(tree);
} else {
- *this = Cord(std::forward<T>(src));
+ contents_.EmplaceTree(NewTree(data, length, 0), method);
}
return *this;
}
-template Cord& Cord::operator=(std::string&& src);
-
// TODO(sanjay): Move to Cord::InlineRep section of file. For now,
// we keep it here to make diffs easier.
-void Cord::InlineRep::AppendArray(const char* src_data, size_t src_size) {
- if (src_size == 0) return; // memcpy(_, nullptr, 0) is undefined.
+void Cord::InlineRep::AppendArray(absl::string_view src,
+ MethodIdentifier method) {
+ if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
size_t appended = 0;
- CordRep* root = nullptr;
- if (is_tree()) {
- root = data_.as_tree();
+ CordRep* rep = tree();
+ const CordRep* const root = rep;
+ CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
+ if (root != nullptr) {
char* region;
- if (PrepareAppendRegion(root, &region, &appended, src_size)) {
- memcpy(region, src_data, appended);
+ if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
+ memcpy(region, src.data(), appended);
}
} else {
// Try to fit in the inline buffer if possible.
size_t inline_length = inline_size();
- if (src_size <= kMaxInline - inline_length) {
+ if (src.size() <= kMaxInline - inline_length) {
// Append new data to embedded array
- memcpy(data_.as_chars() + inline_length, src_data, src_size);
- set_inline_size(inline_length + src_size);
+ memcpy(data_.as_chars() + inline_length, src.data(), src.size());
+ set_inline_size(inline_length + src.size());
return;
}
- // It is possible that src_data == data_, but when we transition from an
- // InlineRep to a tree we need to assign data_ = root via set_tree. To
- // avoid corrupting the source data before we copy it, delay calling
- // set_tree until after we've copied data.
+ // Note: we don't concern ourselves if src aliases data stored in the
+ // inlined data of 'this', as we update the InlineData only at the end.
// We are going from an inline size to beyond inline size. Make the new size
// either double the inlined size, or the added size + 10%.
- const size_t size1 = inline_length * 2 + src_size;
- const size_t size2 = inline_length + src_size / 10;
- root = CordRepFlat::New(std::max<size_t>(size1, size2));
- appended = std::min(
- src_size, root->flat()->Capacity() - inline_length);
- memcpy(root->flat()->Data(), data_.as_chars(), inline_length);
- memcpy(root->flat()->Data() + inline_length, src_data, appended);
- root->length = inline_length + appended;
- set_tree(root);
- }
-
- src_data += appended;
- src_size -= appended;
- if (src_size == 0) {
+ const size_t size1 = inline_length * 2 + src.size();
+ const size_t size2 = inline_length + src.size() / 10;
+ rep = CordRepFlat::New(std::max<size_t>(size1, size2));
+ appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
+ memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
+ memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
+ rep->length = inline_length + appended;
+ }
+
+ src.remove_prefix(appended);
+ if (src.empty()) {
+ CommitTree(root, rep, scope, method);
return;
}
if (cord_ring_enabled()) {
- absl::string_view data(src_data, src_size);
- root = ForceRing(root, (data.size() - 1) / kMaxFlatLength + 1);
- replace_tree(CordRepRing::Append(root->ring(), data));
- return;
- }
-
- // Use new block(s) for any remaining bytes that were not handled above.
- // Alloc extra memory only if the right child of the root of the new tree is
- // going to be a FLAT node, which will permit further inplace appends.
- size_t length = src_size;
- if (src_size < kMaxFlatLength) {
- // The new length is either
- // - old size + 10%
- // - old_size + src_size
- // This will cause a reasonable conservative step-up in size that is still
- // large enough to avoid excessive amounts of small fragments being added.
- length = std::max<size_t>(root->length / 10, src_size);
+ rep = ForceRing(rep, (src.size() - 1) / kMaxFlatLength + 1);
+ rep = CordRepRing::Append(rep->ring(), src);
+ } else {
+ // Use new block(s) for any remaining bytes that were not handled above.
+ // Alloc extra memory only if the right child of the root of the new tree
+ // is going to be a FLAT node, which will permit further inplace appends.
+ size_t length = src.size();
+ if (src.size() < kMaxFlatLength) {
+ // The new length is either
+ // - old size + 10%
+ // - old_size + src.size()
+ // This will cause a reasonable conservative step-up in size that is
+ // still large enough to avoid excessive amounts of small fragments
+ // being added.
+ length = std::max<size_t>(rep->length / 10, src.size());
+ }
+ rep = Concat(rep, NewTree(src.data(), src.size(), length - src.size()));
}
- set_tree(Concat(root, NewTree(src_data, src_size, length - src_size)));
+ CommitTree(root, rep, scope, method);
}
inline CordRep* Cord::TakeRep() const& {
@@ -685,10 +724,17 @@ inline CordRep* Cord::TakeRep() && {
template <typename C>
inline void Cord::AppendImpl(C&& src) {
+ auto constexpr method = CordzUpdateTracker::kAppendCord;
if (empty()) {
- // In case of an empty destination avoid allocating a new node, do not copy
- // data.
- *this = std::forward<C>(src);
+ // Since destination is empty, we can avoid allocating a node,
+ if (src.contents_.is_tree()) {
+ // by taking the tree directly
+ CordRep* rep = std::forward<C>(src).TakeRep();
+ contents_.EmplaceTree(rep, method);
+ } else {
+ // or copying over inline data
+ contents_.data_ = src.contents_.data_;
+ }
return;
}
@@ -698,12 +744,12 @@ inline void Cord::AppendImpl(C&& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree == nullptr) {
// src has embedded data.
- contents_.AppendArray(src.contents_.data(), src_size);
+ contents_.AppendArray({src.contents_.data(), src_size}, method);
return;
}
if (src_tree->tag >= FLAT) {
// src tree just has one flat node.
- contents_.AppendArray(src_tree->flat()->Data(), src_size);
+ contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
return;
}
if (&src == this) {
@@ -719,7 +765,8 @@ inline void Cord::AppendImpl(C&& src) {
}
// Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
- contents_.AppendTree(std::forward<C>(src).TakeRep());
+ CordRep* rep = std::forward<C>(src).TakeRep();
+ contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
}
void Cord::Append(const Cord& src) { AppendImpl(src); }
@@ -731,7 +778,8 @@ void Cord::Append(T&& src) {
if (src.size() <= kMaxBytesToCopy) {
Append(absl::string_view(src));
} else {
- Append(Cord(std::forward<T>(src)));
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
}
}
@@ -741,7 +789,7 @@ void Cord::Prepend(const Cord& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree != nullptr) {
CordRep::Ref(src_tree);
- contents_.PrependTree(src_tree);
+ contents_.PrependTree(src_tree, CordzUpdateTracker::kPrependCord);
return;
}
@@ -764,7 +812,8 @@ void Cord::Prepend(absl::string_view src) {
return;
}
}
- contents_.PrependTree(NewTree(src.data(), src.size(), 0));
+ CordRep* rep = NewTree(src.data(), src.size(), 0);
+ contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
}
template <typename T, Cord::EnableIfString<T>>
@@ -772,7 +821,8 @@ inline void Cord::Prepend(T&& src) {
if (src.size() <= kMaxBytesToCopy) {
Prepend(absl::string_view(src));
} else {
- Prepend(Cord(std::forward<T>(src)));
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
}
}
@@ -870,12 +920,17 @@ void Cord::RemovePrefix(size_t n) {
CordRep* tree = contents_.tree();
if (tree == nullptr) {
contents_.remove_prefix(n);
- } else if (tree->tag == RING) {
- contents_.replace_tree(CordRepRing::RemovePrefix(tree->ring(), n));
} else {
- CordRep* newrep = RemovePrefixFrom(tree, n);
- CordRep::Unref(tree);
- contents_.replace_tree(VerifyTree(newrep));
+ auto constexpr method = CordzUpdateTracker::kRemovePrefix;
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag == RING) {
+ tree = CordRepRing::RemovePrefix(tree->ring(), n);
+ } else {
+ CordRep* newrep = RemovePrefixFrom(tree, n);
+ CordRep::Unref(tree);
+ tree = VerifyTree(newrep);
+ }
+ contents_.SetTreeOrEmpty(tree, scope);
}
}
@@ -886,12 +941,17 @@ void Cord::RemoveSuffix(size_t n) {
CordRep* tree = contents_.tree();
if (tree == nullptr) {
contents_.reduce_size(n);
- } else if (tree->tag == RING) {
- contents_.replace_tree(CordRepRing::RemoveSuffix(tree->ring(), n));
} else {
- CordRep* newrep = RemoveSuffixFrom(tree, n);
- CordRep::Unref(tree);
- contents_.replace_tree(VerifyTree(newrep));
+ auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag == RING) {
+ tree = CordRepRing::RemoveSuffix(tree->ring(), n);
+ } else {
+ CordRep* newrep = RemoveSuffixFrom(tree, n);
+ CordRep::Unref(tree);
+ tree = VerifyTree(newrep);
+ }
+ contents_.SetTreeOrEmpty(tree, scope);
}
}
@@ -951,17 +1011,20 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
size_t length = size();
if (pos > length) pos = length;
if (new_size > length - pos) new_size = length - pos;
+ if (new_size == 0) return sub_cord;
+
CordRep* tree = contents_.tree();
if (tree == nullptr) {
// sub_cord is newly constructed, no need to re-zero-out the tail of
// contents_ memory.
sub_cord.contents_.set_data(contents_.data() + pos, new_size, false);
- } else if (new_size == 0) {
- // We want to return empty subcord, so nothing to do.
- } else if (new_size <= InlineRep::kMaxInline) {
+ return sub_cord;
+ }
+
+ if (new_size <= InlineRep::kMaxInline) {
+ char* dest = sub_cord.contents_.data_.as_chars();
Cord::ChunkIterator it = chunk_begin();
it.AdvanceBytes(pos);
- char* dest = sub_cord.contents_.data_.as_chars();
size_t remaining_size = new_size;
while (remaining_size > it->size()) {
cord_internal::SmallMemmove(dest, it->data(), it->size());
@@ -971,12 +1034,17 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
}
cord_internal::SmallMemmove(dest, it->data(), remaining_size);
sub_cord.contents_.set_inline_size(new_size);
- } else if (tree->tag == RING) {
- tree = CordRepRing::SubRing(CordRep::Ref(tree)->ring(), pos, new_size);
- sub_cord.contents_.set_tree(tree);
+ return sub_cord;
+ }
+
+ if (tree->tag == RING) {
+ CordRepRing* ring = CordRep::Ref(tree)->ring();
+ tree = CordRepRing::SubRing(ring, pos, new_size);
} else {
- sub_cord.contents_.set_tree(NewSubRange(tree, pos, new_size));
+ tree = NewSubRange(tree, pos, new_size);
}
+ sub_cord.contents_.EmplaceTree(tree, contents_.data_,
+ CordzUpdateTracker::kSubCord);
return sub_cord;
}
@@ -1418,6 +1486,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
"Attempted to iterate past `end()`");
Cord subcord;
+ auto constexpr method = CordzUpdateTracker::kCordReader;
if (n <= InlineRep::kMaxInline) {
// Range to read fits in inline data. Flatten it.
@@ -1440,11 +1509,12 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
if (ring_reader_) {
size_t chunk_size = current_chunk_.size();
if (n <= chunk_size && n <= kMaxBytesToCopy) {
- subcord = Cord(current_chunk_.substr(0, n));
+ subcord = Cord(current_chunk_.substr(0, n), method);
} else {
auto* ring = CordRep::Ref(ring_reader_.ring())->ring();
size_t offset = ring_reader_.length() - bytes_remaining_;
- subcord.contents_.set_tree(CordRepRing::SubRing(ring, offset, n));
+ CordRep* rep = CordRepRing::SubRing(ring, offset, n);
+ subcord.contents_.EmplaceTree(rep, method);
}
if (n < chunk_size) {
bytes_remaining_ -= n;
@@ -1463,7 +1533,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
const char* data = subnode->tag == EXTERNAL ? subnode->external()->base
: subnode->flat()->Data();
subnode = NewSubstring(subnode, current_chunk_.data() - data, n);
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
RemoveChunkPrefix(n);
return subcord;
}
@@ -1506,7 +1576,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
if (node == nullptr) {
// We have reached the end of the Cord.
assert(bytes_remaining_ == 0);
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
return subcord;
}
@@ -1546,7 +1616,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
current_chunk_ = absl::string_view(data + offset + n, length - n);
current_leaf_ = node;
bytes_remaining_ -= n;
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
return subcord;
}
@@ -1653,6 +1723,7 @@ char Cord::operator[](size_t i) const {
}
absl::string_view Cord::FlattenSlowPath() {
+ assert(contents_.is_tree());
size_t total_size = size();
CordRep* new_rep;
char* new_buffer;
@@ -1673,10 +1744,9 @@ absl::string_view Cord::FlattenSlowPath() {
s.size());
});
}
- if (CordRep* tree = contents_.tree()) {
- CordRep::Unref(tree);
- }
- contents_.set_tree(new_rep);
+ CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
+ CordRep::Unref(contents_.as_tree());
+ contents_.SetTree(new_rep, scope);
return absl::string_view(new_buffer, total_size);
}
@@ -1688,6 +1758,8 @@ absl::string_view Cord::FlattenSlowPath() {
} else if (rep->tag == EXTERNAL) {
*fragment = absl::string_view(rep->external()->base, rep->length);
return true;
+ } else if (rep->tag == RING) {
+ return rep->ring()->IsFlat(fragment);
} else if (rep->tag == SUBSTRING) {
CordRep* child = rep->substring()->child;
if (child->tag >= FLAT) {
@@ -1698,6 +1770,9 @@ absl::string_view Cord::FlattenSlowPath() {
*fragment = absl::string_view(
child->external()->base + rep->substring()->start, rep->length);
return true;
+ } else if (child->tag == RING) {
+ return child->ring()->IsFlat(rep->substring()->start, rep->length,
+ fragment);
}
}
return false;
@@ -1786,8 +1861,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
*os << absl::CEscape(std::string(rep->external()->base, rep->length));
*os << "]\n";
} else if (rep->tag >= FLAT) {
- *os << "FLAT cap=" << rep->flat()->Capacity()
- << " [";
+ *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
if (include_data)
*os << absl::CEscape(std::string(rep->flat()->Data(), rep->length));
*os << "]\n";
@@ -1799,7 +1873,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
do {
DumpNode(ring->entry_child(head), include_data, os,
indent + kIndentStep);
- head = ring->advance(head);;
+ head = ring->advance(head);
} while (head != ring->tail());
}
if (stack.empty()) break;
@@ -1845,9 +1919,8 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
worklist.push_back(node->concat()->left);
}
} else if (node->tag >= FLAT) {
- ABSL_INTERNAL_CHECK(
- node->length <= node->flat()->Capacity(),
- ReportError(root, node));
+ ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
+ ReportError(root, node));
} else if (node->tag == EXTERNAL) {
ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
ReportError(root, node));
diff --git a/third_party/abseil-cpp/absl/strings/cord.h b/third_party/abseil-cpp/absl/strings/cord.h
index fa9cb913fd..e758f1cdfb 100644
--- a/third_party/abseil-cpp/absl/strings/cord.h
+++ b/third_party/abseil-cpp/absl/strings/cord.h
@@ -70,6 +70,7 @@
#include <string>
#include <type_traits>
+#include "absl/base/config.h"
#include "absl/base/internal/endian.h"
#include "absl/base/internal/per_thread_tls.h"
#include "absl/base/macros.h"
@@ -80,6 +81,11 @@
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cord_rep_ring.h"
#include "absl/strings/internal/cord_rep_ring_reader.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
#include "absl/strings/internal/resize_uninitialized.h"
#include "absl/strings/internal/string_constant.h"
#include "absl/strings/string_view.h"
@@ -664,10 +670,24 @@ class Cord {
explicit constexpr Cord(strings_internal::StringConstant<T>);
private:
+ using CordRep = absl::cord_internal::CordRep;
+ using CordRepFlat = absl::cord_internal::CordRepFlat;
+ using CordzInfo = cord_internal::CordzInfo;
+ using CordzUpdateScope = cord_internal::CordzUpdateScope;
+ using CordzUpdateTracker = cord_internal::CordzUpdateTracker;
+ using InlineData = cord_internal::InlineData;
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // Creates a cord instance with `method` representing the originating
+ // public API call causing the cord to be created.
+ explicit Cord(absl::string_view src, MethodIdentifier method);
+
friend class CordTestPeer;
friend bool operator==(const Cord& lhs, const Cord& rhs);
friend bool operator==(const Cord& lhs, absl::string_view rhs);
+ friend const CordzInfo* GetCordzInfoForTesting(const Cord& cord);
+
// Calls the provided function once for each cord chunk, in order. Unlike
// Chunks(), this API will not allocate memory.
void ForEachChunk(absl::FunctionRef<void(absl::string_view)>) const;
@@ -687,6 +707,7 @@ class Cord {
static_assert(kMaxInline >= sizeof(absl::cord_internal::CordRep*), "");
constexpr InlineRep() : data_() {}
+ explicit InlineRep(InlineData::DefaultInitType init) : data_(init) {}
InlineRep(const InlineRep& src);
InlineRep(InlineRep&& src);
InlineRep& operator=(const InlineRep& src);
@@ -704,23 +725,56 @@ class Cord {
// Returns nullptr if holding bytes
absl::cord_internal::CordRep* tree() const;
absl::cord_internal::CordRep* as_tree() const;
- // Discards old pointer, if any
- void set_tree(absl::cord_internal::CordRep* rep);
- // Replaces a tree with a new root. This is faster than set_tree, but it
- // should only be used when it's clear that the old rep was a tree.
- void replace_tree(absl::cord_internal::CordRep* rep);
// Returns non-null iff was holding a pointer
absl::cord_internal::CordRep* clear();
// Converts to pointer if necessary.
- absl::cord_internal::CordRep* force_tree(size_t extra_hint);
void reduce_size(size_t n); // REQUIRES: holding data
void remove_prefix(size_t n); // REQUIRES: holding data
- void AppendArray(const char* src_data, size_t src_size);
+ void AppendArray(absl::string_view src, MethodIdentifier method);
absl::string_view FindFlatStartPiece() const;
- void AppendTree(absl::cord_internal::CordRep* tree);
- void PrependTree(absl::cord_internal::CordRep* tree);
- void GetAppendRegion(char** region, size_t* size, size_t max_length);
- void GetAppendRegion(char** region, size_t* size);
+
+ // Creates a CordRepFlat instance from the current inlined data with `extra'
+ // bytes of desired additional capacity.
+ CordRepFlat* MakeFlatWithExtraCapacity(size_t extra);
+
+ // Sets the tree value for this instance. `rep` must not be null.
+ // Requires the current instance to hold a tree, and a lock to be held on
+ // any CordzInfo referenced by this instance. The latter is enforced through
+ // the CordzUpdateScope argument. If the current instance is sampled, then
+ // the CordzInfo instance is updated to reference the new `rep` value.
+ void SetTree(CordRep* rep, const CordzUpdateScope& scope);
+
+ // Identical to SetTree(), except that `rep` is allowed to be null, in
+ // which case the current instance is reset to an empty value.
+ void SetTreeOrEmpty(CordRep* rep, const CordzUpdateScope& scope);
+
+ // Sets the tree value for this instance, and randomly samples this cord.
+ // This function disregards existing contents in `data_`, and should be
+ // called when a Cord is 'promoted' from an 'uninitialized' or 'inlined'
+ // value to a non-inlined (tree / ring) value.
+ void EmplaceTree(CordRep* rep, MethodIdentifier method);
+
+ // Identical to EmplaceTree, except that it copies the parent stack from
+ // the provided `parent` data if the parent is sampled.
+ void EmplaceTree(CordRep* rep, const InlineData& parent,
+ MethodIdentifier method);
+
+ // Commits the change of a newly created, or updated `rep` root value into
+ // this cord. `old_rep` indicates the old (inlined or tree) value of the
+ // cord, and determines if the commit invokes SetTree() or EmplaceTree().
+ void CommitTree(const CordRep* old_rep, CordRep* rep,
+ const CordzUpdateScope& scope, MethodIdentifier method);
+
+ void AppendTreeToInlined(CordRep* tree, MethodIdentifier method);
+ void AppendTreeToTree(CordRep* tree, MethodIdentifier method);
+ void AppendTree(CordRep* tree, MethodIdentifier method);
+ void PrependTreeToInlined(CordRep* tree, MethodIdentifier method);
+ void PrependTreeToTree(CordRep* tree, MethodIdentifier method);
+ void PrependTree(CordRep* tree, MethodIdentifier method);
+
+ template <bool has_length>
+ void GetAppendRegion(char** region, size_t* size, size_t length);
+
bool IsSame(const InlineRep& other) const {
return memcmp(&data_, &other.data_, sizeof(data_)) == 0;
}
@@ -776,8 +830,8 @@ class Cord {
friend class Cord;
void AssignSlow(const InlineRep& src);
- // Unrefs the tree, stops profiling, and zeroes the contents
- void ClearSlow();
+ // Unrefs the tree and stops profiling.
+ void UnrefTree();
void ResetToEmpty() { data_ = {}; }
@@ -828,6 +882,10 @@ class Cord {
template <typename C>
void AppendImpl(C&& src);
+ // Assigns the value in 'src' to this instance, 'stealing' its contents.
+ // Requires src.length() > kMaxBytesToCopy.
+ Cord& AssignLargeString(std::string&& src);
+
// Helper for AbslHashValue().
template <typename H>
H HashFragmented(H hash_state) const {
@@ -930,8 +988,11 @@ inline CordRep* NewExternalRep(absl::string_view data,
template <typename Releaser>
Cord MakeCordFromExternal(absl::string_view data, Releaser&& releaser) {
Cord cord;
- cord.contents_.set_tree(::absl::cord_internal::NewExternalRep(
- data, std::forward<Releaser>(releaser)));
+ if (auto* rep = ::absl::cord_internal::NewExternalRep(
+ data, std::forward<Releaser>(releaser))) {
+ cord.contents_.EmplaceTree(rep,
+ Cord::MethodIdentifier::kMakeCordFromExternal);
+ }
return cord;
}
@@ -939,15 +1000,16 @@ constexpr Cord::InlineRep::InlineRep(cord_internal::InlineData data)
: data_(data) {}
inline Cord::InlineRep::InlineRep(const Cord::InlineRep& src)
- : data_(src.data_) {
- if (is_tree()) {
- data_.clear_cordz_info();
- absl::cord_internal::CordRep::Ref(as_tree());
+ : data_(InlineData::kDefaultInit) {
+ if (CordRep* tree = src.tree()) {
+ EmplaceTree(CordRep::Ref(tree), src.data_,
+ CordzUpdateTracker::kConstructorCord);
+ } else {
+ data_ = src.data_;
}
}
-inline Cord::InlineRep::InlineRep(Cord::InlineRep&& src) {
- data_ = src.data_;
+inline Cord::InlineRep::InlineRep(Cord::InlineRep&& src) : data_(src.data_) {
src.ResetToEmpty();
}
@@ -966,7 +1028,7 @@ inline Cord::InlineRep& Cord::InlineRep::operator=(const Cord::InlineRep& src) {
inline Cord::InlineRep& Cord::InlineRep::operator=(
Cord::InlineRep&& src) noexcept {
if (is_tree()) {
- ClearSlow();
+ UnrefTree();
}
data_ = src.data_;
src.ResetToEmpty();
@@ -1003,31 +1065,62 @@ inline size_t Cord::InlineRep::size() const {
return is_tree() ? as_tree()->length : inline_size();
}
-inline void Cord::InlineRep::set_tree(absl::cord_internal::CordRep* rep) {
- if (rep == nullptr) {
- ResetToEmpty();
+inline cord_internal::CordRepFlat* Cord::InlineRep::MakeFlatWithExtraCapacity(
+ size_t extra) {
+ static_assert(cord_internal::kMinFlatLength >= sizeof(data_), "");
+ size_t len = data_.inline_size();
+ auto* result = CordRepFlat::New(len + extra);
+ result->length = len;
+ memcpy(result->Data(), data_.as_chars(), sizeof(data_));
+ return result;
+}
+
+inline void Cord::InlineRep::EmplaceTree(CordRep* rep,
+ MethodIdentifier method) {
+ assert(rep);
+ data_.make_tree(rep);
+ CordzInfo::MaybeTrackCord(data_, method);
+}
+
+inline void Cord::InlineRep::EmplaceTree(CordRep* rep, const InlineData& parent,
+ MethodIdentifier method) {
+ data_.make_tree(rep);
+ CordzInfo::MaybeTrackCord(data_, parent, method);
+}
+
+inline void Cord::InlineRep::SetTree(CordRep* rep,
+ const CordzUpdateScope& scope) {
+ assert(rep);
+ assert(data_.is_tree());
+ data_.set_tree(rep);
+ scope.SetCordRep(rep);
+}
+
+inline void Cord::InlineRep::SetTreeOrEmpty(CordRep* rep,
+ const CordzUpdateScope& scope) {
+ assert(data_.is_tree());
+ if (rep) {
+ data_.set_tree(rep);
} else {
- if (data_.is_tree()) {
- // `data_` already holds a 'tree' value and an optional cordz_info value.
- // Replace the tree value only, leaving the cordz_info value unchanged.
- data_.set_tree(rep);
- } else {
- // `data_` contains inlined data: initialize data_ to tree value `rep`.
- data_.make_tree(rep);
- }
+ data_ = {};
}
+ scope.SetCordRep(rep);
}
-inline void Cord::InlineRep::replace_tree(absl::cord_internal::CordRep* rep) {
- ABSL_ASSERT(is_tree());
- if (ABSL_PREDICT_FALSE(rep == nullptr)) {
- set_tree(rep);
- return;
+inline void Cord::InlineRep::CommitTree(const CordRep* old_rep, CordRep* rep,
+ const CordzUpdateScope& scope,
+ MethodIdentifier method) {
+ if (old_rep) {
+ SetTree(rep, scope);
+ } else {
+ EmplaceTree(rep, method);
}
- data_.set_tree(rep);
}
inline absl::cord_internal::CordRep* Cord::InlineRep::clear() {
+ if (is_tree()) {
+ CordzInfo::MaybeUntrackCord(cordz_info());
+ }
absl::cord_internal::CordRep* result = tree();
ResetToEmpty();
return result;
@@ -1042,6 +1135,9 @@ inline void Cord::InlineRep::CopyToArray(char* dst) const {
constexpr inline Cord::Cord() noexcept {}
+inline Cord::Cord(absl::string_view src)
+ : Cord(src, CordzUpdateTracker::kConstructorString) {}
+
template <typename T>
constexpr Cord::Cord(strings_internal::StringConstant<T>)
: contents_(strings_internal::StringConstant<T>::value.size() <=
@@ -1057,6 +1153,15 @@ inline Cord& Cord::operator=(const Cord& x) {
return *this;
}
+template <typename T, Cord::EnableIfString<T>>
+Cord& Cord::operator=(T&& src) {
+ if (src.size() <= cord_internal::kMaxBytesToCopy) {
+ return operator=(absl::string_view(src));
+ } else {
+ return AssignLargeString(std::forward<T>(src));
+ }
+}
+
inline Cord::Cord(const Cord& src) : contents_(src.contents_) {}
inline Cord::Cord(Cord&& src) noexcept : contents_(std::move(src.contents_)) {}
@@ -1071,7 +1176,6 @@ inline Cord& Cord::operator=(Cord&& x) noexcept {
}
extern template Cord::Cord(std::string&& src);
-extern template Cord& Cord::operator=(std::string&& src);
inline size_t Cord::size() const {
// Length is 1st field in str.rep_
@@ -1114,7 +1218,7 @@ inline absl::string_view Cord::Flatten() {
}
inline void Cord::Append(absl::string_view src) {
- contents_.AppendArray(src.data(), src.size());
+ contents_.AppendArray(src, CordzUpdateTracker::kAppendString);
}
extern template void Cord::Append(std::string&& src);
diff --git a/third_party/abseil-cpp/absl/strings/cord_ring_reader_test.cc b/third_party/abseil-cpp/absl/strings/cord_ring_reader_test.cc
index 585616f3c0..d9a9a76d1e 100644
--- a/third_party/abseil-cpp/absl/strings/cord_ring_reader_test.cc
+++ b/third_party/abseil-cpp/absl/strings/cord_ring_reader_test.cc
@@ -78,6 +78,7 @@ TEST(CordRingReaderTest, Reset) {
EXPECT_TRUE(static_cast<bool>(reader));
EXPECT_THAT(reader.ring(), Eq(ring));
EXPECT_THAT(reader.index(), Eq(ring->head()));
+ EXPECT_THAT(reader.node(), Eq(ring->entry_child(ring->head())));
EXPECT_THAT(reader.length(), Eq(ring->length));
EXPECT_THAT(reader.consumed(), Eq(flats[0].length()));
EXPECT_THAT(reader.remaining(), Eq(ring->length - reader.consumed()));
@@ -99,11 +100,13 @@ TEST(CordRingReaderTest, Next) {
size_t consumed = reader.consumed();
size_t remaining = reader.remaining();
for (int i = 1; i < flats.size(); ++i) {
+ CordRepRing::index_type index = ring->advance(head, i);
consumed += flats[i].length();
remaining -= flats[i].length();
absl::string_view next = reader.Next();
ASSERT_THAT(next, Eq(flats[i]));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
@@ -125,13 +128,15 @@ TEST(CordRingReaderTest, SeekForward) {
size_t consumed = 0;
size_t remaining = ring->length;;
for (int i = 0; i < flats.size(); ++i) {
+ CordRepRing::index_type index = ring->advance(head, i);
size_t offset = consumed;
consumed += flats[i].length();
remaining -= flats[i].length();
for (int off = 0; off < flats[i].length(); ++off) {
absl::string_view chunk = reader.Seek(offset + off);
ASSERT_THAT(chunk, Eq(flats[i].substr(off)));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
@@ -150,11 +155,13 @@ TEST(CordRingReaderTest, SeekBackward) {
size_t consumed = ring->length;
size_t remaining = 0;
for (int i = flats.size() - 1; i >= 0; --i) {
+ CordRepRing::index_type index = ring->advance(head, i);
size_t offset = consumed - flats[i].length();
for (int off = 0; off < flats[i].length(); ++off) {
absl::string_view chunk = reader.Seek(offset + off);
ASSERT_THAT(chunk, Eq(flats[i].substr(off)));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
diff --git a/third_party/abseil-cpp/absl/strings/cord_ring_test.cc b/third_party/abseil-cpp/absl/strings/cord_ring_test.cc
index 7d75e106e7..cc8fbaf995 100644
--- a/third_party/abseil-cpp/absl/strings/cord_ring_test.cc
+++ b/third_party/abseil-cpp/absl/strings/cord_ring_test.cc
@@ -31,9 +31,6 @@
extern thread_local bool cord_ring;
-// TOOD(b/177688959): weird things happened with the original test
-#define ASAN_BUG_177688959_FIXED false
-
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace {
@@ -101,15 +98,22 @@ using TestParams = std::vector<TestParam>;
// Matcher validating when mutable copies are required / performed.
MATCHER_P2(EqIfPrivate, param, rep,
absl::StrCat("Equal 0x", absl::Hex(rep), " if private")) {
- return param.refcount_is_one ? arg == rep : arg != rep;
+ return param.refcount_is_one ? arg == rep : true;
}
// Matcher validating when mutable copies are required / performed.
MATCHER_P2(EqIfPrivateAndCapacity, param, rep,
absl::StrCat("Equal 0x", absl::Hex(rep),
" if private and capacity")) {
- return (param.refcount_is_one && param.with_capacity) ? arg == rep
- : arg != rep;
+ return (param.refcount_is_one && param.with_capacity) ? arg == rep : true;
+}
+
+// Matcher validating a shared ring was re-allocated. Should only be used for
+// tests doing exactly one update as subsequent updates could return the
+// original (freed and re-used) pointer.
+MATCHER_P2(NeIfShared, param, rep,
+ absl::StrCat("Not equal 0x", absl::Hex(rep), " if shared")) {
+ return param.refcount_is_one ? true : arg != rep;
}
MATCHER_P2(EqIfInputPrivate, param, rep, "Equal if input is private") {
@@ -340,19 +344,15 @@ std::string TestParamToString(const testing::TestParamInfo<TestParam>& info) {
class CordRingTest : public testing::Test {
public:
~CordRingTest() override {
-#if ASAN_BUG_177688959_FIXED
for (CordRep* rep : unrefs_) {
CordRep::Unref(rep);
}
-#endif
}
template <typename CordRepType>
CordRepType* NeedsUnref(CordRepType* rep) {
assert(rep);
-#if ASAN_BUG_177688959_FIXED
unrefs_.push_back(rep);
-#endif
return rep;
}
@@ -362,26 +362,16 @@ class CordRingTest : public testing::Test {
return NeedsUnref(rep);
}
- void Unref(CordRep* rep) {
-#if !ASAN_BUG_177688959_FIXED
- CordRep::Unref(rep);
-#endif
- }
-
private:
-#if ASAN_BUG_177688959_FIXED
std::vector<CordRep*> unrefs_;
-#endif
};
class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
public:
~CordRingTestWithParam() override {
-#if ASAN_BUG_177688959_FIXED
for (CordRep* rep : unrefs_) {
CordRep::Unref(rep);
}
-#endif
}
CordRepRing* CreateWithCapacity(CordRep* child, size_t extra_capacity) {
@@ -400,9 +390,7 @@ class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
template <typename CordRepType>
CordRepType* NeedsUnref(CordRepType* rep) {
assert(rep);
-#if ASAN_BUG_177688959_FIXED
unrefs_.push_back(rep);
-#endif
return rep;
}
@@ -412,43 +400,23 @@ class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
return NeedsUnref(rep);
}
- void Unref(CordRep* rep) {
-#if !ASAN_BUG_177688959_FIXED
- CordRep::Unref(rep);
-#endif
- }
-
template <typename CordRepType>
CordRepType* RefIfShared(CordRepType* rep) {
return Shared() ? Ref(rep) : rep;
}
- void UnrefIfShared(CordRep* rep) {
- if (Shared()) Unref(rep);
- }
-
template <typename CordRepType>
CordRepType* RefIfInputShared(CordRepType* rep) {
return InputShared() ? Ref(rep) : rep;
}
- void UnrefIfInputShared(CordRep* rep) {
- if (InputShared()) Unref(rep);
- }
-
template <typename CordRepType>
CordRepType* RefIfInputSharedIndirect(CordRepType* rep) {
return InputSharedIndirect() ? Ref(rep) : rep;
}
- void UnrefIfInputSharedIndirect(CordRep* rep) {
- if (InputSharedIndirect()) Unref(rep);
- }
-
private:
-#if ASAN_BUG_177688959_FIXED
std::vector<CordRep*> unrefs_;
-#endif
};
class CordRingCreateTest : public CordRingTestWithParam {
@@ -520,26 +488,26 @@ class CordRingBuildInputTest : public CordRingTestWithParam {
}
};
-INSTANTIATE_TEST_CASE_P(WithParam, CordRingSubTest,
- testing::ValuesIn(CordRingSubTest::CreateTestParams()),
- TestParamToString);
+INSTANTIATE_TEST_SUITE_P(WithParam, CordRingSubTest,
+ testing::ValuesIn(CordRingSubTest::CreateTestParams()),
+ TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingCreateTest,
testing::ValuesIn(CordRingCreateTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingCreateFromTreeTest,
testing::ValuesIn(CordRingCreateFromTreeTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingBuildTest,
testing::ValuesIn(CordRingBuildTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingBuildInputTest,
testing::ValuesIn(CordRingBuildInputTest::CreateTestParams()),
TestParamToString);
@@ -550,7 +518,6 @@ TEST_P(CordRingCreateTest, CreateFromFlat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1));
- Unref(result);
}
TEST_P(CordRingCreateTest, CreateFromRing) {
@@ -558,9 +525,8 @@ TEST_P(CordRingCreateTest, CreateFromRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Create(ring));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringRing) {
@@ -570,23 +536,20 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringRing) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfInputPrivate(GetParam(), ring));
EXPECT_THAT(ToString(result), string_view(kFox).substr(2, 11));
- UnrefIfInputSharedIndirect(ring);
- UnrefIfInputShared(sub);
- Unref(result);
}
TEST_F(CordRingTest, CreateWithIllegalExtraCapacity) {
- CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
#if defined(ABSL_HAVE_EXCEPTIONS)
+ CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
try {
CordRepRing::Create(flat, CordRepRing::kMaxCapacity);
GTEST_FAIL() << "expected std::length_error exception";
} catch (const std::length_error&) {
}
#elif defined(GTEST_HAS_DEATH_TEST)
+ CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
EXPECT_DEATH(CordRepRing::Create(flat, CordRepRing::kMaxCapacity), ".*");
#endif
- Unref(flat);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfFlat) {
@@ -597,9 +560,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfFlat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(20));
EXPECT_THAT(ToFlats(result), ElementsAre(str1.substr(4, 20)));
- Unref(result);
- UnrefIfInputShared(flat);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingCreateTest, CreateFromExternal) {
@@ -609,8 +569,6 @@ TEST_P(CordRingCreateTest, CreateFromExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1));
- Unref(result);
- UnrefIfInputShared(child);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfExternal) {
@@ -621,9 +579,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(24));
EXPECT_THAT(ToFlats(result), ElementsAre(str1.substr(1, 24)));
- Unref(result);
- UnrefIfInputShared(external);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfLargeExternal) {
@@ -637,9 +592,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfLargeExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str.size()));
EXPECT_THAT(ToRawFlats(result), ElementsAre(str));
- Unref(result);
- UnrefIfInputShared(external);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingBuildInputTest, CreateFromConcat) {
@@ -652,10 +604,6 @@ TEST_P(CordRingBuildInputTest, CreateFromConcat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(26));
EXPECT_THAT(ToString(result), Eq(kAlphabet));
- UnrefIfInputSharedIndirect(flats[0]);
- UnrefIfInputSharedIndirect(flats[3]);
- UnrefIfInputShared(concat);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, CreateFromSubstringConcat) {
@@ -671,10 +619,6 @@ TEST_P(CordRingBuildInputTest, CreateFromSubstringConcat) {
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result->length, Eq(len));
ASSERT_THAT(ToString(result), string_view(kAlphabet).substr(off, len));
- UnrefIfInputSharedIndirect(flats[0]);
- UnrefIfInputSharedIndirect(flats[3]);
- UnrefIfInputShared(child);
- Unref(result);
}
}
}
@@ -689,7 +633,6 @@ TEST_P(CordRingCreateTest, Properties) {
EXPECT_THAT(result->capacity(), Le(2 * 120 + 1));
EXPECT_THAT(result->entries(), Eq(1));
EXPECT_THAT(result->begin_pos(), Eq(0));
- Unref(result);
}
TEST_P(CordRingCreateTest, EntryForNewFlat) {
@@ -700,7 +643,6 @@ TEST_P(CordRingCreateTest, EntryForNewFlat) {
EXPECT_THAT(result->entry_child(0), Eq(child));
EXPECT_THAT(result->entry_end_pos(0), Eq(str1.length()));
EXPECT_THAT(result->entry_data_offset(0), Eq(0));
- Unref(result);
}
TEST_P(CordRingCreateTest, EntryForNewFlatSubstring) {
@@ -712,7 +654,6 @@ TEST_P(CordRingCreateTest, EntryForNewFlatSubstring) {
EXPECT_THAT(result->entry_child(0), Eq(child));
EXPECT_THAT(result->entry_end_pos(0), Eq(26));
EXPECT_THAT(result->entry_data_offset(0), Eq(10));
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendFlat) {
@@ -722,10 +663,9 @@ TEST_P(CordRingBuildTest, AppendFlat) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, MakeFlat(str2)));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependFlat) {
@@ -735,10 +675,9 @@ TEST_P(CordRingBuildTest, PrependFlat) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, MakeFlat(str2)));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendString) {
@@ -748,10 +687,9 @@ TEST_P(CordRingBuildTest, AppendString) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingExtra) {
@@ -762,8 +700,7 @@ TEST_P(CordRingBuildTest, AppendStringHavingExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
- UnrefIfShared(ring);
- Unref(result);
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
}
TEST_P(CordRingBuildTest, AppendStringHavingPartialExtra) {
@@ -785,13 +722,12 @@ TEST_P(CordRingBuildTest, AppendStringHavingPartialExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str1, str1a), str2a));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingExtraInSubstring) {
@@ -802,14 +738,13 @@ TEST_P(CordRingBuildTest, AppendStringHavingExtraInSubstring) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat("1234", str2)));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre("1234", str2));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingSharedExtra) {
@@ -837,10 +772,9 @@ TEST_P(CordRingBuildTest, AppendStringHavingSharedExtra) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre("1234", str2));
- UnrefIfShared(ring);
- Unref(result);
CordRep::Unref(shared_type == 1 ? flat1 : flat);
}
@@ -857,8 +791,6 @@ TEST_P(CordRingBuildTest, AppendStringWithExtra) {
EXPECT_THAT(result->length, Eq(str1.size() + str2.size() + str3.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, StrCat(str2, str3)));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependString) {
@@ -875,8 +807,6 @@ TEST_P(CordRingBuildTest, PrependString) {
}
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependStringHavingExtra) {
@@ -887,14 +817,13 @@ TEST_P(CordRingBuildTest, PrependStringHavingExtra) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str2, "1234")));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre(str2, "1234"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependStringHavingSharedExtra) {
@@ -920,9 +849,8 @@ TEST_P(CordRingBuildTest, PrependStringHavingSharedExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1a.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1a));
- UnrefIfShared(ring);
- Unref(result);
CordRep::Unref(shared_type == 1 ? flat1 : flat);
}
}
@@ -938,8 +866,6 @@ TEST_P(CordRingBuildTest, PrependStringWithExtra) {
EXPECT_THAT(result->length, Eq(str1.size() + str2.size() + str3.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str3, str2), str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMix) {
@@ -950,12 +876,10 @@ TEST_P(CordRingBuildTest, AppendPrependStringMix) {
result = CordRepRing::Prepend(result, flats[4 - i]);
result = CordRepRing::Append(result, flats[4 + i]);
}
- UnrefIfShared(ring);
NeedsUnref(result);
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToString(result), kFox);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMixWithExtra) {
@@ -976,8 +900,6 @@ TEST_P(CordRingBuildTest, AppendPrependStringMixWithExtra) {
EXPECT_THAT(ToFlats(result), ElementsAre("The quick brown fox ", "jumps ",
"over the lazy dog"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMixWithPrependedExtra) {
@@ -998,8 +920,6 @@ TEST_P(CordRingBuildTest, AppendPrependStringMixWithPrependedExtra) {
EXPECT_THAT(ToFlats(result), ElementsAre("The quick brown fox ", "jumps ",
"over the lazy dog"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingSubTest, SubRing) {
@@ -1011,16 +931,14 @@ TEST_P(CordRingSubTest, SubRing) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::SubRing(ring, offset, 0);
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size() - offset; ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::SubRing(ring, offset, len));
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
ASSERT_THAT(ToString(result), Eq(all.substr(offset, len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
}
@@ -1039,18 +957,16 @@ TEST_P(CordRingSubTest, SubRingFromLargeExternal) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::SubRing(ring, offset, 0);
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = all.size() - 30; len < all.size() - offset; ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::SubRing(ring, offset, len));
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
auto str = ToString(result);
ASSERT_THAT(str, SizeIs(len));
ASSERT_THAT(str, Eq(all.substr(offset, len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
}
@@ -1063,16 +979,14 @@ TEST_P(CordRingSubTest, RemovePrefix) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::RemovePrefix(ring, all.size());
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size(); ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::RemovePrefix(ring, len));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToString(result), Eq(all.substr(len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
@@ -1087,7 +1001,6 @@ TEST_P(CordRingSubTest, RemovePrefixFromLargeExternal) {
ElementsAre(
not_a_string_view(external1->base, 1 << 20).remove_prefix(1 << 16),
not_a_string_view(external2->base, 1 << 20)));
- Unref(result);
}
TEST_P(CordRingSubTest, RemoveSuffix) {
@@ -1098,16 +1011,14 @@ TEST_P(CordRingSubTest, RemoveSuffix) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::RemoveSuffix(ring, all.size());
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size(); ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::RemoveSuffix(ring, len));
ASSERT_THAT(result, IsValidRingBuffer());
- EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
- EXPECT_THAT(ToString(result), Eq(all.substr(0, all.size() - len)));
- UnrefIfShared(ring);
- Unref(result);
+ ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
+ ASSERT_THAT(ToString(result), Eq(all.substr(0, all.size() - len)));
}
}
@@ -1120,9 +1031,8 @@ TEST_P(CordRingSubTest, AppendRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, child));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithFlatOffset) {
@@ -1135,11 +1045,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithFlatOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "brown ", "fox ", "jumps ",
"over ", "the ", "lazy ", "dog"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithBrokenOffset) {
@@ -1152,11 +1060,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithBrokenOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("Head", "umps ", "over ", "the ", "lazy ", "dog"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithFlatLength) {
@@ -1169,11 +1075,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "The ", "quick ", "brown ",
"fox ", "jumps ", "over ", "the "));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingWithBrokenFlatLength) {
@@ -1186,11 +1090,9 @@ TEST_P(CordRingBuildTest, AppendRingWithBrokenFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "The ", "quick ", "brown ",
"fox ", "jumps ", "ov"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingMiddlePiece) {
@@ -1203,11 +1105,9 @@ TEST_P(CordRingBuildTest, AppendRingMiddlePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("Head", "ck ", "brown ", "fox ", "jum"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingSinglePiece) {
@@ -1220,11 +1120,8 @@ TEST_P(CordRingBuildTest, AppendRingSinglePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "row"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfInputShared(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingSinglePieceWithPrefix) {
@@ -1241,11 +1138,8 @@ TEST_P(CordRingBuildInputTest, AppendRingSinglePieceWithPrefix) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Prepend", "Head", "row"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfInputShared(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRing) {
@@ -1258,10 +1152,8 @@ TEST_P(CordRingBuildInputTest, PrependRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, child));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfInputShared(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithFlatOffset) {
@@ -1274,12 +1166,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithFlatOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("brown ", "fox ", "jumps ", "over ",
"the ", "lazy ", "dog", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithBrokenOffset) {
@@ -1291,12 +1180,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithBrokenOffset) {
CordRep* stripped = RefIfInputSharedIndirect(RemovePrefix(21, child));
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("umps ", "over ", "the ", "lazy ", "dog", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithFlatLength) {
@@ -1309,12 +1195,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("The ", "quick ", "brown ", "fox ",
"jumps ", "over ", "the ", "Tail"));
- UnrefIfShared(ring);
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithBrokenFlatLength) {
@@ -1327,12 +1210,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithBrokenFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("The ", "quick ", "brown ", "fox ",
"jumps ", "ov", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingMiddlePiece) {
@@ -1346,12 +1226,9 @@ TEST_P(CordRingBuildInputTest, PrependRingMiddlePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("ck ", "brown ", "fox ", "jum", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingSinglePiece) {
@@ -1364,11 +1241,8 @@ TEST_P(CordRingBuildInputTest, PrependRingSinglePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("row", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingSinglePieceWithPrefix) {
@@ -1384,11 +1258,8 @@ TEST_P(CordRingBuildInputTest, PrependRingSinglePieceWithPrefix) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("row", "Prepend", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_F(CordRingTest, Find) {
@@ -1406,7 +1277,6 @@ TEST_F(CordRingTest, Find) {
ASSERT_THAT(found.offset, Lt(data.length()));
ASSERT_THAT(data[found.offset], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindWithHint) {
@@ -1442,7 +1312,6 @@ TEST_F(CordRingTest, FindWithHint) {
++flat_pos;
flat_offset += flat.length();
}
- Unref(ring);
}
TEST_F(CordRingTest, FindInLargeRing) {
@@ -1464,7 +1333,6 @@ TEST_F(CordRingTest, FindInLargeRing) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[pos.offset], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTail) {
@@ -1483,7 +1351,6 @@ TEST_F(CordRingTest, FindTail) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTailWithHint) {
@@ -1510,7 +1377,6 @@ TEST_F(CordRingTest, FindTailWithHint) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTailInLargeRing) {
@@ -1532,7 +1398,6 @@ TEST_F(CordRingTest, FindTailInLargeRing) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, GetCharacter) {
@@ -1544,7 +1409,6 @@ TEST_F(CordRingTest, GetCharacter) {
for (int i = 0; i < value.length(); ++i) {
ASSERT_THAT(result->GetCharacter(i), Eq(value[i]));
}
- Unref(result);
}
TEST_F(CordRingTest, GetCharacterWithSubstring) {
@@ -1556,7 +1420,67 @@ TEST_F(CordRingTest, GetCharacterWithSubstring) {
for (int i = 0; i < value.length(); ++i) {
ASSERT_THAT(result->GetCharacter(i), Eq(value[i]));
}
- Unref(result);
+}
+
+TEST_F(CordRingTest, IsFlatSingleFlat) {
+ for (bool external : {false, true}) {
+ SCOPED_TRACE(external ? "With External" : "With Flat");
+ absl::string_view str = "Hello world";
+ CordRep* rep = external ? MakeExternal(str) : MakeFlat(str);
+ CordRepRing* ring = NeedsUnref(CordRepRing::Create(rep));
+
+ // The ring is a single non-fragmented flat:
+ absl::string_view fragment;
+ EXPECT_TRUE(ring->IsFlat(nullptr));
+ EXPECT_TRUE(ring->IsFlat(&fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+ fragment = "";
+ EXPECT_TRUE(ring->IsFlat(0, 11, nullptr));
+ EXPECT_TRUE(ring->IsFlat(0, 11, &fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+
+ // Arbitrary ranges must check true as well.
+ EXPECT_TRUE(ring->IsFlat(1, 4, &fragment));
+ EXPECT_THAT(fragment, Eq("ello"));
+ EXPECT_TRUE(ring->IsFlat(6, 5, &fragment));
+ EXPECT_THAT(fragment, Eq("world"));
+ }
+}
+
+TEST_F(CordRingTest, IsFlatMultiFlat) {
+ for (bool external : {false, true}) {
+ SCOPED_TRACE(external ? "With External" : "With Flat");
+ absl::string_view str1 = "Hello world";
+ absl::string_view str2 = "Halt and catch fire";
+ CordRep* rep1 = external ? MakeExternal(str1) : MakeFlat(str1);
+ CordRep* rep2 = external ? MakeExternal(str2) : MakeFlat(str2);
+ CordRepRing* ring = CordRepRing::Append(CordRepRing::Create(rep1), rep2);
+ NeedsUnref(ring);
+
+ // The ring is fragmented, IsFlat() on the entire cord must be false.
+ EXPECT_FALSE(ring->IsFlat(nullptr));
+ absl::string_view fragment = "Don't touch this";
+ EXPECT_FALSE(ring->IsFlat(&fragment));
+ EXPECT_THAT(fragment, Eq("Don't touch this"));
+
+ // Check for ranges exactly within both flats.
+ EXPECT_TRUE(ring->IsFlat(0, 11, &fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+ EXPECT_TRUE(ring->IsFlat(11, 19, &fragment));
+ EXPECT_THAT(fragment, Eq("Halt and catch fire"));
+
+ // Check for arbitrary partial range inside each flat.
+ EXPECT_TRUE(ring->IsFlat(1, 4, &fragment));
+ EXPECT_THAT(fragment, "ello");
+ EXPECT_TRUE(ring->IsFlat(26, 4, &fragment));
+ EXPECT_THAT(fragment, "fire");
+
+ // Check ranges spanning across both flats
+ fragment = "Don't touch this";
+ EXPECT_FALSE(ring->IsFlat(1, 18, &fragment));
+ EXPECT_FALSE(ring->IsFlat(10, 2, &fragment));
+ EXPECT_THAT(fragment, Eq("Don't touch this"));
+ }
}
TEST_F(CordRingTest, Dump) {
@@ -1564,7 +1488,6 @@ TEST_F(CordRingTest, Dump) {
auto flats = MakeSpan(kFoxFlats);
CordRepRing* ring = NeedsUnref(FromFlats(flats, kPrepend));
ss << *ring;
- Unref(ring);
}
} // namespace
diff --git a/third_party/abseil-cpp/absl/strings/cord_test.cc b/third_party/abseil-cpp/absl/strings/cord_test.cc
index f9982428b3..14eca15573 100644
--- a/third_party/abseil-cpp/absl/strings/cord_test.cc
+++ b/third_party/abseil-cpp/absl/strings/cord_test.cc
@@ -35,6 +35,7 @@
#include "absl/base/macros.h"
#include "absl/container/fixed_array.h"
#include "absl/strings/cord_test_helpers.h"
+#include "absl/strings/cordz_test_helpers.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
@@ -187,6 +188,19 @@ class CordTestPeer {
static cord_internal::CordzInfo* GetCordzInfo(const Cord& c) {
return c.contents_.cordz_info();
}
+
+ static Cord MakeSubstring(Cord src, size_t offset, size_t length) {
+ ABSL_RAW_CHECK(src.contents_.is_tree(), "Can not be inlined");
+ Cord cord;
+ auto* rep = new cord_internal::CordRepSubstring;
+ rep->tag = cord_internal::SUBSTRING;
+ rep->child = cord_internal::CordRep::Ref(src.contents_.tree());
+ rep->start = offset;
+ rep->length = length;
+ cord.contents_.EmplaceTree(rep,
+ cord_internal::CordzUpdateTracker::kSubCord);
+ return cord;
+ }
};
ABSL_NAMESPACE_END
@@ -227,7 +241,6 @@ TEST(GigabyteCord, FromExternal) {
// caused crashes in production. We grow exponentially so that the code will
// execute in a reasonable amount of time.
absl::Cord c;
- ABSL_RAW_LOG(INFO, "Made a Cord with %zu bytes!", c.size());
c.Append(from);
while (c.size() < max_size) {
c.Append(c);
@@ -466,8 +479,8 @@ TEST(TryFlat, SubstrInlined) {
TEST(TryFlat, SubstrFlat) {
absl::Cord c("longer than 15 bytes");
- c.RemovePrefix(1);
- EXPECT_EQ(c.TryFlat(), "onger than 15 bytes");
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), "onger than 15 bytes");
}
TEST(TryFlat, Concat) {
@@ -482,16 +495,46 @@ TEST(TryFlat, External) {
TEST(TryFlat, SubstrExternal) {
absl::Cord c = absl::MakeCordFromExternal("hell", [](absl::string_view) {});
- c.RemovePrefix(1);
- EXPECT_EQ(c.TryFlat(), "ell");
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), "ell");
}
TEST(TryFlat, SubstrConcat) {
absl::Cord c = absl::MakeFragmentedCord({"hello", " world"});
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), absl::nullopt);
c.RemovePrefix(1);
EXPECT_EQ(c.TryFlat(), absl::nullopt);
}
+TEST(TryFlat, CommonlyAssumedInvariants) {
+ // The behavior tested below is not part of the API contract of Cord, but it's
+ // something we intend to be true in our current implementation. This test
+ // exists to detect and prevent accidental breakage of the implementation.
+ absl::string_view fragments[] = {"A fragmented test",
+ " cord",
+ " to test subcords",
+ " of ",
+ "a",
+ " cord for",
+ " each chunk "
+ "returned by the ",
+ "iterator"};
+ absl::Cord c = absl::MakeFragmentedCord(fragments);
+ int fragment = 0;
+ int offset = 0;
+ absl::Cord::CharIterator itc = c.char_begin();
+ for (absl::string_view sv : c.Chunks()) {
+ absl::string_view expected = fragments[fragment];
+ absl::Cord subcord1 = c.Subcord(offset, sv.length());
+ absl::Cord subcord2 = absl::Cord::AdvanceAndRead(&itc, sv.size());
+ EXPECT_EQ(subcord1.TryFlat(), expected);
+ EXPECT_EQ(subcord2.TryFlat(), expected);
+ ++fragment;
+ offset += sv.length();
+ }
+}
+
static bool IsFlat(const absl::Cord& c) {
return c.chunk_begin() == c.chunk_end() || ++c.chunk_begin() == c.chunk_end();
}
@@ -1274,6 +1317,26 @@ TEST(Cord, Concat_Append) {
EXPECT_EQ(s2.size(), size + 1);
}
+TEST(Cord, DiabolicalGrowth) {
+ // This test exercises a diabolical Append(<one char>) on a cord, making the
+ // cord shared before each Append call resulting in a terribly fragmented
+ // resulting cord.
+ // TODO(b/183983616): Apply some minimum compaction when copying a shared
+ // source cord into a mutable copy for updates in CordRepRing.
+ RandomEngine rng(testing::GTEST_FLAG(random_seed));
+ const std::string expected = RandomLowercaseString(&rng, 5000);
+ absl::Cord cord;
+ for (char c : expected) {
+ absl::Cord shared(cord);
+ cord.Append(absl::string_view(&c, 1));
+ }
+ std::string value;
+ absl::CopyCordToString(cord, &value);
+ EXPECT_EQ(value, expected);
+ ABSL_RAW_LOG(INFO, "Diabolical size allocated = %zu",
+ cord.EstimatedMemoryUsage());
+}
+
TEST(MakeFragmentedCord, MakeFragmentedCordFromInitializerList) {
absl::Cord fragmented =
absl::MakeFragmentedCord({"A ", "fragmented ", "Cord"});
diff --git a/third_party/abseil-cpp/absl/strings/cord_test_helpers.h b/third_party/abseil-cpp/absl/strings/cord_test_helpers.h
index f1036e3b13..31a1dc8980 100644
--- a/third_party/abseil-cpp/absl/strings/cord_test_helpers.h
+++ b/third_party/abseil-cpp/absl/strings/cord_test_helpers.h
@@ -17,11 +17,73 @@
#ifndef ABSL_STRINGS_CORD_TEST_HELPERS_H_
#define ABSL_STRINGS_CORD_TEST_HELPERS_H_
+#include <cstdint>
+#include <iostream>
+#include <string>
+
+#include "absl/base/config.h"
#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/string_view.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
+// Cord sizes relevant for testing
+enum class TestCordSize {
+ // An empty value
+ kEmpty = 0,
+
+ // An inlined string value
+ kInlined = cord_internal::kMaxInline / 2 + 1,
+
+ // 'Well known' SSO lengths (excluding terminating zero).
+ // libstdcxx has a maximum SSO of 15, libc++ has a maximum SSO of 22.
+ kStringSso1 = 15,
+ kStringSso2 = 22,
+
+ // A string value which is too large to fit in inlined data, but small enough
+ // such that Cord prefers copying the value if possible, i.e.: not stealing
+ // std::string inputs, or referencing existing CordReps on Append, etc.
+ kSmall = cord_internal::kMaxBytesToCopy / 2 + 1,
+
+ // A string value large enough that Cord prefers to reference or steal from
+ // existing inputs rather than copying contents of the input.
+ kMedium = cord_internal::kMaxFlatLength / 2 + 1,
+
+ // A string value large enough to cause it to be stored in mutliple flats.
+ kLarge = cord_internal::kMaxFlatLength * 4
+};
+
+// To string helper
+inline absl::string_view ToString(TestCordSize size) {
+ switch (size) {
+ case TestCordSize::kEmpty:
+ return "Empty";
+ case TestCordSize::kInlined:
+ return "Inlined";
+ case TestCordSize::kSmall:
+ return "Small";
+ case TestCordSize::kStringSso1:
+ return "StringSso1";
+ case TestCordSize::kStringSso2:
+ return "StringSso2";
+ case TestCordSize::kMedium:
+ return "Medium";
+ case TestCordSize::kLarge:
+ return "Large";
+ }
+ return "???";
+}
+
+// Returns the length matching the specified size
+inline size_t Length(TestCordSize size) { return static_cast<size_t>(size); }
+
+// Stream output helper
+inline std::ostream& operator<<(std::ostream& stream, TestCordSize size) {
+ return stream << ToString(size);
+}
+
// Creates a multi-segment Cord from an iterable container of strings. The
// resulting Cord is guaranteed to have one segment for every string in the
// container. This allows code to be unit tested with multi-segment Cord
diff --git a/third_party/abseil-cpp/absl/strings/cordz_test.cc b/third_party/abseil-cpp/absl/strings/cordz_test.cc
new file mode 100644
index 0000000000..2b7d30b0e0
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/cordz_test.cc
@@ -0,0 +1,466 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <cstdint>
+#include <string>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/macros.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/cord_test_helpers.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+#include "absl/strings/string_view.h"
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+using testing::Eq;
+using testing::AnyOf;
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+
+using cord_internal::CordzInfo;
+using cord_internal::CordzSampleToken;
+using cord_internal::CordzStatistics;
+using cord_internal::CordzUpdateTracker;
+using Method = CordzUpdateTracker::MethodIdentifier;
+
+// Do not print cord contents, we only care about 'size' perhaps.
+// Note that this method must be inside the named namespace.
+inline void PrintTo(const Cord& cord, std::ostream* s) {
+ if (s) *s << "Cord[" << cord.size() << "]";
+}
+
+namespace {
+
+auto constexpr kMaxInline = cord_internal::kMaxInline;
+
+// Returns a string_view value of the specified length
+// We do this to avoid 'consuming' large strings in Cord by default.
+absl::string_view MakeString(size_t size) {
+ thread_local std::string str;
+ str = std::string(size, '.');
+ return str;
+}
+
+absl::string_view MakeString(TestCordSize size) {
+ return MakeString(Length(size));
+}
+
+// Returns a cord with a sampled method of kAppendString.
+absl::Cord MakeAppendStringCord(TestCordSize size) {
+ CordzSamplingIntervalHelper always(1);
+ absl::Cord cord;
+ cord.Append(MakeString(size));
+ return cord;
+}
+
+std::string TestParamToString(::testing::TestParamInfo<TestCordSize> size) {
+ return absl::StrCat("On", ToString(size.param), "Cord");
+}
+
+class CordzUpdateTest : public testing::TestWithParam<TestCordSize> {
+ public:
+ Cord& cord() { return cord_; }
+
+ Method InitialOr(Method method) const {
+ return (GetParam() > TestCordSize::kInlined) ? Method::kConstructorString
+ : method;
+ }
+
+ private:
+ CordzSamplingIntervalHelper sample_every_{1};
+ Cord cord_{MakeString(GetParam())};
+};
+
+template <typename T>
+std::string ParamToString(::testing::TestParamInfo<T> param) {
+ return std::string(ToString(param.param));
+}
+
+INSTANTIATE_TEST_SUITE_P(WithParam, CordzUpdateTest,
+ testing::Values(TestCordSize::kEmpty,
+ TestCordSize::kInlined,
+ TestCordSize::kLarge),
+ TestParamToString);
+
+class CordzStringTest : public testing::TestWithParam<TestCordSize> {
+ private:
+ CordzSamplingIntervalHelper sample_every_{1};
+};
+
+INSTANTIATE_TEST_SUITE_P(WithParam, CordzStringTest,
+ testing::Values(TestCordSize::kInlined,
+ TestCordSize::kStringSso1,
+ TestCordSize::kStringSso2,
+ TestCordSize::kSmall,
+ TestCordSize::kLarge),
+ ParamToString<TestCordSize>);
+
+TEST(CordzTest, ConstructSmallArray) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST(CordzTest, ConstructLargeArray) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzStringTest, ConstructString) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ }
+}
+
+TEST(CordzTest, CopyConstructFromUnsampled) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord cord(src);
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, CopyConstructFromSampled) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord(src);
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+}
+
+TEST(CordzTest, MoveConstruct) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src(MakeString(TestCordSize::kLarge));
+ Cord cord(std::move(src));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzUpdateTest, AssignUnsampledCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ const CordzInfo* info = GetCordzInfoForTesting(cord());
+ cord() = src;
+ EXPECT_THAT(GetCordzInfoForTesting(cord()), Eq(nullptr));
+ EXPECT_FALSE(CordzInfoIsListed(info));
+}
+
+TEST_P(CordzUpdateTest, AssignSampledCord) {
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ cord() = src;
+ ASSERT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord())->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignSampledCordToInlined) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord cord;
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignSampledCordToUnsampledCord) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord cord = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignUnsampledCordToSampledCordWithoutSampling) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord cord = MakeAppendStringCord(TestCordSize::kLarge);
+ const CordzInfo* info = GetCordzInfoForTesting(cord);
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+ EXPECT_FALSE(CordzInfoIsListed(info));
+}
+
+TEST(CordzUpdateTest, AssignUnsampledCordToSampledCordWithSampling) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord = MakeAppendStringCord(TestCordSize::kLarge);
+ const CordzInfo* info = GetCordzInfoForTesting(cord);
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+ EXPECT_FALSE(CordzInfoIsListed(info));
+}
+
+TEST(CordzUpdateTest, AssignSampledCordToSampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignUnsampledCordToSampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzTest, AssignInlinedCordToSampledCord) {
+ CordzSampleToken token;
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kLarge));
+ const CordzInfo* info = GetCordzInfoForTesting(cord);
+ Cord src = UnsampledCord(MakeString(TestCordSize::kInlined));
+ cord = src;
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+ EXPECT_FALSE(CordzInfoIsListed(info));
+}
+
+TEST(CordzUpdateTest, MoveAssignCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord;
+ Cord src(MakeString(TestCordSize::kLarge));
+ cord = std::move(src);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzUpdateTest, AssignLargeArray) {
+ cord() = MakeString(TestCordSize::kSmall);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
+}
+
+TEST_P(CordzUpdateTest, AssignSmallArray) {
+ cord() = MakeString(TestCordSize::kSmall);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
+}
+
+TEST_P(CordzUpdateTest, AssignInlinedArray) {
+ cord() = MakeString(TestCordSize::kInlined);
+ EXPECT_THAT(GetCordzInfoForTesting(cord()), Eq(nullptr));
+}
+
+TEST_P(CordzStringTest, AssignStringToInlined) {
+ Cord cord;
+ cord = std::string(Length(GetParam()), '.');
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAssignString));
+ }
+}
+
+TEST_P(CordzStringTest, AssignStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord = std::string(Length(GetParam()), '.');
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kAssignString, 1));
+ }
+}
+
+TEST_P(CordzUpdateTest, AssignInlinedString) {
+ cord() = std::string(Length(TestCordSize::kInlined), '.');
+ EXPECT_THAT(GetCordzInfoForTesting(cord()), Eq(nullptr));
+}
+
+TEST_P(CordzUpdateTest, AppendCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord().Append(src);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
+}
+
+TEST_P(CordzUpdateTest, MoveAppendCord) {
+ cord().Append(UnsampledCord(MakeString(TestCordSize::kLarge)));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
+}
+
+TEST_P(CordzUpdateTest, AppendSmallArray) {
+ cord().Append(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
+}
+
+TEST_P(CordzUpdateTest, AppendLargeArray) {
+ cord().Append(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
+}
+
+TEST_P(CordzStringTest, AppendStringToEmpty) {
+ Cord cord;
+ cord.Append(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendString));
+ }
+}
+
+TEST_P(CordzStringTest, AppendStringToInlined) {
+ Cord cord(MakeString(TestCordSize::kInlined));
+ cord.Append(std::string(Length(GetParam()), '.'));
+ if (Length(TestCordSize::kInlined) + Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendString));
+ }
+}
+
+TEST_P(CordzStringTest, AppendStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord.Append(std::string(Length(GetParam()), '.'));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kAppendString, 1));
+}
+
+TEST(CordzTest, MakeCordFromExternal) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord = MakeCordFromExternal("Hello world", [](absl::string_view) {});
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kMakeCordFromExternal));
+}
+
+TEST(CordzTest, MakeCordFromEmptyExternal) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord = MakeCordFromExternal({}, [](absl::string_view) {});
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST_P(CordzUpdateTest, PrependCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord().Prepend(src);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependCord)));
+}
+
+TEST_P(CordzUpdateTest, PrependSmallArray) {
+ cord().Prepend(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependString)));
+}
+
+TEST_P(CordzUpdateTest, PrependLargeArray) {
+ cord().Prepend(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependString)));
+}
+
+TEST_P(CordzStringTest, PrependStringToEmpty) {
+ Cord cord;
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kPrependString));
+ }
+}
+
+TEST_P(CordzStringTest, PrependStringToInlined) {
+ Cord cord(MakeString(TestCordSize::kInlined));
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ if (Length(TestCordSize::kInlined) + Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kPrependString));
+ }
+}
+
+TEST_P(CordzStringTest, PrependStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kPrependString, 1));
+}
+
+TEST(CordzTest, RemovePrefix) {
+ CordzSamplingIntervalHelper sample_every(1);
+ Cord cord(MakeString(TestCordSize::kLarge));
+
+ // Half the cord
+ cord.RemovePrefix(cord.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemovePrefix, 1));
+
+ // TODO(mvels): RemovePrefix does not reset to inlined, except if empty?
+ cord.RemovePrefix(cord.size() - kMaxInline);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemovePrefix, 2));
+
+ cord.RemovePrefix(cord.size());
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, RemoveSuffix) {
+ CordzSamplingIntervalHelper sample_every(1);
+ Cord cord(MakeString(TestCordSize::kLarge));
+
+ // Half the cord
+ cord.RemoveSuffix(cord.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemoveSuffix, 1));
+
+ // TODO(mvels): RemoveSuffix does not reset to inlined, except if empty?
+ cord.RemoveSuffix(cord.size() - kMaxInline);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemoveSuffix, 2));
+
+ cord.RemoveSuffix(cord.size());
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, SubCordFromUnsampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord cord = src.Subcord(10, src.size() / 2);
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, SubCordFromSampledCord) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord = src.Subcord(10, src.size() / 2);
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+}
+
+TEST(CordzTest, SmallSubCord) {
+ CordzSamplingIntervalHelper sample_never{99999};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord = src.Subcord(10, kMaxInline + 1);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
+}
+
+} // namespace
+
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
diff --git a/third_party/abseil-cpp/absl/strings/cordz_test_helpers.h b/third_party/abseil-cpp/absl/strings/cordz_test_helpers.h
new file mode 100644
index 0000000000..e410eecf7f
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/cordz_test_helpers.h
@@ -0,0 +1,151 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
+#define ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
+
+#include <utility>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/base/macros.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+
+// Returns the CordzInfo for the cord, or nullptr if the cord is not sampled.
+inline const cord_internal::CordzInfo* GetCordzInfoForTesting(
+ const Cord& cord) {
+ if (!cord.contents_.is_tree()) return nullptr;
+ return cord.contents_.cordz_info();
+}
+
+// Returns true if the provided cordz_info is in the list of sampled cords.
+inline bool CordzInfoIsListed(const cord_internal::CordzInfo* cordz_info,
+ cord_internal::CordzSampleToken token = {}) {
+ for (const cord_internal::CordzInfo& info : token) {
+ if (cordz_info == &info) return true;
+ }
+ return false;
+}
+
+// Matcher on Cord that verifies all of:
+// - the cord is sampled
+// - the CordzInfo of the cord is listed / discoverable.
+// - the reported CordzStatistics match the cord's actual properties
+// - the cord has an (initial) UpdateTracker count of 1 for `method`
+MATCHER_P(HasValidCordzInfoOf, method, "CordzInfo matches cord") {
+ const cord_internal::CordzInfo* cord_info = GetCordzInfoForTesting(arg);
+ if (cord_info == nullptr) {
+ *result_listener << "cord is not sampled";
+ return false;
+ }
+ if (!CordzInfoIsListed(cord_info)) {
+ *result_listener << "cord is sampled, but not listed";
+ return false;
+ }
+ cord_internal::CordzStatistics stat = cord_info->GetCordzStatistics();
+ if (stat.size != arg.size()) {
+ *result_listener << "cordz size " << stat.size
+ << " does not match cord size " << arg.size();
+ return false;
+ }
+ if (stat.update_tracker.Value(method) != 1) {
+ *result_listener << "Expected method count 1 for " << method << ", found "
+ << stat.update_tracker.Value(method);
+ return false;
+ }
+ return true;
+}
+
+// Matcher on Cord that verifies that the cord is sampled and that the CordzInfo
+// update tracker has 'method' with a call count of 'n'
+MATCHER_P2(CordzMethodCountEq, method, n,
+ absl::StrCat("CordzInfo method count equals ", n)) {
+ const cord_internal::CordzInfo* cord_info = GetCordzInfoForTesting(arg);
+ if (cord_info == nullptr) {
+ *result_listener << "cord is not sampled";
+ return false;
+ }
+ cord_internal::CordzStatistics stat = cord_info->GetCordzStatistics();
+ if (stat.update_tracker.Value(method) != n) {
+ *result_listener << "Expected method count " << n << " for " << method
+ << ", found " << stat.update_tracker.Value(method);
+ return false;
+ }
+ return true;
+}
+
+// Cordz will only update with a new rate once the previously scheduled event
+// has fired. When we disable Cordz, a long delay takes place where we won't
+// consider profiling new Cords. CordzSampleIntervalHelper will burn through
+// that interval and allow for testing that assumes that the average sampling
+// interval is a particular value.
+class CordzSamplingIntervalHelper {
+ public:
+ explicit CordzSamplingIntervalHelper(int32_t interval)
+ : orig_mean_interval_(absl::cord_internal::get_cordz_mean_interval()) {
+ absl::cord_internal::set_cordz_mean_interval(interval);
+ absl::cord_internal::cordz_set_next_sample_for_testing(interval);
+ }
+
+ ~CordzSamplingIntervalHelper() {
+ absl::cord_internal::set_cordz_mean_interval(orig_mean_interval_);
+ absl::cord_internal::cordz_set_next_sample_for_testing(orig_mean_interval_);
+ }
+
+ private:
+ int32_t orig_mean_interval_;
+};
+
+// Wrapper struct managing a small CordRep `rep`
+struct TestCordRep {
+ cord_internal::CordRepFlat* rep;
+
+ TestCordRep() {
+ rep = cord_internal::CordRepFlat::New(100);
+ rep->length = 100;
+ memset(rep->Data(), 1, 100);
+ }
+ ~TestCordRep() { cord_internal::CordRep::Unref(rep); }
+};
+
+// Wrapper struct managing a small CordRep `rep`, and
+// an InlineData `data` initialized with that CordRep.
+struct TestCordData {
+ TestCordRep rep;
+ cord_internal::InlineData data{rep.rep};
+};
+
+// Creates a Cord that is not sampled
+template <typename... Args>
+Cord UnsampledCord(Args... args) {
+ CordzSamplingIntervalHelper never(9999);
+ Cord cord(std::forward<Args>(args)...);
+ ABSL_ASSERT(GetCordzInfoForTesting(cord) == nullptr);
+ return cord;
+}
+
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc b/third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc
index 8b11868c88..d29acaf462 100644
--- a/third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc
@@ -52,7 +52,7 @@ static_assert(std::numeric_limits<double>::digits == 53, "IEEE double fact");
// The lowest valued 19-digit decimal mantissa we can read still contains
// sufficient information to reconstruct a binary mantissa.
-static_assert(1000000000000000000u > (uint64_t(1) << (53 + 3)), "(b) above");
+static_assert(1000000000000000000u > (uint64_t{1} << (53 + 3)), "(b) above");
// ParseFloat<16> will read the first 15 significant digits of the mantissa.
//
diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_internal.h b/third_party/abseil-cpp/absl/strings/internal/cord_internal.h
index a1ba67fec3..813b3f3527 100644
--- a/third_party/abseil-cpp/absl/strings/internal/cord_internal.h
+++ b/third_party/abseil-cpp/absl/strings/internal/cord_internal.h
@@ -329,18 +329,17 @@ static constexpr cordz_info_t BigEndianByte(unsigned char value) {
class InlineData {
public:
+ // DefaultInitType forces the use of the default initialization constructor.
+ enum DefaultInitType { kDefaultInit };
+
// kNullCordzInfo holds the big endian representation of intptr_t(1)
// This is the 'null' / initial value of 'cordz_info'. The null value
// is specifically big endian 1 as with 64-bit pointers, the last
// byte of cordz_info overlaps with the last byte holding the tag.
static constexpr cordz_info_t kNullCordzInfo = BigEndianByte(1);
- // kFakeCordzInfo holds a 'fake', non-null cordz-info value we use to
- // emulate the previous 'kProfiled' tag logic in 'set_profiled' until
- // cord code is changed to store cordz_info values in InlineData.
- static constexpr cordz_info_t kFakeCordzInfo = BigEndianByte(9);
-
constexpr InlineData() : as_chars_{0} {}
+ explicit InlineData(DefaultInitType) {}
explicit constexpr InlineData(CordRep* rep) : as_tree_(rep) {}
explicit constexpr InlineData(absl::string_view chars)
: as_chars_{
@@ -367,6 +366,16 @@ class InlineData {
return as_tree_.cordz_info != kNullCordzInfo;
}
+ // Returns true if either of the provided instances hold a cordz_info value.
+ // This method is more efficient than the equivalent `data1.is_profiled() ||
+ // data2.is_profiled()`. Requires both arguments to hold a tree.
+ static bool is_either_profiled(const InlineData& data1,
+ const InlineData& data2) {
+ assert(data1.is_tree() && data2.is_tree());
+ return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
+ kNullCordzInfo;
+ }
+
// Returns the cordz_info sampling instance for this instance, or nullptr
// if the current instance is not sampled and does not have CordzInfo data.
// Requires the current instance to hold a tree value.
@@ -454,13 +463,6 @@ class InlineData {
tag() = static_cast<char>(size << 1);
}
- // Sets or unsets the 'is_profiled' state of this instance.
- // Requires the current instance to hold a tree value.
- void set_profiled(bool profiled) {
- assert(is_tree());
- as_tree_.cordz_info = profiled ? kFakeCordzInfo : kNullCordzInfo;
- }
-
private:
// See cordz_info_t for forced alignment and size of `cordz_info` details.
struct AsTree {
diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc
index 4d31d1d97c..f78c94e19b 100644
--- a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.cc
@@ -32,15 +32,6 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
-// See https://bugs.llvm.org/show_bug.cgi?id=48477
-#ifdef __clang__
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wshadow"
-#if __has_warning("-Wshadow-field")
-#pragma clang diagnostic ignored "-Wshadow-field"
-#endif
-#endif
-
namespace {
using index_type = CordRepRing::index_type;
@@ -301,7 +292,7 @@ bool CordRepRing::IsValid(std::ostream& output) const {
if (offset >= child->length || entry_length > child->length - offset) {
output << "entry[" << head << "] has offset " << offset
<< " and entry length " << entry_length
- << " which are outside of the childs length of " << child->length;
+ << " which are outside of the child's length of " << child->length;
return false;
}
@@ -400,10 +391,11 @@ CordRepRing* CordRepRing::Mutable(CordRepRing* rep, size_t extra) {
// Get current number of entries, and check for max capacity.
size_t entries = rep->entries();
- size_t min_extra = (std::max)(extra, rep->capacity() * 2 - entries);
if (!rep->refcount.IsOne()) {
- return Copy(rep, rep->head(), rep->tail(), min_extra);
+ return Copy(rep, rep->head(), rep->tail(), extra);
} else if (entries + extra > rep->capacity()) {
+ const size_t min_grow = rep->capacity() + rep->capacity() / 2;
+ const size_t min_extra = (std::max)(extra, min_grow - entries);
CordRepRing* newrep = CordRepRing::New(entries, min_extra);
newrep->Fill<false>(rep, rep->head(), rep->tail());
CordRepRing::Delete(rep);
@@ -449,12 +441,12 @@ Span<char> CordRepRing::GetPrependBuffer(size_t size) {
}
CordRepRing* CordRepRing::CreateFromLeaf(CordRep* child, size_t offset,
- size_t length, size_t extra) {
+ size_t len, size_t extra) {
CordRepRing* rep = CordRepRing::New(1, extra);
rep->head_ = 0;
rep->tail_ = rep->advance(0);
- rep->length = length;
- rep->entry_end_pos()[0] = length;
+ rep->length = len;
+ rep->entry_end_pos()[0] = len;
rep->entry_child()[0] = child;
rep->entry_data_offset()[0] = static_cast<offset_type>(offset);
return Validate(rep);
@@ -462,16 +454,16 @@ CordRepRing* CordRepRing::CreateFromLeaf(CordRep* child, size_t offset,
CordRepRing* CordRepRing::CreateSlow(CordRep* child, size_t extra) {
CordRepRing* rep = nullptr;
- Consume(child, [&](CordRep* child, size_t offset, size_t length) {
- if (IsFlatOrExternal(child)) {
- rep = rep ? AppendLeaf(rep, child, offset, length)
- : CreateFromLeaf(child, offset, length, extra);
+ Consume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
+ if (IsFlatOrExternal(child_arg)) {
+ rep = rep ? AppendLeaf(rep, child_arg, offset, len)
+ : CreateFromLeaf(child_arg, offset, len, extra);
} else if (rep) {
- rep = AddRing<AddMode::kAppend>(rep, child->ring(), offset, length);
- } else if (offset == 0 && child->length == length) {
- rep = Mutable(child->ring(), extra);
+ rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
+ } else if (offset == 0 && child_arg->length == len) {
+ rep = Mutable(child_arg->ring(), extra);
} else {
- rep = SubRing(child->ring(), offset, length, extra);
+ rep = SubRing(child_arg->ring(), offset, len, extra);
}
});
return Validate(rep, nullptr, __LINE__);
@@ -490,18 +482,18 @@ CordRepRing* CordRepRing::Create(CordRep* child, size_t extra) {
template <CordRepRing::AddMode mode>
CordRepRing* CordRepRing::AddRing(CordRepRing* rep, CordRepRing* ring,
- size_t offset, size_t length) {
+ size_t offset, size_t len) {
assert(offset < ring->length);
constexpr bool append = mode == AddMode::kAppend;
Position head = ring->Find(offset);
- Position tail = ring->FindTail(head.index, offset + length);
+ Position tail = ring->FindTail(head.index, offset + len);
const index_type entries = ring->entries(head.index, tail.index);
rep = Mutable(rep, entries);
// The delta for making ring[head].end_pos into 'len - offset'
const pos_type delta_length =
- (append ? rep->begin_pos_ + rep->length : rep->begin_pos_ - length) -
+ (append ? rep->begin_pos_ + rep->length : rep->begin_pos_ - len) -
ring->entry_begin_pos(head.index) - head.offset;
// Start filling at `tail`, or `entries` before `head`
@@ -542,36 +534,36 @@ CordRepRing* CordRepRing::AddRing(CordRepRing* rep, CordRepRing* ring,
}
// Commit changes
- rep->length += length;
+ rep->length += len;
if (append) {
rep->tail_ = filler.pos();
} else {
rep->head_ = filler.head();
- rep->begin_pos_ -= length;
+ rep->begin_pos_ -= len;
}
return Validate(rep);
}
CordRepRing* CordRepRing::AppendSlow(CordRepRing* rep, CordRep* child) {
- Consume(child, [&rep](CordRep* child, size_t offset, size_t length) {
- if (child->tag == RING) {
- rep = AddRing<AddMode::kAppend>(rep, child->ring(), offset, length);
+ Consume(child, [&rep](CordRep* child_arg, size_t offset, size_t len) {
+ if (child_arg->tag == RING) {
+ rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
} else {
- rep = AppendLeaf(rep, child, offset, length);
+ rep = AppendLeaf(rep, child_arg, offset, len);
}
});
return rep;
}
CordRepRing* CordRepRing::AppendLeaf(CordRepRing* rep, CordRep* child,
- size_t offset, size_t length) {
+ size_t offset, size_t len) {
rep = Mutable(rep, 1);
index_type back = rep->tail_;
const pos_type begin_pos = rep->begin_pos_ + rep->length;
rep->tail_ = rep->advance(rep->tail_);
- rep->length += length;
- rep->entry_end_pos()[back] = begin_pos + length;
+ rep->length += len;
+ rep->entry_end_pos()[back] = begin_pos + len;
rep->entry_child()[back] = child;
rep->entry_data_offset()[back] = static_cast<offset_type>(offset);
return Validate(rep, nullptr, __LINE__);
@@ -589,24 +581,24 @@ CordRepRing* CordRepRing::Append(CordRepRing* rep, CordRep* child) {
}
CordRepRing* CordRepRing::PrependSlow(CordRepRing* rep, CordRep* child) {
- RConsume(child, [&](CordRep* child, size_t offset, size_t length) {
- if (IsFlatOrExternal(child)) {
- rep = PrependLeaf(rep, child, offset, length);
+ RConsume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
+ if (IsFlatOrExternal(child_arg)) {
+ rep = PrependLeaf(rep, child_arg, offset, len);
} else {
- rep = AddRing<AddMode::kPrepend>(rep, child->ring(), offset, length);
+ rep = AddRing<AddMode::kPrepend>(rep, child_arg->ring(), offset, len);
}
});
return Validate(rep);
}
CordRepRing* CordRepRing::PrependLeaf(CordRepRing* rep, CordRep* child,
- size_t offset, size_t length) {
+ size_t offset, size_t len) {
rep = Mutable(rep, 1);
index_type head = rep->retreat(rep->head_);
pos_type end_pos = rep->begin_pos_;
rep->head_ = head;
- rep->length += length;
- rep->begin_pos_ -= length;
+ rep->length += len;
+ rep->begin_pos_ -= len;
rep->entry_end_pos()[head] = end_pos;
rep->entry_child()[head] = child;
rep->entry_data_offset()[head] = static_cast<offset_type>(offset);
@@ -786,18 +778,18 @@ char CordRepRing::GetCharacter(size_t offset) const {
}
CordRepRing* CordRepRing::SubRing(CordRepRing* rep, size_t offset,
- size_t length, size_t extra) {
+ size_t len, size_t extra) {
assert(offset <= rep->length);
- assert(offset <= rep->length - length);
+ assert(offset <= rep->length - len);
- if (length == 0) {
+ if (len == 0) {
CordRep::Unref(rep);
return nullptr;
}
// Find position of first byte
Position head = rep->Find(offset);
- Position tail = rep->FindTail(head.index, offset + length);
+ Position tail = rep->FindTail(head.index, offset + len);
const size_t new_entries = rep->entries(head.index, tail.index);
if (rep->refcount.IsOne() && extra <= (rep->capacity() - new_entries)) {
@@ -814,7 +806,7 @@ CordRepRing* CordRepRing::SubRing(CordRepRing* rep, size_t offset,
}
// Adjust begin_pos and length
- rep->length = length;
+ rep->length = len;
rep->begin_pos_ += offset;
// Adjust head and tail blocks
@@ -888,10 +880,6 @@ CordRepRing* CordRepRing::RemoveSuffix(CordRepRing* rep, size_t len,
return Validate(rep);
}
-#ifdef __clang__
-#pragma clang diagnostic pop
-#endif
-
} // namespace cord_internal
ABSL_NAMESPACE_END
} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.h b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.h
index c74d3353ff..2082a5653f 100644
--- a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.h
+++ b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring.h
@@ -30,15 +30,6 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
-// See https://bugs.llvm.org/show_bug.cgi?id=48477
-#ifdef __clang__
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wshadow"
-#if __has_warning("-Wshadow-field")
-#pragma clang diagnostic ignored "-Wshadow-field"
-#endif
-#endif
-
// All operations modifying a ring buffer are implemented as static methods
// requiring a CordRepRing instance with a reference adopted by the method.
//
@@ -210,23 +201,23 @@ class CordRepRing : public CordRep {
// referencing up to `size` capacity directly before the existing data.
Span<char> GetPrependBuffer(size_t size);
- // Returns a cord ring buffer containing `length` bytes of data starting at
+ // Returns a cord ring buffer containing `len` bytes of data starting at
// `offset`. If the input is not shared, this function will remove all head
// and tail child nodes outside of the requested range, and adjust the new
// head and tail nodes as required. If the input is shared, this function
// returns a new instance sharing some or all of the nodes from the input.
- static CordRepRing* SubRing(CordRepRing* r, size_t offset, size_t length,
+ static CordRepRing* SubRing(CordRepRing* r, size_t offset, size_t len,
size_t extra = 0);
- // Returns a cord ring buffer with the first `length` bytes removed.
+ // Returns a cord ring buffer with the first `len` bytes removed.
// If the input is not shared, this function will remove all head child nodes
// fully inside the first `length` bytes, and adjust the new head as required.
// If the input is shared, this function returns a new instance sharing some
// or all of the nodes from the input.
- static CordRepRing* RemoveSuffix(CordRepRing* r, size_t length,
+ static CordRepRing* RemoveSuffix(CordRepRing* r, size_t len,
size_t extra = 0);
- // Returns a cord ring buffer with the last `length` bytes removed.
+ // Returns a cord ring buffer with the last `len` bytes removed.
// If the input is not shared, this function will remove all head child nodes
// fully inside the first `length` bytes, and adjust the new head as required.
// If the input is shared, this function returns a new instance sharing some
@@ -237,6 +228,18 @@ class CordRepRing : public CordRep {
// Returns the character at `offset`. Requires that `offset < length`.
char GetCharacter(size_t offset) const;
+ // Returns true if this instance manages a single contiguous buffer, in which
+ // case the (optional) output parameter `fragment` is set. Otherwise, the
+ // function returns false, and `fragment` is left unchanged.
+ bool IsFlat(absl::string_view* fragment) const;
+
+ // Returns true if the data starting at `offset` with length `len` is
+ // managed by this instance inside a single contiguous buffer, in which case
+ // the (optional) output parameter `fragment` is set to the contiguous memory
+ // starting at offset `offset` with length `length`. Otherwise, the function
+ // returns false, and `fragment` is left unchanged.
+ bool IsFlat(size_t offset, size_t len, absl::string_view* fragment) const;
+
// Testing only: set capacity to requested capacity.
void SetCapacityForTesting(size_t capacity);
@@ -461,10 +464,10 @@ class CordRepRing : public CordRep {
size_t length, size_t extra);
// Appends or prepends (depending on AddMode) the ring buffer in `ring' to
- // `rep` starting at `offset` with length `length`.
+ // `rep` starting at `offset` with length `len`.
template <AddMode mode>
static CordRepRing* AddRing(CordRepRing* rep, CordRepRing* ring,
- size_t offset, size_t length);
+ size_t offset, size_t len);
// Increases the data offset for entry `index` by `n`.
void AddDataOffset(index_type index, size_t n);
@@ -576,11 +579,26 @@ inline const CordRepRing* CordRep::ring() const {
return static_cast<const CordRepRing*>(this);
}
-std::ostream& operator<<(std::ostream& s, const CordRepRing& rep);
+inline bool CordRepRing::IsFlat(absl::string_view* fragment) const {
+ if (entries() == 1) {
+ if (fragment) *fragment = entry_data(head());
+ return true;
+ }
+ return false;
+}
-#ifdef __clang__
-#pragma clang diagnostic pop
-#endif
+inline bool CordRepRing::IsFlat(size_t offset, size_t len,
+ absl::string_view* fragment) const {
+ const Position pos = Find(offset);
+ const absl::string_view data = entry_data(pos.index);
+ if (data.length() >= len && data.length() - len >= pos.offset) {
+ if (fragment) *fragment = data.substr(pos.offset, len);
+ return true;
+ }
+ return false;
+}
+
+std::ostream& operator<<(std::ostream& s, const CordRepRing& rep);
} // namespace cord_internal
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring_reader.h b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring_reader.h
index 396c0e2cd8..7ceeaa000e 100644
--- a/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring_reader.h
+++ b/third_party/abseil-cpp/absl/strings/internal/cord_rep_ring_reader.h
@@ -40,6 +40,10 @@ class CordRepRingReader {
// The returned value is undefined if this instance is empty.
CordRepRing::index_type index() const { return index_; }
+ // Returns the current node inside the ring buffer for this instance.
+ // The returned value is undefined if this instance is empty.
+ CordRep* node() const { return ring_->entry_child(index_); }
+
// Returns the length of the referenced ring buffer.
// Requires the current instance to be non empty.
size_t length() const {
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_functions.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_functions.cc
new file mode 100644
index 0000000000..f30080f8c2
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_functions.cc
@@ -0,0 +1,110 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_functions.h"
+
+#include <atomic>
+#include <cmath>
+#include <limits>
+#include <random>
+
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/base/internal/exponential_biased.h"
+#include "absl/base/internal/raw_logging.h"
+
+// TODO(b/162942788): weak 'cordz_disabled' value.
+// A strong version is in the 'cordz_disabled_hack_for_odr' library which can
+// be linked in to disable cordz at compile time.
+extern "C" {
+bool absl_internal_cordz_disabled ABSL_ATTRIBUTE_WEAK = false;
+}
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+// The average interval until the next sample. A value of 0 disables profiling
+// while a value of 1 will profile all Cords.
+std::atomic<int> g_cordz_mean_interval(50000);
+
+} // namespace
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+// Special negative 'not initialized' per thread value for cordz_next_sample.
+static constexpr int64_t kInitCordzNextSample = -1;
+
+ABSL_CONST_INIT thread_local int64_t cordz_next_sample = kInitCordzNextSample;
+
+// kIntervalIfDisabled is the number of profile-eligible events need to occur
+// before the code will confirm that cordz is still disabled.
+constexpr int64_t kIntervalIfDisabled = 1 << 16;
+
+ABSL_ATTRIBUTE_NOINLINE bool cordz_should_profile_slow() {
+ // TODO(b/162942788): check if profiling is disabled at compile time.
+ if (absl_internal_cordz_disabled) {
+ ABSL_RAW_LOG(WARNING, "Cordz info disabled at compile time");
+ // We are permanently disabled: set counter to highest possible value.
+ cordz_next_sample = std::numeric_limits<int64_t>::max();
+ return false;
+ }
+
+ thread_local absl::base_internal::ExponentialBiased
+ exponential_biased_generator;
+ int32_t mean_interval = get_cordz_mean_interval();
+
+ // Check if we disabled profiling. If so, set the next sample to a "large"
+ // number to minimize the overhead of the should_profile codepath.
+ if (mean_interval <= 0) {
+ cordz_next_sample = kIntervalIfDisabled;
+ return false;
+ }
+
+ // Check if we're always sampling.
+ if (mean_interval == 1) {
+ cordz_next_sample = 1;
+ return true;
+ }
+
+ if (cordz_next_sample <= 0) {
+ // If first check on current thread, check cordz_should_profile()
+ // again using the created (initial) stride in cordz_next_sample.
+ const bool initialized = cordz_next_sample != kInitCordzNextSample;
+ cordz_next_sample = exponential_biased_generator.GetStride(mean_interval);
+ return initialized || cordz_should_profile();
+ }
+
+ --cordz_next_sample;
+ return false;
+}
+
+void cordz_set_next_sample_for_testing(int64_t next_sample) {
+ cordz_next_sample = next_sample;
+}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+int32_t get_cordz_mean_interval() {
+ return g_cordz_mean_interval.load(std::memory_order_acquire);
+}
+
+void set_cordz_mean_interval(int32_t mean_interval) {
+ g_cordz_mean_interval.store(mean_interval, std::memory_order_release);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_functions.h b/third_party/abseil-cpp/absl/strings/internal/cordz_functions.h
new file mode 100644
index 0000000000..c9ba14508a
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_functions.h
@@ -0,0 +1,85 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+#define ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+
+#include <stdint.h>
+
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/base/optimization.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Returns the current sample rate. This represents the average interval
+// between samples.
+int32_t get_cordz_mean_interval();
+
+// Sets the sample rate with the average interval between samples.
+void set_cordz_mean_interval(int32_t mean_interval);
+
+// Enable cordz unless any of the following applies:
+// - no thread local support
+// - MSVC build
+// - Android build
+// - Apple build
+// - DLL build
+// Hashtablez is turned off completely in opensource builds.
+// MSVC's static atomics are dynamically initialized in debug mode, which breaks
+// sampling.
+#if defined(ABSL_HAVE_THREAD_LOCAL) && !defined(_MSC_VER) && \
+ !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL) && \
+ !defined(__ANDROID__) && !defined(__APPLE__)
+#define ABSL_INTERNAL_CORDZ_ENABLED 1
+#endif
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+// cordz_next_sample is the number of events until the next sample event. If
+// the value is 1 or less, the code will check on the next event if cordz is
+// enabled, and if so, will sample the Cord. cordz is only enabled when we can
+// use thread locals.
+ABSL_CONST_INIT extern thread_local int64_t cordz_next_sample;
+
+// Determines if the next sample should be profiled. If it is, the value pointed
+// at by next_sample will be set with the interval until the next sample.
+bool cordz_should_profile_slow();
+
+// Returns true if the next cord should be sampled.
+inline bool cordz_should_profile() {
+ if (ABSL_PREDICT_TRUE(cordz_next_sample > 1)) {
+ cordz_next_sample--;
+ return false;
+ }
+ return cordz_should_profile_slow();
+}
+
+// Sets the interval until the next sample (for testing only)
+void cordz_set_next_sample_for_testing(int64_t next_sample);
+
+#else // ABSL_INTERNAL_CORDZ_ENABLED
+
+inline bool cordz_should_profile() { return false; }
+inline void cordz_set_next_sample_for_testing(int64_t) {}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_FUNCTIONS_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_functions_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_functions_test.cc
new file mode 100644
index 0000000000..350623c1f3
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_functions_test.cc
@@ -0,0 +1,149 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_functions.h"
+
+#include <thread> // NOLINT we need real clean new threads
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Ge;
+using ::testing::Le;
+
+TEST(CordzFunctionsTest, SampleRate) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+ int32_t expected_sample_rate = 123;
+ set_cordz_mean_interval(expected_sample_rate);
+ EXPECT_THAT(get_cordz_mean_interval(), Eq(expected_sample_rate));
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+// Cordz is disabled when we don't have thread_local. All calls to
+// should_profile will return false when cordz is diabled, so we might want to
+// avoid those tests.
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+TEST(CordzFunctionsTest, ShouldProfileDisable) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(0);
+ cordz_set_next_sample_for_testing(0);
+ EXPECT_FALSE(cordz_should_profile());
+ // 1 << 16 is from kIntervalIfDisabled in cordz_functions.cc.
+ EXPECT_THAT(cordz_next_sample, Eq(1 << 16));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+TEST(CordzFunctionsTest, ShouldProfileAlways) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(1);
+ cordz_set_next_sample_for_testing(1);
+ EXPECT_TRUE(cordz_should_profile());
+ EXPECT_THAT(cordz_next_sample, Le(1));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+TEST(CordzFunctionsTest, DoesNotAlwaysSampleFirstCord) {
+ // Set large enough interval such that the chance of 'tons' of threads
+ // randomly sampling the first call is infinitely small.
+ set_cordz_mean_interval(10000);
+ int tries = 0;
+ bool sampled = false;
+ do {
+ ++tries;
+ ASSERT_THAT(tries, Le(1000));
+ std::thread thread([&sampled] {
+ sampled = cordz_should_profile();
+ });
+ thread.join();
+ } while (sampled);
+}
+
+TEST(CordzFunctionsTest, ShouldProfileRate) {
+ static constexpr int kDesiredMeanInterval = 1000;
+ static constexpr int kSamples = 10000;
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(kDesiredMeanInterval);
+
+ int64_t sum_of_intervals = 0;
+ for (int i = 0; i < kSamples; i++) {
+ // Setting next_sample to 0 will force cordz_should_profile to generate a
+ // new value for next_sample each iteration.
+ cordz_set_next_sample_for_testing(0);
+ cordz_should_profile();
+ sum_of_intervals += cordz_next_sample;
+ }
+
+ // The sum of independent exponential variables is an Erlang distribution,
+ // which is a gamma distribution where the shape parameter is equal to the
+ // number of summands. The distribution used for cordz_should_profile is
+ // actually floor(Exponential(1/mean)) which introduces bias. However, we can
+ // apply the squint-really-hard correction factor. That is, when mean is
+ // large, then if we squint really hard the shape of the distribution between
+ // N and N+1 looks like a uniform distribution. On average, each value for
+ // next_sample will be about 0.5 lower than we would expect from an
+ // exponential distribution. This squint-really-hard correction approach won't
+ // work when mean is smaller than about 10 but works fine when mean is 1000.
+ //
+ // We can use R to calculate a confidence interval. This
+ // shows how to generate a confidence interval with a false positive rate of
+ // one in a billion.
+ //
+ // $ R -q
+ // > mean = 1000
+ // > kSamples = 10000
+ // > errorRate = 1e-9
+ // > correction = -kSamples / 2
+ // > low = qgamma(errorRate/2, kSamples, 1/mean) + correction
+ // > high = qgamma(1 - errorRate/2, kSamples, 1/mean) + correction
+ // > low
+ // [1] 9396115
+ // > high
+ // [1] 10618100
+ EXPECT_THAT(sum_of_intervals, Ge(9396115));
+ EXPECT_THAT(sum_of_intervals, Le(10618100));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+#else // ABSL_INTERNAL_CORDZ_ENABLED
+
+TEST(CordzFunctionsTest, ShouldProfileDisabled) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(1);
+ cordz_set_next_sample_for_testing(0);
+ EXPECT_FALSE(cordz_should_profile());
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_handle.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_handle.cc
new file mode 100644
index 0000000000..a73fefed59
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_handle.cc
@@ -0,0 +1,139 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "absl/strings/internal/cordz_handle.h"
+
+#include <atomic>
+
+#include "absl/base/internal/raw_logging.h" // For ABSL_RAW_CHECK
+#include "absl/base/internal/spinlock.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::absl::base_internal::SpinLockHolder;
+
+ABSL_CONST_INIT CordzHandle::Queue CordzHandle::global_queue_(absl::kConstInit);
+
+CordzHandle::CordzHandle(bool is_snapshot) : is_snapshot_(is_snapshot) {
+ if (is_snapshot) {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* dq_tail = queue_->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = this;
+ }
+ queue_->dq_tail.store(this, std::memory_order_release);
+ }
+}
+
+CordzHandle::~CordzHandle() {
+ ODRCheck();
+ if (is_snapshot_) {
+ std::vector<CordzHandle*> to_delete;
+ {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* next = dq_next_;
+ if (dq_prev_ == nullptr) {
+ // We were head of the queue, delete every CordzHandle until we reach
+ // either the end of the list, or a snapshot handle.
+ while (next && !next->is_snapshot_) {
+ to_delete.push_back(next);
+ next = next->dq_next_;
+ }
+ } else {
+ // Another CordzHandle existed before this one, don't delete anything.
+ dq_prev_->dq_next_ = next;
+ }
+ if (next) {
+ next->dq_prev_ = dq_prev_;
+ } else {
+ queue_->dq_tail.store(dq_prev_, std::memory_order_release);
+ }
+ }
+ for (CordzHandle* handle : to_delete) {
+ delete handle;
+ }
+ }
+}
+
+bool CordzHandle::SafeToDelete() const {
+ return is_snapshot_ || queue_->IsEmpty();
+}
+
+void CordzHandle::Delete(CordzHandle* handle) {
+ assert(handle);
+ if (handle) {
+ handle->ODRCheck();
+ Queue* const queue = handle->queue_;
+ if (!handle->SafeToDelete()) {
+ SpinLockHolder lock(&queue->mutex);
+ CordzHandle* dq_tail = queue->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ handle->dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = handle;
+ queue->dq_tail.store(handle, std::memory_order_release);
+ return;
+ }
+ }
+ delete handle;
+ }
+}
+
+std::vector<const CordzHandle*> CordzHandle::DiagnosticsGetDeleteQueue() {
+ std::vector<const CordzHandle*> handles;
+ SpinLockHolder lock(&global_queue_.mutex);
+ CordzHandle* dq_tail = global_queue_.dq_tail.load(std::memory_order_acquire);
+ for (const CordzHandle* p = dq_tail; p; p = p->dq_prev_) {
+ handles.push_back(p);
+ }
+ return handles;
+}
+
+bool CordzHandle::DiagnosticsHandleIsSafeToInspect(
+ const CordzHandle* handle) const {
+ ODRCheck();
+ if (!is_snapshot_) return false;
+ if (handle == nullptr) return true;
+ if (handle->is_snapshot_) return false;
+ bool snapshot_found = false;
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = queue_->dq_tail; p; p = p->dq_prev_) {
+ if (p == handle) return !snapshot_found;
+ if (p == this) snapshot_found = true;
+ }
+ ABSL_ASSERT(snapshot_found); // Assert that 'this' is in delete queue.
+ return true;
+}
+
+std::vector<const CordzHandle*>
+CordzHandle::DiagnosticsGetSafeToInspectDeletedHandles() {
+ ODRCheck();
+ std::vector<const CordzHandle*> handles;
+ if (!is_snapshot()) {
+ return handles;
+ }
+
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = dq_next_; p != nullptr; p = p->dq_next_) {
+ if (!p->is_snapshot()) {
+ handles.push_back(p);
+ }
+ }
+ return handles;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_handle.h b/third_party/abseil-cpp/absl/strings/internal/cordz_handle.h
new file mode 100644
index 0000000000..5df53c782a
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_handle.h
@@ -0,0 +1,131 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_HANDLE_H_
+#define ABSL_STRINGS_CORDZ_HANDLE_H_
+
+#include <atomic>
+#include <vector>
+
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/synchronization/mutex.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// This base class allows multiple types of object (CordzInfo and
+// CordzSampleToken) to exist simultaneously on the delete queue (pointed to by
+// global_dq_tail and traversed using dq_prev_ and dq_next_). The
+// delete queue guarantees that once a profiler creates a CordzSampleToken and
+// has gained visibility into a CordzInfo object, that CordzInfo object will not
+// be deleted prematurely. This allows the profiler to inspect all CordzInfo
+// objects that are alive without needing to hold a global lock.
+class CordzHandle {
+ public:
+ CordzHandle() : CordzHandle(false) {}
+
+ bool is_snapshot() const { return is_snapshot_; }
+
+ // Returns true if this instance is safe to be deleted because it is either a
+ // snapshot, which is always safe to delete, or not included in the global
+ // delete queue and thus not included in any snapshot.
+ // Callers are responsible for making sure this instance can not be newly
+ // discovered by other threads. For example, CordzInfo instances first de-list
+ // themselves from the global CordzInfo list before determining if they are
+ // safe to be deleted directly.
+ // If SafeToDelete returns false, callers MUST use the Delete() method to
+ // safely queue CordzHandle instances for deletion.
+ bool SafeToDelete() const;
+
+ // Deletes the provided instance, or puts it on the delete queue to be deleted
+ // once there are no more sample tokens (snapshot) instances potentially
+ // referencing the instance. `handle` should not be null.
+ static void Delete(CordzHandle* handle);
+
+ // Returns the current entries in the delete queue in LIFO order.
+ static std::vector<const CordzHandle*> DiagnosticsGetDeleteQueue();
+
+ // Returns true if the provided handle is nullptr or guarded by this handle.
+ // Since the CordzSnapshot token is itself a CordzHandle, this method will
+ // allow tests to check if that token is keeping an arbitrary CordzHandle
+ // alive.
+ bool DiagnosticsHandleIsSafeToInspect(const CordzHandle* handle) const;
+
+ // Returns the current entries in the delete queue, in LIFO order, that are
+ // protected by this. CordzHandle objects are only placed on the delete queue
+ // after CordzHandle::Delete is called with them as an argument. Only
+ // CordzHandle objects that are not also CordzSnapshot objects will be
+ // included in the return vector. For each of the handles in the return
+ // vector, the earliest that their memory can be freed is when this
+ // CordzSnapshot object is deleted.
+ std::vector<const CordzHandle*> DiagnosticsGetSafeToInspectDeletedHandles();
+
+ protected:
+ explicit CordzHandle(bool is_snapshot);
+ virtual ~CordzHandle();
+
+ private:
+ // Global queue data. CordzHandle stores a pointer to the global queue
+ // instance to harden against ODR violations.
+ struct Queue {
+ constexpr explicit Queue(absl::ConstInitType)
+ : mutex(absl::kConstInit,
+ absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ absl::base_internal::SpinLock mutex;
+ std::atomic<CordzHandle*> dq_tail ABSL_GUARDED_BY(mutex){nullptr};
+
+ // Returns true if this delete queue is empty. This method does not acquire
+ // the lock, but does a 'load acquire' observation on the delete queue tail.
+ // It is used inside Delete() to check for the presence of a delete queue
+ // without holding the lock. The assumption is that the caller is in the
+ // state of 'being deleted', and can not be newly discovered by a concurrent
+ // 'being constructed' snapshot instance. Practically, this means that any
+ // such discovery (`find`, 'first' or 'next', etc) must have proper 'happens
+ // before / after' semantics and atomic fences.
+ bool IsEmpty() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return dq_tail.load(std::memory_order_acquire) == nullptr;
+ }
+ };
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(queue_ == &global_queue_, "ODR violation in Cord");
+#endif
+ }
+
+ ABSL_CONST_INIT static Queue global_queue_;
+ Queue* const queue_ = &global_queue_;
+ const bool is_snapshot_;
+
+ // dq_prev_ and dq_next_ require the global queue mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that queue_ and global_queue_ are one and the same.
+ CordzHandle* dq_prev_ = nullptr;
+ CordzHandle* dq_next_ = nullptr;
+};
+
+class CordzSnapshot : public CordzHandle {
+ public:
+ CordzSnapshot() : CordzHandle(true) {}
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_HANDLE_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_handle_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_handle_test.cc
new file mode 100644
index 0000000000..fd68e06b3e
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_handle_test.cc
@@ -0,0 +1,265 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "absl/strings/internal/cordz_handle.h"
+
+#include <random>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/memory/memory.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+#include "absl/time/clock.h"
+#include "absl/time/time.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Gt;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+// Local less verbose helper
+std::vector<const CordzHandle*> DeleteQueue() {
+ return CordzHandle::DiagnosticsGetDeleteQueue();
+}
+
+struct CordzHandleDeleteTracker : public CordzHandle {
+ bool* deleted;
+ explicit CordzHandleDeleteTracker(bool* deleted) : deleted(deleted) {}
+ ~CordzHandleDeleteTracker() override { *deleted = true; }
+};
+
+TEST(CordzHandleTest, DeleteQueueIsEmpty) {
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzHandleTest, CordzHandleCreateDelete) {
+ bool deleted = false;
+ auto* handle = new CordzHandleDeleteTracker(&deleted);
+ EXPECT_FALSE(handle->is_snapshot());
+ EXPECT_TRUE(handle->SafeToDelete());
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+
+ CordzHandle::Delete(handle);
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_TRUE(deleted);
+}
+
+TEST(CordzHandleTest, CordzSnapshotCreateDelete) {
+ auto* snapshot = new CordzSnapshot();
+ EXPECT_TRUE(snapshot->is_snapshot());
+ EXPECT_TRUE(snapshot->SafeToDelete());
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot));
+ delete snapshot;
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzHandleTest, CordzHandleCreateDeleteWithSnapshot) {
+ bool deleted = false;
+ auto* snapshot = new CordzSnapshot();
+ auto* handle = new CordzHandleDeleteTracker(&deleted);
+ EXPECT_FALSE(handle->SafeToDelete());
+
+ CordzHandle::Delete(handle);
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle, snapshot));
+ EXPECT_FALSE(deleted);
+ EXPECT_FALSE(handle->SafeToDelete());
+
+ delete snapshot;
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_TRUE(deleted);
+}
+
+TEST(CordzHandleTest, MultiSnapshot) {
+ bool deleted[3] = {false, false, false};
+
+ CordzSnapshot* snapshot[3];
+ CordzHandleDeleteTracker* handle[3];
+ for (int i = 0; i < 3; ++i) {
+ snapshot[i] = new CordzSnapshot();
+ handle[i] = new CordzHandleDeleteTracker(&deleted[i]);
+ CordzHandle::Delete(handle[i]);
+ }
+
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2], handle[1],
+ snapshot[1], handle[0], snapshot[0]));
+ EXPECT_THAT(deleted, ElementsAre(false, false, false));
+
+ delete snapshot[1];
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2], handle[1],
+ handle[0], snapshot[0]));
+ EXPECT_THAT(deleted, ElementsAre(false, false, false));
+
+ delete snapshot[0];
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2]));
+ EXPECT_THAT(deleted, ElementsAre(true, true, false));
+
+ delete snapshot[2];
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_THAT(deleted, ElementsAre(true, true, deleted));
+}
+
+TEST(CordzHandleTest, DiagnosticsHandleIsSafeToInspect) {
+ CordzSnapshot snapshot1;
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(nullptr));
+
+ auto* handle1 = new CordzHandle();
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+
+ CordzHandle::Delete(handle1);
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+
+ CordzSnapshot snapshot2;
+ auto* handle2 = new CordzHandle();
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle2));
+ EXPECT_FALSE(snapshot2.DiagnosticsHandleIsSafeToInspect(handle1));
+ EXPECT_TRUE(snapshot2.DiagnosticsHandleIsSafeToInspect(handle2));
+
+ CordzHandle::Delete(handle2);
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+}
+
+TEST(CordzHandleTest, DiagnosticsGetSafeToInspectDeletedHandles) {
+ EXPECT_THAT(DeleteQueue(), IsEmpty());
+
+ auto* handle = new CordzHandle();
+ auto* snapshot1 = new CordzSnapshot();
+
+ // snapshot1 should be able to see handle.
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot1));
+ EXPECT_TRUE(snapshot1->DiagnosticsHandleIsSafeToInspect(handle));
+ EXPECT_THAT(snapshot1->DiagnosticsGetSafeToInspectDeletedHandles(),
+ IsEmpty());
+
+ // This handle will be safe to inspect as long as snapshot1 is alive. However,
+ // since only snapshot1 can prove that it's alive, it will be hidden from
+ // snapshot2.
+ CordzHandle::Delete(handle);
+
+ // This snapshot shouldn't be able to see handle because handle was already
+ // sent to Delete.
+ auto* snapshot2 = new CordzSnapshot();
+
+ // DeleteQueue elements are LIFO order.
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot2, handle, snapshot1));
+
+ EXPECT_TRUE(snapshot1->DiagnosticsHandleIsSafeToInspect(handle));
+ EXPECT_FALSE(snapshot2->DiagnosticsHandleIsSafeToInspect(handle));
+
+ EXPECT_THAT(snapshot1->DiagnosticsGetSafeToInspectDeletedHandles(),
+ ElementsAre(handle));
+ EXPECT_THAT(snapshot2->DiagnosticsGetSafeToInspectDeletedHandles(),
+ IsEmpty());
+
+ CordzHandle::Delete(snapshot1);
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot2));
+
+ CordzHandle::Delete(snapshot2);
+ EXPECT_THAT(DeleteQueue(), IsEmpty());
+}
+
+// Create and delete CordzHandle and CordzSnapshot objects in multiple threads
+// so that tsan has some time to chew on it and look for memory problems.
+TEST(CordzHandleTest, MultiThreaded) {
+ Notification stop;
+ static constexpr int kNumThreads = 4;
+ // Keep the number of handles relatively small so that the test will naturally
+ // transition to an empty delete queue during the test. If there are, say, 100
+ // handles, that will virtually never happen. With 10 handles and around 50k
+ // iterations in each of 4 threads, the delete queue appears to become empty
+ // around 200 times.
+ static constexpr int kNumHandles = 10;
+
+ // Each thread is going to pick a random index and atomically swap its
+ // CordzHandle with one in handles. This way, each thread can avoid
+ // manipulating a CordzHandle that might be operated upon in another thread.
+ std::vector<std::atomic<CordzHandle*>> handles(kNumHandles);
+
+ // global bool which is set when any thread did get some 'safe to inspect'
+ // handles. On some platforms and OSS tests, we might risk that some pool
+ // threads are starved, stalled, or just got a few unlikely random 'handle'
+ // coin tosses, so we satisfy this test with simply observing 'some' thread
+ // did something meaningful, which should minimize the potential for flakes.
+ std::atomic<bool> found_safe_to_inspect(false);
+
+ {
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&stop, &handles, &found_safe_to_inspect]() {
+ std::minstd_rand gen;
+ std::uniform_int_distribution<int> dist_type(0, 2);
+ std::uniform_int_distribution<int> dist_handle(0, kNumHandles - 1);
+
+ while (!stop.HasBeenNotified()) {
+ CordzHandle* handle;
+ switch (dist_type(gen)) {
+ case 0:
+ handle = new CordzHandle();
+ break;
+ case 1:
+ handle = new CordzSnapshot();
+ break;
+ default:
+ handle = nullptr;
+ break;
+ }
+ CordzHandle* old_handle = handles[dist_handle(gen)].exchange(handle);
+ if (old_handle != nullptr) {
+ std::vector<const CordzHandle*> safe_to_inspect =
+ old_handle->DiagnosticsGetSafeToInspectDeletedHandles();
+ for (const CordzHandle* handle : safe_to_inspect) {
+ // We're in a tight loop, so don't generate too many error
+ // messages.
+ ASSERT_FALSE(handle->is_snapshot());
+ }
+ if (!safe_to_inspect.empty()) {
+ found_safe_to_inspect.store(true);
+ }
+ CordzHandle::Delete(old_handle);
+ }
+ }
+
+ // Have each thread attempt to clean up everything. Some thread will be
+ // the last to reach this cleanup code, and it will be guaranteed to
+ // clean up everything because nothing remains to create new handles.
+ for (auto& h : handles) {
+ if (CordzHandle* handle = h.exchange(nullptr)) {
+ CordzHandle::Delete(handle);
+ }
+ }
+ });
+ }
+
+ // The threads will hammer away. Give it a little bit of time for tsan to
+ // spot errors.
+ absl::SleepFor(absl::Seconds(3));
+ stop.Notify();
+ }
+
+ // Confirm that the test did *something*. This check will be satisfied as
+ // long as any thread has deleted a CordzSnapshot object and a non-snapshot
+ // CordzHandle was deleted after the CordzSnapshot was created.
+ // See also comments on `found_safe_to_inspect`
+ EXPECT_TRUE(found_safe_to_inspect.load());
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_info.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_info.cc
new file mode 100644
index 0000000000..a3a0b9c046
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_info.cc
@@ -0,0 +1,436 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_info.h"
+
+#include "absl/base/config.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/container/inlined_vector.h"
+#include "absl/debugging/stacktrace.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/mutex.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::absl::base_internal::SpinLockHolder;
+
+constexpr int CordzInfo::kMaxStackDepth;
+
+ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{absl::kConstInit};
+
+namespace {
+
+// CordRepAnalyzer performs the analysis of a cord.
+//
+// It computes absolute node counts and total memory usage, and an 'estimated
+// fair share memory usage` statistic.
+// Conceptually, it divides the 'memory usage' at each location in the 'cord
+// graph' by the cumulative reference count of that location. The cumulative
+// reference count is the factored total of all edges leading into that node.
+//
+// The top level node is treated specially: we assume the current thread
+// (typically called from the CordzHandler) to hold a reference purely to
+// perform a safe analysis, and not being part of the application. So we
+// substract 1 from the reference count of the top node to compute the
+// 'application fair share' excluding the reference of the current thread.
+//
+// An example of fair sharing, and why we multiply reference counts:
+// Assume we have 2 CordReps, both being a Substring referencing a Flat:
+// CordSubstring A (refcount = 5) --> child Flat C (refcount = 2)
+// CordSubstring B (refcount = 9) --> child Flat C (refcount = 2)
+//
+// Flat C has 2 incoming edges from the 2 substrings (refcount = 2) and is not
+// referenced directly anywhere else. Translated into a 'fair share', we then
+// attribute 50% of the memory (memory / refcount = 2) to each incoming edge.
+// Rep A has a refcount of 5, so we attribute each incoming edge 1 / 5th of the
+// memory cost below it, i.e.: the fair share of Rep A of the memory used by C
+// is then 'memory C / (refcount C * refcount A) + (memory A / refcount A)'.
+// It is also easy to see how all incoming edges add up to 100%.
+class CordRepAnalyzer {
+ public:
+ // Creates an analyzer instance binding to `statistics`.
+ explicit CordRepAnalyzer(CordzStatistics& statistics)
+ : statistics_(statistics) {}
+
+ // Analyzes the memory statistics and node counts for the provided `rep`, and
+ // adds the results to `statistics`. Note that node counts and memory sizes
+ // are not initialized, computed values are added to any existing values.
+ void AnalyzeCordRep(const CordRep* rep) {
+ // Process all linear nodes.
+ // As per the class comments, use refcout - 1 on the top level node, as the
+ // top level node is assumed to be referenced only for analysis purposes.
+ size_t refcount = rep->refcount.Get();
+ RepRef repref{rep, (refcount > 1) ? refcount - 1 : 1};
+
+ // Process all top level linear nodes (substrings and flats).
+ repref = CountLinearReps(repref, memory_usage_);
+
+ // We should have have either a concat or ring node node if not null.
+ if (repref.rep != nullptr) {
+ assert(repref.rep->tag == RING || repref.rep->tag == CONCAT);
+ if (repref.rep->tag == RING) {
+ AnalyzeRing(repref);
+ } else if (repref.rep->tag == CONCAT) {
+ AnalyzeConcat(repref);
+ }
+ }
+
+ // Adds values to output
+ statistics_.estimated_memory_usage += memory_usage_.total;
+ statistics_.estimated_fair_share_memory_usage += memory_usage_.fair_share;
+ }
+
+ private:
+ // RepRef identifies a CordRep* inside the Cord tree with its cumulative
+ // refcount including itself. For example, a tree consisting of a substring
+ // with a refcount of 3 and a child flat with a refcount of 4 will have RepRef
+ // refcounts of 3 and 12 respectively.
+ struct RepRef {
+ const CordRep* rep;
+ size_t refcount;
+
+ // Returns a 'child' RepRef which contains the cumulative reference count of
+ // this instance multiplied by the child's reference count.
+ RepRef Child(const CordRep* child) const {
+ return RepRef{child, refcount * child->refcount.Get()};
+ }
+ };
+
+ // Memory usage values
+ struct MemoryUsage {
+ size_t total = 0;
+ size_t fair_share = 0;
+
+ // Adds 'size` memory usage to this class, with a cumulative (recursive)
+ // reference count of `refcount`
+ void Add(size_t size, size_t refcount) {
+ total += size;
+ fair_share += size / refcount;
+ }
+ };
+
+ // Returns `rr` if `rr.rep` is not null and a CONCAT type.
+ // Asserts that `rr.rep` is a concat node or null.
+ static RepRef AssertConcat(RepRef repref) {
+ const CordRep* rep = repref.rep;
+ assert(rep == nullptr || rep->tag == CONCAT);
+ return (rep != nullptr && rep->tag == CONCAT) ? repref : RepRef{nullptr, 0};
+ }
+
+ // Counts a flat of the provide allocated size
+ void CountFlat(size_t size) {
+ statistics_.node_count++;
+ statistics_.node_counts.flat++;
+ if (size <= 64) {
+ statistics_.node_counts.flat_64++;
+ } else if (size <= 128) {
+ statistics_.node_counts.flat_128++;
+ } else if (size <= 256) {
+ statistics_.node_counts.flat_256++;
+ } else if (size <= 512) {
+ statistics_.node_counts.flat_512++;
+ } else if (size <= 1024) {
+ statistics_.node_counts.flat_1k++;
+ }
+ }
+
+ // Processes 'linear' reps (substring, flat, external) not requiring iteration
+ // or recursion. Returns RefRep{null} if all reps were processed, else returns
+ // the top-most non-linear concat or ring cordrep.
+ // Node counts are updated into `statistics_`, memory usage is update into
+ // `memory_usage`, which typically references `memory_usage_` except for ring
+ // buffers where we count children unrounded.
+ RepRef CountLinearReps(RepRef rep, MemoryUsage& memory_usage) {
+ // Consume all substrings
+ while (rep.rep->tag == SUBSTRING) {
+ statistics_.node_count++;
+ statistics_.node_counts.substring++;
+ memory_usage.Add(sizeof(CordRepSubstring), rep.refcount);
+ rep = rep.Child(rep.rep->substring()->child);
+ }
+
+ // Consume possible FLAT
+ if (rep.rep->tag >= FLAT) {
+ size_t size = rep.rep->flat()->AllocatedSize();
+ CountFlat(size);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ // Consume possible external
+ if (rep.rep->tag == EXTERNAL) {
+ statistics_.node_count++;
+ statistics_.node_counts.external++;
+ size_t size = rep.rep->length + sizeof(CordRepExternalImpl<intptr_t>);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ return rep;
+ }
+
+ // Analyzes the provided concat node in a flattened recursive way.
+ void AnalyzeConcat(RepRef rep) {
+ absl::InlinedVector<RepRef, 47> pending;
+
+ while (rep.rep != nullptr) {
+ const CordRepConcat* concat = rep.rep->concat();
+ RepRef left = rep.Child(concat->left);
+ RepRef right = rep.Child(concat->right);
+
+ statistics_.node_count++;
+ statistics_.node_counts.concat++;
+ memory_usage_.Add(sizeof(CordRepConcat), rep.refcount);
+
+ right = AssertConcat(CountLinearReps(right, memory_usage_));
+ rep = AssertConcat(CountLinearReps(left, memory_usage_));
+ if (rep.rep != nullptr) {
+ if (right.rep != nullptr) {
+ pending.push_back(right);
+ }
+ } else if (right.rep != nullptr) {
+ rep = right;
+ } else if (!pending.empty()) {
+ rep = pending.back();
+ pending.pop_back();
+ }
+ }
+ }
+
+ // Counts the provided ring buffer child into `child_usage`.
+ void CountRingChild(const CordRep* child, MemoryUsage& child_usage) {
+ RepRef rep{child, static_cast<size_t>(child->refcount.Get())};
+ rep = CountLinearReps(rep, child_usage);
+ assert(rep.rep == nullptr);
+ }
+
+ // Analyzes the provided ring. As ring buffers can have many child nodes, the
+ // effect of rounding errors can become non trivial, so we compute the totals
+ // first at the ring level, and then divide the fair share of the total
+ // including children fair share totals.
+ void AnalyzeRing(RepRef rep) {
+ statistics_.node_count++;
+ statistics_.node_counts.ring++;
+ MemoryUsage ring_usage;
+ const CordRepRing* ring = rep.rep->ring();
+ ring_usage.Add(CordRepRing::AllocSize(ring->capacity()), 1);
+ ring->ForEach([&](CordRepRing::index_type pos) {
+ CountRingChild(ring->entry_child(pos), ring_usage);
+ });
+ memory_usage_.total += ring_usage.total;
+ memory_usage_.fair_share += ring_usage.fair_share / rep.refcount;
+ }
+
+ CordzStatistics& statistics_;
+ MemoryUsage memory_usage_;
+};
+
+} // namespace
+
+CordzInfo* CordzInfo::Head(const CordzSnapshot& snapshot) {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // We can do an 'unsafe' load of 'head', as we are guaranteed that the
+ // instance it points to is kept alive by the provided CordzSnapshot, so we
+ // can simply return the current value using an acquire load.
+ // We do enforce in DEBUG builds that the 'head' value is present in the
+ // delete queue: ODR violations may lead to 'snapshot' and 'global_list_'
+ // being in different libraries / modules.
+ CordzInfo* head = global_list_.head.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(head));
+ return head;
+}
+
+CordzInfo* CordzInfo::Next(const CordzSnapshot& snapshot) const {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // Similar to the 'Head()' function, we do not need a mutex here.
+ CordzInfo* next = ci_next_.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(this));
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(next));
+ return next;
+}
+
+void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(!cord.is_profiled());
+ CordzInfo* cordz_info = new CordzInfo(cord.as_tree(), nullptr, method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(src.is_tree());
+
+ // Unsample current as we the current cord is being replaced with 'src',
+ // so any method history is no longer relevant.
+ CordzInfo* cordz_info = cord.cordz_info();
+ if (cordz_info != nullptr) cordz_info->Untrack();
+
+ // Start new cord sample
+ cordz_info = new CordzInfo(cord.as_tree(), src.cordz_info(), method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+void CordzInfo::MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
+ MethodIdentifier method) {
+ if (src.is_profiled()) {
+ TrackCord(cord, src, method);
+ } else if (cord.is_profiled()) {
+ cord.cordz_info()->Untrack();
+ cord.clear_cordz_info();
+ }
+}
+
+CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) {
+ if (src == nullptr) return MethodIdentifier::kUnknown;
+ return src->parent_method_ != MethodIdentifier::kUnknown ? src->parent_method_
+ : src->method_;
+}
+
+int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) {
+ assert(stack);
+ if (src == nullptr) return 0;
+ if (src->parent_stack_depth_) {
+ memcpy(stack, src->parent_stack_, src->parent_stack_depth_ * sizeof(void*));
+ return src->parent_stack_depth_;
+ }
+ memcpy(stack, src->stack_, src->stack_depth_ * sizeof(void*));
+ return src->stack_depth_;
+}
+
+CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method)
+ : rep_(rep),
+ stack_depth_(absl::GetStackTrace(stack_, /*max_depth=*/kMaxStackDepth,
+ /*skip_count=*/1)),
+ parent_stack_depth_(FillParentStack(src, parent_stack_)),
+ method_(method),
+ parent_method_(GetParentMethod(src)),
+ create_time_(absl::Now()) {
+ update_tracker_.LossyAdd(method);
+ if (src) {
+ // Copy parent counters.
+ update_tracker_.LossyAdd(src->update_tracker_);
+ }
+}
+
+CordzInfo::~CordzInfo() {
+ // `rep_` is potentially kept alive if CordzInfo is included
+ // in a collection snapshot (which should be rare).
+ if (ABSL_PREDICT_FALSE(rep_)) {
+ CordRep::Unref(rep_);
+ }
+}
+
+void CordzInfo::Track() {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ if (head != nullptr) {
+ head->ci_prev_.store(this, std::memory_order_release);
+ }
+ ci_next_.store(head, std::memory_order_release);
+ list_->head.store(this, std::memory_order_release);
+}
+
+void CordzInfo::Untrack() {
+ ODRCheck();
+ {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
+ CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
+
+ if (next) {
+ ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
+ next->ci_prev_.store(prev, std::memory_order_release);
+ }
+ if (prev) {
+ ABSL_ASSERT(head != this);
+ ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
+ prev->ci_next_.store(next, std::memory_order_release);
+ } else {
+ ABSL_ASSERT(head == this);
+ list_->head.store(next, std::memory_order_release);
+ }
+ }
+
+ // We can no longer be discovered: perform a fast path check if we are not
+ // listed on any delete queue, so we can directly delete this instance.
+ if (SafeToDelete()) {
+ UnsafeSetCordRep(nullptr);
+ delete this;
+ return;
+ }
+
+ // We are likely part of a snapshot, extend the life of the CordRep
+ {
+ absl::MutexLock lock(&mutex_);
+ if (rep_) CordRep::Ref(rep_);
+ }
+ CordzHandle::Delete(this);
+}
+
+void CordzInfo::Lock(MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_) {
+ mutex_.Lock();
+ update_tracker_.LossyAdd(method);
+ assert(rep_);
+}
+
+void CordzInfo::Unlock() ABSL_UNLOCK_FUNCTION(mutex_) {
+ bool tracked = rep_ != nullptr;
+ mutex_.Unlock();
+ if (!tracked) {
+ Untrack();
+ }
+}
+
+absl::Span<void* const> CordzInfo::GetStack() const {
+ return absl::MakeConstSpan(stack_, stack_depth_);
+}
+
+absl::Span<void* const> CordzInfo::GetParentStack() const {
+ return absl::MakeConstSpan(parent_stack_, parent_stack_depth_);
+}
+
+CordzStatistics CordzInfo::GetCordzStatistics() const {
+ CordzStatistics stats;
+ stats.method = method_;
+ stats.parent_method = parent_method_;
+ stats.update_tracker = update_tracker_;
+ if (CordRep* rep = RefCordRep()) {
+ stats.size = rep->length;
+ CordRepAnalyzer analyzer(stats);
+ analyzer.AnalyzeCordRep(rep);
+ CordRep::Unref(rep);
+ }
+ return stats;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_info.h b/third_party/abseil-cpp/absl/strings/internal/cordz_info.h
new file mode 100644
index 0000000000..026d5b9981
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_info.h
@@ -0,0 +1,298 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_INFO_H_
+#define ABSL_STRINGS_CORDZ_INFO_H_
+
+#include <atomic>
+#include <cstdint>
+#include <functional>
+
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/base/thread_annotations.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/mutex.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzInfo tracks a profiled Cord. Each of these objects can be in two places.
+// If a Cord is alive, the CordzInfo will be in the global_cordz_infos map, and
+// can also be retrieved via the linked list starting with
+// global_cordz_infos_head and continued via the cordz_info_next() method. When
+// a Cord has reached the end of its lifespan, the CordzInfo object will be
+// migrated out of the global_cordz_infos list and the global_cordz_infos_map,
+// and will either be deleted or appended to the global_delete_queue. If it is
+// placed on the global_delete_queue, the CordzInfo object will be cleaned in
+// the destructor of a CordzSampleToken object.
+class ABSL_LOCKABLE CordzInfo : public CordzHandle {
+ public:
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // TrackCord creates a CordzInfo instance which tracks important metrics of
+ // a sampled cord, and stores the created CordzInfo instance into `cord'. All
+ // CordzInfo instances are placed in a global list which is used to discover
+ // and snapshot all actively tracked cords. Callers are responsible for
+ // calling UntrackCord() before the tracked Cord instance is deleted, or to
+ // stop tracking the sampled Cord. Callers are also responsible for guarding
+ // changes to the 'tree' value of a Cord (InlineData.tree) through the Lock()
+ // and Unlock() calls. Any change resulting in a new tree value for the cord
+ // requires a call to SetCordRep() before the old tree has been unreffed
+ // and/or deleted. `method` identifies the Cord public API method initiating
+ // the cord to be sampled.
+ // Requires `cord` to hold a tree, and `cord.cordz_info()` to be null.
+ static void TrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Identical to TrackCord(), except that this function fills the
+ // `parent_stack` and `parent_method` properties of the returned CordzInfo
+ // instance from the provided `src` instance if `src` is sampled.
+ // This function should be used for sampling 'copy constructed' and 'copy
+ // assigned' cords. This function allows 'cord` to be already sampled, in
+ // which case the CordzInfo will be newly created from `src`.
+ static void TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // Uses `cordz_should_profile` to randomly pick cords to be sampled, and if
+ // so, invokes `TrackCord` to start sampling `cord`.
+ static void MaybeTrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // `src` identifies a 'parent' cord which is assigned to `cord`, typically the
+ // input cord for a copy constructor, or an assign method such as `operator=`
+ // `cord` will be sampled if (and only if) `src` is sampled.
+ // If `cord` is currently being sampled and `src` is not being sampled, then
+ // this function will stop sampling the cord and reset the cord's cordz_info.
+ //
+ // Previously this function defined that `cord` will be sampled if either
+ // `src` is sampled, or if `cord` is randomly picked for sampling. However,
+ // this can cause issues, as there may be paths where some cord is assigned an
+ // indirect copy of it's own value. As such a 'string of copies' would then
+ // remain sampled (`src.is_profiled`), then assigning such a cord back to
+ // 'itself' creates a cycle where the cord will converge to 'always sampled`.
+ //
+ // For example:
+ //
+ // Cord x;
+ // for (...) {
+ // // Copy ctor --> y.is_profiled := x.is_profiled | random(...)
+ // Cord y = x;
+ // ...
+ // // Assign x = y --> x.is_profiled = y.is_profiled | random(...)
+ // // ==> x.is_profiled |= random(...)
+ // // ==> x converges to 'always profiled'
+ // x = y;
+ // }
+ static void MaybeTrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Stops tracking changes for a sampled cord, and deletes the provided info.
+ // This function must be called before the sampled cord instance is deleted,
+ // and before the root cordrep of the sampled cord is unreffed.
+ // This function may extend the lifetime of the cordrep in cases where the
+ // CordInfo instance is being held by a concurrent collection thread.
+ void Untrack();
+
+ // Invokes UntrackCord() on `info` if `info` is not null.
+ static void MaybeUntrackCord(CordzInfo* info);
+
+ CordzInfo() = delete;
+ CordzInfo(const CordzInfo&) = delete;
+ CordzInfo& operator=(const CordzInfo&) = delete;
+
+ // Retrieves the oldest existing CordzInfo.
+ static CordzInfo* Head(const CordzSnapshot& snapshot)
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Retrieves the next oldest existing CordzInfo older than 'this' instance.
+ CordzInfo* Next(const CordzSnapshot& snapshot) const
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Locks this instance for the update identified by `method`.
+ // Increases the count for `method` in `update_tracker`.
+ void Lock(MethodIdentifier method) ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_);
+
+ // Unlocks this instance. If the contained `rep` has been set to null
+ // indicating the Cord has been cleared or is otherwise no longer sampled,
+ // then this method will delete this CordzInfo instance.
+ void Unlock() ABSL_UNLOCK_FUNCTION(mutex_);
+
+ // Asserts that this CordzInfo instance is locked.
+ void AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_);
+
+ // Updates the `rep` property of this instance. This methods is invoked by
+ // Cord logic each time the root node of a sampled Cord changes, and before
+ // the old root reference count is deleted. This guarantees that collection
+ // code can always safely take a reference on the tracked cord.
+ // Requires a lock to be held through the `Lock()` method.
+ // TODO(b/117940323): annotate with ABSL_EXCLUSIVE_LOCKS_REQUIRED once all
+ // Cord code is in a state where this can be proven true by the compiler.
+ void SetCordRep(CordRep* rep);
+
+ // Returns the current `rep` property of this instance with a reference
+ // added, or null if this instance represents a cord that has since been
+ // deleted or untracked.
+ CordRep* RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_);
+
+ // Returns the current value of `rep_` for testing purposes only.
+ CordRep* GetCordRepForTesting() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return rep_;
+ }
+
+ // Sets the current value of `rep_` for testing purposes only.
+ void SetCordRepForTesting(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ rep_ = rep;
+ }
+
+ // Returns the stack trace for where the cord was first sampled. Cords are
+ // potentially sampled when they promote from an inlined cord to a tree or
+ // ring representation, which is not necessarily the location where the cord
+ // was first created. Some cords are created as inlined cords, and only as
+ // data is added do they become a non-inlined cord. However, typically the
+ // location represents reasonably well where the cord is 'created'.
+ absl::Span<void* const> GetStack() const;
+
+ // Returns the stack trace for a sampled cord's 'parent stack trace'. This
+ // value may be set if the cord is sampled (promoted) after being created
+ // from, or being assigned the value of an existing (sampled) cord.
+ absl::Span<void* const> GetParentStack() const;
+
+ // Retrieves the CordzStatistics associated with this Cord. The statistics
+ // are only updated when a Cord goes through a mutation, such as an Append
+ // or RemovePrefix.
+ CordzStatistics GetCordzStatistics() const;
+
+ private:
+ using SpinLock = absl::base_internal::SpinLock;
+ using SpinLockHolder = ::absl::base_internal::SpinLockHolder;
+
+ // Global cordz info list. CordzInfo stores a pointer to the global list
+ // instance to harden against ODR violations.
+ struct List {
+ constexpr explicit List(absl::ConstInitType)
+ : mutex(absl::kConstInit,
+ absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ SpinLock mutex;
+ std::atomic<CordzInfo*> head ABSL_GUARDED_BY(mutex){nullptr};
+ };
+
+ static constexpr int kMaxStackDepth = 64;
+
+ explicit CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method);
+ ~CordzInfo() override;
+
+ // Sets `rep_` without holding a lock.
+ void UnsafeSetCordRep(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ void Track();
+
+ // Returns the parent method from `src`, which is either `parent_method_` or
+ // `method_` depending on `parent_method_` being kUnknown.
+ // Returns kUnknown if `src` is null.
+ static MethodIdentifier GetParentMethod(const CordzInfo* src);
+
+ // Fills the provided stack from `src`, copying either `parent_stack_` or
+ // `stack_` depending on `parent_stack_` being empty, returning the size of
+ // the parent stack.
+ // Returns 0 if `src` is null.
+ static int FillParentStack(const CordzInfo* src, void** stack);
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(list_ == &global_list_, "ODR violation in Cord");
+#endif
+ }
+
+ // Non-inlined implementation of `MaybeTrackCord`, which is executed if
+ // either `src` is sampled or `cord` is sampled, and either untracks or
+ // tracks `cord` as documented per `MaybeTrackCord`.
+ static void MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ ABSL_CONST_INIT static List global_list_;
+ List* const list_ = &global_list_;
+
+ // ci_prev_ and ci_next_ require the global list mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that list_ and global_list_ are one and the same.
+ std::atomic<CordzInfo*> ci_prev_{nullptr};
+ std::atomic<CordzInfo*> ci_next_{nullptr};
+
+ mutable absl::Mutex mutex_;
+ CordRep* rep_ ABSL_GUARDED_BY(mutex_);
+
+ void* stack_[kMaxStackDepth];
+ void* parent_stack_[kMaxStackDepth];
+ const int stack_depth_;
+ const int parent_stack_depth_;
+ const MethodIdentifier method_;
+ const MethodIdentifier parent_method_;
+ CordzUpdateTracker update_tracker_;
+ const absl::Time create_time_;
+};
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(cordz_should_profile())) {
+ TrackCord(cord, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, const InlineData& src, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(InlineData::is_either_profiled(cord, src))) {
+ MaybeTrackCordImpl(cord, src, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeUntrackCord(
+ CordzInfo* info) {
+ if (ABSL_PREDICT_FALSE(info)) {
+ info->Untrack();
+ }
+}
+
+inline void CordzInfo::AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_) {
+#ifndef NDEBUG
+ mutex_.AssertHeld();
+#endif
+}
+
+inline void CordzInfo::SetCordRep(CordRep* rep) {
+ AssertHeld();
+ rep_ = rep;
+}
+
+inline void CordzInfo::UnsafeSetCordRep(CordRep* rep) { rep_ = rep; }
+
+inline CordRep* CordzInfo::RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
+ return rep_ ? CordRep::Ref(rep_) : nullptr;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_INFO_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_info_statistics_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_info_statistics_test.cc
new file mode 100644
index 0000000000..9f2842d97d
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_info_statistics_test.cc
@@ -0,0 +1,508 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+#include <random>
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Do not print statistics contents, the matcher prints them as needed.
+inline void PrintTo(const CordzStatistics& stats, std::ostream* s) {
+ if (s) *s << "CordzStatistics{...}";
+}
+
+namespace {
+
+// Creates a flat of the specified allocated size
+CordRepFlat* Flat(size_t size) {
+ // Round up to a tag size, as we are going to poke an exact tag size back into
+ // the allocated flat. 'size returning allocators' could grant us more than we
+ // wanted, but we are ok to poke the 'requested' size in the tag, even in the
+ // presence of sized deletes, so we need to make sure the size rounds
+ // perfectly to a tag value.
+ assert(size >= kMinFlatSize);
+ size = RoundUpForTag(size);
+ CordRepFlat* flat = CordRepFlat::New(size - kFlatOverhead);
+ flat->tag = AllocatedSizeToTag(size);
+ flat->length = size - kFlatOverhead;
+ return flat;
+}
+
+// Creates an external of the specified length
+CordRepExternal* External(int length = 512) {
+ return static_cast<CordRepExternal*>(
+ NewExternalRep(absl::string_view("", length), [](absl::string_view) {}));
+}
+
+// Creates a substring on the provided rep of length - 1
+CordRepSubstring* Substring(CordRep* rep) {
+ auto* substring = new CordRepSubstring;
+ substring->length = rep->length - 1;
+ substring->tag = SUBSTRING;
+ substring->child = rep;
+ return substring;
+}
+
+// Creates a concat on the provided reps
+CordRepConcat* Concat(CordRep* left, CordRep* right) {
+ auto* concat = new CordRepConcat;
+ concat->length = left->length + right->length;
+ concat->tag = CONCAT;
+ concat->left = left;
+ concat->right = right;
+ return concat;
+}
+
+// Reference count helper
+struct RefHelper {
+ std::vector<CordRep*> refs;
+
+ ~RefHelper() {
+ for (CordRep* rep : refs) {
+ CordRep::Unref(rep);
+ }
+ }
+
+ // Invokes CordRep::Unref() on `rep` when this instance is destroyed.
+ template <typename T>
+ T* NeedsUnref(T* rep) {
+ refs.push_back(rep);
+ return rep;
+ }
+
+ // Adds `n` reference counts to `rep` which will be unreffed when this
+ // instance is destroyed.
+ template <typename T>
+ T* Ref(T* rep, size_t n = 1) {
+ while (n--) {
+ NeedsUnref(CordRep::Ref(rep));
+ }
+ return rep;
+ }
+};
+
+// Sizeof helper. Returns the allocated size of `p`, excluding any child
+// elements for substring, concat and ring cord reps.
+template <typename T>
+size_t SizeOf(const T* rep) {
+ return sizeof(T);
+}
+
+template <>
+size_t SizeOf(const CordRepFlat* rep) {
+ return rep->AllocatedSize();
+}
+
+template <>
+size_t SizeOf(const CordRepExternal* rep) {
+ // See cord.cc
+ return sizeof(CordRepExternalImpl<intptr_t>) + rep->length;
+}
+
+template <>
+size_t SizeOf(const CordRepRing* rep) {
+ return CordRepRing::AllocSize(rep->capacity());
+}
+
+// Computes fair share memory used in a naive 'we dare to recurse' way.
+size_t FairShare(CordRep* rep, size_t ref = 1) {
+ size_t self = 0, children = 0;
+ ref *= rep->refcount.Get();
+ if (rep->tag >= FLAT) {
+ self = SizeOf(rep->flat());
+ } else if (rep->tag == EXTERNAL) {
+ self = SizeOf(rep->external());
+ } else if (rep->tag == SUBSTRING) {
+ self = SizeOf(rep->substring());
+ children = FairShare(rep->substring()->child, ref);
+ } else if (rep->tag == RING) {
+ self = SizeOf(rep->ring());
+ rep->ring()->ForEach([&](CordRepRing::index_type i) {
+ self += FairShare(rep->ring()->entry_child(i));
+ });
+ } else if (rep->tag == CONCAT) {
+ self = SizeOf(rep->concat());
+ children = FairShare(rep->concat()->left, ref) +
+ FairShare(rep->concat()->right, ref);
+ } else {
+ assert(false);
+ }
+ return self / ref + children;
+}
+
+// Samples the cord and returns CordzInfo::GetStatistics()
+CordzStatistics SampleCord(CordRep* rep) {
+ InlineData cord(rep);
+ CordzInfo::TrackCord(cord, CordzUpdateTracker::kUnknown);
+ CordzStatistics stats = cord.cordz_info()->GetCordzStatistics();
+ cord.cordz_info()->Untrack();
+ return stats;
+}
+
+MATCHER_P(EqStatistics, stats, "Statistics equal expected values") {
+ bool ok = true;
+
+#define STATS_MATCHER_EXPECT_EQ(member) \
+ if (stats.member != arg.member) { \
+ *result_listener << "\n stats." << #member \
+ << ": actual = " << arg.member << ", expected " \
+ << stats.member; \
+ ok = false; \
+ }
+
+ STATS_MATCHER_EXPECT_EQ(size);
+ STATS_MATCHER_EXPECT_EQ(node_count);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_64);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_128);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_256);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_512);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_1k);
+ STATS_MATCHER_EXPECT_EQ(node_counts.external);
+ STATS_MATCHER_EXPECT_EQ(node_counts.concat);
+ STATS_MATCHER_EXPECT_EQ(node_counts.substring);
+ STATS_MATCHER_EXPECT_EQ(node_counts.ring);
+ STATS_MATCHER_EXPECT_EQ(estimated_memory_usage);
+ STATS_MATCHER_EXPECT_EQ(estimated_fair_share_memory_usage);
+
+#undef STATS_MATCHER_EXPECT_EQ
+
+ return ok;
+}
+
+TEST(CordzInfoStatisticsTest, Flat) {
+ RefHelper ref;
+ auto* flat = ref.NeedsUnref(Flat(512));
+
+ CordzStatistics expected;
+ expected.size = flat->length;
+ expected.estimated_memory_usage = SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 1;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_512 = 1;
+
+ EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedFlat) {
+ RefHelper ref;
+ auto* flat = ref.Ref(ref.NeedsUnref(Flat(64)));
+
+ CordzStatistics expected;
+ expected.size = flat->length;
+ expected.estimated_memory_usage = SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = SizeOf(flat) / 2;
+ expected.node_count = 1;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_64 = 1;
+
+ EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, External) {
+ RefHelper ref;
+ auto* external = ref.NeedsUnref(External());
+
+ CordzStatistics expected;
+ expected.size = external->length;
+ expected.estimated_memory_usage = SizeOf(external);
+ expected.estimated_fair_share_memory_usage = SizeOf(external);
+ expected.node_count = 1;
+ expected.node_counts.external = 1;
+
+ EXPECT_THAT(SampleCord(external), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedExternal) {
+ RefHelper ref;
+ auto* external = ref.Ref(ref.NeedsUnref(External()));
+
+ CordzStatistics expected;
+ expected.size = external->length;
+ expected.estimated_memory_usage = SizeOf(external);
+ expected.estimated_fair_share_memory_usage = SizeOf(external) / 2;
+ expected.node_count = 1;
+ expected.node_counts.external = 1;
+
+ EXPECT_THAT(SampleCord(external), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Substring) {
+ RefHelper ref;
+ auto* flat = Flat(1024);
+ auto* substring = ref.NeedsUnref(Substring(flat));
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(substring) + SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 2;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_1k = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedSubstring) {
+ RefHelper ref;
+ auto* flat = ref.Ref(Flat(511), 2);
+ auto* substring = ref.Ref(ref.NeedsUnref(Substring(flat)));
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(flat) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage =
+ SizeOf(substring) / 2 + SizeOf(flat) / 6;
+ expected.node_count = 2;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_512 = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Concat) {
+ RefHelper ref;
+ auto* flat1 = Flat(300);
+ auto* flat2 = Flat(2000);
+ auto* concat = ref.NeedsUnref(Concat(flat1, flat2));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage =
+ SizeOf(concat) + SizeOf(flat1) + SizeOf(flat2);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 3;
+ expected.node_counts.flat = 2;
+ expected.node_counts.flat_512 = 1;
+ expected.node_counts.concat = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, DeepConcat) {
+ RefHelper ref;
+ auto* flat1 = Flat(300);
+ auto* flat2 = Flat(2000);
+ auto* flat3 = Flat(400);
+ auto* external = External(3000);
+ auto* substring = Substring(external);
+ auto* concat1 = Concat(flat1, flat2);
+ auto* concat2 = Concat(flat3, substring);
+ auto* concat = ref.NeedsUnref(Concat(concat1, concat2));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage = SizeOf(concat) * 3 + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+
+ expected.node_count = 8;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_512 = 2;
+ expected.node_counts.external = 1;
+ expected.node_counts.concat = 3;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, DeepSharedConcat) {
+ RefHelper ref;
+ auto* flat1 = Flat(40);
+ auto* flat2 = ref.Ref(Flat(2000), 4);
+ auto* flat3 = Flat(70);
+ auto* external = ref.Ref(External(3000));
+ auto* substring = ref.Ref(Substring(external), 3);
+ auto* concat1 = Concat(flat1, flat2);
+ auto* concat2 = Concat(flat3, substring);
+ auto* concat = ref.Ref(ref.NeedsUnref(Concat(concat1, concat2)));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage = SizeOf(concat) * 3 + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = FairShare(concat);
+ expected.node_count = 8;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_64 = 1;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.external = 1;
+ expected.node_counts.concat = 3;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Ring) {
+ RefHelper ref;
+ auto* flat1 = Flat(240);
+ auto* flat2 = Flat(2000);
+ auto* flat3 = Flat(70);
+ auto* external = External(3000);
+ CordRepRing* ring = CordRepRing::Create(flat1);
+ ring = CordRepRing::Append(ring, flat2);
+ ring = CordRepRing::Append(ring, flat3);
+ ring = ref.NeedsUnref(CordRepRing::Append(ring, external));
+
+ CordzStatistics expected;
+ expected.size = ring->length;
+ expected.estimated_memory_usage = SizeOf(ring) + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 5;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.flat_256 = 1;
+ expected.node_counts.external = 1;
+ expected.node_counts.ring = 1;
+
+ EXPECT_THAT(SampleCord(ring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedSubstringRing) {
+ RefHelper ref;
+ auto* flat1 = ref.Ref(Flat(240));
+ auto* flat2 = Flat(200);
+ auto* flat3 = Flat(70);
+ auto* external = ref.Ref(External(3000), 5);
+ CordRepRing* ring = CordRepRing::Create(flat1);
+ ring = CordRepRing::Append(ring, flat2);
+ ring = CordRepRing::Append(ring, flat3);
+ ring = ref.Ref(CordRepRing::Append(ring, external), 4);
+ auto* substring = ref.Ref(ref.NeedsUnref(Substring(ring)));
+
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(ring) + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = FairShare(substring);
+ expected.node_count = 6;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.flat_256 = 2;
+ expected.node_counts.external = 1;
+ expected.node_counts.ring = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, ThreadSafety) {
+ Notification stop;
+ static constexpr int kNumThreads = 8;
+ int64_t sampled_node_count = 0;
+
+ {
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+
+ // Run analyzer thread emulating a CordzHandler collection.
+ pool.Schedule([&]() {
+ while (!stop.HasBeenNotified()) {
+ // Run every 10us (about 100K total collections).
+ absl::SleepFor(absl::Microseconds(10));
+ CordzSampleToken token;
+ for (const CordzInfo& cord_info : token) {
+ CordzStatistics stats = cord_info.GetCordzStatistics();
+ sampled_node_count += stats.node_count;
+ }
+ }
+ });
+
+ // Run 'application threads'
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&]() {
+ // Track 0 - 2 cordz infos at a time, providing permutations of 0, 1
+ // and 2 CordzHandle and CordzInfo queues being active, with plenty of
+ // 'empty to non empty' transitions.
+ InlineData cords[2];
+ std::minstd_rand gen;
+ std::uniform_int_distribution<int> coin_toss(0, 1);
+
+ while (!stop.HasBeenNotified()) {
+ for (InlineData& cord : cords) {
+ // 50/50 flip the state of the cord
+ if (coin_toss(gen) != 0) {
+ if (cord.is_tree()) {
+ // 50/50 simulate delete (untrack) or 'edit to empty'
+ if (coin_toss(gen) != 0) {
+ CordzInfo::MaybeUntrackCord(cord.cordz_info());
+ } else {
+ CordzUpdateScope scope(cord.cordz_info(),
+ CordzUpdateTracker::kUnknown);
+ scope.SetCordRep(nullptr);
+ }
+ CordRep::Unref(cord.as_tree());
+ cord.set_inline_size(0);
+ } else {
+ // 50/50 Ring or Flat coin toss
+ CordRep* rep = Flat(256);
+ rep = (coin_toss(gen) != 0) ? CordRepRing::Create(rep) : rep;
+ cord.make_tree(rep);
+
+ // 50/50 sample
+ if (coin_toss(gen) != 0) {
+ CordzInfo::TrackCord(cord, CordzUpdateTracker::kUnknown);
+ }
+ }
+ }
+ }
+ }
+ for (InlineData& cord : cords) {
+ if (cord.is_tree()) {
+ CordzInfo::MaybeUntrackCord(cord.cordz_info());
+ CordRep::Unref(cord.as_tree());
+ }
+ }
+ });
+ }
+
+ // Run for 1 second to give memory and thread safety analyzers plenty of
+ // time to detect any mishaps or undefined behaviors.
+ absl::SleepFor(absl::Seconds(1));
+ stop.Notify();
+ }
+
+ std::cout << "Sampled " << sampled_node_count << " nodes\n";
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_info_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_info_test.cc
new file mode 100644
index 0000000000..b98343ae79
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_info_test.cc
@@ -0,0 +1,341 @@
+// Copyright 2019 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_info.h"
+
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/debugging/stacktrace.h"
+#include "absl/debugging/symbolize.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Eq;
+using ::testing::HasSubstr;
+using ::testing::Ne;
+using ::testing::SizeIs;
+
+// Used test values
+auto constexpr kUnknownMethod = CordzUpdateTracker::kUnknown;
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+auto constexpr kChildMethod = CordzUpdateTracker::kConstructorCord;
+auto constexpr kUpdateMethod = CordzUpdateTracker::kAppendString;
+
+// Local less verbose helper
+std::vector<const CordzHandle*> DeleteQueue() {
+ return CordzHandle::DiagnosticsGetDeleteQueue();
+}
+
+std::string FormatStack(absl::Span<void* const> raw_stack) {
+ static constexpr size_t buf_size = 1 << 14;
+ std::unique_ptr<char[]> buf(new char[buf_size]);
+ std::string output;
+ for (void* stackp : raw_stack) {
+ if (absl::Symbolize(stackp, buf.get(), buf_size)) {
+ absl::StrAppend(&output, " ", buf.get(), "\n");
+ }
+ }
+ return output;
+}
+
+TEST(CordzInfoTest, TrackCord) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ ASSERT_THAT(info, Ne(nullptr));
+ EXPECT_FALSE(info->is_snapshot());
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(info));
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(data.rep.rep));
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, MaybeTrackChildCordWithoutSampling) {
+ CordzSamplingIntervalHelper sample_none(99999);
+ TestCordData parent, child;
+ CordzInfo::MaybeTrackCord(child.data, parent.data, kTrackCordMethod);
+ EXPECT_THAT(child.data.cordz_info(), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, MaybeTrackChildCordWithSampling) {
+ CordzSamplingIntervalHelper sample_all(1);
+ TestCordData parent, child;
+ CordzInfo::MaybeTrackCord(child.data, parent.data, kTrackCordMethod);
+ EXPECT_THAT(child.data.cordz_info(), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, MaybeTrackChildCordWithoutSamplingParentSampled) {
+ CordzSamplingIntervalHelper sample_none(99999);
+ TestCordData parent, child;
+ CordzInfo::TrackCord(parent.data, kTrackCordMethod);
+ CordzInfo::MaybeTrackCord(child.data, parent.data, kTrackCordMethod);
+ CordzInfo* parent_info = parent.data.cordz_info();
+ CordzInfo* child_info = child.data.cordz_info();
+ ASSERT_THAT(child_info, Ne(nullptr));
+ EXPECT_THAT(child_info->GetCordRepForTesting(), Eq(child.rep.rep));
+ EXPECT_THAT(child_info->GetParentStack(), parent_info->GetStack());
+ parent_info->Untrack();
+ child_info->Untrack();
+}
+
+TEST(CordzInfoTest, MaybeTrackChildCordWithoutSamplingChildSampled) {
+ CordzSamplingIntervalHelper sample_none(99999);
+ TestCordData parent, child;
+ CordzInfo::TrackCord(child.data, kTrackCordMethod);
+ CordzInfo::MaybeTrackCord(child.data, parent.data, kTrackCordMethod);
+ EXPECT_THAT(child.data.cordz_info(), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, MaybeTrackChildCordWithSamplingChildSampled) {
+ CordzSamplingIntervalHelper sample_all(1);
+ TestCordData parent, child;
+ CordzInfo::TrackCord(child.data, kTrackCordMethod);
+ CordzInfo::MaybeTrackCord(child.data, parent.data, kTrackCordMethod);
+ EXPECT_THAT(child.data.cordz_info(), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, UntrackCord) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ info->Untrack();
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzInfoTest, UntrackCordWithSnapshot) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ CordzSnapshot snapshot;
+ info->Untrack();
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(nullptr));
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(data.rep.rep));
+ EXPECT_THAT(DeleteQueue(), ElementsAre(info, &snapshot));
+}
+
+TEST(CordzInfoTest, SetCordRep) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ TestCordRep rep;
+ info->Lock(CordzUpdateTracker::kAppendCord);
+ info->SetCordRep(rep.rep);
+ info->Unlock();
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(rep.rep));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, SetCordRepNullUntracksCordOnUnlock) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ info->Lock(CordzUpdateTracker::kAppendString);
+ info->SetCordRep(nullptr);
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(nullptr));
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(info));
+
+ info->Unlock();
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, RefCordRep) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ size_t refcount = data.rep.rep->refcount.Get();
+ EXPECT_THAT(info->RefCordRep(), Eq(data.rep.rep));
+ EXPECT_THAT(data.rep.rep->refcount.Get(), Eq(refcount + 1));
+ CordRep::Unref(data.rep.rep);
+ info->Untrack();
+}
+
+#if GTEST_HAS_DEATH_TEST
+
+TEST(CordzInfoTest, SetCordRepRequiresMutex) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ TestCordRep rep;
+ EXPECT_DEBUG_DEATH(info->SetCordRep(rep.rep), ".*");
+ info->Untrack();
+}
+
+#endif // GTEST_HAS_DEATH_TEST
+
+TEST(CordzInfoTest, TrackUntrackHeadFirstV2) {
+ CordzSnapshot snapshot;
+ EXPECT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info1 = data.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ TestCordData data2;
+ CordzInfo::TrackCord(data2.data, kTrackCordMethod);
+ CordzInfo* info2 = data2.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info2->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info1->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, TrackUntrackTailFirstV2) {
+ CordzSnapshot snapshot;
+ EXPECT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info1 = data.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ TestCordData data2;
+ CordzInfo::TrackCord(data2.data, kTrackCordMethod);
+ CordzInfo* info2 = data2.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info1->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(nullptr));
+
+ info2->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, StackV2) {
+ TestCordData data;
+ // kMaxStackDepth is intentionally less than 64 (which is the max depth that
+ // Cordz will record) because if the actual stack depth is over 64
+ // (which it is on Apple platforms) then the expected_stack will end up
+ // catching a few frames at the end that the actual_stack didn't get and
+ // it will no longer be subset. At the time of this writing 58 is the max
+ // that will allow this test to pass (with a minimum os version of iOS 9), so
+ // rounded down to 50 to hopefully not run into this in the future if Apple
+ // makes small modifications to its testing stack. 50 is sufficient to prove
+ // that we got a decent stack.
+ static constexpr int kMaxStackDepth = 50;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ std::vector<void*> local_stack;
+ local_stack.resize(kMaxStackDepth);
+ // In some environments we don't get stack traces. For example in Android
+ // absl::GetStackTrace will return 0 indicating it didn't find any stack. The
+ // resultant formatted stack will be "", but that still equals the stack
+ // recorded in CordzInfo, which is also empty. The skip_count is 1 so that the
+ // line number of the current stack isn't included in the HasSubstr check.
+ local_stack.resize(absl::GetStackTrace(local_stack.data(), kMaxStackDepth,
+ /*skip_count=*/1));
+
+ std::string got_stack = FormatStack(info->GetStack());
+ std::string expected_stack = FormatStack(local_stack);
+ // If TrackCord is inlined, got_stack should match expected_stack. If it isn't
+ // inlined, got_stack should include an additional frame not present in
+ // expected_stack. Either way, expected_stack should be a substring of
+ // got_stack.
+ EXPECT_THAT(got_stack, HasSubstr(expected_stack));
+
+ info->Untrack();
+}
+
+// Local helper functions to get different stacks for child and parent.
+CordzInfo* TrackChildCord(InlineData& data, const InlineData& parent) {
+ CordzInfo::TrackCord(data, parent, kChildMethod);
+ return data.cordz_info();
+}
+CordzInfo* TrackParentCord(InlineData& data) {
+ CordzInfo::TrackCord(data, kTrackCordMethod);
+ return data.cordz_info();
+}
+
+TEST(CordzInfoTest, GetStatistics) {
+ TestCordData data;
+ CordzInfo* info = TrackParentCord(data.data);
+
+ CordzStatistics statistics = info->GetCordzStatistics();
+ EXPECT_THAT(statistics.size, Eq(data.rep.rep->length));
+ EXPECT_THAT(statistics.method, Eq(kTrackCordMethod));
+ EXPECT_THAT(statistics.parent_method, Eq(kUnknownMethod));
+ EXPECT_THAT(statistics.update_tracker.Value(kTrackCordMethod), Eq(1));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, LockCountsMethod) {
+ TestCordData data;
+ CordzInfo* info = TrackParentCord(data.data);
+
+ info->Lock(kUpdateMethod);
+ info->Unlock();
+ info->Lock(kUpdateMethod);
+ info->Unlock();
+
+ CordzStatistics statistics = info->GetCordzStatistics();
+ EXPECT_THAT(statistics.update_tracker.Value(kUpdateMethod), Eq(2));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, FromParent) {
+ TestCordData parent;
+ TestCordData child;
+ CordzInfo* info_parent = TrackParentCord(parent.data);
+ CordzInfo* info_child = TrackChildCord(child.data, parent.data);
+
+ std::string stack = FormatStack(info_parent->GetStack());
+ std::string parent_stack = FormatStack(info_child->GetParentStack());
+ EXPECT_THAT(stack, Eq(parent_stack));
+
+ CordzStatistics statistics = info_child->GetCordzStatistics();
+ EXPECT_THAT(statistics.size, Eq(child.rep.rep->length));
+ EXPECT_THAT(statistics.method, Eq(kChildMethod));
+ EXPECT_THAT(statistics.parent_method, Eq(kTrackCordMethod));
+ EXPECT_THAT(statistics.update_tracker.Value(kChildMethod), Eq(1));
+
+ info_parent->Untrack();
+ info_child->Untrack();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.cc
new file mode 100644
index 0000000000..ba1270d8f0
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.cc
@@ -0,0 +1,64 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_sample_token.h"
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+CordzSampleToken::Iterator& CordzSampleToken::Iterator::operator++() {
+ if (current_) {
+ current_ = current_->Next(*token_);
+ }
+ return *this;
+}
+
+CordzSampleToken::Iterator CordzSampleToken::Iterator::operator++(int) {
+ Iterator it(*this);
+ operator++();
+ return it;
+}
+
+bool operator==(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return lhs.current_ == rhs.current_ &&
+ (lhs.current_ == nullptr || lhs.token_ == rhs.token_);
+}
+
+bool operator!=(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return !(lhs == rhs);
+}
+
+CordzSampleToken::Iterator::reference CordzSampleToken::Iterator::operator*()
+ const {
+ return *current_;
+}
+
+CordzSampleToken::Iterator::pointer CordzSampleToken::Iterator::operator->()
+ const {
+ return current_;
+}
+
+CordzSampleToken::Iterator::Iterator(const CordzSampleToken* token)
+ : token_(token), current_(CordzInfo::Head(*token)) {}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.h b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.h
new file mode 100644
index 0000000000..28a1d70ccc
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token.h
@@ -0,0 +1,97 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+
+#ifndef ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+#define ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// The existence of a CordzSampleToken guarantees that a reader can traverse the
+// global_cordz_infos_head linked-list without needing to hold a mutex. When a
+// CordzSampleToken exists, all CordzInfo objects that would be destroyed are
+// instead appended to a deletion queue. When the CordzSampleToken is destroyed,
+// it will also clean up any of these CordzInfo objects.
+//
+// E.g., ST are CordzSampleToken objects and CH are CordzHandle objects.
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- global_delete_queue_tail
+//
+// This list tracks that CH1 and CH2 were created after ST1, so the thread
+// holding ST1 might have a referece to CH1, CH2, ST2, and CH3. However, ST2 was
+// created later, so the thread holding the ST2 token cannot have a reference to
+// ST1, CH1, or CH2. If ST1 is cleaned up first, that thread will delete ST1,
+// CH1, and CH2. If instead ST2 is cleaned up first, that thread will only
+// delete ST2.
+//
+// If ST1 is cleaned up first, the new list will be:
+// ST2 <- CH3 <- global_delete_queue_tail
+//
+// If ST2 is cleaned up first, the new list will be:
+// ST1 <- CH1 <- CH2 <- CH3 <- global_delete_queue_tail
+//
+// All new CordzHandle objects are appended to the list, so if a new thread
+// comes along before either ST1 or ST2 are cleaned up, the new list will be:
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- ST3 <- global_delete_queue_tail
+//
+// A thread must hold the global_delete_queue_mu mutex whenever it's altering
+// this list.
+//
+// It is safe for thread that holds a CordzSampleToken to read
+// global_cordz_infos at any time since the objects it is able to retrieve will
+// not be deleted while the CordzSampleToken exists.
+class CordzSampleToken : public CordzSnapshot {
+ public:
+ class Iterator {
+ public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = const CordzInfo&;
+ using difference_type = ptrdiff_t;
+ using pointer = const CordzInfo*;
+ using reference = value_type;
+
+ Iterator() = default;
+
+ Iterator& operator++();
+ Iterator operator++(int);
+ friend bool operator==(const Iterator& lhs, const Iterator& rhs);
+ friend bool operator!=(const Iterator& lhs, const Iterator& rhs);
+ reference operator*() const;
+ pointer operator->() const;
+
+ private:
+ friend class CordzSampleToken;
+ explicit Iterator(const CordzSampleToken* token);
+
+ const CordzSampleToken* token_ = nullptr;
+ pointer current_ = nullptr;
+ };
+
+ CordzSampleToken() = default;
+ CordzSampleToken(const CordzSampleToken&) = delete;
+ CordzSampleToken& operator=(const CordzSampleToken&) = delete;
+
+ Iterator begin() { return Iterator(this); }
+ Iterator end() { return Iterator(); }
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token_test.cc
new file mode 100644
index 0000000000..9f54301d68
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_sample_token_test.cc
@@ -0,0 +1,208 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_sample_token.h"
+
+#include <memory>
+#include <type_traits>
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/memory/memory.h"
+#include "absl/random/random.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+#include "absl/time/clock.h"
+#include "absl/time/time.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Eq;
+using ::testing::Ne;
+
+// Used test values
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+
+TEST(CordzSampleTokenTest, IteratorTraits) {
+ static_assert(std::is_copy_constructible<CordzSampleToken::Iterator>::value,
+ "");
+ static_assert(std::is_copy_assignable<CordzSampleToken::Iterator>::value, "");
+ static_assert(std::is_move_constructible<CordzSampleToken::Iterator>::value,
+ "");
+ static_assert(std::is_move_assignable<CordzSampleToken::Iterator>::value, "");
+ static_assert(
+ std::is_same<
+ std::iterator_traits<CordzSampleToken::Iterator>::iterator_category,
+ std::input_iterator_tag>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::value_type,
+ const CordzInfo&>::value,
+ "");
+ static_assert(
+ std::is_same<
+ std::iterator_traits<CordzSampleToken::Iterator>::difference_type,
+ ptrdiff_t>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::pointer,
+ const CordzInfo*>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::reference,
+ const CordzInfo&>::value,
+ "");
+}
+
+TEST(CordzSampleTokenTest, IteratorEmpty) {
+ CordzSampleToken token;
+ EXPECT_THAT(token.begin(), Eq(token.end()));
+}
+
+TEST(CordzSampleTokenTest, Iterator) {
+ TestCordData cord1, cord2, cord3;
+ CordzInfo::TrackCord(cord1.data, kTrackCordMethod);
+ CordzInfo* info1 = cord1.data.cordz_info();
+ CordzInfo::TrackCord(cord2.data, kTrackCordMethod);
+ CordzInfo* info2 = cord2.data.cordz_info();
+ CordzInfo::TrackCord(cord3.data, kTrackCordMethod);
+ CordzInfo* info3 = cord3.data.cordz_info();
+
+ CordzSampleToken token;
+ std::vector<const CordzInfo*> found;
+ for (const CordzInfo& cord_info : token) {
+ found.push_back(&cord_info);
+ }
+
+ EXPECT_THAT(found, ElementsAre(info3, info2, info1));
+
+ info1->Untrack();
+ info2->Untrack();
+ info3->Untrack();
+}
+
+TEST(CordzSampleTokenTest, IteratorEquality) {
+ TestCordData cord1;
+ TestCordData cord2;
+ TestCordData cord3;
+ CordzInfo::TrackCord(cord1.data, kTrackCordMethod);
+ CordzInfo* info1 = cord1.data.cordz_info();
+
+ CordzSampleToken token1;
+ // lhs starts with the CordzInfo corresponding to cord1 at the head.
+ CordzSampleToken::Iterator lhs = token1.begin();
+
+ CordzInfo::TrackCord(cord2.data, kTrackCordMethod);
+ CordzInfo* info2 = cord2.data.cordz_info();
+
+ CordzSampleToken token2;
+ // rhs starts with the CordzInfo corresponding to cord2 at the head.
+ CordzSampleToken::Iterator rhs = token2.begin();
+
+ CordzInfo::TrackCord(cord3.data, kTrackCordMethod);
+ CordzInfo* info3 = cord3.data.cordz_info();
+
+ // lhs is on cord1 while rhs is on cord2.
+ EXPECT_THAT(lhs, Ne(rhs));
+
+ rhs++;
+ // lhs and rhs are both on cord1, but they didn't come from the same
+ // CordzSampleToken.
+ EXPECT_THAT(lhs, Ne(rhs));
+
+ lhs++;
+ rhs++;
+ // Both lhs and rhs are done, so they are on nullptr.
+ EXPECT_THAT(lhs, Eq(rhs));
+
+ info1->Untrack();
+ info2->Untrack();
+ info3->Untrack();
+}
+
+TEST(CordzSampleTokenTest, MultiThreaded) {
+ Notification stop;
+ static constexpr int kNumThreads = 4;
+ static constexpr int kNumCords = 3;
+ static constexpr int kNumTokens = 3;
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&stop]() {
+ absl::BitGen gen;
+ TestCordData cords[kNumCords];
+ std::unique_ptr<CordzSampleToken> tokens[kNumTokens];
+
+ while (!stop.HasBeenNotified()) {
+ // Randomly perform one of five actions:
+ // 1) Untrack
+ // 2) Track
+ // 3) Iterate over Cords visible to a token.
+ // 4) Unsample
+ // 5) Sample
+ int index = absl::Uniform(gen, 0, kNumCords);
+ if (absl::Bernoulli(gen, 0.5)) {
+ TestCordData& cord = cords[index];
+ // Track/untrack.
+ if (cord.data.is_profiled()) {
+ // 1) Untrack
+ cord.data.cordz_info()->Untrack();
+ cord.data.clear_cordz_info();;
+ } else {
+ // 2) Track
+ CordzInfo::TrackCord(cord.data, kTrackCordMethod);
+ }
+ } else {
+ std::unique_ptr<CordzSampleToken>& token = tokens[index];
+ if (token) {
+ if (absl::Bernoulli(gen, 0.5)) {
+ // 3) Iterate over Cords visible to a token.
+ for (const CordzInfo& info : *token) {
+ // This is trivial work to allow us to compile the loop.
+ EXPECT_THAT(info.Next(*token), Ne(&info));
+ }
+ } else {
+ // 4) Unsample
+ token = nullptr;
+ }
+ } else {
+ // 5) Sample
+ token = absl::make_unique<CordzSampleToken>();
+ }
+ }
+ }
+ for (TestCordData& cord : cords) {
+ CordzInfo::MaybeUntrackCord(cord.data.cordz_info());
+ }
+ });
+ }
+ // The threads will hammer away. Give it a little bit of time for tsan to
+ // spot errors.
+ absl::SleepFor(absl::Seconds(3));
+ stop.Notify();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_statistics.h b/third_party/abseil-cpp/absl/strings/internal/cordz_statistics.h
new file mode 100644
index 0000000000..e03c651e9c
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_statistics.h
@@ -0,0 +1,84 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+
+#include <cstdint>
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzStatistics captures some meta information about a Cord's shape.
+struct CordzStatistics {
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // Node counts information
+ struct NodeCounts {
+ size_t flat = 0; // #flats
+ size_t flat_64 = 0; // #flats up to 64 bytes
+ size_t flat_128 = 0; // #flats up to 128 bytes
+ size_t flat_256 = 0; // #flats up to 256 bytes
+ size_t flat_512 = 0; // #flats up to 512 bytes
+ size_t flat_1k = 0; // #flats up to 1K bytes
+ size_t external = 0; // #external reps
+ size_t substring = 0; // #substring reps
+ size_t concat = 0; // #concat reps
+ size_t ring = 0; // #ring buffer reps
+ };
+
+ // The size of the cord in bytes. This matches the result of Cord::size().
+ int64_t size = 0;
+
+ // The estimated memory used by the sampled cord. This value matches the
+ // value as reported by Cord::EstimatedMemoryUsage().
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_memory_usage = 0;
+
+ // The effective memory used by the sampled cord, inversely weighted by the
+ // effective indegree of each allocated node. This is a representation of the
+ // fair share of memory usage that should be attributed to the sampled cord.
+ // This value is more useful for cases where one or more nodes are referenced
+ // by multiple Cord instances, and for cases where a Cord includes the same
+ // node multiple times (either directly or indirectly).
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_fair_share_memory_usage = 0;
+
+ // The total number of nodes referenced by this cord.
+ // For ring buffer Cords, this includes the 'ring buffer' node.
+ // A value of 0 implies the property has not been recorded.
+ int64_t node_count = 0;
+
+ // Detailed node counts per type
+ NodeCounts node_counts;
+
+ // The cord method responsible for sampling the cord.
+ MethodIdentifier method = MethodIdentifier::kUnknown;
+
+ // The cord method responsible for sampling the parent cord if applicable.
+ MethodIdentifier parent_method = MethodIdentifier::kUnknown;
+
+ // Update tracker tracking invocation count per cord method.
+ CordzUpdateTracker update_tracker;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope.h b/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope.h
new file mode 100644
index 0000000000..57ba75de93
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope.h
@@ -0,0 +1,71 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+
+#include "absl/base/config.h"
+#include "absl/base/optimization.h"
+#include "absl/base/thread_annotations.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateScope scopes an update to the provided CordzInfo.
+// The class invokes `info->Lock(method)` and `info->Unlock()` to guard
+// cordrep updates. This class does nothing if `info` is null.
+// See also the 'Lock`, `Unlock` and `SetCordRep` methods in `CordzInfo`.
+class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
+ public:
+ CordzUpdateScope(CordzInfo* info, CordzUpdateTracker::MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(info)
+ : info_(info) {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info->Lock(method);
+ }
+ }
+
+ // CordzUpdateScope can not be copied or assigned to.
+ CordzUpdateScope(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope(const CordzUpdateScope&) = delete;
+ CordzUpdateScope& operator=(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope& operator=(const CordzUpdateScope&) = delete;
+
+ ~CordzUpdateScope() ABSL_UNLOCK_FUNCTION() {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->Unlock();
+ }
+ }
+
+ void SetCordRep(CordRep* rep) const {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->SetCordRep(rep);
+ }
+ }
+
+ CordzInfo* info() const { return info_; }
+
+ private:
+ CordzInfo* info_;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope_test.cc
new file mode 100644
index 0000000000..3d08c622d0
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_update_scope_test.cc
@@ -0,0 +1,49 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_update_scope.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+namespace {
+
+// Used test values
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+
+TEST(CordzUpdateScopeTest, ScopeNullptr) {
+ CordzUpdateScope scope(nullptr, kTrackCordMethod);
+}
+
+TEST(CordzUpdateScopeTest, ScopeSampledCord) {
+ TestCordData cord;
+ CordzInfo::TrackCord(cord.data, kTrackCordMethod);
+ CordzUpdateScope scope(cord.data.cordz_info(), kTrackCordMethod);
+ cord.data.cordz_info()->SetCordRep(nullptr);
+}
+
+} // namespace
+ABSL_NAMESPACE_END
+} // namespace cord_internal
+
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker.h b/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker.h
new file mode 100644
index 0000000000..02efcc3a2d
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker.h
@@ -0,0 +1,119 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+
+#include <atomic>
+#include <cstdint>
+
+#include "absl/base/config.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateTracker tracks counters for Cord update methods.
+//
+// The purpose of CordzUpdateTracker is to track the number of calls to methods
+// updating Cord data for sampled cords. The class internally uses 'lossy'
+// atomic operations: Cord is thread-compatible, so there is no need to
+// synchronize updates. However, Cordz collection threads may call 'Value()' at
+// any point, so the class needs to provide thread safe access.
+//
+// This class is thread-safe. But as per above comments, all non-const methods
+// should be used single-threaded only: updates are thread-safe but lossy.
+class CordzUpdateTracker {
+ public:
+ // Tracked update methods.
+ enum MethodIdentifier {
+ kUnknown,
+ kAppendCord,
+ kAppendExternalMemory,
+ kAppendString,
+ kAssignCord,
+ kAssignString,
+ kClear,
+ kConstructorCord,
+ kConstructorString,
+ kCordReader,
+ kFlatten,
+ kGetAppendRegion,
+ kMakeCordFromExternal,
+ kMoveAppendCord,
+ kMoveAssignCord,
+ kMovePrependCord,
+ kPrependCord,
+ kPrependString,
+ kRemovePrefix,
+ kRemoveSuffix,
+ kSubCord,
+
+ // kNumMethods defines the number of entries: must be the last entry.
+ kNumMethods,
+ };
+
+ // Constructs a new instance. All counters are zero-initialized.
+ constexpr CordzUpdateTracker() noexcept : values_{} {}
+
+ // Copy constructs a new instance.
+ CordzUpdateTracker(const CordzUpdateTracker& rhs) noexcept { *this = rhs; }
+
+ // Assigns the provided value to this instance.
+ CordzUpdateTracker& operator=(const CordzUpdateTracker& rhs) noexcept {
+ for (int i = 0; i < kNumMethods; ++i) {
+ values_[i].store(rhs.values_[i].load(std::memory_order_relaxed),
+ std::memory_order_relaxed);
+ }
+ return *this;
+ }
+
+ // Returns the value for the specified method.
+ int64_t Value(MethodIdentifier method) const {
+ return values_[method].load(std::memory_order_relaxed);
+ }
+
+ // Increases the value for the specified method by `n`
+ void LossyAdd(MethodIdentifier method, int64_t n = 1) {
+ auto& value = values_[method];
+ value.store(value.load(std::memory_order_relaxed) + n,
+ std::memory_order_relaxed);
+ }
+
+ // Adds all the values from `src` to this instance
+ void LossyAdd(const CordzUpdateTracker& src) {
+ for (int i = 0; i < kNumMethods; ++i) {
+ MethodIdentifier method = static_cast<MethodIdentifier>(i);
+ if (int64_t value = src.Value(method)) {
+ LossyAdd(method, value);
+ }
+ }
+ }
+
+ private:
+ // Until C++20 std::atomic is not constexpr default-constructible, so we need
+ // a wrapper for this class to be constexpr constructible.
+ class Counter : public std::atomic<int64_t> {
+ public:
+ constexpr Counter() noexcept : std::atomic<int64_t>(0) {}
+ };
+
+ Counter values_[kNumMethods];
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
diff --git a/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker_test.cc b/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker_test.cc
new file mode 100644
index 0000000000..fcd17df7a0
--- /dev/null
+++ b/third_party/abseil-cpp/absl/strings/internal/cordz_update_tracker_test.cc
@@ -0,0 +1,143 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+#include <array>
+#include <thread> // NOLINT
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/synchronization/notification.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::AnyOf;
+using ::testing::Eq;
+
+using Method = CordzUpdateTracker::MethodIdentifier;
+using Methods = std::array<Method, Method::kNumMethods>;
+
+// Returns an array of all methods defined in `MethodIdentifier`
+Methods AllMethods() {
+ return Methods{Method::kUnknown,
+ Method::kAppendCord,
+ Method::kAppendExternalMemory,
+ Method::kAppendString,
+ Method::kAssignCord,
+ Method::kAssignString,
+ Method::kClear,
+ Method::kConstructorCord,
+ Method::kConstructorString,
+ Method::kCordReader,
+ Method::kFlatten,
+ Method::kGetAppendRegion,
+ Method::kMakeCordFromExternal,
+ Method::kMoveAppendCord,
+ Method::kMoveAssignCord,
+ Method::kMovePrependCord,
+ Method::kPrependCord,
+ Method::kPrependString,
+ Method::kRemovePrefix,
+ Method::kRemoveSuffix,
+ Method::kSubCord};
+}
+
+TEST(CordzUpdateTracker, IsConstExprAndInitializesToZero) {
+ constexpr CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ ASSERT_THAT(tracker.Value(method), Eq(0));
+ }
+}
+
+TEST(CordzUpdateTracker, LossyAdd) {
+ int64_t n = 1;
+ CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ tracker.LossyAdd(method, n);
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, CopyConstructor) {
+ int64_t n = 1;
+ CordzUpdateTracker src;
+ for (Method method : AllMethods()) {
+ src.LossyAdd(method, n);
+ n += 2;
+ }
+
+ n = 1;
+ CordzUpdateTracker tracker(src);
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, OperatorAssign) {
+ int64_t n = 1;
+ CordzUpdateTracker src;
+ CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ src.LossyAdd(method, n);
+ n += 2;
+ }
+
+ n = 1;
+ tracker = src;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, ThreadSanitizedValueCheck) {
+ absl::Notification done;
+ CordzUpdateTracker tracker;
+
+ std::thread reader([&done, &tracker] {
+ while (!done.HasBeenNotified()) {
+ int n = 1;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), AnyOf(Eq(n), Eq(0)));
+ n += 2;
+ }
+ }
+ int n = 1;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+ });
+
+ int64_t n = 1;
+ for (Method method : AllMethods()) {
+ tracker.LossyAdd(method, n);
+ n += 2;
+ }
+ done.Notify();
+ reader.join();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized.h b/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized.h
index e42628e394..749c66e78e 100644
--- a/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized.h
+++ b/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized.h
@@ -17,6 +17,7 @@
#ifndef ABSL_STRINGS_INTERNAL_RESIZE_UNINITIALIZED_H_
#define ABSL_STRINGS_INTERNAL_RESIZE_UNINITIALIZED_H_
+#include <algorithm>
#include <string>
#include <type_traits>
#include <utility>
@@ -66,6 +67,28 @@ inline void STLStringResizeUninitialized(string_type* s, size_t new_size) {
ResizeUninitializedTraits<string_type>::Resize(s, new_size);
}
+// Used to ensure exponential growth so that the amortized complexity of
+// increasing the string size by a small amount is O(1), in contrast to
+// O(str->size()) in the case of precise growth.
+template <typename string_type>
+void STLStringReserveAmortized(string_type* s, size_t new_size) {
+ const size_t cap = s->capacity();
+ if (new_size > cap) {
+ // Make sure to always grow by at least a factor of 2x.
+ s->reserve((std::max)(new_size, 2 * cap));
+ }
+}
+
+// Like STLStringResizeUninitialized(str, new_size), except guaranteed to use
+// exponential growth so that the amortized complexity of increasing the string
+// size by a small amount is O(1), in contrast to O(str->size()) in the case of
+// precise growth.
+template <typename string_type>
+void STLStringResizeUninitializedAmortized(string_type* s, size_t new_size) {
+ STLStringReserveAmortized(s, new_size);
+ STLStringResizeUninitialized(s, new_size);
+}
+
} // namespace strings_internal
ABSL_NAMESPACE_END
} // namespace absl
diff --git a/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized_test.cc b/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized_test.cc
index 0f8b3c2a95..01ee476b6c 100644
--- a/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized_test.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/resize_uninitialized_test.cc
@@ -24,11 +24,13 @@ int resize_call_count = 0;
// resize() method has been called.
struct resizable_string {
size_t size() const { return 0; }
+ size_t capacity() const { return 0; }
char& operator[](size_t) {
static char c = '\0';
return c;
}
void resize(size_t) { resize_call_count += 1; }
+ void reserve(size_t) {}
};
int resize_default_init_call_count = 0;
@@ -37,12 +39,14 @@ int resize_default_init_call_count = 0;
// resize() and __resize_default_init() methods have been called.
struct resize_default_init_string {
size_t size() const { return 0; }
+ size_t capacity() const { return 0; }
char& operator[](size_t) {
static char c = '\0';
return c;
}
void resize(size_t) { resize_call_count += 1; }
void __resize_default_init(size_t) { resize_default_init_call_count += 1; }
+ void reserve(size_t) {}
};
TEST(ResizeUninit, WithAndWithout) {
@@ -60,6 +64,9 @@ TEST(ResizeUninit, WithAndWithout) {
absl::strings_internal::STLStringResizeUninitialized(&rs, 237);
EXPECT_EQ(resize_call_count, 1);
EXPECT_EQ(resize_default_init_call_count, 0);
+ absl::strings_internal::STLStringResizeUninitializedAmortized(&rs, 1000);
+ EXPECT_EQ(resize_call_count, 2);
+ EXPECT_EQ(resize_default_init_call_count, 0);
}
resize_call_count = 0;
@@ -76,7 +83,23 @@ TEST(ResizeUninit, WithAndWithout) {
absl::strings_internal::STLStringResizeUninitialized(&rus, 237);
EXPECT_EQ(resize_call_count, 0);
EXPECT_EQ(resize_default_init_call_count, 1);
+ absl::strings_internal::STLStringResizeUninitializedAmortized(&rus, 1000);
+ EXPECT_EQ(resize_call_count, 0);
+ EXPECT_EQ(resize_default_init_call_count, 2);
+ }
+}
+
+TEST(ResizeUninit, Amortized) {
+ std::string str;
+ size_t prev_cap = str.capacity();
+ int cap_increase_count = 0;
+ for (int i = 0; i < 1000; ++i) {
+ absl::strings_internal::STLStringResizeUninitializedAmortized(&str, i);
+ size_t new_cap = str.capacity();
+ if (new_cap > prev_cap) ++cap_increase_count;
+ prev_cap = new_cap;
}
+ EXPECT_LT(cap_increase_count, 50);
}
} // namespace
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/arg.h b/third_party/abseil-cpp/absl/strings/internal/str_format/arg.h
index 7040c86677..3c91be701f 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/arg.h
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/arg.h
@@ -122,6 +122,14 @@ StringConvertResult FormatConvertImpl(const std::string& v,
StringConvertResult FormatConvertImpl(string_view v,
FormatConversionSpecImpl conv,
FormatSinkImpl* sink);
+#if defined(ABSL_HAVE_STD_STRING_VIEW) && !defined(ABSL_USES_STD_STRING_VIEW)
+inline StringConvertResult FormatConvertImpl(std::string_view v,
+ FormatConversionSpecImpl conv,
+ FormatSinkImpl* sink) {
+ return FormatConvertImpl(absl::string_view(v.data(), v.size()), conv, sink);
+}
+#endif // ABSL_HAVE_STD_STRING_VIEW && !ABSL_USES_STD_STRING_VIEW
+
ArgConvertResult<FormatConversionCharSetUnion(
FormatConversionCharSetInternal::s, FormatConversionCharSetInternal::p)>
FormatConvertImpl(const char* v, const FormatConversionSpecImpl conv,
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc b/third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc
index 4e68b90b5c..c988ba8fd2 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc
@@ -58,7 +58,7 @@ inline bool ArgContext::Bind(const UnboundConversion* unbound,
if (static_cast<size_t>(arg_position - 1) >= pack_.size()) return false;
arg = &pack_[arg_position - 1]; // 1-based
- if (!unbound->flags.basic) {
+ if (unbound->flags != Flags::kBasic) {
int width = unbound->width.value();
bool force_left = false;
if (unbound->width.is_from_arg()) {
@@ -84,9 +84,8 @@ inline bool ArgContext::Bind(const UnboundConversion* unbound,
FormatConversionSpecImplFriend::SetPrecision(precision, bound);
if (force_left) {
- Flags flags = unbound->flags;
- flags.left = true;
- FormatConversionSpecImplFriend::SetFlags(flags, bound);
+ FormatConversionSpecImplFriend::SetFlags(unbound->flags | Flags::kLeft,
+ bound);
} else {
FormatConversionSpecImplFriend::SetFlags(unbound->flags, bound);
}
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/convert_test.cc b/third_party/abseil-cpp/absl/strings/internal/str_format/convert_test.cc
index 926283cfac..91e0360901 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/convert_test.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/convert_test.cc
@@ -229,6 +229,9 @@ TEST_F(FormatConvertTest, BasicString) {
TestStringConvert(static_cast<const char*>("hello"));
TestStringConvert(std::string("hello"));
TestStringConvert(string_view("hello"));
+#if defined(ABSL_HAVE_STD_STRING_VIEW)
+ TestStringConvert(std::string_view("hello"));
+#endif // ABSL_HAVE_STD_STRING_VIEW
}
TEST_F(FormatConvertTest, NullString) {
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc b/third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc
index bb0d96cf32..484f6ebfc1 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc
@@ -23,13 +23,13 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace str_format_internal {
-std::string Flags::ToString() const {
+std::string FlagsToString(Flags v) {
std::string s;
- s.append(left ? "-" : "");
- s.append(show_pos ? "+" : "");
- s.append(sign_col ? " " : "");
- s.append(alt ? "#" : "");
- s.append(zero ? "0" : "");
+ s.append(FlagsContains(v, Flags::kLeft) ? "-" : "");
+ s.append(FlagsContains(v, Flags::kShowPos) ? "+" : "");
+ s.append(FlagsContains(v, Flags::kSignCol) ? " " : "");
+ s.append(FlagsContains(v, Flags::kAlt) ? "#" : "");
+ s.append(FlagsContains(v, Flags::kZero) ? "0" : "");
return s;
}
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/extension.h b/third_party/abseil-cpp/absl/strings/internal/str_format/extension.h
index a9b9e137de..55cbb56d0a 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/extension.h
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/extension.h
@@ -128,19 +128,33 @@ class FormatSinkImpl {
char buf_[1024];
};
-struct Flags {
- bool basic : 1; // fastest conversion: no flags, width, or precision
- bool left : 1; // "-"
- bool show_pos : 1; // "+"
- bool sign_col : 1; // " "
- bool alt : 1; // "#"
- bool zero : 1; // "0"
- std::string ToString() const;
- friend std::ostream& operator<<(std::ostream& os, const Flags& v) {
- return os << v.ToString();
- }
+enum class Flags : uint8_t {
+ kBasic = 0,
+ kLeft = 1 << 0,
+ kShowPos = 1 << 1,
+ kSignCol = 1 << 2,
+ kAlt = 1 << 3,
+ kZero = 1 << 4,
+ // This is not a real flag. It just exists to turn off kBasic when no other
+ // flags are set. This is for when width/precision are specified.
+ kNonBasic = 1 << 5,
};
+constexpr Flags operator|(Flags a, Flags b) {
+ return static_cast<Flags>(static_cast<uint8_t>(a) | static_cast<uint8_t>(b));
+}
+
+constexpr bool FlagsContains(Flags haystack, Flags needle) {
+ return (static_cast<uint8_t>(haystack) & static_cast<uint8_t>(needle)) ==
+ static_cast<uint8_t>(needle);
+}
+
+std::string FlagsToString(Flags v);
+
+inline std::ostream& operator<<(std::ostream& os, Flags v) {
+ return os << FlagsToString(v);
+}
+
// clang-format off
#define ABSL_INTERNAL_CONVERSION_CHARS_EXPAND_(X_VAL, X_SEP) \
/* text */ \
@@ -257,12 +271,16 @@ struct FormatConversionSpecImplFriend;
class FormatConversionSpecImpl {
public:
// Width and precison are not specified, no flags are set.
- bool is_basic() const { return flags_.basic; }
- bool has_left_flag() const { return flags_.left; }
- bool has_show_pos_flag() const { return flags_.show_pos; }
- bool has_sign_col_flag() const { return flags_.sign_col; }
- bool has_alt_flag() const { return flags_.alt; }
- bool has_zero_flag() const { return flags_.zero; }
+ bool is_basic() const { return flags_ == Flags::kBasic; }
+ bool has_left_flag() const { return FlagsContains(flags_, Flags::kLeft); }
+ bool has_show_pos_flag() const {
+ return FlagsContains(flags_, Flags::kShowPos);
+ }
+ bool has_sign_col_flag() const {
+ return FlagsContains(flags_, Flags::kSignCol);
+ }
+ bool has_alt_flag() const { return FlagsContains(flags_, Flags::kAlt); }
+ bool has_zero_flag() const { return FlagsContains(flags_, Flags::kZero); }
FormatConversionChar conversion_char() const {
// Keep this field first in the struct . It generates better code when
@@ -306,7 +324,7 @@ struct FormatConversionSpecImplFriend final {
conv->precision_ = p;
}
static std::string FlagsToString(const FormatConversionSpecImpl& spec) {
- return spec.flags_.ToString();
+ return str_format_internal::FlagsToString(spec.flags_);
}
};
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc b/third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc
index f308d02351..2c9c07dacc 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc
@@ -34,60 +34,67 @@ namespace str_format_internal {
using CC = FormatConversionCharInternal;
using LM = LengthMod;
+// Abbreviations to fit in the table below.
+constexpr auto f_sign = Flags::kSignCol;
+constexpr auto f_alt = Flags::kAlt;
+constexpr auto f_pos = Flags::kShowPos;
+constexpr auto f_left = Flags::kLeft;
+constexpr auto f_zero = Flags::kZero;
+
ABSL_CONST_INIT const ConvTag kTags[256] = {
- {}, {}, {}, {}, {}, {}, {}, {}, // 00-07
- {}, {}, {}, {}, {}, {}, {}, {}, // 08-0f
- {}, {}, {}, {}, {}, {}, {}, {}, // 10-17
- {}, {}, {}, {}, {}, {}, {}, {}, // 18-1f
- {}, {}, {}, {}, {}, {}, {}, {}, // 20-27
- {}, {}, {}, {}, {}, {}, {}, {}, // 28-2f
- {}, {}, {}, {}, {}, {}, {}, {}, // 30-37
- {}, {}, {}, {}, {}, {}, {}, {}, // 38-3f
- {}, CC::A, {}, {}, {}, CC::E, CC::F, CC::G, // @ABCDEFG
- {}, {}, {}, {}, LM::L, {}, {}, {}, // HIJKLMNO
- {}, {}, {}, {}, {}, {}, {}, {}, // PQRSTUVW
- CC::X, {}, {}, {}, {}, {}, {}, {}, // XYZ[\]^_
- {}, CC::a, {}, CC::c, CC::d, CC::e, CC::f, CC::g, // `abcdefg
- LM::h, CC::i, LM::j, {}, LM::l, {}, CC::n, CC::o, // hijklmno
- CC::p, LM::q, {}, CC::s, LM::t, CC::u, {}, {}, // pqrstuvw
- CC::x, {}, LM::z, {}, {}, {}, {}, {}, // xyz{|}!
- {}, {}, {}, {}, {}, {}, {}, {}, // 80-87
- {}, {}, {}, {}, {}, {}, {}, {}, // 88-8f
- {}, {}, {}, {}, {}, {}, {}, {}, // 90-97
- {}, {}, {}, {}, {}, {}, {}, {}, // 98-9f
- {}, {}, {}, {}, {}, {}, {}, {}, // a0-a7
- {}, {}, {}, {}, {}, {}, {}, {}, // a8-af
- {}, {}, {}, {}, {}, {}, {}, {}, // b0-b7
- {}, {}, {}, {}, {}, {}, {}, {}, // b8-bf
- {}, {}, {}, {}, {}, {}, {}, {}, // c0-c7
- {}, {}, {}, {}, {}, {}, {}, {}, // c8-cf
- {}, {}, {}, {}, {}, {}, {}, {}, // d0-d7
- {}, {}, {}, {}, {}, {}, {}, {}, // d8-df
- {}, {}, {}, {}, {}, {}, {}, {}, // e0-e7
- {}, {}, {}, {}, {}, {}, {}, {}, // e8-ef
- {}, {}, {}, {}, {}, {}, {}, {}, // f0-f7
- {}, {}, {}, {}, {}, {}, {}, {}, // f8-ff
+ {}, {}, {}, {}, {}, {}, {}, {}, // 00-07
+ {}, {}, {}, {}, {}, {}, {}, {}, // 08-0f
+ {}, {}, {}, {}, {}, {}, {}, {}, // 10-17
+ {}, {}, {}, {}, {}, {}, {}, {}, // 18-1f
+ f_sign, {}, {}, f_alt, {}, {}, {}, {}, // !"#$%&'
+ {}, {}, {}, f_pos, {}, f_left, {}, {}, // ()*+,-./
+ f_zero, {}, {}, {}, {}, {}, {}, {}, // 01234567
+ {}, {}, {}, {}, {}, {}, {}, {}, // 89:;<=>?
+ {}, CC::A, {}, {}, {}, CC::E, CC::F, CC::G, // @ABCDEFG
+ {}, {}, {}, {}, LM::L, {}, {}, {}, // HIJKLMNO
+ {}, {}, {}, {}, {}, {}, {}, {}, // PQRSTUVW
+ CC::X, {}, {}, {}, {}, {}, {}, {}, // XYZ[\]^_
+ {}, CC::a, {}, CC::c, CC::d, CC::e, CC::f, CC::g, // `abcdefg
+ LM::h, CC::i, LM::j, {}, LM::l, {}, CC::n, CC::o, // hijklmno
+ CC::p, LM::q, {}, CC::s, LM::t, CC::u, {}, {}, // pqrstuvw
+ CC::x, {}, LM::z, {}, {}, {}, {}, {}, // xyz{|}!
+ {}, {}, {}, {}, {}, {}, {}, {}, // 80-87
+ {}, {}, {}, {}, {}, {}, {}, {}, // 88-8f
+ {}, {}, {}, {}, {}, {}, {}, {}, // 90-97
+ {}, {}, {}, {}, {}, {}, {}, {}, // 98-9f
+ {}, {}, {}, {}, {}, {}, {}, {}, // a0-a7
+ {}, {}, {}, {}, {}, {}, {}, {}, // a8-af
+ {}, {}, {}, {}, {}, {}, {}, {}, // b0-b7
+ {}, {}, {}, {}, {}, {}, {}, {}, // b8-bf
+ {}, {}, {}, {}, {}, {}, {}, {}, // c0-c7
+ {}, {}, {}, {}, {}, {}, {}, {}, // c8-cf
+ {}, {}, {}, {}, {}, {}, {}, {}, // d0-d7
+ {}, {}, {}, {}, {}, {}, {}, {}, // d8-df
+ {}, {}, {}, {}, {}, {}, {}, {}, // e0-e7
+ {}, {}, {}, {}, {}, {}, {}, {}, // e8-ef
+ {}, {}, {}, {}, {}, {}, {}, {}, // f0-f7
+ {}, {}, {}, {}, {}, {}, {}, {}, // f8-ff
};
namespace {
bool CheckFastPathSetting(const UnboundConversion& conv) {
- bool should_be_basic = !conv.flags.left && //
- !conv.flags.show_pos && //
- !conv.flags.sign_col && //
- !conv.flags.alt && //
- !conv.flags.zero && //
- (conv.width.value() == -1) &&
- (conv.precision.value() == -1);
- if (should_be_basic != conv.flags.basic) {
+ bool width_precision_needed =
+ conv.width.value() >= 0 || conv.precision.value() >= 0;
+ if (width_precision_needed && conv.flags == Flags::kBasic) {
fprintf(stderr,
"basic=%d left=%d show_pos=%d sign_col=%d alt=%d zero=%d "
"width=%d precision=%d\n",
- conv.flags.basic, conv.flags.left, conv.flags.show_pos,
- conv.flags.sign_col, conv.flags.alt, conv.flags.zero,
- conv.width.value(), conv.precision.value());
+ conv.flags == Flags::kBasic ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kLeft) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kShowPos) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kSignCol) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kAlt) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kZero) ? 1 : 0, conv.width.value(),
+ conv.precision.value());
+ return false;
}
- return should_be_basic == conv.flags.basic;
+ return true;
}
template <bool is_positional>
@@ -131,40 +138,21 @@ const char *ConsumeConversion(const char *pos, const char *const end,
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
// We should start with the basic flag on.
- assert(conv->flags.basic);
+ assert(conv->flags == Flags::kBasic);
// Any non alpha character makes this conversion not basic.
// This includes flags (-+ #0), width (1-9, *) or precision (.).
// All conversion characters and length modifiers are alpha characters.
if (c < 'A') {
- conv->flags.basic = false;
-
- for (; c <= '0';) {
- // FIXME: We might be able to speed this up reusing the lookup table from
- // above. It might require changing Flags to be a plain integer where we
- // can |= a value.
- switch (c) {
- case '-':
- conv->flags.left = true;
- break;
- case '+':
- conv->flags.show_pos = true;
- break;
- case ' ':
- conv->flags.sign_col = true;
- break;
- case '#':
- conv->flags.alt = true;
- break;
- case '0':
- conv->flags.zero = true;
- break;
- default:
- goto flags_done;
+ while (c <= '0') {
+ auto tag = GetTagForChar(c);
+ if (tag.is_flags()) {
+ conv->flags = conv->flags | tag.as_flags();
+ ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
+ } else {
+ break;
}
- ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
}
-flags_done:
if (c <= '9') {
if (c >= '0') {
@@ -173,12 +161,12 @@ flags_done:
if (ABSL_PREDICT_FALSE(*next_arg != 0)) return nullptr;
// Positional conversion.
*next_arg = -1;
- conv->flags = Flags();
- conv->flags.basic = true;
return ConsumeConversion<true>(original_pos, end, conv, next_arg);
}
+ conv->flags = conv->flags | Flags::kNonBasic;
conv->width.set_value(maybe_width);
} else if (c == '*') {
+ conv->flags = conv->flags | Flags::kNonBasic;
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
if (is_positional) {
if (ABSL_PREDICT_FALSE(c < '1' || c > '9')) return nullptr;
@@ -192,6 +180,7 @@ flags_done:
}
if (c == '.') {
+ conv->flags = conv->flags | Flags::kNonBasic;
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
if (std::isdigit(c)) {
conv->precision.set_value(parse_digits());
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/parser.h b/third_party/abseil-cpp/absl/strings/internal/str_format/parser.h
index 6504dd3ddc..ad8646edff 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/parser.h
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/parser.h
@@ -41,10 +41,7 @@ std::string LengthModToString(LengthMod v);
// The analyzed properties of a single specified conversion.
struct UnboundConversion {
- UnboundConversion()
- : flags() /* This is required to zero all the fields of flags. */ {
- flags.basic = true;
- }
+ UnboundConversion() {}
class InputValue {
public:
@@ -79,7 +76,7 @@ struct UnboundConversion {
InputValue width;
InputValue precision;
- Flags flags;
+ Flags flags = Flags::kBasic;
LengthMod length_mod = LengthMod::none;
FormatConversionChar conv = FormatConversionCharInternal::kNone;
};
@@ -93,32 +90,43 @@ const char* ConsumeUnboundConversion(const char* p, const char* end,
UnboundConversion* conv, int* next_arg);
// Helper tag class for the table below.
-// It allows fast `char -> ConversionChar/LengthMod` checking and
+// It allows fast `char -> ConversionChar/LengthMod/Flags` checking and
// conversions.
class ConvTag {
public:
constexpr ConvTag(FormatConversionChar conversion_char) // NOLINT
- : tag_(static_cast<int8_t>(conversion_char)) {}
- // We invert the length modifiers to make them negative so that we can easily
- // test for them.
+ : tag_(static_cast<uint8_t>(conversion_char)) {}
constexpr ConvTag(LengthMod length_mod) // NOLINT
- : tag_(~static_cast<std::int8_t>(length_mod)) {}
- // Everything else is -128, which is negative to make is_conv() simpler.
- constexpr ConvTag() : tag_(-128) {}
+ : tag_(0x80 | static_cast<uint8_t>(length_mod)) {}
+ constexpr ConvTag(Flags flags) // NOLINT
+ : tag_(0xc0 | static_cast<uint8_t>(flags)) {}
+ constexpr ConvTag() : tag_(0xFF) {}
+
+ bool is_conv() const { return (tag_ & 0x80) == 0; }
+ bool is_length() const { return (tag_ & 0xC0) == 0x80; }
+ bool is_flags() const { return (tag_ & 0xE0) == 0xC0; }
- bool is_conv() const { return tag_ >= 0; }
- bool is_length() const { return tag_ < 0 && tag_ != -128; }
FormatConversionChar as_conv() const {
assert(is_conv());
+ assert(!is_length());
+ assert(!is_flags());
return static_cast<FormatConversionChar>(tag_);
}
LengthMod as_length() const {
+ assert(!is_conv());
assert(is_length());
- return static_cast<LengthMod>(~tag_);
+ assert(!is_flags());
+ return static_cast<LengthMod>(tag_ & 0x3F);
+ }
+ Flags as_flags() const {
+ assert(!is_conv());
+ assert(!is_length());
+ assert(is_flags());
+ return static_cast<Flags>(tag_ & 0x1F);
}
private:
- std::int8_t tag_;
+ uint8_t tag_;
};
extern const ConvTag kTags[256];
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_format/parser_test.cc b/third_party/abseil-cpp/absl/strings/internal/str_format/parser_test.cc
index a5fa1c79aa..fe0d296360 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_format/parser_test.cc
+++ b/third_party/abseil-cpp/absl/strings/internal/str_format/parser_test.cc
@@ -270,15 +270,22 @@ TEST_F(ConsumeUnboundConversionTest, Flags) {
for (int k = 0; k < kNumFlags; ++k)
if ((i >> k) & 1) fmt += kAllFlags[k];
// flag order shouldn't matter
- if (rev == 1) { std::reverse(fmt.begin(), fmt.end()); }
+ if (rev == 1) {
+ std::reverse(fmt.begin(), fmt.end());
+ }
fmt += 'd';
SCOPED_TRACE(fmt);
EXPECT_TRUE(Run(fmt.c_str()));
- EXPECT_EQ(fmt.find('-') == std::string::npos, !o.flags.left);
- EXPECT_EQ(fmt.find('+') == std::string::npos, !o.flags.show_pos);
- EXPECT_EQ(fmt.find(' ') == std::string::npos, !o.flags.sign_col);
- EXPECT_EQ(fmt.find('#') == std::string::npos, !o.flags.alt);
- EXPECT_EQ(fmt.find('0') == std::string::npos, !o.flags.zero);
+ EXPECT_EQ(fmt.find('-') == std::string::npos,
+ !FlagsContains(o.flags, Flags::kLeft));
+ EXPECT_EQ(fmt.find('+') == std::string::npos,
+ !FlagsContains(o.flags, Flags::kShowPos));
+ EXPECT_EQ(fmt.find(' ') == std::string::npos,
+ !FlagsContains(o.flags, Flags::kSignCol));
+ EXPECT_EQ(fmt.find('#') == std::string::npos,
+ !FlagsContains(o.flags, Flags::kAlt));
+ EXPECT_EQ(fmt.find('0') == std::string::npos,
+ !FlagsContains(o.flags, Flags::kZero));
}
}
}
@@ -288,14 +295,14 @@ TEST_F(ConsumeUnboundConversionTest, BasicFlag) {
for (const char* fmt : {"d", "llx", "G", "1$X"}) {
SCOPED_TRACE(fmt);
EXPECT_TRUE(Run(fmt));
- EXPECT_TRUE(o.flags.basic);
+ EXPECT_EQ(o.flags, Flags::kBasic);
}
// Flag is off
for (const char* fmt : {"3d", ".llx", "-G", "1$#X"}) {
SCOPED_TRACE(fmt);
EXPECT_TRUE(Run(fmt));
- EXPECT_FALSE(o.flags.basic);
+ EXPECT_NE(o.flags, Flags::kBasic);
}
}
diff --git a/third_party/abseil-cpp/absl/strings/internal/str_split_internal.h b/third_party/abseil-cpp/absl/strings/internal/str_split_internal.h
index a2f41c1531..17c1bfe8d3 100644
--- a/third_party/abseil-cpp/absl/strings/internal/str_split_internal.h
+++ b/third_party/abseil-cpp/absl/strings/internal/str_split_internal.h
@@ -32,7 +32,7 @@
#include <array>
#include <initializer_list>
#include <iterator>
-#include <map>
+#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
@@ -182,6 +182,13 @@ template <typename T>
struct HasConstIterator<T, absl::void_t<typename T::const_iterator>>
: std::true_type {};
+// HasEmplace<T>::value is true iff there exists a method T::emplace().
+template <typename T, typename = void>
+struct HasEmplace : std::false_type {};
+template <typename T>
+struct HasEmplace<T, absl::void_t<decltype(std::declval<T>().emplace())>>
+ : std::true_type {};
+
// IsInitializerList<T>::value is true iff T is an std::initializer_list. More
// details below in Splitter<> where this is used.
std::false_type IsInitializerListDispatch(...); // default: No
@@ -372,50 +379,43 @@ class Splitter {
// value.
template <typename Container, typename First, typename Second>
struct ConvertToContainer<Container, std::pair<const First, Second>, true> {
+ using iterator = typename Container::iterator;
+
Container operator()(const Splitter& splitter) const {
Container m;
- typename Container::iterator it;
+ iterator it;
bool insert = true;
- for (const auto& sp : splitter) {
+ for (const absl::string_view sv : splitter) {
if (insert) {
- it = Inserter<Container>::Insert(&m, First(sp), Second());
+ it = InsertOrEmplace(&m, sv);
} else {
- it->second = Second(sp);
+ it->second = Second(sv);
}
insert = !insert;
}
return m;
}
- // Inserts the key and value into the given map, returning an iterator to
- // the inserted item. Specialized for std::map and std::multimap to use
- // emplace() and adapt emplace()'s return value.
- template <typename Map>
- struct Inserter {
- using M = Map;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->insert(std::make_pair(std::forward<Args>(args)...)).first;
- }
- };
-
- template <typename... Ts>
- struct Inserter<std::map<Ts...>> {
- using M = std::map<Ts...>;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->emplace(std::make_pair(std::forward<Args>(args)...)).first;
- }
- };
-
- template <typename... Ts>
- struct Inserter<std::multimap<Ts...>> {
- using M = std::multimap<Ts...>;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->emplace(std::make_pair(std::forward<Args>(args)...));
- }
- };
+ // Inserts the key and an empty value into the map, returning an iterator to
+ // the inserted item. We use emplace() if available, otherwise insert().
+ template <typename M>
+ static absl::enable_if_t<HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, absl::string_view key) {
+ // Use piecewise_construct to support old versions of gcc in which pair
+ // constructor can't otherwise construct string from string_view.
+ return ToIter(m->emplace(std::piecewise_construct, std::make_tuple(key),
+ std::tuple<>()));
+ }
+ template <typename M>
+ static absl::enable_if_t<!HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, absl::string_view key) {
+ return ToIter(m->insert(std::make_pair(First(key), Second(""))));
+ }
+
+ static iterator ToIter(std::pair<iterator, bool> pair) {
+ return pair.first;
+ }
+ static iterator ToIter(iterator iter) { return iter; }
};
StringType text_;
diff --git a/third_party/abseil-cpp/absl/strings/numbers.h b/third_party/abseil-cpp/absl/strings/numbers.h
index ffc738fa41..1780bb44bd 100644
--- a/third_party/abseil-cpp/absl/strings/numbers.h
+++ b/third_party/abseil-cpp/absl/strings/numbers.h
@@ -124,6 +124,7 @@ inline void PutTwoDigits(size_t i, char* buf) {
}
// safe_strto?() functions for implementing SimpleAtoi()
+
bool safe_strto32_base(absl::string_view text, int32_t* value, int base);
bool safe_strto64_base(absl::string_view text, int64_t* value, int base);
bool safe_strto128_base(absl::string_view text, absl::int128* value,
diff --git a/third_party/abseil-cpp/absl/strings/str_cat.cc b/third_party/abseil-cpp/absl/strings/str_cat.cc
index dd5d25b0d6..f4a77493a4 100644
--- a/third_party/abseil-cpp/absl/strings/str_cat.cc
+++ b/third_party/abseil-cpp/absl/strings/str_cat.cc
@@ -174,7 +174,7 @@ void AppendPieces(std::string* dest,
ASSERT_NO_OVERLAP(*dest, piece);
total_size += piece.size();
}
- strings_internal::STLStringResizeUninitialized(dest, total_size);
+ strings_internal::STLStringResizeUninitializedAmortized(dest, total_size);
char* const begin = &(*dest)[0];
char* out = begin + old_size;
@@ -199,7 +199,7 @@ void StrAppend(std::string* dest, const AlphaNum& a, const AlphaNum& b) {
ASSERT_NO_OVERLAP(*dest, a);
ASSERT_NO_OVERLAP(*dest, b);
std::string::size_type old_size = dest->size();
- strings_internal::STLStringResizeUninitialized(
+ strings_internal::STLStringResizeUninitializedAmortized(
dest, old_size + a.size() + b.size());
char* const begin = &(*dest)[0];
char* out = begin + old_size;
@@ -214,7 +214,7 @@ void StrAppend(std::string* dest, const AlphaNum& a, const AlphaNum& b,
ASSERT_NO_OVERLAP(*dest, b);
ASSERT_NO_OVERLAP(*dest, c);
std::string::size_type old_size = dest->size();
- strings_internal::STLStringResizeUninitialized(
+ strings_internal::STLStringResizeUninitializedAmortized(
dest, old_size + a.size() + b.size() + c.size());
char* const begin = &(*dest)[0];
char* out = begin + old_size;
@@ -231,7 +231,7 @@ void StrAppend(std::string* dest, const AlphaNum& a, const AlphaNum& b,
ASSERT_NO_OVERLAP(*dest, c);
ASSERT_NO_OVERLAP(*dest, d);
std::string::size_type old_size = dest->size();
- strings_internal::STLStringResizeUninitialized(
+ strings_internal::STLStringResizeUninitializedAmortized(
dest, old_size + a.size() + b.size() + c.size() + d.size());
char* const begin = &(*dest)[0];
char* out = begin + old_size;
diff --git a/third_party/abseil-cpp/absl/strings/str_split_test.cc b/third_party/abseil-cpp/absl/strings/str_split_test.cc
index 7f7c097fae..f472f9eda1 100644
--- a/third_party/abseil-cpp/absl/strings/str_split_test.cc
+++ b/third_party/abseil-cpp/absl/strings/str_split_test.cc
@@ -29,6 +29,8 @@
#include "gtest/gtest.h"
#include "absl/base/dynamic_annotations.h"
#include "absl/base/macros.h"
+#include "absl/container/btree_map.h"
+#include "absl/container/btree_set.h"
#include "absl/container/flat_hash_map.h"
#include "absl/container/node_hash_map.h"
#include "absl/strings/numbers.h"
@@ -405,6 +407,10 @@ TEST(Splitter, ConversionOperator) {
TestConversionOperator<std::set<std::string>>(splitter);
TestConversionOperator<std::multiset<absl::string_view>>(splitter);
TestConversionOperator<std::multiset<std::string>>(splitter);
+ TestConversionOperator<absl::btree_set<absl::string_view>>(splitter);
+ TestConversionOperator<absl::btree_set<std::string>>(splitter);
+ TestConversionOperator<absl::btree_multiset<absl::string_view>>(splitter);
+ TestConversionOperator<absl::btree_multiset<std::string>>(splitter);
TestConversionOperator<std::unordered_set<std::string>>(splitter);
// Tests conversion to map-like objects.
@@ -421,6 +427,22 @@ TEST(Splitter, ConversionOperator) {
TestMapConversionOperator<std::multimap<std::string, absl::string_view>>(
splitter);
TestMapConversionOperator<std::multimap<std::string, std::string>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_map<absl::string_view, absl::string_view>>(splitter);
+ TestMapConversionOperator<absl::btree_map<absl::string_view, std::string>>(
+ splitter);
+ TestMapConversionOperator<absl::btree_map<std::string, absl::string_view>>(
+ splitter);
+ TestMapConversionOperator<absl::btree_map<std::string, std::string>>(
+ splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<absl::string_view, absl::string_view>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<absl::string_view, std::string>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<std::string, absl::string_view>>(splitter);
+ TestMapConversionOperator<absl::btree_multimap<std::string, std::string>>(
+ splitter);
TestMapConversionOperator<std::unordered_map<std::string, std::string>>(
splitter);
TestMapConversionOperator<
diff --git a/third_party/abseil-cpp/absl/strings/string_view.cc b/third_party/abseil-cpp/absl/strings/string_view.cc
index c5f5de936d..d596e08cde 100644
--- a/third_party/abseil-cpp/absl/strings/string_view.cc
+++ b/third_party/abseil-cpp/absl/strings/string_view.cc
@@ -78,8 +78,8 @@ std::ostream& operator<<(std::ostream& o, string_view piece) {
return o;
}
-string_view::size_type string_view::find(string_view s, size_type pos) const
- noexcept {
+string_view::size_type string_view::find(string_view s,
+ size_type pos) const noexcept {
if (empty() || pos > length_) {
if (empty() && pos == 0 && s.empty()) return 0;
return npos;
@@ -98,8 +98,8 @@ string_view::size_type string_view::find(char c, size_type pos) const noexcept {
return result != nullptr ? result - ptr_ : npos;
}
-string_view::size_type string_view::rfind(string_view s, size_type pos) const
- noexcept {
+string_view::size_type string_view::rfind(string_view s,
+ size_type pos) const noexcept {
if (length_ < s.length_) return npos;
if (s.empty()) return std::min(length_, pos);
const char* last = ptr_ + std::min(length_ - s.length_, pos) + s.length_;
@@ -108,8 +108,8 @@ string_view::size_type string_view::rfind(string_view s, size_type pos) const
}
// Search range is [0..pos] inclusive. If pos == npos, search everything.
-string_view::size_type string_view::rfind(char c, size_type pos) const
- noexcept {
+string_view::size_type string_view::rfind(char c,
+ size_type pos) const noexcept {
// Note: memrchr() is not available on Windows.
if (empty()) return npos;
for (size_type i = std::min(pos, length_ - 1);; --i) {
@@ -121,9 +121,8 @@ string_view::size_type string_view::rfind(char c, size_type pos) const
return npos;
}
-string_view::size_type string_view::find_first_of(string_view s,
- size_type pos) const
- noexcept {
+string_view::size_type string_view::find_first_of(
+ string_view s, size_type pos) const noexcept {
if (empty() || s.empty()) {
return npos;
}
@@ -138,9 +137,8 @@ string_view::size_type string_view::find_first_of(string_view s,
return npos;
}
-string_view::size_type string_view::find_first_not_of(string_view s,
- size_type pos) const
- noexcept {
+string_view::size_type string_view::find_first_not_of(
+ string_view s, size_type pos) const noexcept {
if (empty()) return npos;
// Avoid the cost of LookupTable() for a single-character search.
if (s.length_ == 1) return find_first_not_of(s.ptr_[0], pos);
@@ -153,9 +151,8 @@ string_view::size_type string_view::find_first_not_of(string_view s,
return npos;
}
-string_view::size_type string_view::find_first_not_of(char c,
- size_type pos) const
- noexcept {
+string_view::size_type string_view::find_first_not_of(
+ char c, size_type pos) const noexcept {
if (empty()) return npos;
for (; pos < length_; ++pos) {
if (ptr_[pos] != c) {
@@ -180,9 +177,8 @@ string_view::size_type string_view::find_last_of(string_view s,
return npos;
}
-string_view::size_type string_view::find_last_not_of(string_view s,
- size_type pos) const
- noexcept {
+string_view::size_type string_view::find_last_not_of(
+ string_view s, size_type pos) const noexcept {
if (empty()) return npos;
size_type i = std::min(pos, length_ - 1);
if (s.empty()) return i;
@@ -198,9 +194,8 @@ string_view::size_type string_view::find_last_not_of(string_view s,
return npos;
}
-string_view::size_type string_view::find_last_not_of(char c,
- size_type pos) const
- noexcept {
+string_view::size_type string_view::find_last_not_of(
+ char c, size_type pos) const noexcept {
if (empty()) return npos;
size_type i = std::min(pos, length_ - 1);
for (;; --i) {
diff --git a/third_party/abseil-cpp/absl/strings/string_view.h b/third_party/abseil-cpp/absl/strings/string_view.h
index 5260b5b73f..968549be46 100644
--- a/third_party/abseil-cpp/absl/strings/string_view.h
+++ b/third_party/abseil-cpp/absl/strings/string_view.h
@@ -36,6 +36,7 @@
#include <limits>
#include <string>
+#include "absl/base/attributes.h"
#include "absl/base/config.h"
#include "absl/base/internal/throw_delegate.h"
#include "absl/base/macros.h"
@@ -61,6 +62,12 @@ ABSL_NAMESPACE_END
#define ABSL_INTERNAL_STRING_VIEW_MEMCMP memcmp
#endif // ABSL_HAVE_BUILTIN(__builtin_memcmp)
+#if defined(__cplusplus) && __cplusplus >= 201402L
+#define ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR constexpr
+#else
+#define ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR
+#endif
+
namespace absl {
ABSL_NAMESPACE_BEGIN
@@ -180,8 +187,8 @@ class string_view {
template <typename Allocator>
string_view( // NOLINT(runtime/explicit)
- const std::basic_string<char, std::char_traits<char>, Allocator>&
- str) noexcept
+ const std::basic_string<char, std::char_traits<char>, Allocator>& str
+ ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept
// This is implemented in terms of `string_view(p, n)` so `str.size()`
// doesn't need to be reevaluated after `ptr_` is set.
: string_view(str.data(), str.size()) {}
@@ -264,9 +271,7 @@ class string_view {
// string_view::size()
//
// Returns the number of characters in the `string_view`.
- constexpr size_type size() const noexcept {
- return length_;
- }
+ constexpr size_type size() const noexcept { return length_; }
// string_view::length()
//
@@ -333,7 +338,7 @@ class string_view {
//
// Removes the first `n` characters from the `string_view`. Note that the
// underlying string is not changed, only the view.
- void remove_prefix(size_type n) {
+ ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void remove_prefix(size_type n) {
ABSL_HARDENING_ASSERT(n <= length_);
ptr_ += n;
length_ -= n;
@@ -343,7 +348,7 @@ class string_view {
//
// Removes the last `n` characters from the `string_view`. Note that the
// underlying string is not changed, only the view.
- void remove_suffix(size_type n) {
+ ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void remove_suffix(size_type n) {
ABSL_HARDENING_ASSERT(n <= length_);
length_ -= n;
}
@@ -351,7 +356,7 @@ class string_view {
// string_view::swap()
//
// Swaps this `string_view` with another `string_view`.
- void swap(string_view& s) noexcept {
+ ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void swap(string_view& s) noexcept {
auto t = *this;
*this = s;
s = t;
@@ -388,7 +393,7 @@ class string_view {
// `n`) as another string_view. This function throws `std::out_of_bounds` if
// `pos > size`.
// Use absl::ClippedSubstr if you need a truncating substr operation.
- constexpr string_view substr(size_type pos, size_type n = npos) const {
+ constexpr string_view substr(size_type pos = 0, size_type n = npos) const {
return ABSL_PREDICT_FALSE(pos > length_)
? (base_internal::ThrowStdOutOfRange(
"absl::string_view::substr"),
@@ -398,12 +403,10 @@ class string_view {
// string_view::compare()
//
- // Performs a lexicographical comparison between the `string_view` and
- // another `absl::string_view`, returning -1 if `this` is less than, 0 if
- // `this` is equal to, and 1 if `this` is greater than the passed string
- // view. Note that in the case of data equality, a further comparison is made
- // on the respective sizes of the two `string_view`s to determine which is
- // smaller, equal, or greater.
+ // Performs a lexicographical comparison between this `string_view` and
+ // another `string_view` `x`, returning a negative value if `*this` is less
+ // than `x`, 0 if `*this` is equal to `x`, and a positive value if `*this`
+ // is greater than `x`.
constexpr int compare(string_view x) const noexcept {
return CompareImpl(length_, x.length_,
Min(length_, x.length_) == 0
@@ -414,31 +417,31 @@ class string_view {
// Overload of `string_view::compare()` for comparing a substring of the
// 'string_view` and another `absl::string_view`.
- int compare(size_type pos1, size_type count1, string_view v) const {
+ constexpr int compare(size_type pos1, size_type count1, string_view v) const {
return substr(pos1, count1).compare(v);
}
// Overload of `string_view::compare()` for comparing a substring of the
// `string_view` and a substring of another `absl::string_view`.
- int compare(size_type pos1, size_type count1, string_view v, size_type pos2,
- size_type count2) const {
+ constexpr int compare(size_type pos1, size_type count1, string_view v,
+ size_type pos2, size_type count2) const {
return substr(pos1, count1).compare(v.substr(pos2, count2));
}
// Overload of `string_view::compare()` for comparing a `string_view` and a
- // a different C-style string `s`.
- int compare(const char* s) const { return compare(string_view(s)); }
+ // a different C-style string `s`.
+ constexpr int compare(const char* s) const { return compare(string_view(s)); }
// Overload of `string_view::compare()` for comparing a substring of the
// `string_view` and a different string C-style string `s`.
- int compare(size_type pos1, size_type count1, const char* s) const {
+ constexpr int compare(size_type pos1, size_type count1, const char* s) const {
return substr(pos1, count1).compare(string_view(s));
}
// Overload of `string_view::compare()` for comparing a substring of the
// `string_view` and a substring of a different C-style string `s`.
- int compare(size_type pos1, size_type count1, const char* s,
- size_type count2) const {
+ constexpr int compare(size_type pos1, size_type count1, const char* s,
+ size_type count2) const {
return substr(pos1, count1).compare(string_view(s, count2));
}
@@ -455,48 +458,92 @@ class string_view {
// within the `string_view`.
size_type find(char c, size_type pos = 0) const noexcept;
+ // Overload of `string_view::find()` for finding a substring of a different
+ // C-style string `s` within the `string_view`.
+ size_type find(const char* s, size_type pos, size_type count) const {
+ return find(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::find()` for finding a different C-style string
+ // `s` within the `string_view`.
+ size_type find(const char* s, size_type pos = 0) const {
+ return find(string_view(s), pos);
+ }
+
// string_view::rfind()
//
// Finds the last occurrence of a substring `s` within the `string_view`,
// returning the position of the first character's match, or `npos` if no
// match was found.
- size_type rfind(string_view s, size_type pos = npos) const
- noexcept;
+ size_type rfind(string_view s, size_type pos = npos) const noexcept;
// Overload of `string_view::rfind()` for finding the last given character `c`
// within the `string_view`.
size_type rfind(char c, size_type pos = npos) const noexcept;
+ // Overload of `string_view::rfind()` for finding a substring of a different
+ // C-style string `s` within the `string_view`.
+ size_type rfind(const char* s, size_type pos, size_type count) const {
+ return rfind(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::rfind()` for finding a different C-style string
+ // `s` within the `string_view`.
+ size_type rfind(const char* s, size_type pos = npos) const {
+ return rfind(string_view(s), pos);
+ }
+
// string_view::find_first_of()
//
// Finds the first occurrence of any of the characters in `s` within the
// `string_view`, returning the start position of the match, or `npos` if no
// match was found.
- size_type find_first_of(string_view s, size_type pos = 0) const
- noexcept;
+ size_type find_first_of(string_view s, size_type pos = 0) const noexcept;
// Overload of `string_view::find_first_of()` for finding a character `c`
// within the `string_view`.
- size_type find_first_of(char c, size_type pos = 0) const
- noexcept {
+ size_type find_first_of(char c, size_type pos = 0) const noexcept {
return find(c, pos);
}
+ // Overload of `string_view::find_first_of()` for finding a substring of a
+ // different C-style string `s` within the `string_view`.
+ size_type find_first_of(const char* s, size_type pos,
+ size_type count) const {
+ return find_first_of(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::find_first_of()` for finding a different C-style
+ // string `s` within the `string_view`.
+ size_type find_first_of(const char* s, size_type pos = 0) const {
+ return find_first_of(string_view(s), pos);
+ }
+
// string_view::find_last_of()
//
// Finds the last occurrence of any of the characters in `s` within the
// `string_view`, returning the start position of the match, or `npos` if no
// match was found.
- size_type find_last_of(string_view s, size_type pos = npos) const
- noexcept;
+ size_type find_last_of(string_view s, size_type pos = npos) const noexcept;
// Overload of `string_view::find_last_of()` for finding a character `c`
// within the `string_view`.
- size_type find_last_of(char c, size_type pos = npos) const
- noexcept {
+ size_type find_last_of(char c, size_type pos = npos) const noexcept {
return rfind(c, pos);
}
+ // Overload of `string_view::find_last_of()` for finding a substring of a
+ // different C-style string `s` within the `string_view`.
+ size_type find_last_of(const char* s, size_type pos, size_type count) const {
+ return find_last_of(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::find_last_of()` for finding a different C-style
+ // string `s` within the `string_view`.
+ size_type find_last_of(const char* s, size_type pos = npos) const {
+ return find_last_of(string_view(s), pos);
+ }
+
// string_view::find_first_not_of()
//
// Finds the first occurrence of any of the characters not in `s` within the
@@ -508,18 +555,43 @@ class string_view {
// that is not `c` within the `string_view`.
size_type find_first_not_of(char c, size_type pos = 0) const noexcept;
+ // Overload of `string_view::find_first_not_of()` for finding a substring of a
+ // different C-style string `s` within the `string_view`.
+ size_type find_first_not_of(const char* s, size_type pos,
+ size_type count) const {
+ return find_first_not_of(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::find_first_not_of()` for finding a different
+ // C-style string `s` within the `string_view`.
+ size_type find_first_not_of(const char* s, size_type pos = 0) const {
+ return find_first_not_of(string_view(s), pos);
+ }
+
// string_view::find_last_not_of()
//
// Finds the last occurrence of any of the characters not in `s` within the
// `string_view`, returning the start position of the last non-match, or
// `npos` if no non-match was found.
size_type find_last_not_of(string_view s,
- size_type pos = npos) const noexcept;
+ size_type pos = npos) const noexcept;
// Overload of `string_view::find_last_not_of()` for finding a character
// that is not `c` within the `string_view`.
- size_type find_last_not_of(char c, size_type pos = npos) const
- noexcept;
+ size_type find_last_not_of(char c, size_type pos = npos) const noexcept;
+
+ // Overload of `string_view::find_last_not_of()` for finding a substring of a
+ // different C-style string `s` within the `string_view`.
+ size_type find_last_not_of(const char* s, size_type pos,
+ size_type count) const {
+ return find_last_not_of(string_view(s, count), pos);
+ }
+
+ // Overload of `string_view::find_last_not_of()` for finding a different
+ // C-style string `s` within the `string_view`.
+ size_type find_last_not_of(const char* s, size_type pos = npos) const {
+ return find_last_not_of(string_view(s), pos);
+ }
private:
static constexpr size_type kMaxSize =
@@ -597,6 +669,7 @@ std::ostream& operator<<(std::ostream& o, string_view piece);
ABSL_NAMESPACE_END
} // namespace absl
+#undef ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR
#undef ABSL_INTERNAL_STRING_VIEW_MEMCMP
#endif // ABSL_USES_STD_STRING_VIEW
diff --git a/third_party/abseil-cpp/absl/strings/string_view_test.cc b/third_party/abseil-cpp/absl/strings/string_view_test.cc
index 643af8f81b..2c13dd1c14 100644
--- a/third_party/abseil-cpp/absl/strings/string_view_test.cc
+++ b/third_party/abseil-cpp/absl/strings/string_view_test.cc
@@ -449,6 +449,24 @@ TEST(StringViewTest, STL2) {
EXPECT_EQ(d.find('x', 4), absl::string_view::npos);
EXPECT_EQ(e.find('x', 7), absl::string_view::npos);
+ EXPECT_EQ(a.find(b.data(), 1, 0), 1);
+ EXPECT_EQ(a.find(c.data(), 9, 0), 9);
+ EXPECT_EQ(a.find(c.data(), absl::string_view::npos, 0),
+ absl::string_view::npos);
+ EXPECT_EQ(b.find(c.data(), absl::string_view::npos, 0),
+ absl::string_view::npos);
+ // empty string nonsense
+ EXPECT_EQ(d.find(b.data(), 4, 0), absl::string_view::npos);
+ EXPECT_EQ(e.find(b.data(), 7, 0), absl::string_view::npos);
+
+ EXPECT_EQ(a.find(b.data(), 1), absl::string_view::npos);
+ EXPECT_EQ(a.find(c.data(), 9), 23);
+ EXPECT_EQ(a.find(c.data(), absl::string_view::npos), absl::string_view::npos);
+ EXPECT_EQ(b.find(c.data(), absl::string_view::npos), absl::string_view::npos);
+ // empty string nonsense
+ EXPECT_EQ(d.find(b.data(), 4), absl::string_view::npos);
+ EXPECT_EQ(e.find(b.data(), 7), absl::string_view::npos);
+
EXPECT_EQ(a.rfind(b), 0);
EXPECT_EQ(a.rfind(b, 1), 0);
EXPECT_EQ(a.rfind(c), 23);
@@ -490,6 +508,14 @@ TEST(StringViewTest, STL2) {
EXPECT_EQ(e.rfind('o'), absl::string_view::npos);
EXPECT_EQ(d.rfind('o', 4), absl::string_view::npos);
EXPECT_EQ(e.rfind('o', 7), absl::string_view::npos);
+
+ EXPECT_EQ(a.rfind(b.data(), 1, 0), 1);
+ EXPECT_EQ(a.rfind(c.data(), 22, 0), 22);
+ EXPECT_EQ(a.rfind(c.data(), 1, 0), 1);
+ EXPECT_EQ(a.rfind(c.data(), 0, 0), 0);
+ EXPECT_EQ(b.rfind(c.data(), 0, 0), 0);
+ EXPECT_EQ(d.rfind(b.data(), 4, 0), 0);
+ EXPECT_EQ(e.rfind(b.data(), 7, 0), 0);
}
// Continued from STL2
@@ -678,6 +704,7 @@ TEST(StringViewTest, STL2Substr) {
EXPECT_EQ(a.substr(23, 3), c);
EXPECT_EQ(a.substr(23, 99), c);
EXPECT_EQ(a.substr(0), a);
+ EXPECT_EQ(a.substr(), a);
EXPECT_EQ(a.substr(3, 2), "de");
// empty string nonsense
EXPECT_EQ(d.substr(0, 99), e);
@@ -1087,7 +1114,24 @@ TEST(StringViewTest, ConstexprCompiles) {
EXPECT_EQ(sp_npos, -1);
}
-TEST(StringViewTest, ConstexprSubstr) {
+constexpr char ConstexprMethodsHelper() {
+#if defined(__cplusplus) && __cplusplus >= 201402L
+ absl::string_view str("123", 3);
+ str.remove_prefix(1);
+ str.remove_suffix(1);
+ absl::string_view bar;
+ str.swap(bar);
+ return bar.front();
+#else
+ return '2';
+#endif
+}
+
+TEST(StringViewTest, ConstexprMethods) {
+ // remove_prefix, remove_suffix, swap
+ static_assert(ConstexprMethodsHelper() == '2', "");
+
+ // substr
constexpr absl::string_view foobar("foobar", 6);
constexpr absl::string_view foo = foobar.substr(0, 3);
constexpr absl::string_view bar = foobar.substr(3);
diff --git a/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt b/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt
index e633d0bf53..605efe2d02 100644
--- a/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/synchronization/CMakeLists.txt
@@ -95,7 +95,7 @@ absl_cc_test(
DEPS
absl::synchronization
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -108,7 +108,7 @@ absl_cc_test(
DEPS
absl::synchronization
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -122,7 +122,7 @@ absl_cc_test(
absl::graphcycles_internal
absl::core_headers
absl::raw_logging_internal
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -154,7 +154,7 @@ absl_cc_test(
absl::memory
absl::raw_logging_internal
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -167,7 +167,7 @@ absl_cc_test(
DEPS
absl::synchronization
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -183,7 +183,7 @@ absl_cc_library(
absl::config
absl::strings
absl::time
- gmock
+ GTest::gmock
TESTONLY
)
@@ -199,7 +199,7 @@ absl_cc_test(
absl::synchronization
absl::strings
absl::time
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc b/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc
index 3cea7aed24..d2f82da3bb 100644
--- a/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc
+++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter.cc
@@ -14,41 +14,51 @@
#include "absl/synchronization/blocking_counter.h"
+#include <atomic>
+
#include "absl/base/internal/raw_logging.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
-// Return whether int *arg is zero.
-static bool IsZero(void *arg) {
- return 0 == *reinterpret_cast<int *>(arg);
+namespace {
+
+// Return whether int *arg is true.
+bool IsDone(void *arg) { return *reinterpret_cast<bool *>(arg); }
+
+} // namespace
+
+BlockingCounter::BlockingCounter(int initial_count)
+ : count_(initial_count),
+ num_waiting_(0),
+ done_{initial_count == 0 ? true : false} {
+ ABSL_RAW_CHECK(initial_count >= 0, "BlockingCounter initial_count negative");
}
bool BlockingCounter::DecrementCount() {
- MutexLock l(&lock_);
- count_--;
- if (count_ < 0) {
- ABSL_RAW_LOG(
- FATAL,
- "BlockingCounter::DecrementCount() called too many times. count=%d",
- count_);
+ int count = count_.fetch_sub(1, std::memory_order_acq_rel) - 1;
+ ABSL_RAW_CHECK(count >= 0,
+ "BlockingCounter::DecrementCount() called too many times");
+ if (count == 0) {
+ MutexLock l(&lock_);
+ done_ = true;
+ return true;
}
- return count_ == 0;
+ return false;
}
void BlockingCounter::Wait() {
MutexLock l(&this->lock_);
- ABSL_RAW_CHECK(count_ >= 0, "BlockingCounter underflow");
// only one thread may call Wait(). To support more than one thread,
// implement a counter num_to_exit, like in the Barrier class.
ABSL_RAW_CHECK(num_waiting_ == 0, "multiple threads called Wait()");
num_waiting_++;
- this->lock_.Await(Condition(IsZero, &this->count_));
+ this->lock_.Await(Condition(IsDone, &this->done_));
- // At this point, We know that all threads executing DecrementCount have
- // released the lock, and so will not touch this object again.
+ // At this point, we know that all threads executing DecrementCount
+ // will not touch this object again.
// Therefore, the thread calling this method is free to delete the object
// after we return from this method.
}
diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter.h b/third_party/abseil-cpp/absl/synchronization/blocking_counter.h
index 1f53f9f240..1908fdb1d9 100644
--- a/third_party/abseil-cpp/absl/synchronization/blocking_counter.h
+++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter.h
@@ -20,6 +20,8 @@
#ifndef ABSL_SYNCHRONIZATION_BLOCKING_COUNTER_H_
#define ABSL_SYNCHRONIZATION_BLOCKING_COUNTER_H_
+#include <atomic>
+
#include "absl/base/thread_annotations.h"
#include "absl/synchronization/mutex.h"
@@ -60,8 +62,7 @@ ABSL_NAMESPACE_BEGIN
//
class BlockingCounter {
public:
- explicit BlockingCounter(int initial_count)
- : count_(initial_count), num_waiting_(0) {}
+ explicit BlockingCounter(int initial_count);
BlockingCounter(const BlockingCounter&) = delete;
BlockingCounter& operator=(const BlockingCounter&) = delete;
@@ -89,8 +90,9 @@ class BlockingCounter {
private:
Mutex lock_;
- int count_ ABSL_GUARDED_BY(lock_);
+ std::atomic<int> count_;
int num_waiting_ ABSL_GUARDED_BY(lock_);
+ bool done_ ABSL_GUARDED_BY(lock_);
};
ABSL_NAMESPACE_END
diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter_benchmark.cc b/third_party/abseil-cpp/absl/synchronization/blocking_counter_benchmark.cc
new file mode 100644
index 0000000000..b504d1a57c
--- /dev/null
+++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter_benchmark.cc
@@ -0,0 +1,83 @@
+// Copyright 2021 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <limits>
+
+#include "absl/synchronization/blocking_counter.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "benchmark/benchmark.h"
+
+namespace {
+
+void BM_BlockingCounter_SingleThread(benchmark::State& state) {
+ for (auto _ : state) {
+ int iterations = state.range(0);
+ absl::BlockingCounter counter{iterations};
+ for (int i = 0; i < iterations; ++i) {
+ counter.DecrementCount();
+ }
+ counter.Wait();
+ }
+}
+BENCHMARK(BM_BlockingCounter_SingleThread)
+ ->ArgName("iterations")
+ ->Arg(2)
+ ->Arg(4)
+ ->Arg(16)
+ ->Arg(64)
+ ->Arg(256);
+
+void BM_BlockingCounter_DecrementCount(benchmark::State& state) {
+ static absl::BlockingCounter* counter =
+ new absl::BlockingCounter{std::numeric_limits<int>::max()};
+ for (auto _ : state) {
+ counter->DecrementCount();
+ }
+}
+BENCHMARK(BM_BlockingCounter_DecrementCount)
+ ->Threads(2)
+ ->Threads(4)
+ ->Threads(6)
+ ->Threads(8)
+ ->Threads(10)
+ ->Threads(12)
+ ->Threads(16)
+ ->Threads(32)
+ ->Threads(64)
+ ->Threads(128);
+
+void BM_BlockingCounter_Wait(benchmark::State& state) {
+ int num_threads = state.range(0);
+ absl::synchronization_internal::ThreadPool pool(num_threads);
+ for (auto _ : state) {
+ absl::BlockingCounter counter{num_threads};
+ pool.Schedule([num_threads, &counter, &pool]() {
+ for (int i = 0; i < num_threads; ++i) {
+ pool.Schedule([&counter]() { counter.DecrementCount(); });
+ }
+ });
+ counter.Wait();
+ }
+}
+BENCHMARK(BM_BlockingCounter_Wait)
+ ->ArgName("threads")
+ ->Arg(2)
+ ->Arg(4)
+ ->Arg(8)
+ ->Arg(16)
+ ->Arg(32)
+ ->Arg(64)
+ ->Arg(128);
+
+} // namespace
diff --git a/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc b/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc
index 2926224af7..06885f5759 100644
--- a/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc
+++ b/third_party/abseil-cpp/absl/synchronization/blocking_counter_test.cc
@@ -63,6 +63,18 @@ TEST(BlockingCounterTest, BasicFunctionality) {
}
}
+TEST(BlockingCounterTest, WaitZeroInitialCount) {
+ BlockingCounter counter(0);
+ counter.Wait();
+}
+
+#if GTEST_HAS_DEATH_TEST
+TEST(BlockingCounterTest, WaitNegativeInitialCount) {
+ EXPECT_DEATH(BlockingCounter counter(-1),
+ "BlockingCounter initial_count negative");
+}
+#endif
+
} // namespace
ABSL_NAMESPACE_END
} // namespace absl
diff --git a/third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem_test.cc b/third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem_test.cc
index 8cf59e64e9..db1184e679 100644
--- a/third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem_test.cc
+++ b/third_party/abseil-cpp/absl/synchronization/internal/per_thread_sem_test.cc
@@ -159,7 +159,7 @@ TEST_F(PerThreadSemTest, Timeouts) {
const absl::Duration elapsed = absl::Now() - start;
// Allow for a slight early return, to account for quality of implementation
// issues on various platforms.
- const absl::Duration slop = absl::Microseconds(200);
+ const absl::Duration slop = absl::Milliseconds(1);
EXPECT_LE(delay - slop, elapsed)
<< "Wait returned " << delay - elapsed
<< " early (with " << slop << " slop), start time was " << start;
diff --git a/third_party/abseil-cpp/absl/synchronization/internal/waiter.cc b/third_party/abseil-cpp/absl/synchronization/internal/waiter.cc
index 2123be60f5..28ef311e4a 100644
--- a/third_party/abseil-cpp/absl/synchronization/internal/waiter.cc
+++ b/third_party/abseil-cpp/absl/synchronization/internal/waiter.cc
@@ -79,6 +79,7 @@ bool Waiter::Wait(KernelTimeout t) {
// Note that, since the thread ticker is just reset, we don't need to check
// whether the thread is idle on the very first pass of the loop.
bool first_pass = true;
+
while (true) {
int32_t x = futex_.load(std::memory_order_relaxed);
while (x != 0) {
@@ -90,7 +91,6 @@ bool Waiter::Wait(KernelTimeout t) {
return true; // Consumed a wakeup, we are done.
}
-
if (!first_pass) MaybeBecomeIdle();
const int err = Futex::WaitUntil(&futex_, 0, t);
if (err != 0) {
diff --git a/third_party/abseil-cpp/absl/synchronization/mutex_test.cc b/third_party/abseil-cpp/absl/synchronization/mutex_test.cc
index 058f757b48..f8fbf9488c 100644
--- a/third_party/abseil-cpp/absl/synchronization/mutex_test.cc
+++ b/third_party/abseil-cpp/absl/synchronization/mutex_test.cc
@@ -852,7 +852,7 @@ TEST(Mutex, MutexReaderDecrementBug) ABSL_NO_THREAD_SAFETY_ANALYSIS {
// held and then destroyed (w/o unlocking).
#ifdef ABSL_HAVE_THREAD_SANITIZER
// TSAN reports errors when locked Mutexes are destroyed.
-TEST(Mutex, DISABLED_LockedMutexDestructionBug) NO_THREAD_SAFETY_ANALYSIS {
+TEST(Mutex, DISABLED_LockedMutexDestructionBug) ABSL_NO_THREAD_SAFETY_ANALYSIS {
#else
TEST(Mutex, LockedMutexDestructionBug) ABSL_NO_THREAD_SAFETY_ANALYSIS {
#endif
@@ -1153,7 +1153,7 @@ TEST(Mutex, DeadlockDetectorStressTest) ABSL_NO_THREAD_SAFETY_ANALYSIS {
#ifdef ABSL_HAVE_THREAD_SANITIZER
// TSAN reports errors when locked Mutexes are destroyed.
-TEST(Mutex, DISABLED_DeadlockIdBug) NO_THREAD_SAFETY_ANALYSIS {
+TEST(Mutex, DISABLED_DeadlockIdBug) ABSL_NO_THREAD_SAFETY_ANALYSIS {
#else
TEST(Mutex, DeadlockIdBug) ABSL_NO_THREAD_SAFETY_ANALYSIS {
#endif
diff --git a/third_party/abseil-cpp/absl/time/CMakeLists.txt b/third_party/abseil-cpp/absl/time/CMakeLists.txt
index 00bdd499c1..f6ff8bd127 100644
--- a/third_party/abseil-cpp/absl/time/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/time/CMakeLists.txt
@@ -102,7 +102,7 @@ absl_cc_library(
absl::config
absl::raw_logging_internal
absl::time_zone
- gmock
+ GTest::gmock
TESTONLY
)
@@ -124,5 +124,5 @@ absl_cc_test(
absl::config
absl::core_headers
absl::time_zone
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/time/civil_time.cc b/third_party/abseil-cpp/absl/time/civil_time.cc
index bdfe9ce0ef..6a231edb2d 100644
--- a/third_party/abseil-cpp/absl/time/civil_time.cc
+++ b/third_party/abseil-cpp/absl/time/civil_time.cc
@@ -38,9 +38,7 @@ std::string FormatYearAnd(string_view fmt, CivilSecond cs) {
const CivilSecond ncs(NormalizeYear(cs.year()), cs.month(), cs.day(),
cs.hour(), cs.minute(), cs.second());
const TimeZone utc = UTCTimeZone();
- // TODO(absl-team): Avoid conversion of fmt string.
- return StrCat(cs.year(),
- FormatTime(std::string(fmt), FromCivil(ncs, utc), utc));
+ return StrCat(cs.year(), FormatTime(fmt, FromCivil(ncs, utc), utc));
}
template <typename CivilT>
diff --git a/third_party/abseil-cpp/absl/time/duration_test.cc b/third_party/abseil-cpp/absl/time/duration_test.cc
index fb28fa987f..b7209e1c0a 100644
--- a/third_party/abseil-cpp/absl/time/duration_test.cc
+++ b/third_party/abseil-cpp/absl/time/duration_test.cc
@@ -17,6 +17,7 @@
#endif
#include <chrono> // NOLINT(build/c++11)
+#include <cfloat>
#include <cmath>
#include <cstdint>
#include <ctime>
@@ -1320,7 +1321,7 @@ TEST(Duration, SmallConversions) {
EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(0));
// TODO(bww): Is the next one OK?
- EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(0.124999999e-9));
+ EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(std::nextafter(0.125e-9, 0)));
EXPECT_EQ(absl::Nanoseconds(1) / 4, absl::Seconds(0.125e-9));
EXPECT_EQ(absl::Nanoseconds(1) / 4, absl::Seconds(0.250e-9));
EXPECT_EQ(absl::Nanoseconds(1) / 2, absl::Seconds(0.375e-9));
@@ -1330,7 +1331,7 @@ TEST(Duration, SmallConversions) {
EXPECT_EQ(absl::Nanoseconds(1), absl::Seconds(0.875e-9));
EXPECT_EQ(absl::Nanoseconds(1), absl::Seconds(1.000e-9));
- EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(-0.124999999e-9));
+ EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(std::nextafter(-0.125e-9, 0)));
EXPECT_EQ(-absl::Nanoseconds(1) / 4, absl::Seconds(-0.125e-9));
EXPECT_EQ(-absl::Nanoseconds(1) / 4, absl::Seconds(-0.250e-9));
EXPECT_EQ(-absl::Nanoseconds(1) / 2, absl::Seconds(-0.375e-9));
@@ -1390,6 +1391,14 @@ void VerifyApproxSameAsMul(double time_as_seconds, int* const misses) {
// Seconds(point) returns a duration near point * Seconds(1.0). (They may
// not be exactly equal due to fused multiply/add contraction.)
TEST(Duration, ToDoubleSecondsCheckEdgeCases) {
+#if (defined(__i386__) || defined(_M_IX86)) && FLT_EVAL_METHOD != 0
+ // We're using an x87-compatible FPU, and intermediate operations can be
+ // performed with 80-bit floats. This means the edge cases are different than
+ // what we expect here, so just skip this test.
+ GTEST_SKIP()
+ << "Skipping the test because we detected x87 floating-point semantics";
+#endif
+
constexpr uint32_t kTicksPerSecond = absl::time_internal::kTicksPerSecond;
constexpr auto duration_tick = absl::time_internal::MakeDuration(0, 1u);
int misses = 0;
diff --git a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc
index 303c0244a8..f2b3294ef7 100644
--- a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc
+++ b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_fixed.cc
@@ -53,7 +53,7 @@ int Parse02d(const char* p) {
} // namespace
bool FixedOffsetFromName(const std::string& name, seconds* offset) {
- if (name.compare(0, std::string::npos, "UTC", 3) == 0) {
+ if (name == "UTC" || name == "UTC0") {
*offset = seconds::zero();
return true;
}
diff --git a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format_test.cc b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format_test.cc
index a11f93e2a5..294f2e2284 100644
--- a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format_test.cc
+++ b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_format_test.cc
@@ -1135,7 +1135,7 @@ TEST(Parse, ExtendedSeconds) {
// All %E<prec>S cases are treated the same as %E*S on input.
auto precisions = {"*", "0", "1", "2", "3", "4", "5", "6", "7",
"8", "9", "10", "11", "12", "13", "14", "15"};
- for (const std::string& prec : precisions) {
+ for (const std::string prec : precisions) {
const std::string fmt = "%E" + prec + "S";
SCOPED_TRACE(fmt);
time_point<chrono::nanoseconds> tp = unix_epoch;
@@ -1217,7 +1217,7 @@ TEST(Parse, ExtendedSubeconds) {
// All %E<prec>f cases are treated the same as %E*f on input.
auto precisions = {"*", "0", "1", "2", "3", "4", "5", "6", "7",
"8", "9", "10", "11", "12", "13", "14", "15"};
- for (const std::string& prec : precisions) {
+ for (const std::string prec : precisions) {
const std::string fmt = "%E" + prec + "f";
SCOPED_TRACE(fmt);
time_point<chrono::nanoseconds> tp = unix_epoch - chrono::seconds(1);
diff --git a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup_test.cc b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup_test.cc
index 9a1a8d6e40..6948c3ea2c 100644
--- a/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup_test.cc
+++ b/third_party/abseil-cpp/absl/time/internal/cctz/src/time_zone_lookup_test.cc
@@ -717,6 +717,18 @@ TEST(TimeZones, LoadZonesConcurrently) {
}
#endif
+TEST(TimeZone, UTC) {
+ const time_zone utc = utc_time_zone();
+
+ time_zone loaded_utc;
+ EXPECT_TRUE(load_time_zone("UTC", &loaded_utc));
+ EXPECT_EQ(loaded_utc, utc);
+
+ time_zone loaded_utc0;
+ EXPECT_TRUE(load_time_zone("UTC0", &loaded_utc0));
+ EXPECT_EQ(loaded_utc0, utc);
+}
+
TEST(TimeZone, NamedTimeZones) {
const time_zone utc = utc_time_zone();
EXPECT_EQ("UTC", utc.name());
diff --git a/third_party/abseil-cpp/absl/time/time.h b/third_party/abseil-cpp/absl/time/time.h
index d9ad1aedd8..48982df45a 100644
--- a/third_party/abseil-cpp/absl/time/time.h
+++ b/third_party/abseil-cpp/absl/time/time.h
@@ -1180,11 +1180,15 @@ inline Time FromDateTime(int64_t year, int mon, int day, int hour,
//
// Converts the `tm_year`, `tm_mon`, `tm_mday`, `tm_hour`, `tm_min`, and
// `tm_sec` fields to an `absl::Time` using the given time zone. See ctime(3)
-// for a description of the expected values of the tm fields. If the indicated
-// time instant is not unique (see `absl::TimeZone::At(absl::CivilSecond)`
-// above), the `tm_isdst` field is consulted to select the desired instant
-// (`tm_isdst` > 0 means DST, `tm_isdst` == 0 means no DST, `tm_isdst` < 0
-// means use the post-transition offset).
+// for a description of the expected values of the tm fields. If the civil time
+// is unique (see `absl::TimeZone::At(absl::CivilSecond)` above), the matching
+// time instant is returned. Otherwise, the `tm_isdst` field is consulted to
+// choose between the possible results. For a repeated civil time, `tm_isdst !=
+// 0` returns the matching DST instant, while `tm_isdst == 0` returns the
+// matching non-DST instant. For a skipped civil time there is no matching
+// instant, so `tm_isdst != 0` returns the DST instant, and `tm_isdst == 0`
+// returns the non-DST instant, that would have matched if the transition never
+// happened.
Time FromTM(const struct tm& tm, TimeZone tz);
// ToTM()
@@ -1348,7 +1352,7 @@ constexpr Duration MakeDuration(int64_t hi, int64_t lo) {
inline Duration MakePosDoubleDuration(double n) {
const int64_t int_secs = static_cast<int64_t>(n);
const uint32_t ticks = static_cast<uint32_t>(
- (n - static_cast<double>(int_secs)) * kTicksPerSecond + 0.5);
+ std::round((n - static_cast<double>(int_secs)) * kTicksPerSecond));
return ticks < kTicksPerSecond
? MakeDuration(int_secs, ticks)
: MakeDuration(int_secs + 1, ticks - kTicksPerSecond);
diff --git a/third_party/abseil-cpp/absl/types/CMakeLists.txt b/third_party/abseil-cpp/absl/types/CMakeLists.txt
index c356b2117d..d7e8614e0d 100644
--- a/third_party/abseil-cpp/absl/types/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/types/CMakeLists.txt
@@ -69,7 +69,7 @@ absl_cc_test(
absl::exception_testing
absl::raw_logging_internal
absl::test_instance_tracker
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -85,7 +85,7 @@ absl_cc_test(
absl::exception_testing
absl::raw_logging_internal
absl::test_instance_tracker
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -99,7 +99,7 @@ absl_cc_test(
absl::any
absl::config
absl::exception_safety_testing
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -136,7 +136,7 @@ absl_cc_test(
absl::inlined_vector
absl::hash_testing
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -156,7 +156,7 @@ absl_cc_test(
absl::inlined_vector
absl::hash_testing
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -222,7 +222,7 @@ absl_cc_test(
absl::raw_logging_internal
absl::strings
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -236,7 +236,7 @@ absl_cc_test(
absl::optional
absl::config
absl::exception_safety_testing
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -258,7 +258,7 @@ absl_cc_library(
absl::type_traits
absl::strings
absl::utility
- gmock_main
+ GTest::gmock_main
TESTONLY
)
@@ -275,7 +275,7 @@ absl_cc_test(
DEPS
absl::conformance_testing
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -288,7 +288,7 @@ absl_cc_test(
DEPS
absl::conformance_testing
absl::type_traits
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -324,7 +324,7 @@ absl_cc_test(
absl::memory
absl::type_traits
absl::strings
- gmock_main
+ GTest::gmock_main
)
absl_cc_library(
@@ -350,7 +350,7 @@ absl_cc_test(
DEPS
absl::base
absl::compare
- gmock_main
+ GTest::gmock_main
)
absl_cc_test(
@@ -365,5 +365,5 @@ absl_cc_test(
absl::config
absl::exception_safety_testing
absl::memory
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/abseil-cpp/absl/types/span.h b/third_party/abseil-cpp/absl/types/span.h
index 95fe79262d..41db3420db 100644
--- a/third_party/abseil-cpp/absl/types/span.h
+++ b/third_party/abseil-cpp/absl/types/span.h
@@ -243,8 +243,8 @@ class Span {
//
template <typename LazyT = T,
typename = EnableIfConstView<LazyT>>
- Span(
- std::initializer_list<value_type> v) noexcept // NOLINT(runtime/explicit)
+ Span(std::initializer_list<value_type> v
+ ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept // NOLINT(runtime/explicit)
: Span(v.begin(), v.size()) {}
// Accessors
diff --git a/third_party/abseil-cpp/absl/utility/CMakeLists.txt b/third_party/abseil-cpp/absl/utility/CMakeLists.txt
index e1edd19aa0..865b758f23 100644
--- a/third_party/abseil-cpp/absl/utility/CMakeLists.txt
+++ b/third_party/abseil-cpp/absl/utility/CMakeLists.txt
@@ -40,5 +40,5 @@ absl_cc_test(
absl::core_headers
absl::memory
absl::strings
- gmock_main
+ GTest::gmock_main
)
diff --git a/third_party/crc32c/CMakeLists.txt b/third_party/crc32c/CMakeLists.txt
new file mode 100644
index 0000000000..bc720892d7
--- /dev/null
+++ b/third_party/crc32c/CMakeLists.txt
@@ -0,0 +1,66 @@
+android_add_library(
+ TARGET
+ crc32c
+ LICENSE
+ "BSD-3-Clause"
+ SRC
+ src/src/crc32c.cc
+ src/src/crc32c_portable.cc)
+target_include_directories(crc32c PUBLIC config src/include)
+target_compile_definitions(crc32c PRIVATE BYTE_ORDER_BIG_ENDIAN=0
+ CRC32C_TESTS_BUILT_WITH_GLOG=0)
+if(LINUX_AARCH64 OR DARWIN_AARCH64)
+ target_compile_definitions(crc32c PRIVATE HAVE_MM_PREFETCH=0 HAVE_SSE42=0)
+ target_sources(crc32c PRIVATE src/src/crc32c_arm64.cc)
+ if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
+ target_compile_definitions(crc32c PRIVATE HAVE_ARM64_CRC32C=0)
+ target_compile_options(
+ crc32c
+ PRIVATE "-march=armv8-a"
+ # Some builds set -march to a different value from the above. The
+ # specific feature flags below enable the instructions we need in
+ # these cases. See https://crbug.com/934016 for example.
+ "-Xclang -target-feature"
+ "-Xclang +crc"
+ "-Xclang -target-feature"
+ "-Xclang +crypto")
+ else()
+ target_compile_options(crc32c PRIVATE "-march=armv8-a+crc+crypto")
+ target_compile_definitions(crc32c PRIVATE HAVE_ARM64_CRC32C=1)
+ endif()
+else()
+ target_compile_definitions(crc32c PRIVATE HAVE_MM_PREFETCH=1 HAVE_SSE42=1)
+ target_compile_definitions(crc32c PRIVATE HAVE_ARM64_CRC32C=0)
+ target_sources(crc32c PRIVATE src/src/crc32c_sse42.cc)
+ if(WINDOWS_MSVC_X86_64)
+ target_compile_options(crc32c PRIVATE -mavx)
+ else()
+ target_compile_options(crc32c PRIVATE -msse4.2)
+ endif()
+endif()
+
+target_compile_definitions(crc32c PRIVATE HAVE_BUILTIN_PREFETCH=1)
+
+if(LINUX_AARCH64 OR LINUX_X86_64)
+ target_compile_definitions(crc32c PRIVATE HAVE_STRONG_GETAUXVAL=1
+ HAVE_WEAK_GETAUXVAL=1)
+
+else()
+ target_compile_definitions(crc32c PRIVATE HAVE_STRONG_GETAUXVAL=0
+ HAVE_WEAK_GETAUXVAL=0)
+endif()
+
+android_add_test(
+ TARGET
+ crc32c_tests
+ SRC
+ "src/src/crc32c_arm64_unittest.cc"
+ "src/src/crc32c_extend_unittests.h"
+ "src/src/crc32c_portable_unittest.cc"
+ "src/src/crc32c_prefetch_unittest.cc"
+ "src/src/crc32c_read_le_unittest.cc"
+ "src/src/crc32c_round_up_unittest.cc"
+ "src/src/crc32c_sse42_unittest.cc"
+ "src/src/crc32c_unittest.cc")
+
+target_link_libraries(crc32c_tests PRIVATE crc32c gmock_main)
diff --git a/third_party/crc32c/config/crc32c/crc32c_config.h b/third_party/crc32c/config/crc32c/crc32c_config.h
new file mode 100644
index 0000000000..3589fa678c
--- /dev/null
+++ b/third_party/crc32c/config/crc32c/crc32c_config.h
@@ -0,0 +1,6 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is a stub. The preprocessor macros that are usually defined here are
+// supplied by BUILD.gn instead.
diff --git a/third_party/crc32c/src/include/crc32c/crc32c.h b/third_party/crc32c/src/include/crc32c/crc32c.h
new file mode 100644
index 0000000000..e8a78170a9
--- /dev/null
+++ b/third_party/crc32c/src/include/crc32c/crc32c.h
@@ -0,0 +1,89 @@
+/* Copyright 2017 The CRC32C Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. See the AUTHORS file for names of contributors. */
+
+#ifndef CRC32C_CRC32C_H_
+#define CRC32C_CRC32C_H_
+
+/* The API exported by the CRC32C project. */
+
+#if defined(__cplusplus)
+
+#include <cstddef>
+#include <cstdint>
+#include <string>
+
+#else /* !defined(__cplusplus) */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#endif /* !defined(__cplusplus) */
+
+
+/* The C API. */
+
+#if defined(__cplusplus)
+extern "C" {
+#endif /* defined(__cplusplus) */
+
+/* Extends "crc" with the CRC32C of "count" bytes in the buffer pointed by
+ "data" */
+uint32_t crc32c_extend(uint32_t crc, const uint8_t* data, size_t count);
+
+/* Computes the CRC32C of "count" bytes in the buffer pointed by "data". */
+uint32_t crc32c_value(const uint8_t* data, size_t count);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* defined(__cplusplus) */
+
+
+/* The C++ API. */
+
+#if defined(__cplusplus)
+
+namespace crc32c {
+
+// Extends "crc" with the CRC32C of "count" bytes in the buffer pointed by
+// "data".
+uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count);
+
+// Computes the CRC32C of "count" bytes in the buffer pointed by "data".
+inline uint32_t Crc32c(const uint8_t* data, size_t count) {
+ return Extend(0, data, count);
+}
+
+// Computes the CRC32C of "count" bytes in the buffer pointed by "data".
+inline uint32_t Crc32c(const char* data, size_t count) {
+ return Extend(0, reinterpret_cast<const uint8_t*>(data), count);
+}
+
+// Computes the CRC32C of the string's content.
+inline uint32_t Crc32c(const std::string& string) {
+ return Crc32c(reinterpret_cast<const uint8_t*>(string.data()),
+ string.size());
+}
+
+} // namespace crc32c
+
+#if __cplusplus > 201402L
+#if __has_include(<string_view>)
+#include <string_view>
+
+namespace crc32c {
+
+// Computes the CRC32C of the bytes in the string_view.
+inline uint32_t Crc32c(const std::string_view& string_view) {
+ return Crc32c(reinterpret_cast<const uint8_t*>(string_view.data()),
+ string_view.size());
+}
+
+} // namespace crc32c
+
+#endif // __has_include(<string_view>)
+#endif // __cplusplus > 201402L
+
+#endif /* defined(__cplusplus) */
+
+#endif // CRC32C_CRC32C_H_
diff --git a/third_party/crc32c/src/src/crc32c.cc b/third_party/crc32c/src/src/crc32c.cc
new file mode 100644
index 0000000000..4d3018af47
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c.cc
@@ -0,0 +1,39 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "crc32c/crc32c.h"
+
+#include <cstddef>
+#include <cstdint>
+
+#include "./crc32c_arm64.h"
+#include "./crc32c_arm64_linux_check.h"
+#include "./crc32c_internal.h"
+#include "./crc32c_sse42.h"
+#include "./crc32c_sse42_check.h"
+
+namespace crc32c {
+
+uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) {
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+ static bool can_use_sse42 = CanUseSse42();
+ if (can_use_sse42) return ExtendSse42(crc, data, count);
+#elif HAVE_ARM64_CRC32C
+ static bool can_use_arm_linux = CanUseArm64Linux();
+ if (can_use_arm_linux) return ExtendArm64(crc, data, count);
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+ return ExtendPortable(crc, data, count);
+}
+
+extern "C" uint32_t crc32c_extend(uint32_t crc, const uint8_t* data,
+ size_t count) {
+ return crc32c::Extend(crc, data, count);
+}
+
+extern "C" uint32_t crc32c_value(const uint8_t* data, size_t count) {
+ return crc32c::Crc32c(data, count);
+}
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_arm64.cc b/third_party/crc32c/src/src/crc32c_arm64.cc
new file mode 100644
index 0000000000..9a988c1eed
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_arm64.cc
@@ -0,0 +1,124 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_arm64.h"
+
+// In a separate source file to allow this accelerated CRC32C function to be
+// compiled with the appropriate compiler flags to enable ARM NEON CRC32C
+// instructions.
+
+// This implementation is based on https://github.com/google/leveldb/pull/490.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "./crc32c_internal.h"
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_ARM64_CRC32C
+
+#include <arm_acle.h>
+#include <arm_neon.h>
+
+#define KBYTES 1032
+#define SEGMENTBYTES 256
+
+// compute 8bytes for each segment parallelly
+#define CRC32C32BYTES(P, IND) \
+ do { \
+ crc1 = __crc32cd( \
+ crc1, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 1 + (IND))); \
+ crc2 = __crc32cd( \
+ crc2, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 2 + (IND))); \
+ crc3 = __crc32cd( \
+ crc3, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 3 + (IND))); \
+ crc0 = __crc32cd( \
+ crc0, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 0 + (IND))); \
+ } while (0);
+
+// compute 8*8 bytes for each segment parallelly
+#define CRC32C256BYTES(P, IND) \
+ do { \
+ CRC32C32BYTES((P), (IND)*8 + 0) \
+ CRC32C32BYTES((P), (IND)*8 + 1) \
+ CRC32C32BYTES((P), (IND)*8 + 2) \
+ CRC32C32BYTES((P), (IND)*8 + 3) \
+ CRC32C32BYTES((P), (IND)*8 + 4) \
+ CRC32C32BYTES((P), (IND)*8 + 5) \
+ CRC32C32BYTES((P), (IND)*8 + 6) \
+ CRC32C32BYTES((P), (IND)*8 + 7) \
+ } while (0);
+
+// compute 4*8*8 bytes for each segment parallelly
+#define CRC32C1024BYTES(P) \
+ do { \
+ CRC32C256BYTES((P), 0) \
+ CRC32C256BYTES((P), 1) \
+ CRC32C256BYTES((P), 2) \
+ CRC32C256BYTES((P), 3) \
+ (P) += 4 * SEGMENTBYTES; \
+ } while (0)
+
+namespace crc32c {
+
+uint32_t ExtendArm64(uint32_t crc, const uint8_t *buf, size_t size) {
+ int64_t length = size;
+ uint32_t crc0, crc1, crc2, crc3;
+ uint64_t t0, t1, t2;
+
+ // k0=CRC(x^(3*SEGMENTBYTES*8)), k1=CRC(x^(2*SEGMENTBYTES*8)),
+ // k2=CRC(x^(SEGMENTBYTES*8))
+ const poly64_t k0 = 0x8d96551c, k1 = 0xbd6f81f8, k2 = 0xdcb17aa4;
+
+ crc = crc ^ kCRC32Xor;
+ const uint8_t *p = reinterpret_cast<const uint8_t *>(buf);
+
+ while (length >= KBYTES) {
+ crc0 = crc;
+ crc1 = 0;
+ crc2 = 0;
+ crc3 = 0;
+
+ // Process 1024 bytes in parallel.
+ CRC32C1024BYTES(p);
+
+ // Merge the 4 partial CRC32C values.
+ t2 = (uint64_t)vmull_p64(crc2, k2);
+ t1 = (uint64_t)vmull_p64(crc1, k1);
+ t0 = (uint64_t)vmull_p64(crc0, k0);
+ crc = __crc32cd(crc3, *(uint64_t *)p);
+ p += sizeof(uint64_t);
+ crc ^= __crc32cd(0, t2);
+ crc ^= __crc32cd(0, t1);
+ crc ^= __crc32cd(0, t0);
+
+ length -= KBYTES;
+ }
+
+ while (length >= 8) {
+ crc = __crc32cd(crc, *(uint64_t *)p);
+ p += 8;
+ length -= 8;
+ }
+
+ if (length & 4) {
+ crc = __crc32cw(crc, *(uint32_t *)p);
+ p += 4;
+ }
+
+ if (length & 2) {
+ crc = __crc32ch(crc, *(uint16_t *)p);
+ p += 2;
+ }
+
+ if (length & 1) {
+ crc = __crc32cb(crc, *p);
+ }
+
+ return crc ^ kCRC32Xor;
+}
+
+} // namespace crc32c
+
+#endif // HAVE_ARM64_CRC32C
diff --git a/third_party/crc32c/src/src/crc32c_arm64.h b/third_party/crc32c/src/src/crc32c_arm64.h
new file mode 100644
index 0000000000..bbdece46c7
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_arm64.h
@@ -0,0 +1,25 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+// Linux-specific code checking the availability for ARM CRC32C instructions.
+
+#ifndef CRC32C_CRC32C_ARM_LINUX_H_
+#define CRC32C_CRC32C_ARM_LINUX_H_
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_ARM64_CRC32C
+
+namespace crc32c {
+
+uint32_t ExtendArm64(uint32_t crc, const uint8_t* data, size_t count);
+
+} // namespace crc32c
+
+#endif // HAVE_ARM64_CRC32C
+
+#endif // CRC32C_CRC32C_ARM_LINUX_H_
diff --git a/third_party/crc32c/src/src/crc32c_arm64_linux_check.h b/third_party/crc32c/src/src/crc32c_arm64_linux_check.h
new file mode 100644
index 0000000000..6817979aac
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_arm64_linux_check.h
@@ -0,0 +1,48 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+// ARM Linux-specific code checking for the availability of CRC32C instructions.
+
+#ifndef CRC32C_CRC32C_ARM_LINUX_CHECK_H_
+#define CRC32C_CRC32C_ARM_LINUX_CHECK_H_
+
+// X86-specific code checking for the availability of SSE4.2 instructions.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_ARM64_CRC32C
+
+#if HAVE_STRONG_GETAUXVAL
+#include <sys/auxv.h>
+#elif HAVE_WEAK_GETAUXVAL
+// getauxval() is not available on Android until API level 20. Link it as a weak
+// symbol.
+extern "C" unsigned long getauxval(unsigned long type) __attribute__((weak));
+
+#define AT_HWCAP 16
+#endif // HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL
+
+namespace crc32c {
+
+inline bool CanUseArm64Linux() {
+#if HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL
+ // From 'arch/arm64/include/uapi/asm/hwcap.h' in Linux kernel source code.
+ constexpr unsigned long kHWCAP_PMULL = 1 << 4;
+ constexpr unsigned long kHWCAP_CRC32 = 1 << 7;
+ unsigned long hwcap = (&getauxval != nullptr) ? getauxval(AT_HWCAP) : 0;
+ return (hwcap & (kHWCAP_PMULL | kHWCAP_CRC32)) ==
+ (kHWCAP_PMULL | kHWCAP_CRC32);
+#else
+ return false;
+#endif // HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL
+}
+
+} // namespace crc32c
+
+#endif // HAVE_ARM64_CRC32C
+
+#endif // CRC32C_CRC32C_ARM_LINUX_CHECK_H_
diff --git a/third_party/crc32c/src/src/crc32c_arm64_unittest.cc b/third_party/crc32c/src/src/crc32c_arm64_unittest.cc
new file mode 100644
index 0000000000..6f917d9c0c
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_arm64_unittest.cc
@@ -0,0 +1,24 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "gtest/gtest.h"
+
+#include "./crc32c_arm64.h"
+#include "./crc32c_extend_unittests.h"
+
+namespace crc32c {
+
+#if HAVE_ARM64_CRC32C
+
+struct Arm64TestTraits {
+ static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) {
+ return ExtendArm64(crc, data, count);
+ }
+};
+
+INSTANTIATE_TYPED_TEST_SUITE_P(Arm64, ExtendTest, Arm64TestTraits);
+
+#endif // HAVE_ARM64_CRC32C
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_benchmark.cc b/third_party/crc32c/src/src/crc32c_benchmark.cc
new file mode 100644
index 0000000000..d31af92256
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_benchmark.cc
@@ -0,0 +1,104 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+#include "benchmark/benchmark.h"
+
+#if CRC32C_TESTS_BUILT_WITH_GLOG
+#include "glog/logging.h"
+#endif // CRC32C_TESTS_BUILT_WITH_GLOG
+
+#include "./crc32c_arm64.h"
+#include "./crc32c_arm64_linux_check.h"
+#include "./crc32c_internal.h"
+#include "./crc32c_sse42.h"
+#include "./crc32c_sse42_check.h"
+#include "crc32c/crc32c.h"
+
+class CRC32CBenchmark : public benchmark::Fixture {
+ public:
+ void SetUp(const benchmark::State& state) override {
+ block_size_ = static_cast<size_t>(state.range(0));
+ block_data_ = std::string(block_size_, 'x');
+ block_buffer_ = reinterpret_cast<const uint8_t*>(block_data_.data());
+ }
+
+ protected:
+ std::string block_data_;
+ const uint8_t* block_buffer_;
+ size_t block_size_;
+};
+
+BENCHMARK_DEFINE_F(CRC32CBenchmark, Public)(benchmark::State& state) {
+ uint32_t crc = 0;
+ for (auto _ : state)
+ crc = crc32c::Extend(crc, block_buffer_, block_size_);
+ state.SetBytesProcessed(state.iterations() * block_size_);
+}
+BENCHMARK_REGISTER_F(CRC32CBenchmark, Public)
+ ->RangeMultiplier(16)
+ ->Range(256, 16777216); // Block size.
+
+BENCHMARK_DEFINE_F(CRC32CBenchmark, Portable)(benchmark::State& state) {
+ uint32_t crc = 0;
+ for (auto _ : state)
+ crc = crc32c::ExtendPortable(crc, block_buffer_, block_size_);
+ state.SetBytesProcessed(state.iterations() * block_size_);
+}
+BENCHMARK_REGISTER_F(CRC32CBenchmark, Portable)
+ ->RangeMultiplier(16)
+ ->Range(256, 16777216); // Block size.
+
+#if HAVE_ARM64_CRC32C
+
+BENCHMARK_DEFINE_F(CRC32CBenchmark, ArmLinux)(benchmark::State& state) {
+ if (!crc32c::CanUseArm64Linux()) {
+ state.SkipWithError("ARM CRC32C instructions not available or not enabled");
+ return;
+ }
+
+ uint32_t crc = 0;
+ for (auto _ : state)
+ crc = crc32c::ExtendArm64(crc, block_buffer_, block_size_);
+ state.SetBytesProcessed(state.iterations() * block_size_);
+}
+BENCHMARK_REGISTER_F(CRC32CBenchmark, ArmLinux)
+ ->RangeMultiplier(16)
+ ->Range(256, 16777216); // Block size.
+
+#endif // HAVE_ARM64_CRC32C
+
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+BENCHMARK_DEFINE_F(CRC32CBenchmark, Sse42)(benchmark::State& state) {
+ if (!crc32c::CanUseSse42()) {
+ state.SkipWithError("SSE4.2 instructions not available or not enabled");
+ return;
+ }
+
+ uint32_t crc = 0;
+ for (auto _ : state)
+ crc = crc32c::ExtendSse42(crc, block_buffer_, block_size_);
+ state.SetBytesProcessed(state.iterations() * block_size_);
+}
+BENCHMARK_REGISTER_F(CRC32CBenchmark, Sse42)
+ ->RangeMultiplier(16)
+ ->Range(256, 16777216); // Block size.
+
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+int main(int argc, char** argv) {
+#if CRC32C_TESTS_BUILT_WITH_GLOG
+ google::InitGoogleLogging(argv[0]);
+ google::InstallFailureSignalHandler();
+#endif // CRC32C_TESTS_BUILT_WITH_GLOG
+
+ benchmark::Initialize(&argc, argv);
+ benchmark::RunSpecifiedBenchmarks();
+ return 0;
+}
diff --git a/third_party/crc32c/src/src/crc32c_capi_unittest.c b/third_party/crc32c/src/src/crc32c_capi_unittest.c
new file mode 100644
index 0000000000..c8993a0959
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_capi_unittest.c
@@ -0,0 +1,66 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "crc32c/crc32c.h"
+
+#include <stddef.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+int main() {
+ /* From rfc3720 section B.4. */
+ uint8_t buf[32];
+
+ memset(buf, 0, sizeof(buf));
+ if ((uint32_t)0x8a9136aa != crc32c_value(buf, sizeof(buf))) {
+ printf("crc32c_value(zeros) test failed\n");
+ return 1;
+ }
+
+ memset(buf, 0xff, sizeof(buf));
+ if ((uint32_t)0x62a8ab43 != crc32c_value(buf, sizeof(buf))) {
+ printf("crc32c_value(0xff) test failed\n");
+ return 1;
+ }
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = (uint8_t)i;
+ if ((uint32_t)0x46dd794e != crc32c_value(buf, sizeof(buf))) {
+ printf("crc32c_value(0..31) test failed\n");
+ return 1;
+ }
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = (uint8_t)(31 - i);
+ if ((uint32_t)0x113fdb5c != crc32c_value(buf, sizeof(buf))) {
+ printf("crc32c_value(31..0) test failed\n");
+ return 1;
+ }
+
+ uint8_t data[48] = {
+ 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+ if ((uint32_t)0xd9963a56 != crc32c_value(data, sizeof(data))) {
+ printf("crc32c_value(31..0) test failed\n");
+ return 1;
+ }
+
+ const uint8_t* hello_space_world = (const uint8_t*)"hello world";
+ const uint8_t* hello_space = (const uint8_t*)"hello ";
+ const uint8_t* world = (const uint8_t*)"world";
+
+ if (crc32c_value(hello_space_world, 11) !=
+ crc32c_extend(crc32c_value(hello_space, 6), world, 5)) {
+ printf("crc32c_extend test failed\n");
+ return 1;
+ }
+
+ printf("All tests passed\n");
+ return 0;
+}
diff --git a/third_party/crc32c/src/src/crc32c_extend_unittests.h b/third_party/crc32c/src/src/crc32c_extend_unittests.h
new file mode 100644
index 0000000000..0732973737
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_extend_unittests.h
@@ -0,0 +1,112 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_EXTEND_UNITTESTS_H_
+#define CRC32C_CRC32C_EXTEND_UNITTESTS_H_
+
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+// Common test cases for all implementations of CRC32C_Extend().
+
+namespace crc32c {
+
+template<typename TestTraits>
+class ExtendTest : public testing::Test {};
+
+TYPED_TEST_SUITE_P(ExtendTest);
+
+TYPED_TEST_P(ExtendTest, StandardResults) {
+ // From rfc3720 section B.4.
+ uint8_t buf[32];
+
+ std::memset(buf, 0, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x8a9136aa),
+ TypeParam::Extend(0, buf, sizeof(buf)));
+
+ std::memset(buf, 0xff, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x62a8ab43),
+ TypeParam::Extend(0, buf, sizeof(buf)));
+
+ for (int i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(i);
+ EXPECT_EQ(static_cast<uint32_t>(0x46dd794e),
+ TypeParam::Extend(0, buf, sizeof(buf)));
+
+ for (int i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(31 - i);
+ EXPECT_EQ(static_cast<uint32_t>(0x113fdb5c),
+ TypeParam::Extend(0, buf, sizeof(buf)));
+
+ uint8_t data[48] = {
+ 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_EQ(static_cast<uint32_t>(0xd9963a56),
+ TypeParam::Extend(0, data, sizeof(data)));
+}
+
+TYPED_TEST_P(ExtendTest, HelloWorld) {
+ const uint8_t* hello_space_world =
+ reinterpret_cast<const uint8_t*>("hello world");
+ const uint8_t* hello_space = reinterpret_cast<const uint8_t*>("hello ");
+ const uint8_t* world = reinterpret_cast<const uint8_t*>("world");
+
+ EXPECT_EQ(TypeParam::Extend(0, hello_space_world, 11),
+ TypeParam::Extend(TypeParam::Extend(0, hello_space, 6), world, 5));
+}
+
+TYPED_TEST_P(ExtendTest, BufferSlicing) {
+ uint8_t buffer[48] = {
+ 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+
+ for (size_t i = 0; i < 48; ++i) {
+ for (size_t j = i + 1; j <= 48; ++j) {
+ uint32_t crc = 0;
+
+ if (i > 0) crc = TypeParam::Extend(crc, buffer, i);
+ crc = TypeParam::Extend(crc, buffer + i, j - i);
+ if (j < 48) crc = TypeParam::Extend(crc, buffer + j, 48 - j);
+
+ EXPECT_EQ(static_cast<uint32_t>(0xd9963a56), crc);
+ }
+ }
+}
+
+TYPED_TEST_P(ExtendTest, LargeBufferSlicing) {
+ uint8_t buffer[2048];
+ for (size_t i = 0; i < 2048; i++)
+ buffer[i] = static_cast<uint8_t>(3 * i * i + 7 * i + 11);
+
+ for (size_t i = 0; i < 2048; ++i) {
+ for (size_t j = i + 1; j <= 2048; ++j) {
+ uint32_t crc = 0;
+
+ if (i > 0) crc = TypeParam::Extend(crc, buffer, i);
+ crc = TypeParam::Extend(crc, buffer + i, j - i);
+ if (j < 2048) crc = TypeParam::Extend(crc, buffer + j, 2048 - j);
+
+ EXPECT_EQ(static_cast<uint32_t>(0x36dcc753), crc);
+ }
+ }
+}
+
+REGISTER_TYPED_TEST_SUITE_P(ExtendTest,
+ StandardResults,
+ HelloWorld,
+ BufferSlicing,
+ LargeBufferSlicing);
+
+} // namespace crc32c
+
+#endif // CRC32C_CRC32C_EXTEND_UNITTESTS_H_
diff --git a/third_party/crc32c/src/src/crc32c_internal.h b/third_party/crc32c/src/src/crc32c_internal.h
new file mode 100644
index 0000000000..2bd23dea43
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_internal.h
@@ -0,0 +1,23 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_INTERNAL_H_
+#define CRC32C_CRC32C_INTERNAL_H_
+
+// Internal functions that may change between releases.
+
+#include <cstddef>
+#include <cstdint>
+
+namespace crc32c {
+
+// Un-accelerated implementation that works on all CPUs.
+uint32_t ExtendPortable(uint32_t crc, const uint8_t* data, size_t count);
+
+// CRCs are pre- and post- conditioned by xoring with all ones.
+static constexpr const uint32_t kCRC32Xor = static_cast<uint32_t>(0xffffffffU);
+
+} // namespace crc32c
+
+#endif // CRC32C_CRC32C_INTERNAL_H_
diff --git a/third_party/crc32c/src/src/crc32c_portable.cc b/third_party/crc32c/src/src/crc32c_portable.cc
new file mode 100644
index 0000000000..31ec6eac53
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_portable.cc
@@ -0,0 +1,351 @@
+// Copyright 2008 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_internal.h"
+
+#include <cstddef>
+#include <cstdint>
+
+#include "./crc32c_prefetch.h"
+#include "./crc32c_read_le.h"
+#include "./crc32c_round_up.h"
+
+namespace {
+
+const uint32_t kByteExtensionTable[256] = {
+ 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,
+ 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,
+ 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
+ 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,
+ 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,
+ 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
+ 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,
+ 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,
+ 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,
+ 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,
+ 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,
+ 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
+ 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,
+ 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,
+ 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,
+ 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,
+ 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,
+ 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
+ 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,
+ 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,
+ 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
+ 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,
+ 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,
+ 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
+ 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,
+ 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,
+ 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,
+ 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,
+ 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,
+ 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
+ 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,
+ 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,
+ 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,
+ 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,
+ 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,
+ 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
+ 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,
+ 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,
+ 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,
+ 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,
+ 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,
+ 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
+ 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351};
+
+const uint32_t kStrideExtensionTable0[256] = {
+ 0x00000000, 0x30d23865, 0x61a470ca, 0x517648af, 0xc348e194, 0xf39ad9f1,
+ 0xa2ec915e, 0x923ea93b, 0x837db5d9, 0xb3af8dbc, 0xe2d9c513, 0xd20bfd76,
+ 0x4035544d, 0x70e76c28, 0x21912487, 0x11431ce2, 0x03171d43, 0x33c52526,
+ 0x62b36d89, 0x526155ec, 0xc05ffcd7, 0xf08dc4b2, 0xa1fb8c1d, 0x9129b478,
+ 0x806aa89a, 0xb0b890ff, 0xe1ced850, 0xd11ce035, 0x4322490e, 0x73f0716b,
+ 0x228639c4, 0x125401a1, 0x062e3a86, 0x36fc02e3, 0x678a4a4c, 0x57587229,
+ 0xc566db12, 0xf5b4e377, 0xa4c2abd8, 0x941093bd, 0x85538f5f, 0xb581b73a,
+ 0xe4f7ff95, 0xd425c7f0, 0x461b6ecb, 0x76c956ae, 0x27bf1e01, 0x176d2664,
+ 0x053927c5, 0x35eb1fa0, 0x649d570f, 0x544f6f6a, 0xc671c651, 0xf6a3fe34,
+ 0xa7d5b69b, 0x97078efe, 0x8644921c, 0xb696aa79, 0xe7e0e2d6, 0xd732dab3,
+ 0x450c7388, 0x75de4bed, 0x24a80342, 0x147a3b27, 0x0c5c750c, 0x3c8e4d69,
+ 0x6df805c6, 0x5d2a3da3, 0xcf149498, 0xffc6acfd, 0xaeb0e452, 0x9e62dc37,
+ 0x8f21c0d5, 0xbff3f8b0, 0xee85b01f, 0xde57887a, 0x4c692141, 0x7cbb1924,
+ 0x2dcd518b, 0x1d1f69ee, 0x0f4b684f, 0x3f99502a, 0x6eef1885, 0x5e3d20e0,
+ 0xcc0389db, 0xfcd1b1be, 0xada7f911, 0x9d75c174, 0x8c36dd96, 0xbce4e5f3,
+ 0xed92ad5c, 0xdd409539, 0x4f7e3c02, 0x7fac0467, 0x2eda4cc8, 0x1e0874ad,
+ 0x0a724f8a, 0x3aa077ef, 0x6bd63f40, 0x5b040725, 0xc93aae1e, 0xf9e8967b,
+ 0xa89eded4, 0x984ce6b1, 0x890ffa53, 0xb9ddc236, 0xe8ab8a99, 0xd879b2fc,
+ 0x4a471bc7, 0x7a9523a2, 0x2be36b0d, 0x1b315368, 0x096552c9, 0x39b76aac,
+ 0x68c12203, 0x58131a66, 0xca2db35d, 0xfaff8b38, 0xab89c397, 0x9b5bfbf2,
+ 0x8a18e710, 0xbacadf75, 0xebbc97da, 0xdb6eafbf, 0x49500684, 0x79823ee1,
+ 0x28f4764e, 0x18264e2b, 0x18b8ea18, 0x286ad27d, 0x791c9ad2, 0x49cea2b7,
+ 0xdbf00b8c, 0xeb2233e9, 0xba547b46, 0x8a864323, 0x9bc55fc1, 0xab1767a4,
+ 0xfa612f0b, 0xcab3176e, 0x588dbe55, 0x685f8630, 0x3929ce9f, 0x09fbf6fa,
+ 0x1baff75b, 0x2b7dcf3e, 0x7a0b8791, 0x4ad9bff4, 0xd8e716cf, 0xe8352eaa,
+ 0xb9436605, 0x89915e60, 0x98d24282, 0xa8007ae7, 0xf9763248, 0xc9a40a2d,
+ 0x5b9aa316, 0x6b489b73, 0x3a3ed3dc, 0x0aecebb9, 0x1e96d09e, 0x2e44e8fb,
+ 0x7f32a054, 0x4fe09831, 0xddde310a, 0xed0c096f, 0xbc7a41c0, 0x8ca879a5,
+ 0x9deb6547, 0xad395d22, 0xfc4f158d, 0xcc9d2de8, 0x5ea384d3, 0x6e71bcb6,
+ 0x3f07f419, 0x0fd5cc7c, 0x1d81cddd, 0x2d53f5b8, 0x7c25bd17, 0x4cf78572,
+ 0xdec92c49, 0xee1b142c, 0xbf6d5c83, 0x8fbf64e6, 0x9efc7804, 0xae2e4061,
+ 0xff5808ce, 0xcf8a30ab, 0x5db49990, 0x6d66a1f5, 0x3c10e95a, 0x0cc2d13f,
+ 0x14e49f14, 0x2436a771, 0x7540efde, 0x4592d7bb, 0xd7ac7e80, 0xe77e46e5,
+ 0xb6080e4a, 0x86da362f, 0x97992acd, 0xa74b12a8, 0xf63d5a07, 0xc6ef6262,
+ 0x54d1cb59, 0x6403f33c, 0x3575bb93, 0x05a783f6, 0x17f38257, 0x2721ba32,
+ 0x7657f29d, 0x4685caf8, 0xd4bb63c3, 0xe4695ba6, 0xb51f1309, 0x85cd2b6c,
+ 0x948e378e, 0xa45c0feb, 0xf52a4744, 0xc5f87f21, 0x57c6d61a, 0x6714ee7f,
+ 0x3662a6d0, 0x06b09eb5, 0x12caa592, 0x22189df7, 0x736ed558, 0x43bced3d,
+ 0xd1824406, 0xe1507c63, 0xb02634cc, 0x80f40ca9, 0x91b7104b, 0xa165282e,
+ 0xf0136081, 0xc0c158e4, 0x52fff1df, 0x622dc9ba, 0x335b8115, 0x0389b970,
+ 0x11ddb8d1, 0x210f80b4, 0x7079c81b, 0x40abf07e, 0xd2955945, 0xe2476120,
+ 0xb331298f, 0x83e311ea, 0x92a00d08, 0xa272356d, 0xf3047dc2, 0xc3d645a7,
+ 0x51e8ec9c, 0x613ad4f9, 0x304c9c56, 0x009ea433};
+
+const uint32_t kStrideExtensionTable1[256] = {
+ 0x00000000, 0x54075546, 0xa80eaa8c, 0xfc09ffca, 0x55f123e9, 0x01f676af,
+ 0xfdff8965, 0xa9f8dc23, 0xabe247d2, 0xffe51294, 0x03eced5e, 0x57ebb818,
+ 0xfe13643b, 0xaa14317d, 0x561dceb7, 0x021a9bf1, 0x5228f955, 0x062fac13,
+ 0xfa2653d9, 0xae21069f, 0x07d9dabc, 0x53de8ffa, 0xafd77030, 0xfbd02576,
+ 0xf9cabe87, 0xadcdebc1, 0x51c4140b, 0x05c3414d, 0xac3b9d6e, 0xf83cc828,
+ 0x043537e2, 0x503262a4, 0xa451f2aa, 0xf056a7ec, 0x0c5f5826, 0x58580d60,
+ 0xf1a0d143, 0xa5a78405, 0x59ae7bcf, 0x0da92e89, 0x0fb3b578, 0x5bb4e03e,
+ 0xa7bd1ff4, 0xf3ba4ab2, 0x5a429691, 0x0e45c3d7, 0xf24c3c1d, 0xa64b695b,
+ 0xf6790bff, 0xa27e5eb9, 0x5e77a173, 0x0a70f435, 0xa3882816, 0xf78f7d50,
+ 0x0b86829a, 0x5f81d7dc, 0x5d9b4c2d, 0x099c196b, 0xf595e6a1, 0xa192b3e7,
+ 0x086a6fc4, 0x5c6d3a82, 0xa064c548, 0xf463900e, 0x4d4f93a5, 0x1948c6e3,
+ 0xe5413929, 0xb1466c6f, 0x18beb04c, 0x4cb9e50a, 0xb0b01ac0, 0xe4b74f86,
+ 0xe6add477, 0xb2aa8131, 0x4ea37efb, 0x1aa42bbd, 0xb35cf79e, 0xe75ba2d8,
+ 0x1b525d12, 0x4f550854, 0x1f676af0, 0x4b603fb6, 0xb769c07c, 0xe36e953a,
+ 0x4a964919, 0x1e911c5f, 0xe298e395, 0xb69fb6d3, 0xb4852d22, 0xe0827864,
+ 0x1c8b87ae, 0x488cd2e8, 0xe1740ecb, 0xb5735b8d, 0x497aa447, 0x1d7df101,
+ 0xe91e610f, 0xbd193449, 0x4110cb83, 0x15179ec5, 0xbcef42e6, 0xe8e817a0,
+ 0x14e1e86a, 0x40e6bd2c, 0x42fc26dd, 0x16fb739b, 0xeaf28c51, 0xbef5d917,
+ 0x170d0534, 0x430a5072, 0xbf03afb8, 0xeb04fafe, 0xbb36985a, 0xef31cd1c,
+ 0x133832d6, 0x473f6790, 0xeec7bbb3, 0xbac0eef5, 0x46c9113f, 0x12ce4479,
+ 0x10d4df88, 0x44d38ace, 0xb8da7504, 0xecdd2042, 0x4525fc61, 0x1122a927,
+ 0xed2b56ed, 0xb92c03ab, 0x9a9f274a, 0xce98720c, 0x32918dc6, 0x6696d880,
+ 0xcf6e04a3, 0x9b6951e5, 0x6760ae2f, 0x3367fb69, 0x317d6098, 0x657a35de,
+ 0x9973ca14, 0xcd749f52, 0x648c4371, 0x308b1637, 0xcc82e9fd, 0x9885bcbb,
+ 0xc8b7de1f, 0x9cb08b59, 0x60b97493, 0x34be21d5, 0x9d46fdf6, 0xc941a8b0,
+ 0x3548577a, 0x614f023c, 0x635599cd, 0x3752cc8b, 0xcb5b3341, 0x9f5c6607,
+ 0x36a4ba24, 0x62a3ef62, 0x9eaa10a8, 0xcaad45ee, 0x3eced5e0, 0x6ac980a6,
+ 0x96c07f6c, 0xc2c72a2a, 0x6b3ff609, 0x3f38a34f, 0xc3315c85, 0x973609c3,
+ 0x952c9232, 0xc12bc774, 0x3d2238be, 0x69256df8, 0xc0ddb1db, 0x94dae49d,
+ 0x68d31b57, 0x3cd44e11, 0x6ce62cb5, 0x38e179f3, 0xc4e88639, 0x90efd37f,
+ 0x39170f5c, 0x6d105a1a, 0x9119a5d0, 0xc51ef096, 0xc7046b67, 0x93033e21,
+ 0x6f0ac1eb, 0x3b0d94ad, 0x92f5488e, 0xc6f21dc8, 0x3afbe202, 0x6efcb744,
+ 0xd7d0b4ef, 0x83d7e1a9, 0x7fde1e63, 0x2bd94b25, 0x82219706, 0xd626c240,
+ 0x2a2f3d8a, 0x7e2868cc, 0x7c32f33d, 0x2835a67b, 0xd43c59b1, 0x803b0cf7,
+ 0x29c3d0d4, 0x7dc48592, 0x81cd7a58, 0xd5ca2f1e, 0x85f84dba, 0xd1ff18fc,
+ 0x2df6e736, 0x79f1b270, 0xd0096e53, 0x840e3b15, 0x7807c4df, 0x2c009199,
+ 0x2e1a0a68, 0x7a1d5f2e, 0x8614a0e4, 0xd213f5a2, 0x7beb2981, 0x2fec7cc7,
+ 0xd3e5830d, 0x87e2d64b, 0x73814645, 0x27861303, 0xdb8fecc9, 0x8f88b98f,
+ 0x267065ac, 0x727730ea, 0x8e7ecf20, 0xda799a66, 0xd8630197, 0x8c6454d1,
+ 0x706dab1b, 0x246afe5d, 0x8d92227e, 0xd9957738, 0x259c88f2, 0x719bddb4,
+ 0x21a9bf10, 0x75aeea56, 0x89a7159c, 0xdda040da, 0x74589cf9, 0x205fc9bf,
+ 0xdc563675, 0x88516333, 0x8a4bf8c2, 0xde4cad84, 0x2245524e, 0x76420708,
+ 0xdfbadb2b, 0x8bbd8e6d, 0x77b471a7, 0x23b324e1};
+
+const uint32_t kStrideExtensionTable2[256] = {
+ 0x00000000, 0x678efd01, 0xcf1dfa02, 0xa8930703, 0x9bd782f5, 0xfc597ff4,
+ 0x54ca78f7, 0x334485f6, 0x3243731b, 0x55cd8e1a, 0xfd5e8919, 0x9ad07418,
+ 0xa994f1ee, 0xce1a0cef, 0x66890bec, 0x0107f6ed, 0x6486e636, 0x03081b37,
+ 0xab9b1c34, 0xcc15e135, 0xff5164c3, 0x98df99c2, 0x304c9ec1, 0x57c263c0,
+ 0x56c5952d, 0x314b682c, 0x99d86f2f, 0xfe56922e, 0xcd1217d8, 0xaa9cead9,
+ 0x020fedda, 0x658110db, 0xc90dcc6c, 0xae83316d, 0x0610366e, 0x619ecb6f,
+ 0x52da4e99, 0x3554b398, 0x9dc7b49b, 0xfa49499a, 0xfb4ebf77, 0x9cc04276,
+ 0x34534575, 0x53ddb874, 0x60993d82, 0x0717c083, 0xaf84c780, 0xc80a3a81,
+ 0xad8b2a5a, 0xca05d75b, 0x6296d058, 0x05182d59, 0x365ca8af, 0x51d255ae,
+ 0xf94152ad, 0x9ecfafac, 0x9fc85941, 0xf846a440, 0x50d5a343, 0x375b5e42,
+ 0x041fdbb4, 0x639126b5, 0xcb0221b6, 0xac8cdcb7, 0x97f7ee29, 0xf0791328,
+ 0x58ea142b, 0x3f64e92a, 0x0c206cdc, 0x6bae91dd, 0xc33d96de, 0xa4b36bdf,
+ 0xa5b49d32, 0xc23a6033, 0x6aa96730, 0x0d279a31, 0x3e631fc7, 0x59ede2c6,
+ 0xf17ee5c5, 0x96f018c4, 0xf371081f, 0x94fff51e, 0x3c6cf21d, 0x5be20f1c,
+ 0x68a68aea, 0x0f2877eb, 0xa7bb70e8, 0xc0358de9, 0xc1327b04, 0xa6bc8605,
+ 0x0e2f8106, 0x69a17c07, 0x5ae5f9f1, 0x3d6b04f0, 0x95f803f3, 0xf276fef2,
+ 0x5efa2245, 0x3974df44, 0x91e7d847, 0xf6692546, 0xc52da0b0, 0xa2a35db1,
+ 0x0a305ab2, 0x6dbea7b3, 0x6cb9515e, 0x0b37ac5f, 0xa3a4ab5c, 0xc42a565d,
+ 0xf76ed3ab, 0x90e02eaa, 0x387329a9, 0x5ffdd4a8, 0x3a7cc473, 0x5df23972,
+ 0xf5613e71, 0x92efc370, 0xa1ab4686, 0xc625bb87, 0x6eb6bc84, 0x09384185,
+ 0x083fb768, 0x6fb14a69, 0xc7224d6a, 0xa0acb06b, 0x93e8359d, 0xf466c89c,
+ 0x5cf5cf9f, 0x3b7b329e, 0x2a03aaa3, 0x4d8d57a2, 0xe51e50a1, 0x8290ada0,
+ 0xb1d42856, 0xd65ad557, 0x7ec9d254, 0x19472f55, 0x1840d9b8, 0x7fce24b9,
+ 0xd75d23ba, 0xb0d3debb, 0x83975b4d, 0xe419a64c, 0x4c8aa14f, 0x2b045c4e,
+ 0x4e854c95, 0x290bb194, 0x8198b697, 0xe6164b96, 0xd552ce60, 0xb2dc3361,
+ 0x1a4f3462, 0x7dc1c963, 0x7cc63f8e, 0x1b48c28f, 0xb3dbc58c, 0xd455388d,
+ 0xe711bd7b, 0x809f407a, 0x280c4779, 0x4f82ba78, 0xe30e66cf, 0x84809bce,
+ 0x2c139ccd, 0x4b9d61cc, 0x78d9e43a, 0x1f57193b, 0xb7c41e38, 0xd04ae339,
+ 0xd14d15d4, 0xb6c3e8d5, 0x1e50efd6, 0x79de12d7, 0x4a9a9721, 0x2d146a20,
+ 0x85876d23, 0xe2099022, 0x878880f9, 0xe0067df8, 0x48957afb, 0x2f1b87fa,
+ 0x1c5f020c, 0x7bd1ff0d, 0xd342f80e, 0xb4cc050f, 0xb5cbf3e2, 0xd2450ee3,
+ 0x7ad609e0, 0x1d58f4e1, 0x2e1c7117, 0x49928c16, 0xe1018b15, 0x868f7614,
+ 0xbdf4448a, 0xda7ab98b, 0x72e9be88, 0x15674389, 0x2623c67f, 0x41ad3b7e,
+ 0xe93e3c7d, 0x8eb0c17c, 0x8fb73791, 0xe839ca90, 0x40aacd93, 0x27243092,
+ 0x1460b564, 0x73ee4865, 0xdb7d4f66, 0xbcf3b267, 0xd972a2bc, 0xbefc5fbd,
+ 0x166f58be, 0x71e1a5bf, 0x42a52049, 0x252bdd48, 0x8db8da4b, 0xea36274a,
+ 0xeb31d1a7, 0x8cbf2ca6, 0x242c2ba5, 0x43a2d6a4, 0x70e65352, 0x1768ae53,
+ 0xbffba950, 0xd8755451, 0x74f988e6, 0x137775e7, 0xbbe472e4, 0xdc6a8fe5,
+ 0xef2e0a13, 0x88a0f712, 0x2033f011, 0x47bd0d10, 0x46bafbfd, 0x213406fc,
+ 0x89a701ff, 0xee29fcfe, 0xdd6d7908, 0xbae38409, 0x1270830a, 0x75fe7e0b,
+ 0x107f6ed0, 0x77f193d1, 0xdf6294d2, 0xb8ec69d3, 0x8ba8ec25, 0xec261124,
+ 0x44b51627, 0x233beb26, 0x223c1dcb, 0x45b2e0ca, 0xed21e7c9, 0x8aaf1ac8,
+ 0xb9eb9f3e, 0xde65623f, 0x76f6653c, 0x1178983d};
+
+const uint32_t kStrideExtensionTable3[256] = {
+ 0x00000000, 0xf20c0dfe, 0xe1f46d0d, 0x13f860f3, 0xc604aceb, 0x3408a115,
+ 0x27f0c1e6, 0xd5fccc18, 0x89e52f27, 0x7be922d9, 0x6811422a, 0x9a1d4fd4,
+ 0x4fe183cc, 0xbded8e32, 0xae15eec1, 0x5c19e33f, 0x162628bf, 0xe42a2541,
+ 0xf7d245b2, 0x05de484c, 0xd0228454, 0x222e89aa, 0x31d6e959, 0xc3dae4a7,
+ 0x9fc30798, 0x6dcf0a66, 0x7e376a95, 0x8c3b676b, 0x59c7ab73, 0xabcba68d,
+ 0xb833c67e, 0x4a3fcb80, 0x2c4c517e, 0xde405c80, 0xcdb83c73, 0x3fb4318d,
+ 0xea48fd95, 0x1844f06b, 0x0bbc9098, 0xf9b09d66, 0xa5a97e59, 0x57a573a7,
+ 0x445d1354, 0xb6511eaa, 0x63add2b2, 0x91a1df4c, 0x8259bfbf, 0x7055b241,
+ 0x3a6a79c1, 0xc866743f, 0xdb9e14cc, 0x29921932, 0xfc6ed52a, 0x0e62d8d4,
+ 0x1d9ab827, 0xef96b5d9, 0xb38f56e6, 0x41835b18, 0x527b3beb, 0xa0773615,
+ 0x758bfa0d, 0x8787f7f3, 0x947f9700, 0x66739afe, 0x5898a2fc, 0xaa94af02,
+ 0xb96ccff1, 0x4b60c20f, 0x9e9c0e17, 0x6c9003e9, 0x7f68631a, 0x8d646ee4,
+ 0xd17d8ddb, 0x23718025, 0x3089e0d6, 0xc285ed28, 0x17792130, 0xe5752cce,
+ 0xf68d4c3d, 0x048141c3, 0x4ebe8a43, 0xbcb287bd, 0xaf4ae74e, 0x5d46eab0,
+ 0x88ba26a8, 0x7ab62b56, 0x694e4ba5, 0x9b42465b, 0xc75ba564, 0x3557a89a,
+ 0x26afc869, 0xd4a3c597, 0x015f098f, 0xf3530471, 0xe0ab6482, 0x12a7697c,
+ 0x74d4f382, 0x86d8fe7c, 0x95209e8f, 0x672c9371, 0xb2d05f69, 0x40dc5297,
+ 0x53243264, 0xa1283f9a, 0xfd31dca5, 0x0f3dd15b, 0x1cc5b1a8, 0xeec9bc56,
+ 0x3b35704e, 0xc9397db0, 0xdac11d43, 0x28cd10bd, 0x62f2db3d, 0x90fed6c3,
+ 0x8306b630, 0x710abbce, 0xa4f677d6, 0x56fa7a28, 0x45021adb, 0xb70e1725,
+ 0xeb17f41a, 0x191bf9e4, 0x0ae39917, 0xf8ef94e9, 0x2d1358f1, 0xdf1f550f,
+ 0xcce735fc, 0x3eeb3802, 0xb13145f8, 0x433d4806, 0x50c528f5, 0xa2c9250b,
+ 0x7735e913, 0x8539e4ed, 0x96c1841e, 0x64cd89e0, 0x38d46adf, 0xcad86721,
+ 0xd92007d2, 0x2b2c0a2c, 0xfed0c634, 0x0cdccbca, 0x1f24ab39, 0xed28a6c7,
+ 0xa7176d47, 0x551b60b9, 0x46e3004a, 0xb4ef0db4, 0x6113c1ac, 0x931fcc52,
+ 0x80e7aca1, 0x72eba15f, 0x2ef24260, 0xdcfe4f9e, 0xcf062f6d, 0x3d0a2293,
+ 0xe8f6ee8b, 0x1afae375, 0x09028386, 0xfb0e8e78, 0x9d7d1486, 0x6f711978,
+ 0x7c89798b, 0x8e857475, 0x5b79b86d, 0xa975b593, 0xba8dd560, 0x4881d89e,
+ 0x14983ba1, 0xe694365f, 0xf56c56ac, 0x07605b52, 0xd29c974a, 0x20909ab4,
+ 0x3368fa47, 0xc164f7b9, 0x8b5b3c39, 0x795731c7, 0x6aaf5134, 0x98a35cca,
+ 0x4d5f90d2, 0xbf539d2c, 0xacabfddf, 0x5ea7f021, 0x02be131e, 0xf0b21ee0,
+ 0xe34a7e13, 0x114673ed, 0xc4babff5, 0x36b6b20b, 0x254ed2f8, 0xd742df06,
+ 0xe9a9e704, 0x1ba5eafa, 0x085d8a09, 0xfa5187f7, 0x2fad4bef, 0xdda14611,
+ 0xce5926e2, 0x3c552b1c, 0x604cc823, 0x9240c5dd, 0x81b8a52e, 0x73b4a8d0,
+ 0xa64864c8, 0x54446936, 0x47bc09c5, 0xb5b0043b, 0xff8fcfbb, 0x0d83c245,
+ 0x1e7ba2b6, 0xec77af48, 0x398b6350, 0xcb876eae, 0xd87f0e5d, 0x2a7303a3,
+ 0x766ae09c, 0x8466ed62, 0x979e8d91, 0x6592806f, 0xb06e4c77, 0x42624189,
+ 0x519a217a, 0xa3962c84, 0xc5e5b67a, 0x37e9bb84, 0x2411db77, 0xd61dd689,
+ 0x03e11a91, 0xf1ed176f, 0xe215779c, 0x10197a62, 0x4c00995d, 0xbe0c94a3,
+ 0xadf4f450, 0x5ff8f9ae, 0x8a0435b6, 0x78083848, 0x6bf058bb, 0x99fc5545,
+ 0xd3c39ec5, 0x21cf933b, 0x3237f3c8, 0xc03bfe36, 0x15c7322e, 0xe7cb3fd0,
+ 0xf4335f23, 0x063f52dd, 0x5a26b1e2, 0xa82abc1c, 0xbbd2dcef, 0x49ded111,
+ 0x9c221d09, 0x6e2e10f7, 0x7dd67004, 0x8fda7dfa};
+
+constexpr const ptrdiff_t kPrefetchHorizon = 256;
+
+} // namespace
+
+namespace crc32c {
+
+uint32_t ExtendPortable(uint32_t crc, const uint8_t* data, size_t size) {
+ const uint8_t* p = data;
+ const uint8_t* e = p + size;
+ uint32_t l = crc ^ kCRC32Xor;
+
+// Process one byte at a time.
+#define STEP1 \
+ do { \
+ int c = (l & 0xff) ^ *p++; \
+ l = kByteExtensionTable[c] ^ (l >> 8); \
+ } while (0)
+
+// Process one of the 4 strides of 4-byte data.
+#define STEP4(s) \
+ do { \
+ crc##s = ReadUint32LE(p + s * 4) ^ kStrideExtensionTable3[crc##s & 0xff] ^ \
+ kStrideExtensionTable2[(crc##s >> 8) & 0xff] ^ \
+ kStrideExtensionTable1[(crc##s >> 16) & 0xff] ^ \
+ kStrideExtensionTable0[crc##s >> 24]; \
+ } while (0)
+
+// Process a 16-byte swath of 4 strides, each of which has 4 bytes of data.
+#define STEP16 \
+ do { \
+ STEP4(0); \
+ STEP4(1); \
+ STEP4(2); \
+ STEP4(3); \
+ p += 16; \
+ } while (0)
+
+// Process 4 bytes that were already loaded into a word.
+#define STEP4W(w) \
+ do { \
+ w ^= l; \
+ for (size_t i = 0; i < 4; ++i) { \
+ w = (w >> 8) ^ kByteExtensionTable[w & 0xff]; \
+ } \
+ l = w; \
+ } while (0)
+
+ // Point x at first 4-byte aligned byte in the buffer. This might be past the
+ // end of the buffer.
+ const uint8_t* x = RoundUp<4>(p);
+ if (x <= e) {
+ // Process bytes p is 4-byte aligned.
+ while (p != x) {
+ STEP1;
+ }
+ }
+
+ if ((e - p) >= 16) {
+ // Load a 16-byte swath into the stride partial results.
+ uint32_t crc0 = ReadUint32LE(p + 0 * 4) ^ l;
+ uint32_t crc1 = ReadUint32LE(p + 1 * 4);
+ uint32_t crc2 = ReadUint32LE(p + 2 * 4);
+ uint32_t crc3 = ReadUint32LE(p + 3 * 4);
+ p += 16;
+
+ while ((e - p) > kPrefetchHorizon) {
+ RequestPrefetch(p + kPrefetchHorizon);
+
+ // Process 64 bytes at a time.
+ STEP16;
+ STEP16;
+ STEP16;
+ STEP16;
+ }
+
+ // Process one 16-byte swath at a time.
+ while ((e - p) >= 16) {
+ STEP16;
+ }
+
+ // Advance one word at a time as far as possible.
+ while ((e - p) >= 4) {
+ STEP4(0);
+ uint32_t tmp = crc0;
+ crc0 = crc1;
+ crc1 = crc2;
+ crc2 = crc3;
+ crc3 = tmp;
+ p += 4;
+ }
+
+ // Combine the 4 partial stride results.
+ l = 0;
+ STEP4W(crc0);
+ STEP4W(crc1);
+ STEP4W(crc2);
+ STEP4W(crc3);
+ }
+
+ // Process the last few bytes.
+ while (p != e) {
+ STEP1;
+ }
+#undef STEP4W
+#undef STEP16
+#undef STEP4
+#undef STEP1
+ return l ^ kCRC32Xor;
+}
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_portable_unittest.cc b/third_party/crc32c/src/src/crc32c_portable_unittest.cc
new file mode 100644
index 0000000000..5098e2c373
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_portable_unittest.cc
@@ -0,0 +1,20 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "gtest/gtest.h"
+
+#include "./crc32c_extend_unittests.h"
+#include "./crc32c_internal.h"
+
+namespace crc32c {
+
+struct PortableTestTraits {
+ static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) {
+ return ExtendPortable(crc, data, count);
+ }
+};
+
+INSTANTIATE_TYPED_TEST_SUITE_P(Portable, ExtendTest, PortableTestTraits);
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_prefetch.h b/third_party/crc32c/src/src/crc32c_prefetch.h
new file mode 100644
index 0000000000..e8df540494
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_prefetch.h
@@ -0,0 +1,44 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_PREFETCH_H_
+#define CRC32C_CRC32C_PREFETCH_H_
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_MM_PREFETCH
+
+#if defined(_MSC_VER)
+#include <intrin.h>
+#else // !defined(_MSC_VER)
+#include <xmmintrin.h>
+#endif // defined(_MSC_VER)
+
+#endif // HAVE_MM_PREFETCH
+
+namespace crc32c {
+
+// Ask the hardware to prefetch the data at the given address into the L1 cache.
+inline void RequestPrefetch(const uint8_t* address) {
+#if HAVE_BUILTIN_PREFETCH
+ // Clang and GCC implement the __builtin_prefetch non-standard extension,
+ // which maps to the best instruction on the target architecture.
+ __builtin_prefetch(reinterpret_cast<const char*>(address), 0 /* Read only. */,
+ 0 /* No temporal locality. */);
+#elif HAVE_MM_PREFETCH
+ // Visual Studio doesn't implement __builtin_prefetch, but exposes the
+ // PREFETCHNTA instruction via the _mm_prefetch intrinsic.
+ _mm_prefetch(reinterpret_cast<const char*>(address), _MM_HINT_NTA);
+#else
+ // No prefetch support. Silence compiler warnings.
+ (void)address;
+#endif // HAVE_BUILTIN_PREFETCH
+}
+
+} // namespace crc32c
+
+#endif // CRC32C_CRC32C_ROUND_UP_H_
diff --git a/third_party/crc32c/src/src/crc32c_prefetch_unittest.cc b/third_party/crc32c/src/src/crc32c_prefetch_unittest.cc
new file mode 100644
index 0000000000..b34ed2d5fe
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_prefetch_unittest.cc
@@ -0,0 +1,9 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_prefetch.h"
+
+// There is no easy way to test cache prefetching. We can only test that the
+// crc32c_prefetch.h header compiles on its own, so it doesn't have any unstated
+// dependencies.
diff --git a/third_party/crc32c/src/src/crc32c_read_le.h b/third_party/crc32c/src/src/crc32c_read_le.h
new file mode 100644
index 0000000000..fe455623c2
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_read_le.h
@@ -0,0 +1,51 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_READ_LE_H_
+#define CRC32C_CRC32C_READ_LE_H_
+
+#include <cstdint>
+#include <cstring>
+
+#include "crc32c/crc32c_config.h"
+
+namespace crc32c {
+
+// Reads a little-endian 32-bit integer from a 32-bit-aligned buffer.
+inline uint32_t ReadUint32LE(const uint8_t* buffer) {
+#if BYTE_ORDER_BIG_ENDIAN
+ return ((static_cast<uint32_t>(static_cast<uint8_t>(buffer[0]))) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[1])) << 8) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[2])) << 16) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[3])) << 24));
+#else // !BYTE_ORDER_BIG_ENDIAN
+ uint32_t result;
+ // This should be optimized to a single instruction.
+ std::memcpy(&result, buffer, sizeof(result));
+ return result;
+#endif // BYTE_ORDER_BIG_ENDIAN
+}
+
+// Reads a little-endian 64-bit integer from a 64-bit-aligned buffer.
+inline uint64_t ReadUint64LE(const uint8_t* buffer) {
+#if BYTE_ORDER_BIG_ENDIAN
+ return ((static_cast<uint32_t>(static_cast<uint8_t>(buffer[0]))) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[1])) << 8) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[2])) << 16) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[3])) << 24) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[4])) << 32) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[5])) << 40) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[6])) << 48) |
+ (static_cast<uint32_t>(static_cast<uint8_t>(buffer[7])) << 56));
+#else // !BYTE_ORDER_BIG_ENDIAN
+ uint64_t result;
+ // This should be optimized to a single instruction.
+ std::memcpy(&result, buffer, sizeof(result));
+ return result;
+#endif // BYTE_ORDER_BIG_ENDIAN
+}
+
+} // namespace crc32c
+
+#endif // CRC32C_CRC32C_READ_LE_H_
diff --git a/third_party/crc32c/src/src/crc32c_read_le_unittest.cc b/third_party/crc32c/src/src/crc32c_read_le_unittest.cc
new file mode 100644
index 0000000000..2a30302adf
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_read_le_unittest.cc
@@ -0,0 +1,32 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_read_le.h"
+
+#include <cstddef>
+#include <cstdint>
+
+#include "gtest/gtest.h"
+
+#include "./crc32c_round_up.h"
+
+namespace crc32c {
+
+TEST(Crc32CReadLETest, ReadUint32LE) {
+ // little-endian 0x12345678
+ alignas(4) uint8_t bytes[] = {0x78, 0x56, 0x34, 0x12};
+
+ ASSERT_EQ(RoundUp<4>(bytes), bytes) << "Stack array is not aligned";
+ EXPECT_EQ(static_cast<uint32_t>(0x12345678), ReadUint32LE(bytes));
+}
+
+TEST(Crc32CReadLETest, ReadUint64LE) {
+ // little-endian 0x123456789ABCDEF0
+ alignas(8) uint8_t bytes[] = {0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12};
+
+ ASSERT_EQ(RoundUp<8>(bytes), bytes) << "Stack array is not aligned";
+ EXPECT_EQ(static_cast<uint64_t>(0x123456789ABCDEF0), ReadUint64LE(bytes));
+}
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_round_up.h b/third_party/crc32c/src/src/crc32c_round_up.h
new file mode 100644
index 0000000000..d3b922beb9
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_round_up.h
@@ -0,0 +1,34 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_ROUND_UP_H_
+#define CRC32C_CRC32C_ROUND_UP_H_
+
+#include <cstddef>
+#include <cstdint>
+
+namespace crc32c {
+
+// Returns the smallest number >= the given number that is evenly divided by N.
+//
+// N must be a power of two.
+template <int N>
+constexpr inline uintptr_t RoundUp(uintptr_t pointer) {
+ static_assert((N & (N - 1)) == 0, "N must be a power of two");
+ return (pointer + (N - 1)) & ~(N - 1);
+}
+
+// Returns the smallest address >= the given address that is aligned to N bytes.
+//
+// N must be a power of two.
+template <int N>
+constexpr inline const uint8_t* RoundUp(const uint8_t* pointer) {
+ static_assert((N & (N - 1)) == 0, "N must be a power of two");
+ return reinterpret_cast<uint8_t*>(
+ RoundUp<N>(reinterpret_cast<uintptr_t>(pointer)));
+}
+
+} // namespace crc32c
+
+#endif // CRC32C_CRC32C_ROUND_UP_H_
diff --git a/third_party/crc32c/src/src/crc32c_round_up_unittest.cc b/third_party/crc32c/src/src/crc32c_round_up_unittest.cc
new file mode 100644
index 0000000000..5ff657bb5c
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_round_up_unittest.cc
@@ -0,0 +1,84 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_round_up.h"
+
+#include <cstddef>
+#include <cstdint>
+
+#include "gtest/gtest.h"
+
+namespace crc32c {
+
+TEST(CRC32CRoundUpTest, RoundUpUintptr) {
+ uintptr_t zero = 0;
+
+ ASSERT_EQ(zero, RoundUp<1>(zero));
+ ASSERT_EQ(1U, RoundUp<1>(1U));
+ ASSERT_EQ(2U, RoundUp<1>(2U));
+ ASSERT_EQ(3U, RoundUp<1>(3U));
+ ASSERT_EQ(~static_cast<uintptr_t>(0), RoundUp<1>(~static_cast<uintptr_t>(0)));
+ ASSERT_EQ(~static_cast<uintptr_t>(1), RoundUp<1>(~static_cast<uintptr_t>(1)));
+ ASSERT_EQ(~static_cast<uintptr_t>(2), RoundUp<1>(~static_cast<uintptr_t>(2)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<1>(~static_cast<uintptr_t>(3)));
+
+ ASSERT_EQ(zero, RoundUp<2>(zero));
+ ASSERT_EQ(2U, RoundUp<2>(1U));
+ ASSERT_EQ(2U, RoundUp<2>(2U));
+ ASSERT_EQ(4U, RoundUp<2>(3U));
+ ASSERT_EQ(4U, RoundUp<2>(4U));
+ ASSERT_EQ(6U, RoundUp<2>(5U));
+ ASSERT_EQ(6U, RoundUp<2>(6U));
+ ASSERT_EQ(8U, RoundUp<2>(7U));
+ ASSERT_EQ(8U, RoundUp<2>(8U));
+ ASSERT_EQ(~static_cast<uintptr_t>(1), RoundUp<2>(~static_cast<uintptr_t>(1)));
+ ASSERT_EQ(~static_cast<uintptr_t>(1), RoundUp<2>(~static_cast<uintptr_t>(2)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<2>(~static_cast<uintptr_t>(3)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<2>(~static_cast<uintptr_t>(4)));
+
+ ASSERT_EQ(zero, RoundUp<4>(zero));
+ ASSERT_EQ(4U, RoundUp<4>(1U));
+ ASSERT_EQ(4U, RoundUp<4>(2U));
+ ASSERT_EQ(4U, RoundUp<4>(3U));
+ ASSERT_EQ(4U, RoundUp<4>(4U));
+ ASSERT_EQ(8U, RoundUp<4>(5U));
+ ASSERT_EQ(8U, RoundUp<4>(6U));
+ ASSERT_EQ(8U, RoundUp<4>(7U));
+ ASSERT_EQ(8U, RoundUp<4>(8U));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<4>(~static_cast<uintptr_t>(3)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<4>(~static_cast<uintptr_t>(4)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<4>(~static_cast<uintptr_t>(5)));
+ ASSERT_EQ(~static_cast<uintptr_t>(3), RoundUp<4>(~static_cast<uintptr_t>(6)));
+ ASSERT_EQ(~static_cast<uintptr_t>(7), RoundUp<4>(~static_cast<uintptr_t>(7)));
+ ASSERT_EQ(~static_cast<uintptr_t>(7), RoundUp<4>(~static_cast<uintptr_t>(8)));
+ ASSERT_EQ(~static_cast<uintptr_t>(7), RoundUp<4>(~static_cast<uintptr_t>(9)));
+}
+
+TEST(CRC32CRoundUpTest, RoundUpPointer) {
+ uintptr_t zero = 0, three = 3, four = 4, seven = 7, eight = 8;
+
+ const uint8_t* zero_ptr = reinterpret_cast<const uint8_t*>(zero);
+ const uint8_t* three_ptr = reinterpret_cast<const uint8_t*>(three);
+ const uint8_t* four_ptr = reinterpret_cast<const uint8_t*>(four);
+ const uint8_t* seven_ptr = reinterpret_cast<const uint8_t*>(seven);
+ const uint8_t* eight_ptr = reinterpret_cast<uint8_t*>(eight);
+
+ ASSERT_EQ(zero_ptr, RoundUp<1>(zero_ptr));
+ ASSERT_EQ(zero_ptr, RoundUp<4>(zero_ptr));
+ ASSERT_EQ(zero_ptr, RoundUp<8>(zero_ptr));
+
+ ASSERT_EQ(three_ptr, RoundUp<1>(three_ptr));
+ ASSERT_EQ(four_ptr, RoundUp<4>(three_ptr));
+ ASSERT_EQ(eight_ptr, RoundUp<8>(three_ptr));
+
+ ASSERT_EQ(four_ptr, RoundUp<1>(four_ptr));
+ ASSERT_EQ(four_ptr, RoundUp<4>(four_ptr));
+ ASSERT_EQ(eight_ptr, RoundUp<8>(four_ptr));
+
+ ASSERT_EQ(seven_ptr, RoundUp<1>(seven_ptr));
+ ASSERT_EQ(eight_ptr, RoundUp<4>(seven_ptr));
+ ASSERT_EQ(eight_ptr, RoundUp<8>(four_ptr));
+}
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_sse42.cc b/third_party/crc32c/src/src/crc32c_sse42.cc
new file mode 100644
index 0000000000..fc0cb0725f
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_sse42.cc
@@ -0,0 +1,256 @@
+// Copyright 2008 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "./crc32c_sse42.h"
+
+// In a separate source file to allow this accelerated CRC32C function to be
+// compiled with the appropriate compiler flags to enable SSE4.2 instructions.
+
+// This implementation is loosely based on Intel Pub 323405 from April 2011,
+// "Fast CRC Computation for iSCSI Polynomial Using CRC32 Instruction".
+
+#include <cstddef>
+#include <cstdint>
+
+#include "./crc32c_internal.h"
+#include "./crc32c_prefetch.h"
+#include "./crc32c_read_le.h"
+#include "./crc32c_round_up.h"
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+#if defined(_MSC_VER)
+#include <intrin.h>
+#else // !defined(_MSC_VER)
+#include <nmmintrin.h>
+#endif // defined(_MSC_VER)
+
+namespace crc32c {
+
+namespace {
+
+constexpr const ptrdiff_t kGroups = 3;
+constexpr const ptrdiff_t kBlock0Size = 16 * 1024 / kGroups / 64 * 64;
+constexpr const ptrdiff_t kBlock1Size = 4 * 1024 / kGroups / 8 * 8;
+constexpr const ptrdiff_t kBlock2Size = 1024 / kGroups / 8 * 8;
+
+const uint32_t kBlock0SkipTable[8][16] = {
+ {0x00000000, 0xff770459, 0xfb027e43, 0x04757a1a, 0xf3e88a77, 0x0c9f8e2e,
+ 0x08eaf434, 0xf79df06d, 0xe23d621f, 0x1d4a6646, 0x193f1c5c, 0xe6481805,
+ 0x11d5e868, 0xeea2ec31, 0xead7962b, 0x15a09272},
+ {0x00000000, 0xc196b2cf, 0x86c1136f, 0x4757a1a0, 0x086e502f, 0xc9f8e2e0,
+ 0x8eaf4340, 0x4f39f18f, 0x10dca05e, 0xd14a1291, 0x961db331, 0x578b01fe,
+ 0x18b2f071, 0xd92442be, 0x9e73e31e, 0x5fe551d1},
+ {0x00000000, 0x21b940bc, 0x43728178, 0x62cbc1c4, 0x86e502f0, 0xa75c424c,
+ 0xc5978388, 0xe42ec334, 0x08267311, 0x299f33ad, 0x4b54f269, 0x6aedb2d5,
+ 0x8ec371e1, 0xaf7a315d, 0xcdb1f099, 0xec08b025},
+ {0x00000000, 0x104ce622, 0x2099cc44, 0x30d52a66, 0x41339888, 0x517f7eaa,
+ 0x61aa54cc, 0x71e6b2ee, 0x82673110, 0x922bd732, 0xa2fefd54, 0xb2b21b76,
+ 0xc354a998, 0xd3184fba, 0xe3cd65dc, 0xf38183fe},
+ {0x00000000, 0x012214d1, 0x024429a2, 0x03663d73, 0x04885344, 0x05aa4795,
+ 0x06cc7ae6, 0x07ee6e37, 0x0910a688, 0x0832b259, 0x0b548f2a, 0x0a769bfb,
+ 0x0d98f5cc, 0x0cbae11d, 0x0fdcdc6e, 0x0efec8bf},
+ {0x00000000, 0x12214d10, 0x24429a20, 0x3663d730, 0x48853440, 0x5aa47950,
+ 0x6cc7ae60, 0x7ee6e370, 0x910a6880, 0x832b2590, 0xb548f2a0, 0xa769bfb0,
+ 0xd98f5cc0, 0xcbae11d0, 0xfdcdc6e0, 0xefec8bf0},
+ {0x00000000, 0x27f8a7f1, 0x4ff14fe2, 0x6809e813, 0x9fe29fc4, 0xb81a3835,
+ 0xd013d026, 0xf7eb77d7, 0x3a294979, 0x1dd1ee88, 0x75d8069b, 0x5220a16a,
+ 0xa5cbd6bd, 0x8233714c, 0xea3a995f, 0xcdc23eae},
+ {0x00000000, 0x745292f2, 0xe8a525e4, 0x9cf7b716, 0xd4a63d39, 0xa0f4afcb,
+ 0x3c0318dd, 0x48518a2f, 0xaca00c83, 0xd8f29e71, 0x44052967, 0x3057bb95,
+ 0x780631ba, 0x0c54a348, 0x90a3145e, 0xe4f186ac},
+};
+const uint32_t kBlock1SkipTable[8][16] = {
+ {0x00000000, 0x79113270, 0xf22264e0, 0x8b335690, 0xe1a8bf31, 0x98b98d41,
+ 0x138adbd1, 0x6a9be9a1, 0xc6bd0893, 0xbfac3ae3, 0x349f6c73, 0x4d8e5e03,
+ 0x2715b7a2, 0x5e0485d2, 0xd537d342, 0xac26e132},
+ {0x00000000, 0x889667d7, 0x14c0b95f, 0x9c56de88, 0x298172be, 0xa1171569,
+ 0x3d41cbe1, 0xb5d7ac36, 0x5302e57c, 0xdb9482ab, 0x47c25c23, 0xcf543bf4,
+ 0x7a8397c2, 0xf215f015, 0x6e432e9d, 0xe6d5494a},
+ {0x00000000, 0xa605caf8, 0x49e7e301, 0xefe229f9, 0x93cfc602, 0x35ca0cfa,
+ 0xda282503, 0x7c2deffb, 0x2273faf5, 0x8476300d, 0x6b9419f4, 0xcd91d30c,
+ 0xb1bc3cf7, 0x17b9f60f, 0xf85bdff6, 0x5e5e150e},
+ {0x00000000, 0x44e7f5ea, 0x89cfebd4, 0xcd281e3e, 0x1673a159, 0x529454b3,
+ 0x9fbc4a8d, 0xdb5bbf67, 0x2ce742b2, 0x6800b758, 0xa528a966, 0xe1cf5c8c,
+ 0x3a94e3eb, 0x7e731601, 0xb35b083f, 0xf7bcfdd5},
+ {0x00000000, 0x59ce8564, 0xb39d0ac8, 0xea538fac, 0x62d66361, 0x3b18e605,
+ 0xd14b69a9, 0x8885eccd, 0xc5acc6c2, 0x9c6243a6, 0x7631cc0a, 0x2fff496e,
+ 0xa77aa5a3, 0xfeb420c7, 0x14e7af6b, 0x4d292a0f},
+ {0x00000000, 0x8eb5fb75, 0x1887801b, 0x96327b6e, 0x310f0036, 0xbfbafb43,
+ 0x2988802d, 0xa73d7b58, 0x621e006c, 0xecabfb19, 0x7a998077, 0xf42c7b02,
+ 0x5311005a, 0xdda4fb2f, 0x4b968041, 0xc5237b34},
+ {0x00000000, 0xc43c00d8, 0x8d947741, 0x49a87799, 0x1ec49873, 0xdaf898ab,
+ 0x9350ef32, 0x576cefea, 0x3d8930e6, 0xf9b5303e, 0xb01d47a7, 0x7421477f,
+ 0x234da895, 0xe771a84d, 0xaed9dfd4, 0x6ae5df0c},
+ {0x00000000, 0x7b1261cc, 0xf624c398, 0x8d36a254, 0xe9a5f1c1, 0x92b7900d,
+ 0x1f813259, 0x64935395, 0xd6a79573, 0xadb5f4bf, 0x208356eb, 0x5b913727,
+ 0x3f0264b2, 0x4410057e, 0xc926a72a, 0xb234c6e6},
+};
+const uint32_t kBlock2SkipTable[8][16] = {
+ {0x00000000, 0x8f158014, 0x1bc776d9, 0x94d2f6cd, 0x378eedb2, 0xb89b6da6,
+ 0x2c499b6b, 0xa35c1b7f, 0x6f1ddb64, 0xe0085b70, 0x74daadbd, 0xfbcf2da9,
+ 0x589336d6, 0xd786b6c2, 0x4354400f, 0xcc41c01b},
+ {0x00000000, 0xde3bb6c8, 0xb99b1b61, 0x67a0ada9, 0x76da4033, 0xa8e1f6fb,
+ 0xcf415b52, 0x117aed9a, 0xedb48066, 0x338f36ae, 0x542f9b07, 0x8a142dcf,
+ 0x9b6ec055, 0x4555769d, 0x22f5db34, 0xfcce6dfc},
+ {0x00000000, 0xde85763d, 0xb8e69a8b, 0x6663ecb6, 0x742143e7, 0xaaa435da,
+ 0xccc7d96c, 0x1242af51, 0xe84287ce, 0x36c7f1f3, 0x50a41d45, 0x8e216b78,
+ 0x9c63c429, 0x42e6b214, 0x24855ea2, 0xfa00289f},
+ {0x00000000, 0xd569796d, 0xaf3e842b, 0x7a57fd46, 0x5b917ea7, 0x8ef807ca,
+ 0xf4affa8c, 0x21c683e1, 0xb722fd4e, 0x624b8423, 0x181c7965, 0xcd750008,
+ 0xecb383e9, 0x39dafa84, 0x438d07c2, 0x96e47eaf},
+ {0x00000000, 0x6ba98c6d, 0xd75318da, 0xbcfa94b7, 0xab4a4745, 0xc0e3cb28,
+ 0x7c195f9f, 0x17b0d3f2, 0x5378f87b, 0x38d17416, 0x842be0a1, 0xef826ccc,
+ 0xf832bf3e, 0x939b3353, 0x2f61a7e4, 0x44c82b89},
+ {0x00000000, 0xa6f1f0f6, 0x480f971d, 0xeefe67eb, 0x901f2e3a, 0x36eedecc,
+ 0xd810b927, 0x7ee149d1, 0x25d22a85, 0x8323da73, 0x6dddbd98, 0xcb2c4d6e,
+ 0xb5cd04bf, 0x133cf449, 0xfdc293a2, 0x5b336354},
+ {0x00000000, 0x4ba4550a, 0x9748aa14, 0xdcecff1e, 0x2b7d22d9, 0x60d977d3,
+ 0xbc3588cd, 0xf791ddc7, 0x56fa45b2, 0x1d5e10b8, 0xc1b2efa6, 0x8a16baac,
+ 0x7d87676b, 0x36233261, 0xeacfcd7f, 0xa16b9875},
+ {0x00000000, 0xadf48b64, 0x5e056039, 0xf3f1eb5d, 0xbc0ac072, 0x11fe4b16,
+ 0xe20fa04b, 0x4ffb2b2f, 0x7df9f615, 0xd00d7d71, 0x23fc962c, 0x8e081d48,
+ 0xc1f33667, 0x6c07bd03, 0x9ff6565e, 0x3202dd3a},
+};
+
+constexpr const ptrdiff_t kPrefetchHorizon = 256;
+
+} // namespace
+
+uint32_t ExtendSse42(uint32_t crc, const uint8_t* data, size_t size) {
+ const uint8_t* p = data;
+ const uint8_t* e = data + size;
+ uint32_t l = crc ^ kCRC32Xor;
+
+#define STEP1 \
+ do { \
+ l = _mm_crc32_u8(l, *p++); \
+ } while (0)
+
+#define STEP4(crc) \
+ do { \
+ crc = _mm_crc32_u32(crc, ReadUint32LE(p)); \
+ p += 4; \
+ } while (0)
+
+#define STEP8(crc, data) \
+ do { \
+ crc = _mm_crc32_u64(crc, ReadUint64LE(data)); \
+ data += 8; \
+ } while (0)
+
+#define STEP8BY3(crc0, crc1, crc2, p0, p1, p2) \
+ do { \
+ STEP8(crc0, p0); \
+ STEP8(crc1, p1); \
+ STEP8(crc2, p2); \
+ } while (0)
+
+#define STEP8X3(crc0, crc1, crc2, bs) \
+ do { \
+ crc0 = _mm_crc32_u64(crc0, ReadUint64LE(p)); \
+ crc1 = _mm_crc32_u64(crc1, ReadUint64LE(p + bs)); \
+ crc2 = _mm_crc32_u64(crc2, ReadUint64LE(p + 2 * bs)); \
+ p += 8; \
+ } while (0)
+
+#define SKIP_BLOCK(crc, tab) \
+ do { \
+ crc = tab[0][crc & 0xf] ^ tab[1][(crc >> 4) & 0xf] ^ \
+ tab[2][(crc >> 8) & 0xf] ^ tab[3][(crc >> 12) & 0xf] ^ \
+ tab[4][(crc >> 16) & 0xf] ^ tab[5][(crc >> 20) & 0xf] ^ \
+ tab[6][(crc >> 24) & 0xf] ^ tab[7][(crc >> 28) & 0xf]; \
+ } while (0)
+
+ // Point x at first 8-byte aligned byte in the buffer. This might be past the
+ // end of the buffer.
+ const uint8_t* x = RoundUp<8>(p);
+ if (x <= e) {
+ // Process bytes p is 8-byte aligned.
+ while (p != x) {
+ STEP1;
+ }
+ }
+
+ // Proccess the data in predetermined block sizes with tables for quickly
+ // combining the checksum. Experimentally it's better to use larger block
+ // sizes where possible so use a hierarchy of decreasing block sizes.
+ uint64_t l64 = l;
+ while ((e - p) >= kGroups * kBlock0Size) {
+ uint64_t l641 = 0;
+ uint64_t l642 = 0;
+ for (int i = 0; i < kBlock0Size; i += 8 * 8) {
+ // Prefetch ahead to hide latency.
+ RequestPrefetch(p + kPrefetchHorizon);
+ RequestPrefetch(p + kBlock0Size + kPrefetchHorizon);
+ RequestPrefetch(p + 2 * kBlock0Size + kPrefetchHorizon);
+
+ // Process 64 bytes at a time.
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ STEP8X3(l64, l641, l642, kBlock0Size);
+ }
+
+ // Combine results.
+ SKIP_BLOCK(l64, kBlock0SkipTable);
+ l64 ^= l641;
+ SKIP_BLOCK(l64, kBlock0SkipTable);
+ l64 ^= l642;
+ p += (kGroups - 1) * kBlock0Size;
+ }
+ while ((e - p) >= kGroups * kBlock1Size) {
+ uint64_t l641 = 0;
+ uint64_t l642 = 0;
+ for (int i = 0; i < kBlock1Size; i += 8) {
+ STEP8X3(l64, l641, l642, kBlock1Size);
+ }
+ SKIP_BLOCK(l64, kBlock1SkipTable);
+ l64 ^= l641;
+ SKIP_BLOCK(l64, kBlock1SkipTable);
+ l64 ^= l642;
+ p += (kGroups - 1) * kBlock1Size;
+ }
+ while ((e - p) >= kGroups * kBlock2Size) {
+ uint64_t l641 = 0;
+ uint64_t l642 = 0;
+ for (int i = 0; i < kBlock2Size; i += 8) {
+ STEP8X3(l64, l641, l642, kBlock2Size);
+ }
+ SKIP_BLOCK(l64, kBlock2SkipTable);
+ l64 ^= l641;
+ SKIP_BLOCK(l64, kBlock2SkipTable);
+ l64 ^= l642;
+ p += (kGroups - 1) * kBlock2Size;
+ }
+
+ // Process bytes 16 at a time
+ while ((e - p) >= 16) {
+ STEP8(l64, p);
+ STEP8(l64, p);
+ }
+
+ l = static_cast<uint32_t>(l64);
+ // Process the last few bytes.
+ while (p != e) {
+ STEP1;
+ }
+#undef SKIP_BLOCK
+#undef STEP8X3
+#undef STEP8BY3
+#undef STEP8
+#undef STEP4
+#undef STEP1
+
+ return l ^ kCRC32Xor;
+}
+
+} // namespace crc32c
+
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
diff --git a/third_party/crc32c/src/src/crc32c_sse42.h b/third_party/crc32c/src/src/crc32c_sse42.h
new file mode 100644
index 0000000000..b9ed179e54
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_sse42.h
@@ -0,0 +1,31 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_SSE42_H_
+#define CRC32C_CRC32C_SSE42_H_
+
+// X86-specific code.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+// The hardware-accelerated implementation is only enabled for 64-bit builds,
+// because a straightforward 32-bit implementation actually runs slower than the
+// portable version. Most X86 machines are 64-bit nowadays, so it doesn't make
+// much sense to spend time building an optimized hardware-accelerated
+// implementation.
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+namespace crc32c {
+
+// SSE4.2-accelerated implementation in crc32c_sse42.cc
+uint32_t ExtendSse42(uint32_t crc, const uint8_t* data, size_t count);
+
+} // namespace crc32c
+
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+#endif // CRC32C_CRC32C_SSE42_H_
diff --git a/third_party/crc32c/src/src/crc32c_sse42_check.h b/third_party/crc32c/src/src/crc32c_sse42_check.h
new file mode 100644
index 0000000000..ad380dd20e
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_sse42_check.h
@@ -0,0 +1,48 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#ifndef CRC32C_CRC32C_SSE42_CHECK_H_
+#define CRC32C_CRC32C_SSE42_CHECK_H_
+
+// X86-specific code checking the availability of SSE4.2 instructions.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "crc32c/crc32c_config.h"
+
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+// If the compiler supports SSE4.2, it definitely supports X86.
+
+#if defined(_MSC_VER)
+#include <intrin.h>
+
+namespace crc32c {
+
+inline bool CanUseSse42() {
+ int cpu_info[4];
+ __cpuid(cpu_info, 1);
+ return (cpu_info[2] & (1 << 20)) != 0;
+}
+
+} // namespace crc32c
+
+#else // !defined(_MSC_VER)
+#include <cpuid.h>
+
+namespace crc32c {
+
+inline bool CanUseSse42() {
+ unsigned int eax, ebx, ecx, edx;
+ return __get_cpuid(1, &eax, &ebx, &ecx, &edx) && ((ecx & (1 << 20)) != 0);
+}
+
+} // namespace crc32c
+
+#endif // defined(_MSC_VER)
+
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+#endif // CRC32C_CRC32C_SSE42_CHECK_H_
diff --git a/third_party/crc32c/src/src/crc32c_sse42_unittest.cc b/third_party/crc32c/src/src/crc32c_sse42_unittest.cc
new file mode 100644
index 0000000000..c73ad8ddd1
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_sse42_unittest.cc
@@ -0,0 +1,24 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "gtest/gtest.h"
+
+#include "./crc32c_extend_unittests.h"
+#include "./crc32c_sse42.h"
+
+namespace crc32c {
+
+#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+struct Sse42TestTraits {
+ static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) {
+ return ExtendSse42(crc, data, count);
+ }
+};
+
+INSTANTIATE_TYPED_TEST_SUITE_P(Sse42, ExtendTest, Sse42TestTraits);
+
+#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__))
+
+} // namespace crc32c
diff --git a/third_party/crc32c/src/src/crc32c_test_main.cc b/third_party/crc32c/src/src/crc32c_test_main.cc
new file mode 100644
index 0000000000..c07e1c8bc4
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_test_main.cc
@@ -0,0 +1,20 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "crc32c/crc32c_config.h"
+
+#include "gtest/gtest.h"
+
+#if CRC32C_TESTS_BUILT_WITH_GLOG
+#include "glog/logging.h"
+#endif // CRC32C_TESTS_BUILT_WITH_GLOG
+
+int main(int argc, char** argv) {
+#if CRC32C_TESTS_BUILT_WITH_GLOG
+ google::InitGoogleLogging(argv[0]);
+ google::InstallFailureSignalHandler();
+#endif // CRC32C_TESTS_BUILT_WITH_GLOG
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/third_party/crc32c/src/src/crc32c_unittest.cc b/third_party/crc32c/src/src/crc32c_unittest.cc
new file mode 100644
index 0000000000..d6c6af680c
--- /dev/null
+++ b/third_party/crc32c/src/src/crc32c_unittest.cc
@@ -0,0 +1,129 @@
+// Copyright 2017 The CRC32C Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file. See the AUTHORS file for names of contributors.
+
+#include "crc32c/crc32c.h"
+
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+
+#include "gtest/gtest.h"
+
+#include "./crc32c_extend_unittests.h"
+
+TEST(Crc32CTest, Crc32c) {
+ // From rfc3720 section B.4.
+ uint8_t buf[32];
+
+ std::memset(buf, 0, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x8a9136aa),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ std::memset(buf, 0xff, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x62a8ab43),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(i);
+ EXPECT_EQ(static_cast<uint32_t>(0x46dd794e),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(31 - i);
+ EXPECT_EQ(static_cast<uint32_t>(0x113fdb5c),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ uint8_t data[48] = {
+ 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00,
+ 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_EQ(static_cast<uint32_t>(0xd9963a56),
+ crc32c::Crc32c(data, sizeof(data)));
+}
+
+namespace crc32c {
+
+struct ApiTestTraits {
+ static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) {
+ return ::crc32c::Extend(crc, data, count);
+ }
+};
+
+INSTANTIATE_TYPED_TEST_SUITE_P(Api, ExtendTest, ApiTestTraits);
+
+} // namespace crc32c
+
+TEST(CRC32CTest, Crc32cCharPointer) {
+ char buf[32];
+
+ std::memset(buf, 0, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x8a9136aa),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ std::memset(buf, 0xff, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x62a8ab43),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<char>(i);
+ EXPECT_EQ(static_cast<uint32_t>(0x46dd794e),
+ crc32c::Crc32c(buf, sizeof(buf)));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<char>(31 - i);
+ EXPECT_EQ(static_cast<uint32_t>(0x113fdb5c),
+ crc32c::Crc32c(buf, sizeof(buf)));
+}
+
+TEST(CRC32CTest, Crc32cStdString) {
+ std::string buf;
+ buf.resize(32);
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<char>(0x00);
+ EXPECT_EQ(static_cast<uint32_t>(0x8a9136aa), crc32c::Crc32c(buf));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = '\xff';
+ EXPECT_EQ(static_cast<uint32_t>(0x62a8ab43), crc32c::Crc32c(buf));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<char>(i);
+ EXPECT_EQ(static_cast<uint32_t>(0x46dd794e), crc32c::Crc32c(buf));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<char>(31 - i);
+ EXPECT_EQ(static_cast<uint32_t>(0x113fdb5c), crc32c::Crc32c(buf));
+}
+
+#if __cplusplus > 201402L
+#if __has_include(<string_view>)
+
+TEST(CRC32CTest, Crc32cStdStringView) {
+ uint8_t buf[32];
+ std::string_view view(reinterpret_cast<const char*>(buf), sizeof(buf));
+
+ std::memset(buf, 0, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x8a9136aa), crc32c::Crc32c(view));
+
+ std::memset(buf, 0xff, sizeof(buf));
+ EXPECT_EQ(static_cast<uint32_t>(0x62a8ab43), crc32c::Crc32c(view));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(i);
+ EXPECT_EQ(static_cast<uint32_t>(0x46dd794e), crc32c::Crc32c(view));
+
+ for (size_t i = 0; i < 32; ++i)
+ buf[i] = static_cast<uint8_t>(31 - i);
+ EXPECT_EQ(static_cast<uint32_t>(0x113fdb5c), crc32c::Crc32c(view));
+}
+
+#endif // __has_include(<string_view>)
+#endif // __cplusplus > 201402L
+
+#define TESTED_EXTEND Extend
+#include "./crc32c_extend_unittests.h"
+#undef TESTED_EXTEND
diff --git a/third_party/libaom/CMakeLists.txt b/third_party/libaom/CMakeLists.txt
index 2e84ff8350..55ac2e0bc7 100644
--- a/third_party/libaom/CMakeLists.txt
+++ b/third_party/libaom/CMakeLists.txt
@@ -91,22 +91,27 @@ elseif(LINUX_AARCH64)
LICENSE
"BSD-3-Clause"
SRC
+ ${AOM_ROOT}/libaom/aom_ports/arm_cpudetect.c
${aom_av1_common_intrin_neon}
${aom_av1_common_sources}
${aom_av1_decoder_sources}
+ ${aom_av1_encoder_intrin_neon}
+ ${aom_av1_encoder_sources}
${aom_dsp_common_intrin_neon}
${aom_dsp_common_sources}
${aom_dsp_decoder_sources}
+ ${aom_dsp_encoder_intrin_neon}
+ ${aom_dsp_encoder_sources}
${aom_mem_sources}
- ${AOM_ROOT}/libaom/aom_ports/arm_cpudetect.c
${aom_rtcd_sources}
${aom_scale_sources}
${aom_sources}
${aom_util_sources})
target_include_directories(
webrtc_libaom
- PRIVATE ${AOM_ROOT}/config ${AOM_ROOT}/config/arm64
+ PRIVATE ${AOM_ROOT}/config ${AOM_ROOT}/config/linux/arm64
PUBLIC ${AOM_ROOT}/libaom)
+# target_compile_options(webrtc_libaom PRIVATE "-mfpu=neon")
else()
message(FATAL_ERROR "This can only be used in linux builds")
endif()
diff --git a/third_party/libaom/libaom_src.cmake b/third_party/libaom/libaom_src.cmake
index 0f60cfc0ea..b582a517af 100644
--- a/third_party/libaom/libaom_src.cmake
+++ b/third_party/libaom/libaom_src.cmake
@@ -288,6 +288,8 @@ set(aom_av1_encoder_sources
"${AOM_ROOT}/libaom/av1/encoder/extend.c"
+ "${AOM_ROOT}/libaom/av1/encoder/external_partition.c"
+
"${AOM_ROOT}/libaom/av1/encoder/hash.c"
@@ -609,6 +611,8 @@ set(aom_dsp_encoder_sources
"${AOM_ROOT}/libaom/aom_dsp/sad.c"
"${AOM_ROOT}/libaom/aom_dsp/sad_av1.c"
"${AOM_ROOT}/libaom/aom_dsp/sse.c"
+ "${AOM_ROOT}/libaom/aom_dsp/ssim.c"
+
"${AOM_ROOT}/libaom/aom_dsp/sum_squares.c"
"${AOM_ROOT}/libaom/aom_dsp/variance.c"
@@ -660,6 +664,7 @@ set(aom_sources
+
"${AOM_ROOT}/libaom/aom/src/aom_codec.c"
"${AOM_ROOT}/libaom/aom/src/aom_decoder.c"
"${AOM_ROOT}/libaom/aom/src/aom_encoder.c"
diff --git a/third_party/libaom/source/config/config/aom_version.h b/third_party/libaom/source/config/config/aom_version.h
index 9c9e03e384..d62ceb34f7 100644
--- a/third_party/libaom/source/config/config/aom_version.h
+++ b/third_party/libaom/source/config/config/aom_version.h
@@ -9,11 +9,11 @@
* PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/
-#define VERSION_MAJOR 2
-#define VERSION_MINOR 0
-#define VERSION_PATCH 2
-#define VERSION_EXTRA "1395-g79b775799"
+#define VERSION_MAJOR 3
+#define VERSION_MINOR 1
+#define VERSION_PATCH 0
+#define VERSION_EXTRA "309-g12287adee"
#define VERSION_PACKED \
((VERSION_MAJOR << 16) | (VERSION_MINOR << 8) | (VERSION_PATCH))
-#define VERSION_STRING_NOSP "2.0.2-1395-g79b775799"
-#define VERSION_STRING " 2.0.2-1395-g79b775799"
+#define VERSION_STRING_NOSP "3.1.0-309-g12287adee"
+#define VERSION_STRING " 3.1.0-309-g12287adee"
diff --git a/third_party/libaom/source/config/ios/arm-neon/config/aom_config.asm b/third_party/libaom/source/config/ios/arm-neon/config/aom_config.asm
index dcceb2497b..15c20d956a 100644
--- a/third_party/libaom/source/config/ios/arm-neon/config/aom_config.asm
+++ b/third_party/libaom/source/config/ios/arm-neon/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/ios/arm-neon/config/aom_config.h b/third_party/libaom/source/config/ios/arm-neon/config/aom_config.h
index 655ca4c8dc..f79ffc6929 100644
--- a/third_party/libaom/source/config/ios/arm-neon/config/aom_config.h
+++ b/third_party/libaom/source/config/ios/arm-neon/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/ios/arm-neon/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/ios/arm-neon/config/aom_dsp_rtcd.h
index 027c19a09f..e71ec66a00 100644
--- a/third_party/libaom/source/config/ios/arm-neon/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/ios/arm-neon/config/aom_dsp_rtcd.h
@@ -3433,6 +3433,17 @@ int64_t aom_sse_neon(const uint8_t* a,
int height);
#define aom_sse aom_sse_neon
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/ios/arm64/config/aom_config.asm b/third_party/libaom/source/config/ios/arm64/config/aom_config.asm
index dcceb2497b..15c20d956a 100644
--- a/third_party/libaom/source/config/ios/arm64/config/aom_config.asm
+++ b/third_party/libaom/source/config/ios/arm64/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/ios/arm64/config/aom_config.h b/third_party/libaom/source/config/ios/arm64/config/aom_config.h
index 655ca4c8dc..f79ffc6929 100644
--- a/third_party/libaom/source/config/ios/arm64/config/aom_config.h
+++ b/third_party/libaom/source/config/ios/arm64/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/ios/arm64/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/ios/arm64/config/aom_dsp_rtcd.h
index 027c19a09f..e71ec66a00 100644
--- a/third_party/libaom/source/config/ios/arm64/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/ios/arm64/config/aom_dsp_rtcd.h
@@ -3433,6 +3433,17 @@ int64_t aom_sse_neon(const uint8_t* a,
int height);
#define aom_sse aom_sse_neon
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.asm b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.asm
index fbaae3b28d..ac5f20f3b1 100644
--- a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.h b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.h
index adb548aa40..c8e44f4edd 100644
--- a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_dsp_rtcd.h
index 61141406d2..a4df74d40d 100644
--- a/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/arm-neon-cpu-detect/config/aom_dsp_rtcd.h
@@ -3813,6 +3813,17 @@ RTCD_EXTERN int64_t (*aom_sse)(const uint8_t* a,
int width,
int height);
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/arm-neon/config/aom_config.asm b/third_party/libaom/source/config/linux/arm-neon/config/aom_config.asm
index dcceb2497b..15c20d956a 100644
--- a/third_party/libaom/source/config/linux/arm-neon/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/arm-neon/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/linux/arm-neon/config/aom_config.h b/third_party/libaom/source/config/linux/arm-neon/config/aom_config.h
index 655ca4c8dc..f79ffc6929 100644
--- a/third_party/libaom/source/config/linux/arm-neon/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/arm-neon/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/arm-neon/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/arm-neon/config/aom_dsp_rtcd.h
index 027c19a09f..e71ec66a00 100644
--- a/third_party/libaom/source/config/linux/arm-neon/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/arm-neon/config/aom_dsp_rtcd.h
@@ -3433,6 +3433,17 @@ int64_t aom_sse_neon(const uint8_t* a,
int height);
#define aom_sse aom_sse_neon
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/arm/config/aom_config.asm b/third_party/libaom/source/config/linux/arm/config/aom_config.asm
index e9000243ad..bc1a95f003 100644
--- a/third_party/libaom/source/config/linux/arm/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/arm/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/linux/arm/config/aom_config.h b/third_party/libaom/source/config/linux/arm/config/aom_config.h
index 0404a4c827..f3ac36f68c 100644
--- a/third_party/libaom/source/config/linux/arm/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/arm/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/arm/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/arm/config/aom_dsp_rtcd.h
index d7b1b04f00..8710d625ed 100644
--- a/third_party/libaom/source/config/linux/arm/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/arm/config/aom_dsp_rtcd.h
@@ -2953,6 +2953,17 @@ int64_t aom_sse_c(const uint8_t* a,
int height);
#define aom_sse aom_sse_c
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/arm64/config/aom_config.asm b/third_party/libaom/source/config/linux/arm64/config/aom_config.asm
index dcceb2497b..15c20d956a 100644
--- a/third_party/libaom/source/config/linux/arm64/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/arm64/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/linux/arm64/config/aom_config.h b/third_party/libaom/source/config/linux/arm64/config/aom_config.h
index 655ca4c8dc..f79ffc6929 100644
--- a/third_party/libaom/source/config/linux/arm64/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/arm64/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/arm64/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/arm64/config/aom_dsp_rtcd.h
index 027c19a09f..e71ec66a00 100644
--- a/third_party/libaom/source/config/linux/arm64/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/arm64/config/aom_dsp_rtcd.h
@@ -3433,6 +3433,17 @@ int64_t aom_sse_neon(const uint8_t* a,
int height);
#define aom_sse aom_sse_neon
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/generic/config/aom_config.asm b/third_party/libaom/source/config/linux/generic/config/aom_config.asm
index 0e681a032e..24b965dfb3 100644
--- a/third_party/libaom/source/config/linux/generic/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/generic/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/linux/generic/config/aom_config.h b/third_party/libaom/source/config/linux/generic/config/aom_config.h
index 0e1665a47e..cdb4794210 100644
--- a/third_party/libaom/source/config/linux/generic/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/generic/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/generic/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/generic/config/aom_dsp_rtcd.h
index 05bfa838bb..702c1b809e 100644
--- a/third_party/libaom/source/config/linux/generic/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/generic/config/aom_dsp_rtcd.h
@@ -2953,6 +2953,17 @@ int64_t aom_sse_c(const uint8_t* a,
int height);
#define aom_sse aom_sse_c
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/ia32/config/aom_config.asm b/third_party/libaom/source/config/linux/ia32/config/aom_config.asm
index d8ec860317..f4e2dfb836 100644
--- a/third_party/libaom/source/config/linux/ia32/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/ia32/config/aom_config.asm
@@ -10,6 +10,7 @@
%define CONFIG_AV1_HIGHBITDEPTH 0
%define CONFIG_AV1_TEMPORAL_DENOISING 1
%define CONFIG_BIG_ENDIAN 0
+%define CONFIG_BITRATE_ACCURACY 0
%define CONFIG_BITSTREAM_DEBUG 0
%define CONFIG_COEFFICIENT_RANGE_CHECKING 0
%define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -21,6 +22,7 @@
%define CONFIG_DIST_8X8 0
%define CONFIG_ENTROPY_STATS 0
%define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+%define CONFIG_FRAME_PARALLEL_ENCODE 0
%define CONFIG_GCC 1
%define CONFIG_GCOV 0
%define CONFIG_GPROF 0
@@ -36,6 +38,7 @@
%define CONFIG_NORMAL_TILE_MODE 1
%define CONFIG_OPTICAL_FLOW_API 0
%define CONFIG_OS_SUPPORT 1
+%define CONFIG_PARTITION_SEARCH_ORDER 0
%define CONFIG_PIC 1
%define CONFIG_RD_DEBUG 0
%define CONFIG_REALTIME_ONLY 1
@@ -48,7 +51,6 @@
%define CONFIG_SPEED_STATS 0
%define CONFIG_TUNE_BUTTERAUGLI 0
%define CONFIG_TUNE_VMAF 0
-%define CONFIG_USE_VMAF_RC 0
%define CONFIG_WEBM_IO 1
%define DECODE_HEIGHT_LIMIT 16384
%define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/ia32/config/aom_config.h b/third_party/libaom/source/config/linux/ia32/config/aom_config.h
index 53666caafa..1b3bba6797 100644
--- a/third_party/libaom/source/config/linux/ia32/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/ia32/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 1
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/ia32/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/ia32/config/aom_dsp_rtcd.h
index 64bc1f4056..323c55e888 100644
--- a/third_party/libaom/source/config/linux/ia32/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/ia32/config/aom_dsp_rtcd.h
@@ -6787,6 +6787,17 @@ RTCD_EXTERN int64_t (*aom_sse)(const uint8_t* a,
int width,
int height);
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/linux/x64/config/aom_config.asm b/third_party/libaom/source/config/linux/x64/config/aom_config.asm
index 0fdb4ea1e8..b15994bbd7 100644
--- a/third_party/libaom/source/config/linux/x64/config/aom_config.asm
+++ b/third_party/libaom/source/config/linux/x64/config/aom_config.asm
@@ -10,6 +10,7 @@
%define CONFIG_AV1_HIGHBITDEPTH 0
%define CONFIG_AV1_TEMPORAL_DENOISING 1
%define CONFIG_BIG_ENDIAN 0
+%define CONFIG_BITRATE_ACCURACY 0
%define CONFIG_BITSTREAM_DEBUG 0
%define CONFIG_COEFFICIENT_RANGE_CHECKING 0
%define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -21,6 +22,7 @@
%define CONFIG_DIST_8X8 0
%define CONFIG_ENTROPY_STATS 0
%define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+%define CONFIG_FRAME_PARALLEL_ENCODE 0
%define CONFIG_GCC 1
%define CONFIG_GCOV 0
%define CONFIG_GPROF 0
@@ -36,6 +38,7 @@
%define CONFIG_NORMAL_TILE_MODE 1
%define CONFIG_OPTICAL_FLOW_API 0
%define CONFIG_OS_SUPPORT 1
+%define CONFIG_PARTITION_SEARCH_ORDER 0
%define CONFIG_PIC 0
%define CONFIG_RD_DEBUG 0
%define CONFIG_REALTIME_ONLY 1
@@ -48,7 +51,6 @@
%define CONFIG_SPEED_STATS 0
%define CONFIG_TUNE_BUTTERAUGLI 0
%define CONFIG_TUNE_VMAF 0
-%define CONFIG_USE_VMAF_RC 0
%define CONFIG_WEBM_IO 1
%define DECODE_HEIGHT_LIMIT 16384
%define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/x64/config/aom_config.h b/third_party/libaom/source/config/linux/x64/config/aom_config.h
index d026bc215f..d090f8398a 100644
--- a/third_party/libaom/source/config/linux/x64/config/aom_config.h
+++ b/third_party/libaom/source/config/linux/x64/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 1
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/linux/x64/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/linux/x64/config/aom_dsp_rtcd.h
index 58de231219..dd561e4498 100644
--- a/third_party/libaom/source/config/linux/x64/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/linux/x64/config/aom_dsp_rtcd.h
@@ -6814,6 +6814,26 @@ RTCD_EXTERN int64_t (*aom_sse)(const uint8_t* a,
int width,
int height);
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+void aom_ssim_parms_8x8_sse2(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_sse2
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/win/arm64/config/aom_config.asm b/third_party/libaom/source/config/win/arm64/config/aom_config.asm
index dcceb2497b..15c20d956a 100644
--- a/third_party/libaom/source/config/win/arm64/config/aom_config.asm
+++ b/third_party/libaom/source/config/win/arm64/config/aom_config.asm
@@ -20,6 +20,7 @@ CONFIG_AV1_ENCODER equ 1
CONFIG_AV1_HIGHBITDEPTH equ 0
CONFIG_AV1_TEMPORAL_DENOISING equ 1
CONFIG_BIG_ENDIAN equ 0
+CONFIG_BITRATE_ACCURACY equ 0
CONFIG_BITSTREAM_DEBUG equ 0
CONFIG_COEFFICIENT_RANGE_CHECKING equ 0
CONFIG_COLLECT_COMPONENT_TIMING equ 0
@@ -31,6 +32,7 @@ CONFIG_DISABLE_FULL_PIXEL_SPLIT_8X8 equ 1
CONFIG_DIST_8X8 equ 0
CONFIG_ENTROPY_STATS equ 0
CONFIG_EXCLUDE_SIMD_MISMATCH equ 0
+CONFIG_FRAME_PARALLEL_ENCODE equ 0
CONFIG_GCC equ 1
CONFIG_GCOV equ 0
CONFIG_GPROF equ 0
@@ -46,6 +48,7 @@ CONFIG_NN_V2 equ 0
CONFIG_NORMAL_TILE_MODE equ 1
CONFIG_OPTICAL_FLOW_API equ 0
CONFIG_OS_SUPPORT equ 1
+CONFIG_PARTITION_SEARCH_ORDER equ 0
CONFIG_PIC equ 0
CONFIG_RD_DEBUG equ 0
CONFIG_REALTIME_ONLY equ 1
@@ -58,7 +61,6 @@ CONFIG_SPATIAL_RESAMPLING equ 1
CONFIG_SPEED_STATS equ 0
CONFIG_TUNE_BUTTERAUGLI equ 0
CONFIG_TUNE_VMAF equ 0
-CONFIG_USE_VMAF_RC equ 0
CONFIG_WEBM_IO equ 1
DECODE_HEIGHT_LIMIT equ 16384
DECODE_WIDTH_LIMIT equ 16384
diff --git a/third_party/libaom/source/config/win/arm64/config/aom_config.h b/third_party/libaom/source/config/win/arm64/config/aom_config.h
index 5ca170928b..c744a45ff4 100644
--- a/third_party/libaom/source/config/win/arm64/config/aom_config.h
+++ b/third_party/libaom/source/config/win/arm64/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 0
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/win/arm64/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/win/arm64/config/aom_dsp_rtcd.h
index 027c19a09f..e71ec66a00 100644
--- a/third_party/libaom/source/config/win/arm64/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/win/arm64/config/aom_dsp_rtcd.h
@@ -3433,6 +3433,17 @@ int64_t aom_sse_neon(const uint8_t* a,
int height);
#define aom_sse aom_sse_neon
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/win/ia32/config/aom_config.asm b/third_party/libaom/source/config/win/ia32/config/aom_config.asm
index 789f7c98f7..ad1912f54d 100644
--- a/third_party/libaom/source/config/win/ia32/config/aom_config.asm
+++ b/third_party/libaom/source/config/win/ia32/config/aom_config.asm
@@ -10,6 +10,7 @@
%define CONFIG_AV1_HIGHBITDEPTH 0
%define CONFIG_AV1_TEMPORAL_DENOISING 1
%define CONFIG_BIG_ENDIAN 0
+%define CONFIG_BITRATE_ACCURACY 0
%define CONFIG_BITSTREAM_DEBUG 0
%define CONFIG_COEFFICIENT_RANGE_CHECKING 0
%define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -21,6 +22,7 @@
%define CONFIG_DIST_8X8 0
%define CONFIG_ENTROPY_STATS 0
%define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+%define CONFIG_FRAME_PARALLEL_ENCODE 0
%define CONFIG_GCC 0
%define CONFIG_GCOV 0
%define CONFIG_GPROF 0
@@ -36,6 +38,7 @@
%define CONFIG_NORMAL_TILE_MODE 1
%define CONFIG_OPTICAL_FLOW_API 0
%define CONFIG_OS_SUPPORT 1
+%define CONFIG_PARTITION_SEARCH_ORDER 0
%define CONFIG_PIC 1
%define CONFIG_RD_DEBUG 0
%define CONFIG_REALTIME_ONLY 1
@@ -48,7 +51,6 @@
%define CONFIG_SPEED_STATS 0
%define CONFIG_TUNE_BUTTERAUGLI 0
%define CONFIG_TUNE_VMAF 0
-%define CONFIG_USE_VMAF_RC 0
%define CONFIG_WEBM_IO 1
%define DECODE_HEIGHT_LIMIT 16384
%define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/win/ia32/config/aom_config.h b/third_party/libaom/source/config/win/ia32/config/aom_config.h
index e9cafd4296..044ba296e6 100644
--- a/third_party/libaom/source/config/win/ia32/config/aom_config.h
+++ b/third_party/libaom/source/config/win/ia32/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 0
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 1
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/win/ia32/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/win/ia32/config/aom_dsp_rtcd.h
index 64bc1f4056..323c55e888 100644
--- a/third_party/libaom/source/config/win/ia32/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/win/ia32/config/aom_dsp_rtcd.h
@@ -6787,6 +6787,17 @@ RTCD_EXTERN int64_t (*aom_sse)(const uint8_t* a,
int width,
int height);
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_c
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/config/win/x64/config/aom_config.asm b/third_party/libaom/source/config/win/x64/config/aom_config.asm
index bdebbbe6b3..f3e1660a08 100644
--- a/third_party/libaom/source/config/win/x64/config/aom_config.asm
+++ b/third_party/libaom/source/config/win/x64/config/aom_config.asm
@@ -10,6 +10,7 @@
%define CONFIG_AV1_HIGHBITDEPTH 0
%define CONFIG_AV1_TEMPORAL_DENOISING 1
%define CONFIG_BIG_ENDIAN 0
+%define CONFIG_BITRATE_ACCURACY 0
%define CONFIG_BITSTREAM_DEBUG 0
%define CONFIG_COEFFICIENT_RANGE_CHECKING 0
%define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -21,6 +22,7 @@
%define CONFIG_DIST_8X8 0
%define CONFIG_ENTROPY_STATS 0
%define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+%define CONFIG_FRAME_PARALLEL_ENCODE 0
%define CONFIG_GCC 0
%define CONFIG_GCOV 0
%define CONFIG_GPROF 0
@@ -36,6 +38,7 @@
%define CONFIG_NORMAL_TILE_MODE 1
%define CONFIG_OPTICAL_FLOW_API 0
%define CONFIG_OS_SUPPORT 1
+%define CONFIG_PARTITION_SEARCH_ORDER 0
%define CONFIG_PIC 0
%define CONFIG_RD_DEBUG 0
%define CONFIG_REALTIME_ONLY 1
@@ -48,7 +51,6 @@
%define CONFIG_SPEED_STATS 0
%define CONFIG_TUNE_BUTTERAUGLI 0
%define CONFIG_TUNE_VMAF 0
-%define CONFIG_USE_VMAF_RC 0
%define CONFIG_WEBM_IO 1
%define DECODE_HEIGHT_LIMIT 16384
%define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/win/x64/config/aom_config.h b/third_party/libaom/source/config/win/x64/config/aom_config.h
index 6187935081..1adc7b7407 100644
--- a/third_party/libaom/source/config/win/x64/config/aom_config.h
+++ b/third_party/libaom/source/config/win/x64/config/aom_config.h
@@ -22,6 +22,7 @@
#define CONFIG_AV1_HIGHBITDEPTH 0
#define CONFIG_AV1_TEMPORAL_DENOISING 1
#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_BITRATE_ACCURACY 0
#define CONFIG_BITSTREAM_DEBUG 0
#define CONFIG_COEFFICIENT_RANGE_CHECKING 0
#define CONFIG_COLLECT_COMPONENT_TIMING 0
@@ -33,6 +34,7 @@
#define CONFIG_DIST_8X8 0
#define CONFIG_ENTROPY_STATS 0
#define CONFIG_EXCLUDE_SIMD_MISMATCH 0
+#define CONFIG_FRAME_PARALLEL_ENCODE 0
#define CONFIG_GCC 0
#define CONFIG_GCOV 0
#define CONFIG_GPROF 0
@@ -48,6 +50,7 @@
#define CONFIG_NORMAL_TILE_MODE 1
#define CONFIG_OPTICAL_FLOW_API 0
#define CONFIG_OS_SUPPORT 1
+#define CONFIG_PARTITION_SEARCH_ORDER 0
#define CONFIG_PIC 0
#define CONFIG_RD_DEBUG 0
#define CONFIG_REALTIME_ONLY 1
@@ -60,7 +63,6 @@
#define CONFIG_SPEED_STATS 0
#define CONFIG_TUNE_BUTTERAUGLI 0
#define CONFIG_TUNE_VMAF 0
-#define CONFIG_USE_VMAF_RC 0
#define CONFIG_WEBM_IO 1
#define DECODE_HEIGHT_LIMIT 16384
#define DECODE_WIDTH_LIMIT 16384
diff --git a/third_party/libaom/source/config/win/x64/config/aom_dsp_rtcd.h b/third_party/libaom/source/config/win/x64/config/aom_dsp_rtcd.h
index 58de231219..dd561e4498 100644
--- a/third_party/libaom/source/config/win/x64/config/aom_dsp_rtcd.h
+++ b/third_party/libaom/source/config/win/x64/config/aom_dsp_rtcd.h
@@ -6814,6 +6814,26 @@ RTCD_EXTERN int64_t (*aom_sse)(const uint8_t* a,
int width,
int height);
+void aom_ssim_parms_8x8_c(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+void aom_ssim_parms_8x8_sse2(const uint8_t* s,
+ int sp,
+ const uint8_t* r,
+ int rp,
+ uint32_t* sum_s,
+ uint32_t* sum_r,
+ uint32_t* sum_sq_s,
+ uint32_t* sum_sq_r,
+ uint32_t* sum_sxr);
+#define aom_ssim_parms_8x8 aom_ssim_parms_8x8_sse2
+
uint32_t aom_sub_pixel_avg_variance128x128_c(const uint8_t* src_ptr,
int source_stride,
int xoffset,
diff --git a/third_party/libaom/source/libaom/aom/aom.h b/third_party/libaom/source/libaom/aom/aom.h
index c591dc9a43..0650a11f6b 100644
--- a/third_party/libaom/source/libaom/aom/aom.h
+++ b/third_party/libaom/source/libaom/aom/aom.h
@@ -41,27 +41,45 @@ extern "C" {
/*!\brief Control functions
*
* The set of macros define the control functions of AOM interface
+ * The range for common control IDs is 230-255(max).
*/
enum aom_com_control_id {
- /* TODO(https://crbug.com/aomedia/2671): The encoder overlaps the range of
- * these values for its control ids, see the NOTEs in aom/aomcx.h. These
- * should be migrated to something like the AOM_DECODER_CTRL_ID_START range
- * next time we're ready to break the ABI.
+ /*!\brief Codec control function to get a pointer to a reference frame
+ *
+ * av1_ref_frame_t* parameter
*/
- AV1_GET_REFERENCE = 128, /**< get a pointer to a reference frame,
- av1_ref_frame_t* parameter */
- AV1_SET_REFERENCE = 129, /**< write a frame into a reference buffer,
- av1_ref_frame_t* parameter */
- AV1_COPY_REFERENCE = 130, /**< get a copy of reference frame from the decoderm
- av1_ref_frame_t* parameter */
- AOM_COMMON_CTRL_ID_MAX,
-
- AV1_GET_NEW_FRAME_IMAGE =
- 192, /**< get a pointer to the new frame, aom_image_t* parameter */
- AV1_COPY_NEW_FRAME_IMAGE = 193, /**< copy the new frame to an external buffer,
- aom_image_t* parameter */
+ AV1_GET_REFERENCE = 230,
+ /*!\brief Codec control function to write a frame into a reference buffer
+ *
+ * av1_ref_frame_t* parameter
+ */
+ AV1_SET_REFERENCE = 231,
+
+ /*!\brief Codec control function to get a copy of reference frame from the
+ * decoder
+ *
+ * av1_ref_frame_t* parameter
+ */
+ AV1_COPY_REFERENCE = 232,
+
+ /*!\brief Codec control function to get a pointer to the new frame
+ *
+ * aom_image_t* parameter
+ */
+ AV1_GET_NEW_FRAME_IMAGE = 233,
+
+ /*!\brief Codec control function to copy the new frame to an external buffer
+ *
+ * aom_image_t* parameter
+ */
+ AV1_COPY_NEW_FRAME_IMAGE = 234,
+
+ /*!\brief Start point of control IDs for aom_dec_control_id.
+ * Any new common control IDs should be added above.
+ */
AOM_DECODER_CTRL_ID_START = 256
+ // No common control IDs should be added after AOM_DECODER_CTRL_ID_START.
};
/*!\brief AV1 specific reference frame data struct
diff --git a/third_party/libaom/source/libaom/aom/aom_codec.h b/third_party/libaom/source/libaom/aom/aom_codec.h
index f58272ee2c..a2a9efaef3 100644
--- a/third_party/libaom/source/libaom/aom/aom_codec.h
+++ b/third_party/libaom/source/libaom/aom/aom_codec.h
@@ -149,7 +149,7 @@ extern "C" {
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
-#define AOM_CODEC_ABI_VERSION (6 + AOM_IMAGE_ABI_VERSION) /**<\hideinitializer*/
+#define AOM_CODEC_ABI_VERSION (7 + AOM_IMAGE_ABI_VERSION) /**<\hideinitializer*/
/*!\brief Algorithm return codes */
typedef enum {
diff --git a/third_party/libaom/source/libaom/aom/aom_encoder.h b/third_party/libaom/source/libaom/aom/aom_encoder.h
index 48e705646d..a98c8d8270 100644
--- a/third_party/libaom/source/libaom/aom/aom_encoder.h
+++ b/third_party/libaom/source/libaom/aom/aom_encoder.h
@@ -31,6 +31,7 @@ extern "C" {
#endif
#include "aom/aom_codec.h"
+#include "aom/aom_external_partition.h"
/*!\brief Current ABI version number
*
@@ -41,7 +42,7 @@ extern "C" {
* fields to structures
*/
#define AOM_ENCODER_ABI_VERSION \
- (9 + AOM_CODEC_ABI_VERSION) /**<\hideinitializer*/
+ (9 + AOM_CODEC_ABI_VERSION + AOM_EXT_PART_ABI_VERSION) /**<\hideinitializer*/
/*! \brief Encoder capabilities bitfield
*
@@ -142,15 +143,8 @@ typedef struct aom_codec_cx_pkt {
double psnr_hbd[4];
} psnr; /**< data for PSNR packet */
aom_fixed_buf_t raw; /**< data for arbitrary packets */
-
- /* This packet size is fixed to allow codecs to extend this
- * interface without having to manage storage for raw packets,
- * i.e., if it's smaller than 128 bytes, you can store in the
- * packet list directly.
- */
- char pad[128 - sizeof(enum aom_codec_cx_pkt_kind)]; /**< fixed sz */
- } data; /**< packet data */
-} aom_codec_cx_pkt_t; /**< alias for struct aom_codec_cx_pkt */
+ } data; /**< packet data */
+} aom_codec_cx_pkt_t; /**< alias for struct aom_codec_cx_pkt */
/*!\brief Rational Number
*
@@ -300,10 +294,6 @@ typedef struct cfg_options {
*
*/
unsigned int disable_smooth_intra;
- /*!\brief disable D45 to D203 intra modes
- *
- */
- unsigned int disable_diagonal_intra;
/*!\brief disable filter intra
*
*/
@@ -880,11 +870,11 @@ typedef struct aom_codec_enc_cfg {
*/
unsigned int use_fixed_qp_offsets;
-/*!\brief Number of fixed QP offsets
+/*!\brief Max number of fixed QP offsets
*
* This defines the number of elements in the fixed_qp_offsets array.
*/
-#define FIXED_QP_OFFSET_COUNT 5
+#define FIXED_QP_OFFSET_COUNT 6
/*!\brief Array of fixed QP offsets
*
diff --git a/third_party/libaom/source/libaom/aom/aom_external_partition.h b/third_party/libaom/source/libaom/aom/aom_external_partition.h
new file mode 100644
index 0000000000..3710466316
--- /dev/null
+++ b/third_party/libaom/source/libaom/aom/aom_external_partition.h
@@ -0,0 +1,331 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+#ifndef AOM_AOM_AOM_EXTERNAL_PARTITION_H_
+#define AOM_AOM_AOM_EXTERNAL_PARTITION_H_
+
+/*!\defgroup aom_encoder AOMedia AOM/AV1 Encoder
+ * \ingroup aom
+ *
+ * @{
+ */
+#include "./aom_integer.h"
+
+/*!\file
+ * \brief Provides function pointer definitions for the external partition.
+ */
+
+/*!\brief Current ABI version number
+ *
+ * \internal
+ * If this file is altered in any way that changes the ABI, this value
+ * must be bumped. Examples include, but are not limited to, changing
+ * types, removing or reassigning enums, adding/removing/rearranging
+ * fields to structures.
+ */
+#define AOM_EXT_PART_ABI_VERSION (1)
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*!\brief Abstract external partition model handler
+ */
+typedef void *aom_ext_part_model_t;
+
+/*!\brief Number of features to determine whether to skip partition none and
+ * do partition split directly. The same as "FEATURE_SIZE_SMS_SPLIT".
+ */
+#define SIZE_DIRECT_SPLIT 17
+
+/*!\brief Number of features to use simple motion search to prune out
+ * rectangular partition in some direction. The same as
+ * "FEATURE_SIZE_SMS_PRUNE_PART".
+ */
+#define SIZE_PRUNE_PART 25
+
+/*!\brief Number of features to prune split and rectangular partition
+ * after PARTITION_NONE.
+ */
+#define SIZE_PRUNE_NONE 4
+
+/*!\brief Number of features to terminates partition after partition none using
+ * simple_motion_search features and the rate, distortion, and rdcost of
+ * PARTITION_NONE. The same as "FEATURE_SIZE_SMS_TERM_NONE".
+ */
+#define SIZE_TERM_NONE 28
+
+/*!\brief Number of features to terminates partition after partition split.
+ */
+#define SIZE_TERM_SPLIT 31
+
+/*!\brief Number of features to prune rectangular partition using stats
+ * collected after partition split.
+ */
+#define SIZE_PRUNE_RECT 9
+
+/*!\brief Number of features to prune AB partition using stats
+ * collected after rectangular partition..
+ */
+#define SIZE_PRUNE_AB 10
+
+/*!\brief Number of features to prune 4-way partition using stats
+ * collected after AB partition.
+ */
+#define SIZE_PRUNE_4_WAY 18
+
+/*!\brief Config information sent to the external partition model.
+ *
+ * For example, the maximum superblock size determined by the sequence header.
+ */
+typedef struct aom_ext_part_config {
+ int superblock_size; /**< super block size (either 64x64 or 128x128) */
+} aom_ext_part_config_t;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ * Specifically, features collected before NONE partition.
+ * Features "f" are used to determine:
+ * partition_none_allowed, partition_horz_allowed, partition_vert_allowed,
+ * do_rectangular_split, do_square_split
+ * Features "f_part2" are used to determine:
+ * prune_horz, prune_vert.
+ */
+typedef struct aom_partition_features_before_none {
+ float f[SIZE_DIRECT_SPLIT]; /**< features to determine whether skip partition
+ none and do split directly */
+ float f_part2[SIZE_PRUNE_PART]; /**< features to determine whether to prune
+ rectangular partition */
+} aom_partition_features_before_none_t;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ * Specifically, features collected after NONE partition.
+ */
+typedef struct aom_partition_features_none {
+ float f[SIZE_PRUNE_NONE]; /**< features to prune split and rectangular
+ partition*/
+ float f_terminate[SIZE_TERM_NONE]; /**< features to determine termination of
+ partition */
+} aom_partition_features_none_t;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ * Specifically, features collected after SPLIT partition.
+ */
+typedef struct aom_partition_features_split {
+ float f_terminate[SIZE_TERM_SPLIT]; /**< features to determine termination of
+ partition */
+ float f_prune_rect[SIZE_PRUNE_RECT]; /**< features to determine pruning rect
+ partition */
+} aom_partition_features_split_t;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ * Specifically, features collected after RECTANGULAR partition.
+ */
+typedef struct aom_partition_features_rect {
+ float f[SIZE_PRUNE_AB]; /**< features to determine pruning AB partition */
+} aom_partition_features_rect_t;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ * Specifically, features collected after AB partition: HORZ_A, HORZ_B, VERT_A,
+ * VERT_B.
+ */
+typedef struct aom_partition_features_ab {
+ float
+ f[SIZE_PRUNE_4_WAY]; /**< features to determine pruning 4-way partition */
+} aom_partition_features_ab_t;
+
+/*!\brief Feature id to tell the external model the current stage in partition
+ * pruning and what features to use to make decisions accordingly.
+ */
+typedef enum {
+ FEATURE_BEFORE_PART_NONE,
+ FEATURE_BEFORE_PART_NONE_PART2,
+ FEATURE_AFTER_PART_NONE,
+ FEATURE_AFTER_PART_NONE_PART2,
+ FEATURE_AFTER_PART_SPLIT,
+ FEATURE_AFTER_PART_SPLIT_PART2,
+ FEATURE_AFTER_PART_RECT,
+ FEATURE_AFTER_PART_AB
+} PART_FEATURE_ID;
+
+/*!\brief Features pass to the external model to make partition decisions.
+ *
+ * The encoder sends these features to the external model through
+ * "func()" defined in .....
+ *
+ * NOTE: new member variables may be added to this structure in the future.
+ * Once new features are finalized, bump the major version of libaom.
+ */
+typedef struct aom_partition_features {
+ PART_FEATURE_ID id; /**< Feature ID to indicate active features */
+ aom_partition_features_before_none_t
+ before_part_none; /**< Features collected before NONE partition */
+ aom_partition_features_none_t
+ after_part_none; /**< Features collected after NONE partition */
+ aom_partition_features_split_t
+ after_part_split; /**< Features collected after SPLIT partition */
+ aom_partition_features_rect_t
+ after_part_rect; /**< Features collected after RECTANGULAR partition */
+ aom_partition_features_ab_t
+ after_part_ab; /**< Features collected after AB partition */
+} aom_partition_features_t;
+
+/*!\brief Partition decisions received from the external model.
+ *
+ * The encoder receives partition decisions and encodes the superblock
+ * with the given partition type.
+ * The encoder receives it from "func()" define in ....
+ *
+ * NOTE: new member variables may be added to this structure in the future.
+ * Once new features are finalized, bump the major version of libaom.
+ */
+typedef struct aom_partition_decision {
+ // Decisions for directly set partition types
+ int is_final_decision; /**< The flag whether it is the final decision */
+ int partition_decision[256]; /**< Partition decisions */
+
+ // Decisions for partition type pruning
+ int terminate_partition_search; /**< Terminate further partition search */
+ int partition_none_allowed; /**< Allow partition none type */
+ int partition_rect_allowed[2]; /**< Allow rectangular partitions */
+ int do_rectangular_split; /**< Try rectangular split partition */
+ int do_square_split; /**< Try square split partition */
+ int prune_rect_part[2]; /**< Prune rectangular partition */
+ int horza_partition_allowed; /**< Allow HORZ_A partitioin */
+ int horzb_partition_allowed; /**< Allow HORZ_B partitioin */
+ int verta_partition_allowed; /**< Allow VERT_A partitioin */
+ int vertb_partition_allowed; /**< Allow VERT_B partitioin */
+ int partition_horz4_allowed; /**< Allow HORZ4 partition */
+ int partition_vert4_allowed; /**< Allow VERT4 partition */
+} aom_partition_decision_t;
+
+/*!\brief Encoding stats for the given partition decision.
+ *
+ * The encoding stats collected by encoding the superblock with the
+ * given partition types.
+ * The encoder sends the stats to the external model for training
+ * or inference though "func()" defined in ....
+ */
+typedef struct aom_partition_stats {
+ int rate; /**< Rate cost of the block */
+ int64_t dist; /**< Distortion of the block */
+ int64_t rdcost; /**< Rate-distortion cost of the block */
+} aom_partition_stats_t;
+
+/*!\brief Enum for return status.
+ */
+typedef enum aom_ext_part_status {
+ AOM_EXT_PART_OK = 0, /**< Status of success */
+ AOM_EXT_PART_ERROR = 1, /**< Status of failure */
+ AOM_EXT_PART_TEST = 2, /**< Status used for tests */
+} aom_ext_part_status_t;
+
+/*!\brief Callback of creating an external partition model.
+ *
+ * The callback is invoked by the encoder to create an external partition
+ * model.
+ *
+ * \param[in] priv Callback's private data
+ * \param[in] part_config Config information pointer for model creation
+ * \param[out] ext_part_model Pointer to the model
+ */
+typedef aom_ext_part_status_t (*aom_ext_part_create_model_fn_t)(
+ void *priv, const aom_ext_part_config_t *part_config,
+ aom_ext_part_model_t *ext_part_model);
+
+/*!\brief Callback of sending features to the external partition model.
+ *
+ * The callback is invoked by the encoder to send features to the external
+ * partition model.
+ *
+ * \param[in] ext_part_model The external model
+ * \param[in] part_features Pointer to the features
+ */
+typedef aom_ext_part_status_t (*aom_ext_part_send_features_fn_t)(
+ aom_ext_part_model_t ext_part_model,
+ const aom_partition_features_t *part_features);
+
+/*!\brief Callback of receiving partition decisions from the external
+ * partition model.
+ *
+ * The callback is invoked by the encoder to receive partition decisions from
+ * the external partition model.
+ *
+ * \param[in] ext_part_model The external model
+ * \param[in] ext_part_decision Pointer to the partition decisions
+ */
+typedef aom_ext_part_status_t (*aom_ext_part_get_decision_fn_t)(
+ aom_ext_part_model_t ext_part_model,
+ aom_partition_decision_t *ext_part_decision);
+
+/*!\brief Callback of sending stats to the external partition model.
+ *
+ * The callback is invoked by the encoder to send encoding stats to
+ * the external partition model.
+ *
+ * \param[in] ext_part_model The external model
+ * \param[in] ext_part_stats Pointer to the encoding stats
+ */
+typedef aom_ext_part_status_t (*aom_ext_part_send_partition_stats_fn_t)(
+ aom_ext_part_model_t ext_part_model,
+ const aom_partition_stats_t *ext_part_stats);
+
+/*!\brief Callback of deleting the external partition model.
+ *
+ * The callback is invoked by the encoder to delete the external partition
+ * model.
+ *
+ * \param[in] ext_part_model The external model
+ */
+typedef aom_ext_part_status_t (*aom_ext_part_delete_model_fn_t)(
+ aom_ext_part_model_t ext_part_model);
+
+/*!\brief Callback function set for external partition model.
+ *
+ * Uses can enable external partition model by registering a set of
+ * callback functions with the flag: AV1E_SET_EXTERNAL_PARTITION_MODEL
+ */
+typedef struct aom_ext_part_funcs {
+ /*!
+ * Create an external partition model.
+ */
+ aom_ext_part_create_model_fn_t create_model;
+
+ /*!
+ * Send features to the external partition model to make partition decisions.
+ */
+ aom_ext_part_send_features_fn_t send_features;
+
+ /*!
+ * Get partition decisions from the external partition model.
+ */
+ aom_ext_part_get_decision_fn_t get_partition_decision;
+
+ /*!
+ * Send stats of the current partition to the external model.
+ */
+ aom_ext_part_send_partition_stats_fn_t send_partition_stats;
+
+ /*!
+ * Delete the external partition model.
+ */
+ aom_ext_part_delete_model_fn_t delete_model;
+
+ /*!
+ * Private data for the external partition model.
+ */
+ void *priv;
+} aom_ext_part_funcs_t;
+
+/*!@} - end defgroup aom_encoder*/
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // AOM_AOM_AOM_EXTERNAL_PARTITION_H_
diff --git a/third_party/libaom/source/libaom/aom/aomcx.h b/third_party/libaom/source/libaom/aom/aomcx.h
index 87f0b5db9b..8345911abd 100644
--- a/third_party/libaom/source/libaom/aom/aomcx.h
+++ b/third_party/libaom/source/libaom/aom/aomcx.h
@@ -18,6 +18,7 @@
*/
#include "aom/aom.h"
#include "aom/aom_encoder.h"
+#include "aom/aom_external_partition.h"
/*!\file
* \brief Provides definitions for using AOM or AV1 encoder algorithm within the
@@ -167,6 +168,7 @@ extern aom_codec_iface_t *aom_codec_av1_cx(void);
*
* This set of macros define the control functions available for AVx
* encoder interface.
+ * The range of encode control ID is 7-229(max).
*
* \sa #aom_codec_control(aom_codec_ctx_t *ctx, int ctrl_id, ...)
*/
@@ -221,10 +223,14 @@ enum aome_enc_control_id {
/* NOTE: enum 15 unused */
- /*!\brief Codec control function to set loop filter sharpness,
+ /*!\brief Codec control function to set the sharpness parameter,
* unsigned int parameter.
*
- * Valid range: 0..7. The default is 0.
+ * This parameter controls the level at which rate-distortion optimization of
+ * transform coefficients favours sharpness in the block.
+ *
+ * Valid range: 0..7. The default is 0. Values 1-7 will avoid eob and skip
+ * block optimization and will change rdmult in favour of block sharpness.
*/
AOME_SET_SHARPNESS = AOME_SET_ENABLEAUTOALTREF + 2, // 16
@@ -1204,9 +1210,6 @@ enum aome_enc_control_id {
parameter */
AV1E_SET_REDUCED_REFERENCE_SET = 125,
- /* NOTE: enums 126-139 unused */
- /* NOTE: Need a gap in enum values to avoud conflict with 128, 129, 130 */
-
/*!\brief Control to set frequency of the cost updates for coefficients,
* unsigned int parameter
*
@@ -1215,7 +1218,7 @@ enum aome_enc_control_id {
* - 2 = update at tile level
* - 3 = turn off
*/
- AV1E_SET_COEFF_COST_UPD_FREQ = 140,
+ AV1E_SET_COEFF_COST_UPD_FREQ = 126,
/*!\brief Control to set frequency of the cost updates for mode, unsigned int
* parameter
@@ -1225,7 +1228,7 @@ enum aome_enc_control_id {
* - 2 = update at tile level
* - 3 = turn off
*/
- AV1E_SET_MODE_COST_UPD_FREQ = 141,
+ AV1E_SET_MODE_COST_UPD_FREQ = 127,
/*!\brief Control to set frequency of the cost updates for motion vectors,
* unsigned int parameter
@@ -1235,7 +1238,7 @@ enum aome_enc_control_id {
* - 2 = update at tile level
* - 3 = turn off
*/
- AV1E_SET_MV_COST_UPD_FREQ = 142,
+ AV1E_SET_MV_COST_UPD_FREQ = 128,
/*!\brief Control to set bit mask that specifies which tier each of the 32
* possible operating points conforms to, unsigned int parameter
@@ -1243,37 +1246,37 @@ enum aome_enc_control_id {
* - 0 = main tier (default)
* - 1 = high tier
*/
- AV1E_SET_TIER_MASK = 143,
+ AV1E_SET_TIER_MASK = 129,
/*!\brief Control to set minimum compression ratio, unsigned int parameter
* Take integer values. If non-zero, encoder will try to keep the compression
* ratio of each frame to be higher than the given value divided by 100.
* E.g. 850 means minimum compression ratio of 8.5.
*/
- AV1E_SET_MIN_CR = 144,
+ AV1E_SET_MIN_CR = 130,
/* NOTE: enums 145-149 unused */
/*!\brief Codec control function to set the layer id, aom_svc_layer_id_t*
* parameter
*/
- AV1E_SET_SVC_LAYER_ID = 150,
+ AV1E_SET_SVC_LAYER_ID = 131,
/*!\brief Codec control function to set SVC paramaeters, aom_svc_params_t*
* parameter
*/
- AV1E_SET_SVC_PARAMS = 151,
+ AV1E_SET_SVC_PARAMS = 132,
/*!\brief Codec control function to set reference frame config:
* the ref_idx and the refresh flags for each buffer slot.
* aom_svc_ref_frame_config_t* parameter
*/
- AV1E_SET_SVC_REF_FRAME_CONFIG = 152,
+ AV1E_SET_SVC_REF_FRAME_CONFIG = 133,
/*!\brief Codec control function to set the path to the VMAF model used when
* tuning the encoder for VMAF, const char* parameter
*/
- AV1E_SET_VMAF_MODEL_PATH = 153,
+ AV1E_SET_VMAF_MODEL_PATH = 134,
/*!\brief Codec control function to enable EXT_TILE_DEBUG in AV1 encoder,
* unsigned int parameter
@@ -1283,7 +1286,7 @@ enum aome_enc_control_id {
*
* \note This is only used in lightfield example test.
*/
- AV1E_ENABLE_EXT_TILE_DEBUG = 154,
+ AV1E_ENABLE_EXT_TILE_DEBUG = 135,
/*!\brief Codec control function to enable the superblock multipass unit test
* in AV1 to ensure that the encoder does not leak state between different
@@ -1294,30 +1297,30 @@ enum aome_enc_control_id {
*
* \note This is only used in sb_multipass unit test.
*/
- AV1E_ENABLE_SB_MULTIPASS_UNIT_TEST = 155,
+ AV1E_ENABLE_SB_MULTIPASS_UNIT_TEST = 136,
/*!\brief Control to select minimum height for the GF group pyramid structure,
* unsigned int parameter
*
* Valid values: 0..5
*/
- AV1E_SET_GF_MIN_PYRAMID_HEIGHT = 156,
+ AV1E_SET_GF_MIN_PYRAMID_HEIGHT = 137,
/*!\brief Control to set average complexity of the corpus in the case of
* single pass vbr based on LAP, unsigned int parameter
*/
- AV1E_SET_VBR_CORPUS_COMPLEXITY_LAP = 157,
+ AV1E_SET_VBR_CORPUS_COMPLEXITY_LAP = 138,
/*!\brief Control to get baseline gf interval
*/
- AV1E_GET_BASELINE_GF_INTERVAL = 158,
+ AV1E_GET_BASELINE_GF_INTERVAL = 139,
/*\brief Control to set encoding the denoised frame from denoise-noise-level
*
* - 0 = disabled/encode the original frame
* - 1 = enabled/encode the denoised frame (default)
*/
- AV1E_SET_ENABLE_DNL_DENOISING = 159,
+ AV1E_SET_ENABLE_DNL_DENOISING = 140,
/*!\brief Codec control function to turn on / off D45 to D203 intra mode
* usage, int parameter
@@ -1327,7 +1330,32 @@ enum aome_enc_control_id {
* - 0 = disable
* - 1 = enable (default)
*/
- AV1E_SET_ENABLE_DIAGONAL_INTRA = 160,
+ AV1E_SET_ENABLE_DIAGONAL_INTRA = 141,
+
+ /*!\brief Control to set frequency of the cost updates for intrabc motion
+ * vectors, unsigned int parameter
+ *
+ * - 0 = update at SB level (default)
+ * - 1 = update at SB row level in tile
+ * - 2 = update at tile level
+ * - 3 = turn off
+ */
+ AV1E_SET_DV_COST_UPD_FREQ = 142,
+
+ /*!\brief Codec control to set the path for partition stats read and write.
+ * const char * parameter.
+ */
+ AV1E_SET_PARTITION_INFO_PATH = 143,
+
+ /*!\brief Codec control to use an external partition model
+ * A set of callback functions is passed through this control
+ * to let the encoder encode with given partitions.
+ */
+ AV1E_SET_EXTERNAL_PARTITION = 144,
+
+ // Any new encoder control IDs should be added above.
+ // Maximum allowed encoder control ID is 229.
+ // No encoder control ID should be added below.
};
/*!\brief aom 1-D scaling mode
@@ -1858,6 +1886,15 @@ AOM_CTRL_USE_TYPE(AV1E_SET_VBR_CORPUS_COMPLEXITY_LAP, unsigned int)
AOM_CTRL_USE_TYPE(AV1E_SET_ENABLE_DNL_DENOISING, int)
#define AOM_CTRL_AV1E_SET_ENABLE_DNL_DENOISING
+AOM_CTRL_USE_TYPE(AV1E_SET_DV_COST_UPD_FREQ, unsigned int)
+#define AOM_CTRL_AV1E_SET_DV_COST_UPD_FREQ
+
+AOM_CTRL_USE_TYPE(AV1E_SET_PARTITION_INFO_PATH, const char *)
+#define AOM_CTRL_AV1E_SET_PARTITION_INFO_PATH
+
+AOM_CTRL_USE_TYPE(AV1E_SET_EXTERNAL_PARTITION, aom_ext_part_funcs_t *)
+#define AOM_CTRL_AV1E_SET_ENABLE_DNL_DENOISING
+
/*!\endcond */
/*! @} - end defgroup aom_encoder */
#ifdef __cplusplus
diff --git a/third_party/libaom/source/libaom/aom/aomdx.h b/third_party/libaom/source/libaom/aom/aomdx.h
index aa4f435ec4..b3fd90e460 100644
--- a/third_party/libaom/source/libaom/aom/aomdx.h
+++ b/third_party/libaom/source/libaom/aom/aomdx.h
@@ -188,6 +188,7 @@ typedef struct av1_ext_ref_frame {
*
* This set of macros define the control functions available for the AOM
* decoder interface.
+ * The range for decoder control ID is >= 256.
*
* \sa #aom_codec_control(aom_codec_ctx_t *ctx, int ctrl_id, ...)
*/
@@ -381,8 +382,6 @@ enum aom_dec_control_id {
*/
AV1D_SET_SKIP_FILM_GRAIN,
- AOM_DECODER_CTRL_ID_MAX,
-
/*!\brief Codec control function to check the presence of forward key frames
*/
AOMD_GET_FWD_KF_PRESENT,
diff --git a/third_party/libaom/source/libaom/aom/internal/aom_codec_internal.h b/third_party/libaom/source/libaom/aom/internal/aom_codec_internal.h
index 0ad33bdf2e..457da9244a 100644
--- a/third_party/libaom/source/libaom/aom/internal/aom_codec_internal.h
+++ b/third_party/libaom/source/libaom/aom/internal/aom_codec_internal.h
@@ -278,7 +278,7 @@ typedef aom_fixed_buf_t *(*aom_codec_get_global_headers_fn_t)(
typedef aom_image_t *(*aom_codec_get_preview_frame_fn_t)(
aom_codec_alg_priv_t *ctx);
-/*!\brief Decoder algorithm interface interface
+/*!\brief Decoder algorithm interface
*
* All decoders \ref MUST expose a variable of this type.
*/
diff --git a/third_party/libaom/source/libaom/aom/src/aom_image.c b/third_party/libaom/source/libaom/aom/src/aom_image.c
index dfdee87d26..13f71b2bf5 100644
--- a/third_party/libaom/source/libaom/aom/src/aom_image.c
+++ b/third_party/libaom/source/libaom/aom/src/aom_image.c
@@ -38,6 +38,8 @@ static aom_image_t *img_alloc_helper(
unsigned int h, w, s, xcs, ycs, bps, bit_depth;
unsigned int stride_in_bytes;
+ if (img != NULL) memset(img, 0, sizeof(aom_image_t));
+
/* Treat align==0 like align==1 */
if (!buf_align) buf_align = 1;
@@ -111,8 +113,6 @@ static aom_image_t *img_alloc_helper(
if (!img) goto fail;
img->self_allocd = 1;
- } else {
- memset(img, 0, sizeof(aom_image_t));
}
img->img_data = img_data;
diff --git a/third_party/libaom/source/libaom/aom_dsp/arm/intrapred_neon.c b/third_party/libaom/source/libaom/aom_dsp/arm/intrapred_neon.c
index 6d41708ee0..945e7e48ee 100644
--- a/third_party/libaom/source/libaom/aom_dsp/arm/intrapred_neon.c
+++ b/third_party/libaom/source/libaom/aom_dsp/arm/intrapred_neon.c
@@ -11,8 +11,6 @@
#include <arm_neon.h>
-#include "common/tools_common.h"
-
#include "config/aom_config.h"
#include "config/aom_dsp_rtcd.h"
diff --git a/third_party/libaom/source/libaom/aom_dsp/butteraugli.c b/third_party/libaom/source/libaom/aom_dsp/butteraugli.c
index 7ce2324c06..038efcd313 100644
--- a/third_party/libaom/source/libaom/aom_dsp/butteraugli.c
+++ b/third_party/libaom/source/libaom/aom_dsp/butteraugli.c
@@ -18,37 +18,71 @@
int aom_calc_butteraugli(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *distorted, int bit_depth,
- float *dist_map) {
+ aom_matrix_coefficients_t matrix_coefficients,
+ aom_color_range_t color_range, float *dist_map) {
(void)bit_depth;
assert(bit_depth == 8);
- assert(source->y_width == source->uv_width * 2);
const int width = source->y_crop_width;
const int height = source->y_crop_height;
+ const int ss_x = source->subsampling_x;
+ const int ss_y = source->subsampling_y;
- size_t buffer_size = width * height * 3;
- uint8_t *src_rgb = (uint8_t *)aom_malloc(buffer_size);
- uint8_t *distorted_rgb = (uint8_t *)aom_malloc(buffer_size);
- if (!src_rgb || !distorted_rgb) {
- aom_free(src_rgb);
- aom_free(distorted_rgb);
+ const struct YuvConstants *yuv_constants;
+ if (matrix_coefficients == AOM_CICP_MC_BT_709) {
+ if (color_range == AOM_CR_FULL_RANGE) return 0;
+ yuv_constants = &kYuvH709Constants;
+ } else {
+ yuv_constants = color_range == AOM_CR_FULL_RANGE ? &kYuvJPEGConstants
+ : &kYuvI601Constants;
+ }
+
+ const size_t stride_argb = width * 4;
+ const size_t buffer_size = height * stride_argb;
+ uint8_t *src_argb = (uint8_t *)aom_malloc(buffer_size);
+ uint8_t *distorted_argb = (uint8_t *)aom_malloc(buffer_size);
+ if (!src_argb || !distorted_argb) {
+ aom_free(src_argb);
+ aom_free(distorted_argb);
return 0;
}
- I420ToRGB24Matrix(source->y_buffer, source->y_stride, source->u_buffer,
- source->uv_stride, source->v_buffer, source->uv_stride,
- src_rgb, width * 3, &kYuvH709Constants, width, height);
- I420ToRGB24Matrix(distorted->y_buffer, distorted->y_stride,
- distorted->u_buffer, distorted->uv_stride,
- distorted->v_buffer, distorted->uv_stride, distorted_rgb,
- width * 3, &kYuvH709Constants, width, height);
+ if (ss_x == 1 && ss_y == 1) {
+ I420ToARGBMatrix(source->y_buffer, source->y_stride, source->u_buffer,
+ source->uv_stride, source->v_buffer, source->uv_stride,
+ src_argb, stride_argb, yuv_constants, width, height);
+ I420ToARGBMatrix(distorted->y_buffer, distorted->y_stride,
+ distorted->u_buffer, distorted->uv_stride,
+ distorted->v_buffer, distorted->uv_stride, distorted_argb,
+ stride_argb, yuv_constants, width, height);
+ } else if (ss_x == 1 && ss_y == 0) {
+ I422ToARGBMatrix(source->y_buffer, source->y_stride, source->u_buffer,
+ source->uv_stride, source->v_buffer, source->uv_stride,
+ src_argb, stride_argb, yuv_constants, width, height);
+ I422ToARGBMatrix(distorted->y_buffer, distorted->y_stride,
+ distorted->u_buffer, distorted->uv_stride,
+ distorted->v_buffer, distorted->uv_stride, distorted_argb,
+ stride_argb, yuv_constants, width, height);
+ } else if (ss_x == 0 && ss_y == 0) {
+ I444ToARGBMatrix(source->y_buffer, source->y_stride, source->u_buffer,
+ source->uv_stride, source->v_buffer, source->uv_stride,
+ src_argb, stride_argb, yuv_constants, width, height);
+ I444ToARGBMatrix(distorted->y_buffer, distorted->y_stride,
+ distorted->u_buffer, distorted->uv_stride,
+ distorted->v_buffer, distorted->uv_stride, distorted_argb,
+ stride_argb, yuv_constants, width, height);
+ } else {
+ aom_free(src_argb);
+ aom_free(distorted_argb);
+ return 0;
+ }
- JxlPixelFormat pixel_format = { 3, JXL_TYPE_UINT8, JXL_NATIVE_ENDIAN, 0 };
+ JxlPixelFormat pixel_format = { 4, JXL_TYPE_UINT8, JXL_NATIVE_ENDIAN, 0 };
JxlButteraugliApi *api = JxlButteraugliApiCreate(NULL);
JxlButteraugliApiSetHFAsymmetry(api, 0.8f);
JxlButteraugliResult *result = JxlButteraugliCompute(
- api, width, height, &pixel_format, src_rgb, buffer_size, &pixel_format,
- distorted_rgb, buffer_size);
+ api, width, height, &pixel_format, src_argb, buffer_size, &pixel_format,
+ distorted_argb, buffer_size);
const float *distmap = NULL;
uint32_t row_stride;
@@ -56,8 +90,8 @@ int aom_calc_butteraugli(const YV12_BUFFER_CONFIG *source,
if (distmap == NULL) {
JxlButteraugliApiDestroy(api);
JxlButteraugliResultDestroy(result);
- aom_free(src_rgb);
- aom_free(distorted_rgb);
+ aom_free(src_argb);
+ aom_free(distorted_argb);
return 0;
}
@@ -69,7 +103,7 @@ int aom_calc_butteraugli(const YV12_BUFFER_CONFIG *source,
JxlButteraugliApiDestroy(api);
JxlButteraugliResultDestroy(result);
- aom_free(src_rgb);
- aom_free(distorted_rgb);
+ aom_free(src_argb);
+ aom_free(distorted_argb);
return 1;
}
diff --git a/third_party/libaom/source/libaom/aom_dsp/butteraugli.h b/third_party/libaom/source/libaom/aom_dsp/butteraugli.h
index 06402aa3e4..5304092ccb 100644
--- a/third_party/libaom/source/libaom/aom_dsp/butteraugli.h
+++ b/third_party/libaom/source/libaom/aom_dsp/butteraugli.h
@@ -14,8 +14,10 @@
#include "aom_scale/yv12config.h"
+// Returns a boolean that indicates success/failure.
int aom_calc_butteraugli(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *distorted, int bit_depth,
- float *dist_map);
+ aom_matrix_coefficients_t matrix_coefficients,
+ aom_color_range_t color_range, float *dist_map);
#endif // AOM_AOM_DSP_BUTTERAUGLI_H_
diff --git a/third_party/libaom/source/libaom/aom_dsp/fastssim.c b/third_party/libaom/source/libaom/aom_dsp/fastssim.c
index 3804519b31..89712c5f40 100644
--- a/third_party/libaom/source/libaom/aom_dsp/fastssim.c
+++ b/third_party/libaom/source/libaom/aom_dsp/fastssim.c
@@ -31,6 +31,7 @@ typedef struct fs_ctx fs_ctx;
#define SSIM_C1_12 (4095 * 4095 * 0.01 * 0.01)
#define SSIM_C2_10 (1023 * 1023 * 0.03 * 0.03)
#define SSIM_C2_12 (4095 * 4095 * 0.03 * 0.03)
+#define MAX_SSIM_DB 100.0
#define FS_MINI(_a, _b) ((_a) < (_b) ? (_a) : (_b))
#define FS_MAXI(_a, _b) ((_a) > (_b) ? (_a) : (_b))
diff --git a/third_party/libaom/source/libaom/aom_dsp/grain_table.c b/third_party/libaom/source/libaom/aom_dsp/grain_table.c
index e03f04d5da..b22752abd9 100644
--- a/third_party/libaom/source/libaom/aom_dsp/grain_table.c
+++ b/third_party/libaom/source/libaom/aom_dsp/grain_table.c
@@ -202,7 +202,7 @@ int aom_film_grain_table_lookup(aom_film_grain_table_t *t, int64_t time_stamp,
int64_t end_time, int erase,
aom_film_grain_t *grain) {
aom_film_grain_table_entry_t *entry = t->head;
- aom_film_grain_table_entry_t *prev_entry = 0;
+ aom_film_grain_table_entry_t *prev_entry = NULL;
uint16_t random_seed = grain ? grain->random_seed : 0;
if (grain) memset(grain, 0, sizeof(*grain));
@@ -241,10 +241,10 @@ int aom_film_grain_table_lookup(aom_film_grain_table_t *t, int64_t time_stamp,
entry->end_time = time_stamp;
if (t->tail == entry) t->tail = new_entry;
}
- // If segments aren't aligned, delete from the beggining of subsequent
+ // If segments aren't aligned, delete from the beginning of subsequent
// segments
if (end_time > entry_end_time) {
- aom_film_grain_table_lookup(t, entry->end_time, end_time, 1, 0);
+ aom_film_grain_table_lookup(t, entry_end_time, end_time, 1, 0);
}
return 1;
}
@@ -275,12 +275,12 @@ aom_codec_err_t aom_film_grain_table_read(
return error_info->error_code;
}
- aom_film_grain_table_entry_t *prev_entry = 0;
+ aom_film_grain_table_entry_t *prev_entry = NULL;
while (!feof(file)) {
aom_film_grain_table_entry_t *entry = aom_malloc(sizeof(*entry));
memset(entry, 0, sizeof(*entry));
grain_table_entry_read(file, error_info, entry);
- entry->next = 0;
+ entry->next = NULL;
if (prev_entry) prev_entry->next = entry;
if (!t->head) t->head = entry;
diff --git a/third_party/libaom/source/libaom/aom_dsp/noise_model.c b/third_party/libaom/source/libaom/aom_dsp/noise_model.c
index f56fdd5860..19c660e911 100644
--- a/third_party/libaom/source/libaom/aom_dsp/noise_model.c
+++ b/third_party/libaom/source/libaom/aom_dsp/noise_model.c
@@ -214,6 +214,7 @@ static void set_chroma_coefficient_fallback_soln(aom_equation_system_t *eqns) {
int aom_noise_strength_lut_init(aom_noise_strength_lut_t *lut, int num_points) {
if (!lut) return 0;
+ if (num_points <= 0) return 0;
lut->num_points = 0;
lut->points = (double(*)[2])aom_malloc(num_points * sizeof(*lut->points));
if (!lut->points) return 0;
@@ -1152,12 +1153,24 @@ int aom_noise_model_get_grain_parameters(aom_noise_model_t *const noise_model,
// Convert the scaling functions to 8 bit values
aom_noise_strength_lut_t scaling_points[3];
- aom_noise_strength_solver_fit_piecewise(
- &noise_model->combined_state[0].strength_solver, 14, scaling_points + 0);
- aom_noise_strength_solver_fit_piecewise(
- &noise_model->combined_state[1].strength_solver, 10, scaling_points + 1);
- aom_noise_strength_solver_fit_piecewise(
- &noise_model->combined_state[2].strength_solver, 10, scaling_points + 2);
+ if (!aom_noise_strength_solver_fit_piecewise(
+ &noise_model->combined_state[0].strength_solver, 14,
+ scaling_points + 0)) {
+ return 0;
+ }
+ if (!aom_noise_strength_solver_fit_piecewise(
+ &noise_model->combined_state[1].strength_solver, 10,
+ scaling_points + 1)) {
+ aom_noise_strength_lut_free(scaling_points + 0);
+ return 0;
+ }
+ if (!aom_noise_strength_solver_fit_piecewise(
+ &noise_model->combined_state[2].strength_solver, 10,
+ scaling_points + 2)) {
+ aom_noise_strength_lut_free(scaling_points + 0);
+ aom_noise_strength_lut_free(scaling_points + 1);
+ return 0;
+ }
// Both the domain and the range of the scaling functions in the film_grain
// are normalized to 8-bit (e.g., they are implicitly scaled during grain
diff --git a/third_party/libaom/source/libaom/aom_dsp/psnrhvs.c b/third_party/libaom/source/libaom/aom_dsp/psnrhvs.c
index 69a1d99bf2..25f075aa2f 100644
--- a/third_party/libaom/source/libaom/aom_dsp/psnrhvs.c
+++ b/third_party/libaom/source/libaom/aom_dsp/psnrhvs.c
@@ -34,6 +34,7 @@ static void od_bin_fdct8x8(tran_low_t *y, int ystride, const int16_t *x,
*(y + ystride * i + j) = (*(y + ystride * i + j) + 4) >> 3;
}
+#if CONFIG_AV1_HIGHBITDEPTH
static void hbd_od_bin_fdct8x8(tran_low_t *y, int ystride, const int16_t *x,
int xstride) {
int i, j;
@@ -43,6 +44,7 @@ static void hbd_od_bin_fdct8x8(tran_low_t *y, int ystride, const int16_t *x,
for (j = 0; j < 8; j++)
*(y + ystride * i + j) = (*(y + ystride * i + j) + 4) >> 3;
}
+#endif // CONFIG_AV1_HIGHBITDEPTH
/* Normalized inverse quantization matrix for 8x8 DCT at the point of
* transparency. This is not the JPEG based matrix from the paper,
@@ -210,6 +212,7 @@ static double calc_psnrhvs(const unsigned char *src, int _systride,
}
}
s_gvar = 1.f / (36 - n + 1) * s_gmean / 36.f;
+#if CONFIG_AV1_HIGHBITDEPTH
if (!buf_is_hbd) {
od_bin_fdct8x8(dct_s_coef, 8, dct_s, 8);
od_bin_fdct8x8(dct_d_coef, 8, dct_d, 8);
@@ -217,6 +220,10 @@ static double calc_psnrhvs(const unsigned char *src, int _systride,
hbd_od_bin_fdct8x8(dct_s_coef, 8, dct_s, 8);
hbd_od_bin_fdct8x8(dct_d_coef, 8, dct_d, 8);
}
+#else
+ od_bin_fdct8x8(dct_s_coef, 8, dct_s, 8);
+ od_bin_fdct8x8(dct_d_coef, 8, dct_d, 8);
+#endif // CONFIG_AV1_HIGHBITDEPTH
for (i = 0; i < 8; i++)
for (j = (i == 0); j < 8; j++)
s_mask += dct_s_coef[i * 8 + j] * dct_s_coef[i * 8 + j] * mask[i][j];
diff --git a/third_party/libaom/source/libaom/aom_dsp/ssim.c b/third_party/libaom/source/libaom/aom_dsp/ssim.c
index 357da99ae4..c5334fd2c5 100644
--- a/third_party/libaom/source/libaom/aom_dsp/ssim.c
+++ b/third_party/libaom/source/libaom/aom_dsp/ssim.c
@@ -18,6 +18,7 @@
#include "aom_ports/mem.h"
#include "aom_ports/system_state.h"
+#if CONFIG_INTERNAL_STATS
void aom_ssim_parms_16x16_c(const uint8_t *s, int sp, const uint8_t *r, int rp,
uint32_t *sum_s, uint32_t *sum_r,
uint32_t *sum_sq_s, uint32_t *sum_sq_r,
@@ -33,6 +34,7 @@ void aom_ssim_parms_16x16_c(const uint8_t *s, int sp, const uint8_t *r, int rp,
}
}
}
+#endif // CONFIG_INTERNAL_STATS
void aom_ssim_parms_8x8_c(const uint8_t *s, int sp, const uint8_t *r, int rp,
uint32_t *sum_s, uint32_t *sum_r, uint32_t *sum_sq_s,
@@ -49,24 +51,6 @@ void aom_ssim_parms_8x8_c(const uint8_t *s, int sp, const uint8_t *r, int rp,
}
}
-#if CONFIG_AV1_HIGHBITDEPTH
-void aom_highbd_ssim_parms_8x8_c(const uint16_t *s, int sp, const uint16_t *r,
- int rp, uint32_t *sum_s, uint32_t *sum_r,
- uint32_t *sum_sq_s, uint32_t *sum_sq_r,
- uint32_t *sum_sxr) {
- int i, j;
- for (i = 0; i < 8; i++, s += sp, r += rp) {
- for (j = 0; j < 8; j++) {
- *sum_s += s[j];
- *sum_r += r[j];
- *sum_sq_s += s[j] * s[j];
- *sum_sq_r += r[j] * r[j];
- *sum_sxr += s[j] * r[j];
- }
- }
-}
-#endif
-
static const int64_t cc1 = 26634; // (64^2*(.01*255)^2
static const int64_t cc2 = 239708; // (64^2*(.03*255)^2
static const int64_t cc1_10 = 428658; // (64^2*(.01*1023)^2
@@ -78,7 +62,7 @@ static double similarity(uint32_t sum_s, uint32_t sum_r, uint32_t sum_sq_s,
uint32_t sum_sq_r, uint32_t sum_sxr, int count,
uint32_t bd) {
double ssim_n, ssim_d;
- int64_t c1, c2;
+ int64_t c1 = 0, c2 = 0;
if (bd == 8) {
// scale the constants by number of pixels
c1 = (cc1 * count * count) >> 12;
@@ -90,8 +74,9 @@ static double similarity(uint32_t sum_s, uint32_t sum_r, uint32_t sum_sq_s,
c1 = (cc1_12 * count * count) >> 12;
c2 = (cc2_12 * count * count) >> 12;
} else {
- c1 = c2 = 0;
assert(0);
+ // Return similarity as zero for unsupported bit-depth values.
+ return 0;
}
ssim_n = (2.0 * sum_s * sum_r + c1) *
@@ -111,21 +96,11 @@ static double ssim_8x8(const uint8_t *s, int sp, const uint8_t *r, int rp) {
return similarity(sum_s, sum_r, sum_sq_s, sum_sq_r, sum_sxr, 64, 8);
}
-static double highbd_ssim_8x8(const uint16_t *s, int sp, const uint16_t *r,
- int rp, uint32_t bd, uint32_t shift) {
- uint32_t sum_s = 0, sum_r = 0, sum_sq_s = 0, sum_sq_r = 0, sum_sxr = 0;
- aom_highbd_ssim_parms_8x8(s, sp, r, rp, &sum_s, &sum_r, &sum_sq_s, &sum_sq_r,
- &sum_sxr);
- return similarity(sum_s >> shift, sum_r >> shift, sum_sq_s >> (2 * shift),
- sum_sq_r >> (2 * shift), sum_sxr >> (2 * shift), 64, bd);
-}
-
// We are using a 8x8 moving window with starting location of each 8x8 window
// on the 4x4 pixel grid. Such arrangement allows the windows to overlap
// block boundaries to penalize blocking artifacts.
-static double aom_ssim2(const uint8_t *img1, const uint8_t *img2,
- int stride_img1, int stride_img2, int width,
- int height) {
+double aom_ssim2(const uint8_t *img1, const uint8_t *img2, int stride_img1,
+ int stride_img2, int width, int height) {
int i, j;
int samples = 0;
double ssim_total = 0;
@@ -143,31 +118,10 @@ static double aom_ssim2(const uint8_t *img1, const uint8_t *img2,
return ssim_total;
}
-static double aom_highbd_ssim2(const uint8_t *img1, const uint8_t *img2,
- int stride_img1, int stride_img2, int width,
- int height, uint32_t bd, uint32_t shift) {
- int i, j;
- int samples = 0;
- double ssim_total = 0;
-
- // sample point start with each 4x4 location
- for (i = 0; i <= height - 8;
- i += 4, img1 += stride_img1 * 4, img2 += stride_img2 * 4) {
- for (j = 0; j <= width - 8; j += 4) {
- double v = highbd_ssim_8x8(CONVERT_TO_SHORTPTR(img1 + j), stride_img1,
- CONVERT_TO_SHORTPTR(img2 + j), stride_img2, bd,
- shift);
- ssim_total += v;
- samples++;
- }
- }
- ssim_total /= samples;
- return ssim_total;
-}
-
-void aom_calc_ssim(const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *dest, double *weight,
- double *fast_ssim) {
+#if CONFIG_INTERNAL_STATS
+void aom_lowbd_calc_ssim(const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *dest, double *weight,
+ double *fast_ssim) {
double abc[3];
for (int i = 0; i < 3; ++i) {
const int is_uv = i > 0;
@@ -421,7 +375,57 @@ double aom_get_ssim_metrics(uint8_t *img1, int img1_pitch, uint8_t *img2,
m->dssim = dssim_total;
return inconsistency_total;
}
+#endif // CONFIG_INTERNAL_STATS
+#if CONFIG_AV1_HIGHBITDEPTH
+void aom_highbd_ssim_parms_8x8_c(const uint16_t *s, int sp, const uint16_t *r,
+ int rp, uint32_t *sum_s, uint32_t *sum_r,
+ uint32_t *sum_sq_s, uint32_t *sum_sq_r,
+ uint32_t *sum_sxr) {
+ int i, j;
+ for (i = 0; i < 8; i++, s += sp, r += rp) {
+ for (j = 0; j < 8; j++) {
+ *sum_s += s[j];
+ *sum_r += r[j];
+ *sum_sq_s += s[j] * s[j];
+ *sum_sq_r += r[j] * r[j];
+ *sum_sxr += s[j] * r[j];
+ }
+ }
+}
+
+static double highbd_ssim_8x8(const uint16_t *s, int sp, const uint16_t *r,
+ int rp, uint32_t bd, uint32_t shift) {
+ uint32_t sum_s = 0, sum_r = 0, sum_sq_s = 0, sum_sq_r = 0, sum_sxr = 0;
+ aom_highbd_ssim_parms_8x8(s, sp, r, rp, &sum_s, &sum_r, &sum_sq_s, &sum_sq_r,
+ &sum_sxr);
+ return similarity(sum_s >> shift, sum_r >> shift, sum_sq_s >> (2 * shift),
+ sum_sq_r >> (2 * shift), sum_sxr >> (2 * shift), 64, bd);
+}
+
+double aom_highbd_ssim2(const uint8_t *img1, const uint8_t *img2,
+ int stride_img1, int stride_img2, int width, int height,
+ uint32_t bd, uint32_t shift) {
+ int i, j;
+ int samples = 0;
+ double ssim_total = 0;
+
+ // sample point start with each 4x4 location
+ for (i = 0; i <= height - 8;
+ i += 4, img1 += stride_img1 * 4, img2 += stride_img2 * 4) {
+ for (j = 0; j <= width - 8; j += 4) {
+ double v = highbd_ssim_8x8(CONVERT_TO_SHORTPTR(img1 + j), stride_img1,
+ CONVERT_TO_SHORTPTR(img2 + j), stride_img2, bd,
+ shift);
+ ssim_total += v;
+ samples++;
+ }
+ }
+ ssim_total /= samples;
+ return ssim_total;
+}
+
+#if CONFIG_INTERNAL_STATS
void aom_highbd_calc_ssim(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *dest, double *weight,
uint32_t bd, uint32_t in_bd, double *fast_ssim) {
@@ -455,3 +459,25 @@ void aom_highbd_calc_ssim(const YV12_BUFFER_CONFIG *source,
fast_ssim[1] = abc[0] * .8 + .1 * (abc[1] + abc[2]);
}
}
+#endif // CONFIG_INTERNAL_STATS
+#endif // CONFIG_AV1_HIGHBITDEPTH
+
+#if CONFIG_INTERNAL_STATS
+void aom_calc_ssim(const YV12_BUFFER_CONFIG *orig,
+ const YV12_BUFFER_CONFIG *recon, const uint32_t bit_depth,
+ const uint32_t in_bit_depth, int is_hbd, double *weight,
+ double *frame_ssim2) {
+#if CONFIG_AV1_HIGHBITDEPTH
+ if (is_hbd) {
+ aom_highbd_calc_ssim(orig, recon, weight, bit_depth, in_bit_depth,
+ frame_ssim2);
+ return;
+ }
+#else
+ (void)bit_depth;
+ (void)in_bit_depth;
+ (void)is_hbd;
+#endif // CONFIG_AV1_HIGHBITDEPTH
+ aom_lowbd_calc_ssim(orig, recon, weight, frame_ssim2);
+}
+#endif // CONFIG_INTERNAL_STATS
diff --git a/third_party/libaom/source/libaom/aom_dsp/ssim.h b/third_party/libaom/source/libaom/aom_dsp/ssim.h
index d635ef5bbe..fb92556a8c 100644
--- a/third_party/libaom/source/libaom/aom_dsp/ssim.h
+++ b/third_party/libaom/source/libaom/aom_dsp/ssim.h
@@ -12,14 +12,13 @@
#ifndef AOM_AOM_DSP_SSIM_H_
#define AOM_AOM_DSP_SSIM_H_
-#define MAX_SSIM_DB 100.0;
-
#ifdef __cplusplus
extern "C" {
#endif
#include "config/aom_config.h"
+#if CONFIG_INTERNAL_STATS
#include "aom_scale/yv12config.h"
// metrics used for calculating ssim, ssim2, dssim, and ssimc
@@ -68,18 +67,35 @@ double aom_get_ssim_metrics(uint8_t *img1, int img1_pitch, uint8_t *img2,
int img2_pitch, int width, int height, Ssimv *sv2,
Metrics *m, int do_inconsistency);
-void aom_calc_ssim(const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *dest, double *weight,
- double *fast_ssim);
+void aom_lowbd_calc_ssim(const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *dest, double *weight,
+ double *fast_ssim);
double aom_calc_fastssim(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *dest, double *ssim_y,
double *ssim_u, double *ssim_v, uint32_t bd,
uint32_t in_bd);
+#if CONFIG_AV1_HIGHBITDEPTH
void aom_highbd_calc_ssim(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *dest, double *weight,
uint32_t bd, uint32_t in_bd, double *fast_ssim);
+#endif // CONFIG_AV1_HIGHBITDEPTH
+
+void aom_calc_ssim(const YV12_BUFFER_CONFIG *orig,
+ const YV12_BUFFER_CONFIG *recon, const uint32_t bit_depth,
+ const uint32_t in_bit_depth, int is_hbd, double *weight,
+ double *frame_ssim2);
+#endif // CONFIG_INTERNAL_STATS
+
+double aom_ssim2(const uint8_t *img1, const uint8_t *img2, int stride_img1,
+ int stride_img2, int width, int height);
+
+#if CONFIG_AV1_HIGHBITDEPTH
+double aom_highbd_ssim2(const uint8_t *img1, const uint8_t *img2,
+ int stride_img1, int stride_img2, int width, int height,
+ uint32_t bd, uint32_t shift);
+#endif // CONFIG_AV1_HIGHBITDEPTH
#ifdef __cplusplus
} // extern "C"
diff --git a/third_party/libaom/source/libaom/aom_dsp/vmaf.c b/third_party/libaom/source/libaom/aom_dsp/vmaf.c
index 41653430c1..219e278303 100644
--- a/third_party/libaom/source/libaom/aom_dsp/vmaf.c
+++ b/third_party/libaom/source/libaom/aom_dsp/vmaf.c
@@ -12,9 +12,6 @@
#include "aom_dsp/vmaf.h"
#include <assert.h>
-#if !CONFIG_USE_VMAF_RC
-#include <libvmaf.h>
-#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
@@ -24,10 +21,7 @@
#include <unistd.h>
#endif
-#if CONFIG_USE_VMAF_RC
-#include <libvmaf/libvmaf.rc.h>
-#endif
-
+#include <libvmaf/libvmaf.h>
#include "aom_dsp/blend.h"
#include "aom_ports/system_state.h"
@@ -36,162 +30,18 @@ static void vmaf_fatal_error(const char *message) {
exit(EXIT_FAILURE);
}
-#if !CONFIG_USE_VMAF_RC
-typedef struct FrameData {
- const YV12_BUFFER_CONFIG *source;
- const YV12_BUFFER_CONFIG *distorted;
- int frame_set;
- int bit_depth;
-} FrameData;
-
-// A callback function used to pass data to VMAF.
-// Returns 0 after reading a frame.
-// Returns 2 when there is no more frame to read.
-static int read_frame(float *ref_data, float *main_data, float *temp_data,
- int stride, void *user_data) {
- FrameData *frames = (FrameData *)user_data;
-
- if (!frames->frame_set) {
- const int width = frames->source->y_width;
- const int height = frames->source->y_height;
- assert(width == frames->distorted->y_width);
- assert(height == frames->distorted->y_height);
-
- if (frames->source->flags & YV12_FLAG_HIGHBITDEPTH) {
- const float scale_factor = 1.0f / (float)(1 << (frames->bit_depth - 8));
- uint16_t *ref_ptr = CONVERT_TO_SHORTPTR(frames->source->y_buffer);
- uint16_t *main_ptr = CONVERT_TO_SHORTPTR(frames->distorted->y_buffer);
-
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- ref_data[col] = scale_factor * (float)ref_ptr[col];
- }
- ref_ptr += frames->source->y_stride;
- ref_data += stride / sizeof(*ref_data);
- }
-
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- main_data[col] = scale_factor * (float)main_ptr[col];
- }
- main_ptr += frames->distorted->y_stride;
- main_data += stride / sizeof(*main_data);
- }
- } else {
- uint8_t *ref_ptr = frames->source->y_buffer;
- uint8_t *main_ptr = frames->distorted->y_buffer;
-
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- ref_data[col] = (float)ref_ptr[col];
- }
- ref_ptr += frames->source->y_stride;
- ref_data += stride / sizeof(*ref_data);
- }
-
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- main_data[col] = (float)main_ptr[col];
- }
- main_ptr += frames->distorted->y_stride;
- main_data += stride / sizeof(*main_data);
- }
- }
- frames->frame_set = 1;
- return 0;
- }
-
- (void)temp_data;
- return 2;
-}
-
-void aom_calc_vmaf(const char *model_path, const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *distorted, const int bit_depth,
- double *const vmaf) {
- aom_clear_system_state();
- const int width = source->y_width;
- const int height = source->y_height;
- FrameData frames = { source, distorted, 0, bit_depth };
- char *fmt = bit_depth == 10 ? "yuv420p10le" : "yuv420p";
- double vmaf_score;
- const int ret =
- compute_vmaf(&vmaf_score, fmt, width, height, read_frame,
- /*user_data=*/&frames, (char *)model_path,
- /*log_path=*/NULL, /*log_fmt=*/NULL, /*disable_clip=*/1,
- /*disable_avx=*/0, /*enable_transform=*/0,
- /*phone_model=*/0, /*do_psnr=*/0, /*do_ssim=*/0,
- /*do_ms_ssim=*/0, /*pool_method=*/NULL, /*n_thread=*/0,
- /*n_subsample=*/1, /*enable_conf_interval=*/0);
- if (ret) vmaf_fatal_error("Failed to compute VMAF scores.");
-
- aom_clear_system_state();
- *vmaf = vmaf_score;
-}
-
-void aom_calc_vmaf_multi_frame(void *user_data, const char *model_path,
- int (*rd_frm)(float *ref_data, float *main_data,
- float *temp_data, int stride_byte,
- void *user_data),
- int frame_width, int frame_height, int bit_depth,
- double *vmaf) {
- aom_clear_system_state();
-
- char *fmt = bit_depth == 10 ? "yuv420p10le" : "yuv420p";
- int log_path_length = snprintf(NULL, 0, "vmaf_scores_%d.xml", getpid()) + 1;
- char *log_path = malloc(log_path_length);
- snprintf(log_path, log_path_length, "vmaf_scores_%d.xml", getpid());
- double vmaf_score;
- const int ret =
- compute_vmaf(&vmaf_score, fmt, frame_width, frame_height, rd_frm,
- /*user_data=*/user_data, (char *)model_path,
- /*log_path=*/log_path, /*log_fmt=*/NULL, /*disable_clip=*/0,
- /*disable_avx=*/0, /*enable_transform=*/0,
- /*phone_model=*/0, /*do_psnr=*/0, /*do_ssim=*/0,
- /*do_ms_ssim=*/0, /*pool_method=*/NULL, /*n_thread=*/0,
- /*n_subsample=*/1, /*enable_conf_interval=*/0);
- FILE *vmaf_log = fopen(log_path, "r");
- free(log_path);
- log_path = NULL;
- if (vmaf_log == NULL || ret) {
- vmaf_fatal_error("Failed to compute VMAF scores.");
- }
-
- int frame_index = 0;
- char buf[512];
- while (fgets(buf, 511, vmaf_log) != NULL) {
- if (memcmp(buf, "\t\t<frame ", 9) == 0) {
- char *p = strstr(buf, "vmaf=");
- if (p != NULL && p[5] == '"') {
- char *p2 = strstr(&p[6], "\"");
- *p2 = '\0';
- const double score = atof(&p[6]);
- if (score < 0.0 || score > 100.0) {
- vmaf_fatal_error("Failed to compute VMAF scores.");
- }
- vmaf[frame_index++] = score;
- }
- }
- }
- fclose(vmaf_log);
-
- aom_clear_system_state();
-}
-#endif
-
-#if CONFIG_USE_VMAF_RC
-void aom_init_vmaf_model_rc(VmafModel **vmaf_model, const char *model_path) {
+void aom_init_vmaf_model(VmafModel **vmaf_model, const char *model_path) {
if (*vmaf_model != NULL) return;
VmafModelConfig model_cfg;
model_cfg.flags = VMAF_MODEL_FLAG_DISABLE_CLIP;
model_cfg.name = "vmaf";
- model_cfg.path = (char *)model_path;
- if (vmaf_model_load_from_path(vmaf_model, &model_cfg)) {
+ if (vmaf_model_load_from_path(vmaf_model, &model_cfg, model_path)) {
vmaf_fatal_error("Failed to load VMAF model.");
}
}
-void aom_close_vmaf_model_rc(VmafModel *vmaf_model) {
+void aom_close_vmaf_model(VmafModel *vmaf_model) {
vmaf_model_destroy(vmaf_model);
}
@@ -221,8 +71,9 @@ static void copy_picture(const int bit_depth, const YV12_BUFFER_CONFIG *src,
}
}
-void aom_init_vmaf_context_rc(VmafContext **vmaf_context, VmafModel *vmaf_model,
- bool cal_vmaf_neg) {
+void aom_init_vmaf_context(VmafContext **vmaf_context, VmafModel *vmaf_model,
+ bool cal_vmaf_neg) {
+ // TODO(sdeng): make them CLI arguments.
VmafConfiguration cfg;
cfg.log_level = VMAF_LOG_LEVEL_NONE;
cfg.n_threads = 0;
@@ -233,41 +84,53 @@ void aom_init_vmaf_context_rc(VmafContext **vmaf_context, VmafModel *vmaf_model,
vmaf_fatal_error("Failed to init VMAF context.");
}
- if (vmaf_use_features_from_model(*vmaf_context, vmaf_model)) {
- vmaf_fatal_error("Failed to load feature extractors from VMAF model.");
- }
-
if (cal_vmaf_neg) {
VmafFeatureDictionary *vif_feature = NULL;
- vmaf_feature_dictionary_set(&vif_feature, "vif_enhn_gain_limit", "1.0");
- if (vmaf_use_feature(*vmaf_context, "float_vif", vif_feature)) {
+ if (vmaf_feature_dictionary_set(&vif_feature, "vif_enhn_gain_limit",
+ "1.0")) {
+ vmaf_fatal_error("Failed to set vif_enhn_gain_limit.");
+ }
+ if (vmaf_model_feature_overload(vmaf_model, "float_vif", vif_feature)) {
vmaf_fatal_error("Failed to use feature float_vif.");
}
VmafFeatureDictionary *adm_feature = NULL;
- vmaf_feature_dictionary_set(&adm_feature, "adm_enhn_gain_limit", "1.0");
- if (vmaf_use_feature(*vmaf_context, "float_adm", adm_feature)) {
+ if (vmaf_feature_dictionary_set(&adm_feature, "adm_enhn_gain_limit",
+ "1.0")) {
+ vmaf_fatal_error("Failed to set adm_enhn_gain_limit.");
+ }
+ if (vmaf_model_feature_overload(vmaf_model, "adm", adm_feature)) {
vmaf_fatal_error("Failed to use feature float_adm.");
}
}
VmafFeatureDictionary *motion_force_zero = NULL;
- vmaf_feature_dictionary_set(&motion_force_zero, "motion_force_zero", "true");
- if (vmaf_use_feature(*vmaf_context, "float_motion", motion_force_zero)) {
+ if (vmaf_feature_dictionary_set(&motion_force_zero, "motion_force_zero",
+ "1")) {
+ vmaf_fatal_error("Failed to set motion_force_zero.");
+ }
+ if (vmaf_model_feature_overload(vmaf_model, "float_motion",
+ motion_force_zero)) {
vmaf_fatal_error("Failed to use feature float_motion.");
}
+
+ if (vmaf_use_features_from_model(*vmaf_context, vmaf_model)) {
+ vmaf_fatal_error("Failed to load feature extractors from VMAF model.");
+ }
}
-void aom_close_vmaf_context_rc(VmafContext *vmaf_context) {
+void aom_close_vmaf_context(VmafContext *vmaf_context) {
if (vmaf_close(vmaf_context)) {
vmaf_fatal_error("Failed to close VMAF context.");
}
}
-void aom_calc_vmaf_at_index_rc(VmafContext *vmaf_context, VmafModel *vmaf_model,
- const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *distorted,
- int bit_depth, int frame_index, double *vmaf) {
+void aom_calc_vmaf(VmafModel *vmaf_model, const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *distorted, int bit_depth,
+ bool cal_vmaf_neg, double *vmaf) {
+ VmafContext *vmaf_context;
+ aom_init_vmaf_context(&vmaf_context, vmaf_model, cal_vmaf_neg);
+ const int frame_index = 0;
VmafPicture ref, dist;
if (vmaf_picture_alloc(&ref, VMAF_PIX_FMT_YUV420P, bit_depth, source->y_width,
source->y_height) ||
@@ -282,10 +145,50 @@ void aom_calc_vmaf_at_index_rc(VmafContext *vmaf_context, VmafModel *vmaf_model,
vmaf_fatal_error("Failed to read VMAF pictures.");
}
+ if (vmaf_read_pictures(vmaf_context, NULL, NULL, 0)) {
+ vmaf_fatal_error("Failed to flush context.");
+ }
+
vmaf_picture_unref(&ref);
vmaf_picture_unref(&dist);
vmaf_score_at_index(vmaf_context, vmaf_model, vmaf, frame_index);
+ aom_close_vmaf_context(vmaf_context);
}
-#endif // CONFIG_USE_VMAF_RC
+void aom_read_vmaf_image(VmafContext *vmaf_context,
+ const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *distorted, int bit_depth,
+ int frame_index) {
+ VmafPicture ref, dist;
+ if (vmaf_picture_alloc(&ref, VMAF_PIX_FMT_YUV420P, bit_depth, source->y_width,
+ source->y_height) ||
+ vmaf_picture_alloc(&dist, VMAF_PIX_FMT_YUV420P, bit_depth,
+ source->y_width, source->y_height)) {
+ vmaf_fatal_error("Failed to alloc VMAF pictures.");
+ }
+ copy_picture(bit_depth, source, &ref);
+ copy_picture(bit_depth, distorted, &dist);
+ if (vmaf_read_pictures(vmaf_context, &ref, &dist,
+ /*picture index=*/frame_index)) {
+ vmaf_fatal_error("Failed to read VMAF pictures.");
+ }
+
+ vmaf_picture_unref(&ref);
+ vmaf_picture_unref(&dist);
+}
+
+double aom_calc_vmaf_at_index(VmafContext *vmaf_context, VmafModel *vmaf_model,
+ int frame_index) {
+ double vmaf;
+ if (vmaf_score_at_index(vmaf_context, vmaf_model, &vmaf, frame_index)) {
+ vmaf_fatal_error("Failed to calc VMAF scores.");
+ }
+ return vmaf;
+}
+
+void aom_flush_vmaf_context(VmafContext *vmaf_context) {
+ if (vmaf_read_pictures(vmaf_context, NULL, NULL, 0)) {
+ vmaf_fatal_error("Failed to flush context.");
+ }
+}
diff --git a/third_party/libaom/source/libaom/aom_dsp/vmaf.h b/third_party/libaom/source/libaom/aom_dsp/vmaf.h
index d9da223e29..3ba8c8d565 100644
--- a/third_party/libaom/source/libaom/aom_dsp/vmaf.h
+++ b/third_party/libaom/source/libaom/aom_dsp/vmaf.h
@@ -15,33 +15,28 @@
#include <stdbool.h>
#include "aom_scale/yv12config.h"
-#if CONFIG_USE_VMAF_RC
typedef struct VmafContext VmafContext;
typedef struct VmafModel VmafModel;
-#endif
-
-#if CONFIG_USE_VMAF_RC
-void aom_init_vmaf_context_rc(VmafContext **vmaf_context, VmafModel *vmaf_model,
- bool cal_vmaf_neg);
-void aom_close_vmaf_context_rc(VmafContext *vmaf_context);
-
-void aom_init_vmaf_model_rc(VmafModel **vmaf_model, const char *model_path);
-void aom_close_vmaf_model_rc(VmafModel *vmaf_model);
-
-void aom_calc_vmaf_at_index_rc(VmafContext *vmaf_context, VmafModel *vmaf_model,
- const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *distorted,
- int bit_depth, int frame_index, double *vmaf);
-#else
-void aom_calc_vmaf(const char *model_path, const YV12_BUFFER_CONFIG *source,
+
+void aom_init_vmaf_context(VmafContext **vmaf_context, VmafModel *vmaf_model,
+ bool cal_vmaf_neg);
+void aom_close_vmaf_context(VmafContext *vmaf_context);
+
+void aom_init_vmaf_model(VmafModel **vmaf_model, const char *model_path);
+void aom_close_vmaf_model(VmafModel *vmaf_model);
+
+void aom_calc_vmaf(VmafModel *vmaf_model, const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *distorted, int bit_depth,
- double *vmaf);
-
-void aom_calc_vmaf_multi_frame(
- void *user_data, const char *model_path,
- int (*read_frame)(float *ref_data, float *main_data, float *temp_data,
- int stride_byte, void *user_data),
- int frame_width, int frame_height, int bit_depth, double *vmaf);
-#endif // CONFIG_USE_VMAF_RC
+ bool cal_vmaf_neg, double *vmaf);
+
+void aom_read_vmaf_image(VmafContext *vmaf_context,
+ const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *distorted, int bit_depth,
+ int frame_index);
+
+double aom_calc_vmaf_at_index(VmafContext *vmaf_context, VmafModel *vmaf_model,
+ int frame_index);
+
+void aom_flush_vmaf_context(VmafContext *vmaf_context);
#endif // AOM_AOM_DSP_VMAF_H_
diff --git a/third_party/libaom/source/libaom/aom_dsp/x86/highbd_sad_sse2.asm b/third_party/libaom/source/libaom/aom_dsp/x86/highbd_sad_sse2.asm
index 58f1ac964e..a2510d5e7f 100644
--- a/third_party/libaom/source/libaom/aom_dsp/x86/highbd_sad_sse2.asm
+++ b/third_party/libaom/source/libaom/aom_dsp/x86/highbd_sad_sse2.asm
@@ -20,20 +20,21 @@ SECTION .text
; Arg 2: Height
; Arg 3: Number of general purpose registers: 5 for 32-bit build, 6 for 64-bit
; Arg 4: Type of function: if 0, normal sad; if 1, avg; if 2, skip rows
-%macro HIGH_SAD_FN 4
+; Arg 5: Number of xmm registers. 8xh needs 8, others only need 7
+%macro HIGH_SAD_FN 4-5 7
%if %4 == 0
%if %3 == 5
-cglobal highbd_sad%1x%2, 4, %3, 7, src, src_stride, ref, ref_stride, n_rows
+cglobal highbd_sad%1x%2, 4, %3, %5, src, src_stride, ref, ref_stride, n_rows
%else ; %3 == 7
-cglobal highbd_sad%1x%2, 4, %3, 7, src, src_stride, ref, ref_stride, \
+cglobal highbd_sad%1x%2, 4, %3, %5, src, src_stride, ref, ref_stride, \
src_stride3, ref_stride3, n_rows
%endif ; %3 == 5/7
%elif %4 == 1 ; avg
%if %3 == 5
-cglobal highbd_sad%1x%2_avg, 5, 1 + %3, 7, src, src_stride, ref, ref_stride, \
+cglobal highbd_sad%1x%2_avg, 5, 1 + %3, %5, src, src_stride, ref, ref_stride, \
second_pred, n_rows
%else ; %3 == 7
-cglobal highbd_sad%1x%2_avg, 5, ARCH_X86_64 + %3, 7, src, src_stride, \
+cglobal highbd_sad%1x%2_avg, 5, ARCH_X86_64 + %3, %5, src, src_stride, \
ref, ref_stride, \
second_pred, \
src_stride3, ref_stride3
@@ -356,7 +357,7 @@ HIGH_SAD16XN 8, 2 ; highbd_sad_skip_16x8_sse2
; unsigned int aom_highbd_sad8x{4,8,16}_sse2(uint8_t *src, int src_stride,
; uint8_t *ref, int ref_stride);
%macro HIGH_SAD8XN 1-2 0
- HIGH_SAD_FN 8, %1, 7, %2
+ HIGH_SAD_FN 8, %1, 7, %2, 8
%if %2 == 2 ; skip rows, so divide number of rows by 2
mov n_rowsd, %1/8
%else
@@ -377,22 +378,30 @@ HIGH_SAD16XN 8, 2 ; highbd_sad_skip_16x8_sse2
pavgw m4, [second_predq+mmsize*3]
lea second_predq, [second_predq+mmsize*4]
%endif
- mova m5, [srcq]
- psubusw m5, m1
- psubusw m1, [srcq]
+ mova m7, m1
+ movu m5, [srcq]
+ psubusw m1, m5
+ psubusw m5, m7
por m1, m5
- mova m5, [srcq+src_strideq*2]
- psubusw m5, m2
- psubusw m2, [srcq+src_strideq*2]
+
+ mova m7, m2
+ movu m5, [srcq+src_strideq*2]
+ psubusw m2, m5
+ psubusw m5, m7
por m2, m5
- mova m5, [srcq+src_strideq*4]
- psubusw m5, m3
- psubusw m3, [srcq+src_strideq*4]
+
+ mova m7, m3
+ movu m5, [srcq+src_strideq*4]
+ psubusw m3, m5
+ psubusw m5, m7
por m3, m5
- mova m5, [srcq+src_stride3q*2]
- psubusw m5, m4
- psubusw m4, [srcq+src_stride3q*2]
+
+ mova m7, m4
+ movu m5, [srcq+src_stride3q*2]
+ psubusw m4, m5
+ psubusw m5, m7
por m4, m5
+
paddw m1, m2
paddw m3, m4
movhlps m2, m1
diff --git a/third_party/libaom/source/libaom/aom_dsp/x86/variance_impl_avx2.c b/third_party/libaom/source/libaom/aom_dsp/x86/variance_impl_avx2.c
index f779270ae3..163e4cc566 100644
--- a/third_party/libaom/source/libaom/aom_dsp/x86/variance_impl_avx2.c
+++ b/third_party/libaom/source/libaom/aom_dsp/x86/variance_impl_avx2.c
@@ -616,7 +616,7 @@ unsigned int aom_sub_pixel_avg_variance32xh_avx2(
src += src_stride;
dst += dst_stride;
}
- } else if (y_offset == 8) {
+ } else if (y_offset == 4) {
__m256i src_next_reg;
for (i = 0; i < height; i++) {
LOAD_SRC_DST
@@ -652,8 +652,8 @@ unsigned int aom_sub_pixel_avg_variance32xh_avx2(
dst += dst_stride;
}
}
- // x_offset = 8 and y_offset = 0
- } else if (x_offset == 8) {
+ // x_offset = 4 and y_offset = 0
+ } else if (x_offset == 4) {
if (y_offset == 0) {
__m256i src_next_reg;
for (i = 0; i < height; i++) {
@@ -668,8 +668,8 @@ unsigned int aom_sub_pixel_avg_variance32xh_avx2(
src += src_stride;
dst += dst_stride;
}
- // x_offset = 8 and y_offset = 8
- } else if (y_offset == 8) {
+ // x_offset = 4 and y_offset = 4
+ } else if (y_offset == 4) {
__m256i src_next_reg, src_avg;
// load source and another source starting from the next
// following byte
@@ -691,7 +691,7 @@ unsigned int aom_sub_pixel_avg_variance32xh_avx2(
CALC_SUM_SSE_INSIDE_LOOP
dst += dst_stride;
}
- // x_offset = 8 and y_offset = bilin interpolation
+ // x_offset = 4 and y_offset = bilin interpolation
} else {
__m256i filter, pw8, src_next_reg, src_avg;
y_offset <<= 5;
@@ -741,8 +741,8 @@ unsigned int aom_sub_pixel_avg_variance32xh_avx2(
src += src_stride;
dst += dst_stride;
}
- // x_offset = bilin interpolation and y_offset = 8
- } else if (y_offset == 8) {
+ // x_offset = bilin interpolation and y_offset = 4
+ } else if (y_offset == 4) {
__m256i filter, pw8, src_next_reg, src_pack;
x_offset <<= 5;
filter = _mm256_load_si256(
diff --git a/third_party/libaom/source/libaom/apps/aomenc.c b/third_party/libaom/source/libaom/apps/aomenc.c
index 11035bf129..c09c3ca9c2 100644
--- a/third_party/libaom/source/libaom/apps/aomenc.c
+++ b/third_party/libaom/source/libaom/apps/aomenc.c
@@ -227,6 +227,8 @@ static const int av1_arg_ctrl_map[] = { AOME_SET_CPUUSED,
#if CONFIG_TUNE_VMAF
AV1E_SET_VMAF_MODEL_PATH,
#endif
+ AV1E_SET_DV_COST_UPD_FREQ,
+ AV1E_SET_PARTITION_INFO_PATH,
0 };
const arg_def_t *main_args[] = { &g_av1_codec_arg_defs.help,
@@ -422,6 +424,8 @@ const arg_def_t *av1_ctrl_args[] = {
#if CONFIG_TUNE_VMAF
&g_av1_codec_arg_defs.vmaf_model_path,
#endif
+ &g_av1_codec_arg_defs.dv_cost_upd_freq,
+ &g_av1_codec_arg_defs.partition_info_path,
NULL,
};
@@ -505,6 +509,7 @@ struct stream_config {
#if CONFIG_TUNE_VMAF
const char *vmaf_model_path;
#endif
+ const char *partition_info_path;
aom_color_range_t color_range;
};
@@ -681,6 +686,8 @@ static void parse_global_config(struct AvxEncoderConfig *global, char ***argv) {
if (global->usage == AOM_USAGE_REALTIME && global->passes > 1) {
warn("Enforcing one-pass encoding in realtime mode\n");
+ if (global->pass > 1)
+ die("Error: Invalid --pass=%d for one-pass encoding\n", global->pass);
global->passes = 1;
}
@@ -853,9 +860,9 @@ static void set_config_arg_key_vals(struct stream_config *config,
}
/* Point either to the next free element or the first instance of this
- * control.
+ * option.
*/
- for (j = 0; j < config->arg_ctrl_cnt; j++)
+ for (j = 0; j < config->arg_key_val_cnt; j++)
if (strcmp(name, config->arg_key_vals[j][0]) == 0) break;
/* Update/insert */
@@ -1071,6 +1078,9 @@ static int parse_stream_params(struct AvxEncoderConfig *global,
} else if (arg_match(&arg, &g_av1_codec_arg_defs.vmaf_model_path, argi)) {
config->vmaf_model_path = arg.val;
#endif
+ } else if (arg_match(&arg, &g_av1_codec_arg_defs.partition_info_path,
+ argi)) {
+ config->partition_info_path = arg.val;
} else if (arg_match(&arg, &g_av1_codec_arg_defs.use_fixed_qp_offsets,
argi)) {
config->cfg.use_fixed_qp_offsets = arg_parse_uint(&arg);
@@ -1078,9 +1088,14 @@ static int parse_stream_params(struct AvxEncoderConfig *global,
const int fixed_qp_offset_count = arg_parse_list(
&arg, config->cfg.fixed_qp_offsets, FIXED_QP_OFFSET_COUNT);
if (fixed_qp_offset_count < FIXED_QP_OFFSET_COUNT) {
- die("Option --fixed_qp_offsets requires %d comma-separated values, but "
- "only %d values were provided.\n",
- FIXED_QP_OFFSET_COUNT, fixed_qp_offset_count);
+ if (fixed_qp_offset_count < 2) {
+ die("Option --fixed_qp_offsets requires at least 2 comma-separated "
+ "values for kf and arf, but only %d were provided.\n",
+ fixed_qp_offset_count);
+ }
+ for (int k = fixed_qp_offset_count; k < FIXED_QP_OFFSET_COUNT; ++k)
+ config->cfg.fixed_qp_offsets[k] =
+ (config->cfg.fixed_qp_offsets[k - 1] + 1) / 2;
}
config->cfg.use_fixed_qp_offsets = 1;
} else if (global->usage == AOM_USAGE_REALTIME &&
@@ -1301,7 +1316,6 @@ static void show_stream_config(struct stream_state *stream,
SHOW_PARAMS(disable_intrabc);
SHOW_PARAMS(disable_cfl);
SHOW_PARAMS(disable_smooth_intra);
- SHOW_PARAMS(disable_diagonal_intra);
SHOW_PARAMS(disable_filter_intra);
SHOW_PARAMS(disable_dual_filter);
SHOW_PARAMS(disable_intra_angle_delta);
@@ -1437,6 +1451,11 @@ static void initialize_encoder(struct stream_state *stream,
stream->config.vmaf_model_path);
}
#endif
+ if (stream->config.partition_info_path) {
+ AOM_CODEC_CONTROL_TYPECHECKED(&stream->encoder,
+ AV1E_SET_PARTITION_INFO_PATH,
+ stream->config.partition_info_path);
+ }
if (stream->config.film_grain_filename) {
AOM_CODEC_CONTROL_TYPECHECKED(&stream->encoder, AV1E_SET_FILM_GRAIN_TABLE,
@@ -1473,6 +1492,33 @@ static void initialize_encoder(struct stream_state *stream,
#endif
}
+// Convert the input image 'img' to a monochrome image. The Y plane of the
+// output image is a shallow copy of the Y plane of the input image, therefore
+// the input image must remain valid for the lifetime of the output image. The U
+// and V planes of the output image are set to null pointers. The output image
+// format is AOM_IMG_FMT_I420 because libaom does not have AOM_IMG_FMT_I400.
+static void convert_image_to_monochrome(const struct aom_image *img,
+ struct aom_image *monochrome_img) {
+ *monochrome_img = *img;
+ monochrome_img->fmt = AOM_IMG_FMT_I420;
+ if (img->fmt & AOM_IMG_FMT_HIGHBITDEPTH) {
+ monochrome_img->fmt |= AOM_IMG_FMT_HIGHBITDEPTH;
+ }
+ monochrome_img->monochrome = 1;
+ monochrome_img->csp = AOM_CSP_UNKNOWN;
+ monochrome_img->x_chroma_shift = 1;
+ monochrome_img->y_chroma_shift = 1;
+ monochrome_img->planes[AOM_PLANE_U] = NULL;
+ monochrome_img->planes[AOM_PLANE_V] = NULL;
+ monochrome_img->stride[AOM_PLANE_U] = 0;
+ monochrome_img->stride[AOM_PLANE_V] = 0;
+ monochrome_img->sz = 0;
+ monochrome_img->bps = (img->fmt & AOM_IMG_FMT_HIGHBITDEPTH) ? 16 : 8;
+ monochrome_img->img_data = NULL;
+ monochrome_img->img_data_owner = 0;
+ monochrome_img->self_allocd = 0;
+}
+
static void encode_frame(struct stream_state *stream,
struct AvxEncoderConfig *global, struct aom_image *img,
unsigned int frames_in) {
@@ -1552,6 +1598,12 @@ static void encode_frame(struct stream_state *stream,
#endif
}
+ struct aom_image monochrome_img;
+ if (img && cfg->monochrome) {
+ convert_image_to_monochrome(img, &monochrome_img);
+ img = &monochrome_img;
+ }
+
aom_usec_timer_start(&timer);
aom_codec_encode(&stream->encoder, img, frame_start,
(uint32_t)(next_frame_start - frame_start), 0);
@@ -1941,8 +1993,10 @@ int main(int argc, const char **argv_) {
stream->config.cfg.g_profile = 1;
profile_updated = 1;
}
- } else if (input.bit_depth == 12 || input.fmt == AOM_IMG_FMT_I422 ||
- input.fmt == AOM_IMG_FMT_I42216) {
+ } else if (input.bit_depth == 12 ||
+ ((input.fmt == AOM_IMG_FMT_I422 ||
+ input.fmt == AOM_IMG_FMT_I42216) &&
+ !stream->config.cfg.monochrome)) {
stream->config.cfg.g_profile = 2;
profile_updated = 1;
}
diff --git a/third_party/libaom/source/libaom/av1/arg_defs.c b/third_party/libaom/source/libaom/av1/arg_defs.c
index e79f9b2934..8646b09c9d 100644
--- a/third_party/libaom/source/libaom/av1/arg_defs.c
+++ b/third_party/libaom/source/libaom/av1/arg_defs.c
@@ -271,7 +271,9 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
.noise_sens = ARG_DEF(NULL, "noise-sensitivity", 1,
"Noise sensitivity (frames to blur)"),
.sharpness = ARG_DEF(NULL, "sharpness", 1,
- "Loop filter sharpness (0..7), default is 0"),
+ "Bias towards block sharpness in rate-distortion "
+ "optimization of transform coefficients "
+ "(0..7), default is 0"),
.static_thresh =
ARG_DEF(NULL, "static-thresh", 1, "Motion detection threshold"),
.auto_altref =
@@ -448,13 +450,16 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
"Use Default-transform only for INTRA modes"),
.quant_b_adapt = ARG_DEF(NULL, "quant-b-adapt", 1, "Use adaptive quantize_b"),
.coeff_cost_upd_freq = ARG_DEF(NULL, "coeff-cost-upd-freq", 1,
- "Update freq for coeff costs"
+ "Update freq for coeff costs. "
"0: SB, 1: SB Row per Tile, 2: Tile, 3: Off"),
.mode_cost_upd_freq = ARG_DEF(NULL, "mode-cost-upd-freq", 1,
- "Update freq for mode costs"
+ "Update freq for mode costs. "
"0: SB, 1: SB Row per Tile, 2: Tile, 3: Off"),
.mv_cost_upd_freq = ARG_DEF(NULL, "mv-cost-upd-freq", 1,
- "Update freq for mv costs"
+ "Update freq for mv costs. "
+ "0: SB, 1: SB Row per Tile, 2: Tile, 3: Off"),
+ .dv_cost_upd_freq = ARG_DEF(NULL, "dv-cost-upd-freq", 1,
+ "Update freq for dv costs. "
"0: SB, 1: SB Row per Tile, 2: Tile, 3: Off"),
.num_tg = ARG_DEF(NULL, "num-tile-groups", 1,
"Maximum number of tile groups, default is 1"),
@@ -471,6 +476,8 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
.vmaf_model_path =
ARG_DEF(NULL, "vmaf-model-path", 1, "Path to the VMAF model file"),
#endif
+ .partition_info_path = ARG_DEF(NULL, "partition-info-path", 1,
+ "Partition information read and write path"),
.film_grain_test = ARG_DEF(
NULL, "film-grain-test", 1,
"Film grain test vectors (0: none (default), 1: test-1 2: test-2, "
@@ -592,7 +599,9 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
"pyramid. Selected automatically from --cq-level if "
"--fixed-qp-offsets is not provided. If this option is not "
"specified (default), offsets are adaptively chosen by the "
- "encoder."),
+ "encoder. Further, if this option is specified, at least two "
+ "comma-separated values corresponding to kf and arf offsets "
+ "must be provided, while the rest are chosen by the encoder"),
.fixed_qp_offsets = ARG_DEF(
NULL, "fixed-qp-offsets", 1,
@@ -605,6 +614,6 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
.vbr_corpus_complexity_lap = ARG_DEF(
NULL, "vbr-corpus-complexity-lap", 1,
"Set average corpus complexity per mb for single pass VBR using lap. "
- "(0..10000), default is 0")
+ "(0..10000), default is 0"),
#endif // CONFIG_AV1_ENCODER
};
diff --git a/third_party/libaom/source/libaom/av1/arg_defs.h b/third_party/libaom/source/libaom/av1/arg_defs.h
index f86e91551c..6a8d0d47cf 100644
--- a/third_party/libaom/source/libaom/av1/arg_defs.h
+++ b/third_party/libaom/source/libaom/av1/arg_defs.h
@@ -173,12 +173,14 @@ typedef struct av1_codec_arg_definitions {
arg_def_t coeff_cost_upd_freq;
arg_def_t mode_cost_upd_freq;
arg_def_t mv_cost_upd_freq;
+ arg_def_t dv_cost_upd_freq;
arg_def_t num_tg;
arg_def_t mtu_size;
arg_def_t timing_info;
#if CONFIG_TUNE_VMAF
arg_def_t vmaf_model_path;
#endif
+ arg_def_t partition_info_path;
arg_def_t film_grain_test;
arg_def_t film_grain_table;
#if CONFIG_DENOISE
diff --git a/third_party/libaom/source/libaom/av1/av1_cx_iface.c b/third_party/libaom/source/libaom/av1/av1_cx_iface.c
index 123bb1dc41..11c47bca24 100644
--- a/third_party/libaom/source/libaom/av1/av1_cx_iface.c
+++ b/third_party/libaom/source/libaom/av1/av1_cx_iface.c
@@ -26,6 +26,7 @@
#include "av1/encoder/bitstream.h"
#include "av1/encoder/encoder.h"
#include "av1/encoder/ethread.h"
+#include "av1/encoder/external_partition.h"
#include "av1/encoder/firstpass.h"
#include "av1/arg_defs.h"
@@ -51,6 +52,7 @@ struct av1_extracfg {
unsigned int gf_max_pyr_height;
aom_tune_metric tuning;
const char *vmaf_model_path;
+ const char *partition_info_path;
unsigned int cq_level; // constrained quality level
unsigned int rc_max_intra_bitrate_pct;
unsigned int rc_max_inter_bitrate_pct;
@@ -154,12 +156,26 @@ struct av1_extracfg {
COST_UPDATE_TYPE coeff_cost_upd_freq;
COST_UPDATE_TYPE mode_cost_upd_freq;
COST_UPDATE_TYPE mv_cost_upd_freq;
+ COST_UPDATE_TYPE dv_cost_upd_freq;
unsigned int ext_tile_debug;
unsigned int sb_multipass_unit_test;
};
+#if CONFIG_REALTIME_ONLY
+// Settings changed for realtime only build:
+// cpu_used: 7
+// enable_tpl_model: 0
+// enable_restoration: 0
+// enable_obmc: 0
+// deltaq_mode: NO_DELTA_Q
+// enable_global_motion usage: 0
+// enable_warped_motion at sequence level: 0
+// allow_warped_motion at frame level: 0
+// coeff_cost_upd_freq: COST_UPD_OFF
+// mode_cost_upd_freq: COST_UPD_OFF
+// mv_cost_upd_freq: COST_UPD_OFF
static struct av1_extracfg default_extra_cfg = {
- 0, // cpu_used
+ 7, // cpu_used
1, // enable_auto_alt_ref
0, // enable_auto_bwd_ref
0, // noise_sensitivity
@@ -168,7 +184,7 @@ static struct av1_extracfg default_extra_cfg = {
1, // row_mt
0, // tile_columns
0, // tile_rows
- 1, // enable_tpl_model
+ 0, // enable_tpl_model
1, // enable_keyframe_filtering
7, // arnr_max_frames
5, // arnr_strength
@@ -177,31 +193,32 @@ static struct av1_extracfg default_extra_cfg = {
0, // gf_min_pyr_height
5, // gf_max_pyr_height
AOM_TUNE_PSNR, // tuning
- "/usr/local/share/model/vmaf_v0.6.1.pkl", // VMAF model path
- 10, // cq_level
- 0, // rc_max_intra_bitrate_pct
- 0, // rc_max_inter_bitrate_pct
- 0, // gf_cbr_boost_pct
- 0, // lossless
- 1, // enable_cdef
- 1, // enable_restoration
- 0, // force_video_mode
- 1, // enable_obmc
- 3, // disable_trellis_quant
- 0, // enable_qm
- DEFAULT_QM_Y, // qm_y
- DEFAULT_QM_U, // qm_u
- DEFAULT_QM_V, // qm_v
- DEFAULT_QM_FIRST, // qm_min
- DEFAULT_QM_LAST, // qm_max
- 1, // max number of tile groups
- 0, // mtu_size
+ "/usr/local/share/model/vmaf_v0.6.1.json", // VMAF model path
+ ".", // partition info path
+ 10, // cq_level
+ 0, // rc_max_intra_bitrate_pct
+ 0, // rc_max_inter_bitrate_pct
+ 0, // gf_cbr_boost_pct
+ 0, // lossless
+ 1, // enable_cdef
+ 0, // enable_restoration
+ 0, // force_video_mode
+ 0, // enable_obmc
+ 3, // disable_trellis_quant
+ 0, // enable_qm
+ DEFAULT_QM_Y, // qm_y
+ DEFAULT_QM_U, // qm_u
+ DEFAULT_QM_V, // qm_v
+ DEFAULT_QM_FIRST, // qm_min
+ DEFAULT_QM_LAST, // qm_max
+ 1, // max number of tile groups
+ 0, // mtu_size
AOM_TIMING_UNSPECIFIED, // No picture timing signaling in bitstream
0, // frame_parallel_decoding_mode
1, // enable dual filter
0, // enable delta quant in chroma planes
NO_AQ, // aq_mode
- DELTA_Q_OBJECTIVE, // deltaq_mode
+ NO_DELTA_Q, // deltaq_mode
0, // delta lf mode
0, // frame_periodic_boost
AOM_BITS_8, // Bit depth
@@ -243,9 +260,9 @@ static struct av1_extracfg default_extra_cfg = {
1, // enable difference-weighted compound
1, // enable interinter wedge compound
1, // enable interintra wedge compound
- 1, // enable_global_motion usage
- 1, // enable_warped_motion at sequence level
- 1, // allow_warped_motion at frame level
+ 0, // enable_global_motion usage
+ 0, // enable_warped_motion at sequence level
+ 0, // allow_warped_motion at frame level
1, // enable filter intra at sequence level
1, // enable smooth intra modes usage for sequence
1, // enable Paeth intra mode usage for sequence
@@ -277,15 +294,148 @@ static struct av1_extracfg default_extra_cfg = {
SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ }, // target_seq_level_idx
+ 0, // tier_mask
+ 0, // min_cr
+ COST_UPD_OFF, // coeff_cost_upd_freq
+ COST_UPD_OFF, // mode_cost_upd_freq
+ COST_UPD_OFF, // mv_cost_upd_freq
+ COST_UPD_OFF, // dv_cost_upd_freq
+ 0, // ext_tile_debug
+ 0, // sb_multipass_unit_test
+};
+#else
+static struct av1_extracfg default_extra_cfg = {
+ 0, // cpu_used
+ 1, // enable_auto_alt_ref
+ 0, // enable_auto_bwd_ref
+ 0, // noise_sensitivity
+ 0, // sharpness
+ 0, // static_thresh
+ 1, // row_mt
+ 0, // tile_columns
+ 0, // tile_rows
+ 1, // enable_tpl_model
+ 1, // enable_keyframe_filtering
+ 7, // arnr_max_frames
+ 5, // arnr_strength
+ 0, // min_gf_interval; 0 -> default decision
+ 0, // max_gf_interval; 0 -> default decision
+ 0, // gf_min_pyr_height
+ 5, // gf_max_pyr_height
+ AOM_TUNE_PSNR, // tuning
+ "/usr/local/share/model/vmaf_v0.6.1.json", // VMAF model path
+ ".", // partition info path
+ 10, // cq_level
+ 0, // rc_max_intra_bitrate_pct
+ 0, // rc_max_inter_bitrate_pct
+ 0, // gf_cbr_boost_pct
+ 0, // lossless
+ 1, // enable_cdef
+ 1, // enable_restoration
+ 0, // force_video_mode
+ 1, // enable_obmc
+ 3, // disable_trellis_quant
+ 0, // enable_qm
+ DEFAULT_QM_Y, // qm_y
+ DEFAULT_QM_U, // qm_u
+ DEFAULT_QM_V, // qm_v
+ DEFAULT_QM_FIRST, // qm_min
+ DEFAULT_QM_LAST, // qm_max
+ 1, // max number of tile groups
+ 0, // mtu_size
+ AOM_TIMING_UNSPECIFIED, // No picture timing signaling in bitstream
+ 0, // frame_parallel_decoding_mode
+ 1, // enable dual filter
+ 0, // enable delta quant in chroma planes
+ NO_AQ, // aq_mode
+ DELTA_Q_OBJECTIVE, // deltaq_mode
+ 0, // delta lf mode
+ 0, // frame_periodic_boost
+ AOM_BITS_8, // Bit depth
+ AOM_CONTENT_DEFAULT, // content
+ AOM_CICP_CP_UNSPECIFIED, // CICP color primaries
+ AOM_CICP_TC_UNSPECIFIED, // CICP transfer characteristics
+ AOM_CICP_MC_UNSPECIFIED, // CICP matrix coefficients
+ AOM_CSP_UNKNOWN, // chroma sample position
+ 0, // color range
+ 0, // render width
+ 0, // render height
+ AOM_SUPERBLOCK_SIZE_DYNAMIC, // superblock_size
+ 1, // this depends on large_scale_tile.
+ 0, // error_resilient_mode off by default.
+ 0, // s_frame_mode off by default.
+ 0, // film_grain_test_vector
+ 0, // film_grain_table_filename
+ 0, // motion_vector_unit_test
+ 1, // CDF update mode
+ 1, // enable rectangular partitions
+ 1, // enable ab shape partitions
+ 1, // enable 1:4 and 4:1 partitions
+ 4, // min_partition_size
+ 128, // max_partition_size
+ 1, // enable intra edge filter
+ 1, // frame order hint
+ 1, // enable 64-pt transform usage
+ 1, // enable flip and identity transform
+ 1, // enable rectangular transform usage
+ 1, // dist-wtd compound
+ 7, // max_reference_frames
+ 0, // enable_reduced_reference_set
+ 1, // enable_ref_frame_mvs sequence level
+ 1, // allow ref_frame_mvs frame level
+ 1, // enable masked compound at sequence level
+ 1, // enable one sided compound at sequence level
+ 1, // enable interintra compound at sequence level
+ 1, // enable smooth interintra mode
+ 1, // enable difference-weighted compound
+ 1, // enable interinter wedge compound
+ 1, // enable interintra wedge compound
+ 1, // enable_global_motion usage
+ 1, // enable_warped_motion at sequence level
+ 1, // allow_warped_motion at frame level
+ 1, // enable filter intra at sequence level
+ 1, // enable smooth intra modes usage for sequence
+ 1, // enable Paeth intra mode usage for sequence
+ 1, // enable CFL uv intra mode usage for sequence
+ 1, // enable D45 to D203 intra mode usage for sequence
+ 1, // superres
+ 1, // enable overlay
+ 1, // enable palette
+ !CONFIG_SHARP_SETTINGS, // enable intrabc
+ 1, // enable angle delta
+#if CONFIG_DENOISE
+ 0, // noise_level
+ 32, // noise_block_size
+ 1, // enable_dnl_denoising
+#endif
+ 0, // chroma_subsampling_x
+ 0, // chroma_subsampling_y
+ 0, // reduced_tx_type_set
+ 0, // use_intra_dct_only
+ 0, // use_inter_dct_only
+ 0, // use_intra_default_tx_only
+ 0, // quant_b_adapt
+ 0, // vbr_corpus_complexity_lap
+ {
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
+ SEQ_LEVEL_MAX, SEQ_LEVEL_MAX,
}, // target_seq_level_idx
0, // tier_mask
0, // min_cr
COST_UPD_SB, // coeff_cost_upd_freq
COST_UPD_SB, // mode_cost_upd_freq
COST_UPD_SB, // mv_cost_upd_freq
+ COST_UPD_SB, // dv_cost_upd_freq
0, // ext_tile_debug
0, // sb_multipass_unit_test
};
+#endif
struct aom_codec_alg_priv {
aom_codec_priv_t base;
@@ -380,7 +530,11 @@ static aom_codec_err_t validate_config(aom_codec_alg_priv_t *ctx,
RANGE_CHECK_HI(extra_cfg, deltaq_mode, DELTA_Q_MODE_COUNT - 1);
RANGE_CHECK_HI(extra_cfg, deltalf_mode, 1);
RANGE_CHECK_HI(extra_cfg, frame_periodic_boost, 1);
- RANGE_CHECK_HI(cfg, g_usage, 2);
+#if CONFIG_REALTIME_ONLY
+ RANGE_CHECK(cfg, g_usage, AOM_USAGE_REALTIME, AOM_USAGE_REALTIME);
+#else
+ RANGE_CHECK_HI(cfg, g_usage, AOM_USAGE_ALL_INTRA);
+#endif
RANGE_CHECK_HI(cfg, g_threads, MAX_NUM_THREADS);
RANGE_CHECK(cfg, rc_end_usage, AOM_VBR, AOM_Q);
RANGE_CHECK_HI(cfg, rc_undershoot_pct, 100);
@@ -540,15 +694,6 @@ static aom_codec_err_t validate_config(aom_codec_alg_priv_t *ctx,
}
#endif
-#if !CONFIG_USE_VMAF_RC
- if (extra_cfg->tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN) {
- ERROR(
- "This error may be related to the wrong configuration options: try to "
- "set -DCONFIG_TUNE_VMAF=1 and -DCONFIG_USE_VMAF_RC=1 at the time CMake"
- " is run.");
- }
-#endif
-
RANGE_CHECK(extra_cfg, tuning, AOM_TUNE_PSNR, AOM_TUNE_BUTTERAUGLI);
RANGE_CHECK(extra_cfg, timing_info_type, AOM_TIMING_UNSPECIFIED,
@@ -572,6 +717,7 @@ static aom_codec_err_t validate_config(aom_codec_alg_priv_t *ctx,
RANGE_CHECK(extra_cfg, coeff_cost_upd_freq, 0, 3);
RANGE_CHECK(extra_cfg, mode_cost_upd_freq, 0, 3);
RANGE_CHECK(extra_cfg, mv_cost_upd_freq, 0, 3);
+ RANGE_CHECK(extra_cfg, dv_cost_upd_freq, 0, 3);
RANGE_CHECK(extra_cfg, min_partition_size, 4, 128);
RANGE_CHECK(extra_cfg, max_partition_size, 4, 128);
@@ -619,13 +765,14 @@ static aom_codec_err_t validate_img(aom_codec_alg_priv_t *ctx,
#if CONFIG_TUNE_BUTTERAUGLI
if (ctx->extra_cfg.tuning == AOM_TUNE_BUTTERAUGLI) {
- if (img->x_chroma_shift != 1 || img->y_chroma_shift != 1) {
- ERROR("Only YV12/I420 images supported in tune=butteraugli mode.");
+ if (img->bit_depth > 8) {
+ ERROR("Only 8 bit depth images supported in tune=butteraugli mode.");
}
- if ((img->cp != 0 && img->cp != AOM_CICP_CP_BT_709) ||
- (img->tc != 0 && img->tc != AOM_CICP_TC_BT_709) ||
- (img->mc != 0 && img->mc != AOM_CICP_MC_BT_709)) {
- ERROR("Only BT.709 images supported in tune=butteraugli mode.");
+ if (img->mc != 0 && img->mc != AOM_CICP_MC_BT_709 &&
+ img->mc != AOM_CICP_MC_BT_601 && img->mc != AOM_CICP_MC_BT_470_B_G) {
+ ERROR(
+ "Only BT.709 and BT.601 matrix coefficients supported in "
+ "tune=butteraugli mode. Identity matrix is treated as BT.601.");
}
}
#endif
@@ -689,7 +836,6 @@ static void update_default_encoder_config(const cfg_options_t *cfg,
extra_cfg->enable_smooth_intra = (cfg->disable_smooth_intra == 0);
extra_cfg->enable_paeth_intra = (cfg->disable_paeth_intra == 0);
extra_cfg->enable_cfl_intra = (cfg->disable_cfl == 0);
- extra_cfg->enable_diagonal_intra = (cfg->disable_diagonal_intra == 0);
extra_cfg->enable_obmc = (cfg->disable_obmc == 0);
extra_cfg->enable_palette = (cfg->disable_palette == 0);
extra_cfg->enable_intrabc = (cfg->disable_intrabc == 0);
@@ -709,12 +855,12 @@ static double convert_qp_offset(int cq_level, int q_offset, int bit_depth) {
return (base_q_val - new_q_val);
}
-static double get_modeled_qp_offset(int cq_level, int level, int bit_depth) {
- // 80% for keyframe was derived empirically.
- // 40% similar to rc_pick_q_and_bounds_one_pass_vbr() for Q mode ARF.
+static double get_modeled_qp_offset(int qp, int level, int bit_depth) {
+ // 76% for keyframe was derived empirically.
+ // 60% similar to rc_pick_q_and_bounds_one_pass_vbr() for Q mode ARF.
// Rest derived similar to rc_pick_q_and_bounds_two_pass()
- static const int percents[FIXED_QP_OFFSET_COUNT] = { 76, 60, 30, 15, 8 };
- const double q_val = av1_convert_qindex_to_q(cq_level, bit_depth);
+ static const int percents[FIXED_QP_OFFSET_COUNT] = { 76, 60, 30, 15, 8, 4 };
+ const double q_val = av1_convert_qindex_to_q(qp, bit_depth);
return q_val * percents[level] / 100;
}
@@ -916,6 +1062,7 @@ static aom_codec_err_t set_encoder_config(AV1EncoderConfig *oxcf,
oxcf->cost_upd_freq.coeff = (COST_UPDATE_TYPE)extra_cfg->coeff_cost_upd_freq;
oxcf->cost_upd_freq.mode = (COST_UPDATE_TYPE)extra_cfg->mode_cost_upd_freq;
oxcf->cost_upd_freq.mv = (COST_UPDATE_TYPE)extra_cfg->mv_cost_upd_freq;
+ oxcf->cost_upd_freq.dv = (COST_UPDATE_TYPE)extra_cfg->dv_cost_upd_freq;
// Set frame resize mode configuration.
resize_cfg->resize_mode = (RESIZE_MODE)cfg->rc_resize_mode;
@@ -1044,7 +1191,7 @@ static aom_codec_err_t set_encoder_config(AV1EncoderConfig *oxcf,
oxcf->motion_mode_cfg.enable_obmc = extra_cfg->enable_obmc;
oxcf->motion_mode_cfg.enable_warped_motion = extra_cfg->enable_warped_motion;
oxcf->motion_mode_cfg.allow_warped_motion =
- (cfg->g_usage == AOM_USAGE_REALTIME)
+ (cfg->g_usage == AOM_USAGE_REALTIME && oxcf->speed >= 7)
? false
: (extra_cfg->allow_warped_motion & extra_cfg->enable_warped_motion);
@@ -1141,6 +1288,8 @@ static aom_codec_err_t set_encoder_config(AV1EncoderConfig *oxcf,
sizeof(oxcf->target_seq_level_idx));
oxcf->tier_mask = extra_cfg->tier_mask;
+ oxcf->partition_info_path = extra_cfg->partition_info_path;
+
return AOM_CODEC_OK;
}
@@ -1179,10 +1328,20 @@ static aom_codec_err_t encoder_set_config(aom_codec_alg_priv_t *ctx,
ctx->cfg = *cfg;
set_encoder_config(&ctx->oxcf, &ctx->cfg, &ctx->extra_cfg);
// On profile change, request a key frame
- force_key |= ctx->ppi->cpi->common.seq_params.profile != ctx->oxcf.profile;
- av1_change_config(ctx->ppi->cpi, &ctx->oxcf);
+ force_key |= ctx->ppi->seq_params.profile != ctx->oxcf.profile;
+ bool is_sb_size_changed = false;
+ av1_change_config_seq(ctx->ppi, &ctx->oxcf, &is_sb_size_changed);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int i;
+ for (i = 0; i < ctx->ppi->num_fp_contexts; i++) {
+ av1_change_config(ctx->ppi->parallel_cpi[i], &ctx->oxcf,
+ is_sb_size_changed);
+ }
+#else
+ av1_change_config(ctx->ppi->cpi, &ctx->oxcf, is_sb_size_changed);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
if (ctx->ppi->cpi_lap != NULL) {
- av1_change_config(ctx->ppi->cpi_lap, &ctx->oxcf);
+ av1_change_config(ctx->ppi->cpi_lap, &ctx->oxcf, is_sb_size_changed);
}
}
@@ -1192,7 +1351,7 @@ static aom_codec_err_t encoder_set_config(aom_codec_alg_priv_t *ctx,
}
static aom_fixed_buf_t *encoder_get_global_headers(aom_codec_alg_priv_t *ctx) {
- return av1_get_global_headers(ctx->ppi->cpi);
+ return av1_get_global_headers(ctx->ppi);
}
static aom_codec_err_t ctrl_get_quantizer(aom_codec_alg_priv_t *ctx,
@@ -1215,7 +1374,7 @@ static aom_codec_err_t ctrl_get_baseline_gf_interval(aom_codec_alg_priv_t *ctx,
va_list args) {
int *const arg = va_arg(args, int *);
if (arg == NULL) return AOM_CODEC_INVALID_PARAM;
- *arg = ctx->ppi->cpi->rc.baseline_gf_interval;
+ *arg = ctx->ppi->p_rc.baseline_gf_interval;
return AOM_CODEC_OK;
}
@@ -1225,9 +1384,19 @@ static aom_codec_err_t update_extra_cfg(aom_codec_alg_priv_t *ctx,
if (res == AOM_CODEC_OK) {
ctx->extra_cfg = *extra_cfg;
set_encoder_config(&ctx->oxcf, &ctx->cfg, &ctx->extra_cfg);
- av1_change_config(ctx->ppi->cpi, &ctx->oxcf);
+ bool is_sb_size_changed = false;
+ av1_change_config_seq(ctx->ppi, &ctx->oxcf, &is_sb_size_changed);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int i;
+ for (i = 0; i < ctx->ppi->num_fp_contexts; i++) {
+ av1_change_config(ctx->ppi->parallel_cpi[i], &ctx->oxcf,
+ is_sb_size_changed);
+ }
+#else
+ av1_change_config(ctx->ppi->cpi, &ctx->oxcf, is_sb_size_changed);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
if (ctx->ppi->cpi_lap != NULL) {
- av1_change_config(ctx->ppi->cpi_lap, &ctx->oxcf);
+ av1_change_config(ctx->ppi->cpi_lap, &ctx->oxcf, is_sb_size_changed);
}
}
return res;
@@ -1299,7 +1468,13 @@ static aom_codec_err_t ctrl_set_tile_rows(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_set_enable_tpl_model(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.enable_tpl_model = CAST(AV1E_SET_ENABLE_TPL_MODEL, args);
+ const unsigned int tpl_model_arg = CAST(AV1E_SET_ENABLE_TPL_MODEL, args);
+#if CONFIG_REALTIME_ONLY
+ if (tpl_model_arg) {
+ ERROR("TPL model can't be turned on in realtime only build.");
+ }
+#endif
+ extra_cfg.enable_tpl_model = tpl_model_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
@@ -1379,7 +1554,13 @@ static aom_codec_err_t ctrl_set_enable_cdef(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_set_enable_restoration(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.enable_restoration = CAST(AV1E_SET_ENABLE_RESTORATION, args);
+ const unsigned int restoration_arg = CAST(AV1E_SET_ENABLE_RESTORATION, args);
+#if CONFIG_REALTIME_ONLY
+ if (restoration_arg) {
+ ERROR("Restoration can't be turned on in realtime only build.");
+ }
+#endif
+ extra_cfg.enable_restoration = restoration_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
@@ -1393,7 +1574,13 @@ static aom_codec_err_t ctrl_set_force_video_mode(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_set_enable_obmc(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.enable_obmc = CAST(AV1E_SET_ENABLE_OBMC, args);
+ const unsigned int obmc_arg = CAST(AV1E_SET_ENABLE_OBMC, args);
+#if CONFIG_REALTIME_ONLY
+ if (obmc_arg) {
+ ERROR("OBMC can't be enabled in realtime only build.");
+ }
+#endif
+ extra_cfg.enable_obmc = obmc_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
@@ -1637,14 +1824,26 @@ static aom_codec_err_t ctrl_set_enable_interintra_wedge(
static aom_codec_err_t ctrl_set_enable_global_motion(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.enable_global_motion = CAST(AV1E_SET_ENABLE_GLOBAL_MOTION, args);
+ const int global_motion_arg = CAST(AV1E_SET_ENABLE_GLOBAL_MOTION, args);
+#if CONFIG_REALTIME_ONLY
+ if (global_motion_arg) {
+ ERROR("Global motion can't be enabled in realtime only build.");
+ }
+#endif
+ extra_cfg.enable_global_motion = global_motion_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
static aom_codec_err_t ctrl_set_enable_warped_motion(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.enable_warped_motion = CAST(AV1E_SET_ENABLE_WARPED_MOTION, args);
+ const int warped_motion_arg = CAST(AV1E_SET_ENABLE_WARPED_MOTION, args);
+#if CONFIG_REALTIME_ONLY
+ if (warped_motion_arg) {
+ ERROR("Warped motion can't be enabled in realtime only build.");
+ }
+#endif
+ extra_cfg.enable_warped_motion = warped_motion_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
@@ -1825,6 +2024,13 @@ static aom_codec_err_t ctrl_set_mv_cost_upd_freq(aom_codec_alg_priv_t *ctx,
return update_extra_cfg(ctx, &extra_cfg);
}
+static aom_codec_err_t ctrl_set_dv_cost_upd_freq(aom_codec_alg_priv_t *ctx,
+ va_list args) {
+ struct av1_extracfg extra_cfg = ctx->extra_cfg;
+ extra_cfg.dv_cost_upd_freq = CAST(AV1E_SET_DV_COST_UPD_FREQ, args);
+ return update_extra_cfg(ctx, &extra_cfg);
+}
+
static aom_codec_err_t ctrl_set_vmaf_model_path(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
@@ -1832,6 +2038,13 @@ static aom_codec_err_t ctrl_set_vmaf_model_path(aom_codec_alg_priv_t *ctx,
return update_extra_cfg(ctx, &extra_cfg);
}
+static aom_codec_err_t ctrl_set_partition_info_path(aom_codec_alg_priv_t *ctx,
+ va_list args) {
+ struct av1_extracfg extra_cfg = ctx->extra_cfg;
+ extra_cfg.partition_info_path = CAST(AV1E_SET_PARTITION_INFO_PATH, args);
+ return update_extra_cfg(ctx, &extra_cfg);
+}
+
static aom_codec_err_t ctrl_set_film_grain_test_vector(
aom_codec_alg_priv_t *ctx, va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
@@ -1890,7 +2103,13 @@ static aom_codec_err_t ctrl_set_enable_dnl_denoising(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_set_deltaq_mode(aom_codec_alg_priv_t *ctx,
va_list args) {
struct av1_extracfg extra_cfg = ctx->extra_cfg;
- extra_cfg.deltaq_mode = CAST(AV1E_SET_DELTAQ_MODE, args);
+ const DELTAQ_MODE deltaq_arg = CAST(AV1E_SET_DELTAQ_MODE, args);
+#if CONFIG_REALTIME_ONLY
+ if (deltaq_arg > NO_DELTA_Q) {
+ ERROR("Delta Q mode can't be enabled in realtime only build.");
+ }
+#endif
+ extra_cfg.deltaq_mode = deltaq_arg;
return update_extra_cfg(ctx, &extra_cfg);
}
@@ -1986,6 +2205,18 @@ static aom_codec_err_t ctrl_enable_sb_multipass_unit_test(
return update_extra_cfg(ctx, &extra_cfg);
}
+static aom_codec_err_t ctrl_set_external_partition(aom_codec_alg_priv_t *ctx,
+ va_list args) {
+ AV1_COMP *const cpi = ctx->ppi->cpi;
+ aom_ext_part_funcs_t funcs = *CAST(AV1E_SET_EXTERNAL_PARTITION, args);
+ aom_ext_part_config_t config;
+ // TODO(chengchen): verify the sb_size has been set at this point.
+ config.superblock_size = cpi->common.seq_params->sb_size;
+ const aom_codec_err_t status =
+ av1_ext_part_create(funcs, config, &cpi->ext_part_controller);
+ return status;
+}
+
#if !CONFIG_REALTIME_ONLY
static aom_codec_err_t create_stats_buffer(FIRSTPASS_STATS **frame_stats_buffer,
STATS_BUFFER_CTX *stats_buf_context,
@@ -2014,27 +2245,22 @@ static aom_codec_err_t create_stats_buffer(FIRSTPASS_STATS **frame_stats_buffer,
static aom_codec_err_t create_context_and_bufferpool(
AV1_PRIMARY *ppi, AV1_COMP **p_cpi, BufferPool **p_buffer_pool,
- AV1EncoderConfig *oxcf, struct aom_codec_pkt_list *pkt_list_head,
- FIRSTPASS_STATS *frame_stats_buf, COMPRESSOR_STAGE stage,
- int num_lap_buffers, int lap_lag_in_frames,
- STATS_BUFFER_CTX *stats_buf_context) {
+ AV1EncoderConfig *oxcf, COMPRESSOR_STAGE stage, int lap_lag_in_frames) {
aom_codec_err_t res = AOM_CODEC_OK;
- *p_buffer_pool = (BufferPool *)aom_calloc(1, sizeof(BufferPool));
- if (*p_buffer_pool == NULL) return AOM_CODEC_MEM_ERROR;
+ if (*p_buffer_pool == NULL) {
+ *p_buffer_pool = (BufferPool *)aom_calloc(1, sizeof(BufferPool));
+ if (*p_buffer_pool == NULL) return AOM_CODEC_MEM_ERROR;
#if CONFIG_MULTITHREAD
- if (pthread_mutex_init(&((*p_buffer_pool)->pool_mutex), NULL)) {
- return AOM_CODEC_MEM_ERROR;
- }
+ if (pthread_mutex_init(&((*p_buffer_pool)->pool_mutex), NULL)) {
+ return AOM_CODEC_MEM_ERROR;
+ }
#endif
- *p_cpi = av1_create_compressor(ppi, oxcf, *p_buffer_pool, frame_stats_buf,
- stage, num_lap_buffers, lap_lag_in_frames,
- stats_buf_context);
- if (*p_cpi == NULL)
- res = AOM_CODEC_MEM_ERROR;
- else
- (*p_cpi)->output_pkt_list = pkt_list_head;
+ }
+ *p_cpi = av1_create_compressor(ppi, oxcf, *p_buffer_pool, stage,
+ lap_lag_in_frames);
+ if (*p_cpi == NULL) res = AOM_CODEC_MEM_ERROR;
return res;
}
@@ -2084,27 +2310,48 @@ static aom_codec_err_t encoder_init(aom_codec_ctx_t *ctx) {
priv->oxcf.use_highbitdepth =
(ctx->init_flags & AOM_CODEC_USE_HIGHBITDEPTH) ? 1 : 0;
- priv->ppi = av1_create_primary_compressor();
+ priv->ppi = av1_create_primary_compressor(&priv->pkt_list.head,
+ *num_lap_buffers, &priv->oxcf);
if (!priv->ppi) return AOM_CODEC_MEM_ERROR;
#if !CONFIG_REALTIME_ONLY
res = create_stats_buffer(&priv->frame_stats_buffer,
&priv->stats_buf_context, *num_lap_buffers);
if (res != AOM_CODEC_OK) return AOM_CODEC_MEM_ERROR;
+
+ assert(MAX_LAP_BUFFERS >= MAX_LAG_BUFFERS);
+ int size = get_stats_buf_size(*num_lap_buffers, MAX_LAG_BUFFERS);
+ for (int i = 0; i < size; i++)
+ priv->ppi->twopass.frame_stats_arr[i] = &priv->frame_stats_buffer[i];
+
+ priv->ppi->twopass.stats_buf_ctx = &priv->stats_buf_context;
+ priv->ppi->twopass.stats_in =
+ priv->ppi->twopass.stats_buf_ctx->stats_in_start;
#endif
- res = create_context_and_bufferpool(
- priv->ppi, &priv->ppi->cpi, &priv->buffer_pool, &priv->oxcf,
- &priv->pkt_list.head, priv->frame_stats_buffer, ENCODE_STAGE,
- *num_lap_buffers, -1, &priv->stats_buf_context);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ assert(priv->ppi->num_fp_contexts >= 1);
+ int i;
+ for (i = 0; i < priv->ppi->num_fp_contexts; i++) {
+ res = create_context_and_bufferpool(
+ priv->ppi, &priv->ppi->parallel_cpi[i], &priv->buffer_pool,
+ &priv->oxcf, ENCODE_STAGE, -1);
+ if (res != AOM_CODEC_OK) {
+ return res;
+ }
+ }
+ priv->ppi->cpi = priv->ppi->parallel_cpi[0];
+#else
+ res = create_context_and_bufferpool(priv->ppi, &priv->ppi->cpi,
+ &priv->buffer_pool, &priv->oxcf,
+ ENCODE_STAGE, -1);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
// Create another compressor if look ahead is enabled
if (res == AOM_CODEC_OK && *num_lap_buffers) {
res = create_context_and_bufferpool(
priv->ppi, &priv->ppi->cpi_lap, &priv->buffer_pool_lap, &priv->oxcf,
- NULL, priv->frame_stats_buffer, LAP_STAGE, *num_lap_buffers,
- clamp(lap_lag_in_frames, 0, MAX_LAG_BUFFERS),
- &priv->stats_buf_context);
+ LAP_STAGE, clamp(lap_lag_in_frames, 0, MAX_LAG_BUFFERS));
}
}
}
@@ -2113,12 +2360,16 @@ static aom_codec_err_t encoder_init(aom_codec_ctx_t *ctx) {
}
static void destroy_context_and_bufferpool(AV1_COMP *cpi,
- BufferPool *buffer_pool) {
+ BufferPool **p_buffer_pool) {
av1_remove_compressor(cpi);
+ if (*p_buffer_pool) {
+ av1_free_ref_frame_buffers(*p_buffer_pool);
#if CONFIG_MULTITHREAD
- if (buffer_pool) pthread_mutex_destroy(&buffer_pool->pool_mutex);
+ pthread_mutex_destroy(&(*p_buffer_pool)->pool_mutex);
#endif
- aom_free(buffer_pool);
+ aom_free(*p_buffer_pool);
+ *p_buffer_pool = NULL;
+ }
}
static void destroy_stats_buffer(STATS_BUFFER_CTX *stats_buf_context,
@@ -2133,9 +2384,30 @@ static aom_codec_err_t encoder_destroy(aom_codec_alg_priv_t *ctx) {
if (ctx->ppi) {
AV1_PRIMARY *ppi = ctx->ppi;
- destroy_context_and_bufferpool(ppi->cpi, ctx->buffer_pool);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ for (int i = 0; i < ppi->num_fp_contexts - 1; i++) {
+ if (ppi->parallel_frames_data[i].cx_data_frame) {
+ free(ppi->parallel_frames_data[i].cx_data_frame);
+ }
+ }
+#endif
+#if CONFIG_ENTROPY_STATS
+ print_entropy_stats(ppi);
+#endif
+#if CONFIG_INTERNAL_STATS
+ print_internal_stats(ppi);
+#endif
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int i;
+ for (i = 0; i < ppi->num_fp_contexts; i++) {
+ destroy_context_and_bufferpool(ppi->parallel_cpi[i], &ctx->buffer_pool);
+ }
+ ppi->cpi = NULL;
+#else
+ destroy_context_and_bufferpool(ppi->cpi, &ctx->buffer_pool);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
if (ppi->cpi_lap) {
- destroy_context_and_bufferpool(ppi->cpi_lap, ctx->buffer_pool_lap);
+ destroy_context_and_bufferpool(ppi->cpi_lap, &ctx->buffer_pool_lap);
}
av1_remove_primary_compressor(ppi);
}
@@ -2151,7 +2423,7 @@ static aom_codec_frame_flags_t get_frame_pkt_flags(const AV1_COMP *cpi,
aom_codec_frame_flags_t flags = lib_flags << 16;
if (lib_flags & FRAMEFLAGS_KEY ||
- (cpi->use_svc &&
+ (cpi->ppi->use_svc &&
svc->layer_context[svc->spatial_layer_id * svc->number_temporal_layers +
svc->temporal_layer_id]
.is_key_frame))
@@ -2182,7 +2454,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
AV1_COMP *cpi_lap = ppi->cpi_lap;
if (cpi == NULL) return AOM_CODEC_INVALID_PARAM;
- if (cpi->lap_enabled && cpi_lap == NULL && cpi->oxcf.pass == 0)
+ if (cpi->ppi->lap_enabled && cpi_lap == NULL && cpi->oxcf.pass == 0)
return AOM_CODEC_INVALID_PARAM;
if (img != NULL) {
@@ -2216,6 +2488,22 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
return AOM_CODEC_MEM_ERROR;
}
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ for (int i = 0; i < cpi->ppi->num_fp_contexts - 1; i++) {
+ if (cpi->ppi->parallel_frames_data[i].cx_data_frame == NULL) {
+ cpi->ppi->parallel_frames_data[i].cx_data_sz = uncompressed_frame_sz;
+ cpi->ppi->parallel_frames_data[i].frame_display_order_hint = -1;
+ cpi->ppi->parallel_frames_data[i].frame_size = 0;
+ cpi->ppi->parallel_frames_data[i].cx_data_frame =
+ (unsigned char *)malloc(
+ cpi->ppi->parallel_frames_data[i].cx_data_sz);
+ if (cpi->ppi->parallel_frames_data[i].cx_data_frame == NULL) {
+ cpi->ppi->parallel_frames_data[i].cx_data_sz = 0;
+ return AOM_CODEC_MEM_ERROR;
+ }
+ }
+ }
+#endif
}
}
@@ -2226,22 +2514,16 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
// The jmp_buf is valid only for the duration of the function that calls
// setjmp(). Therefore, this function must reset the 'setjmp' field to 0
// before it returns.
- if (setjmp(cpi->common.error.jmp)) {
- cpi->common.error.setjmp = 0;
- res = update_error_state(ctx, &cpi->common.error);
+ if (setjmp(ppi->error.jmp)) {
+ ppi->error.setjmp = 0;
+ res = update_error_state(ctx, &ppi->error);
aom_clear_system_state();
return res;
}
- cpi->common.error.setjmp = 1;
- if (cpi_lap != NULL) {
- if (setjmp(cpi_lap->common.error.jmp)) {
- cpi_lap->common.error.setjmp = 0;
- res = update_error_state(ctx, &cpi_lap->common.error);
- aom_clear_system_state();
- return res;
- }
- cpi_lap->common.error.setjmp = 1;
- }
+ ppi->error.setjmp = 1;
+
+ if (cpi->ppi->use_svc && cpi->svc.use_flexible_mode == 0 && flags == 0)
+ av1_set_svc_fixed_mode(cpi);
// Note(yunqing): While applying encoding flags, always start from enabling
// all, and then modifying according to the flags. Previous frame's flags are
@@ -2251,9 +2533,12 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
av1_apply_encoding_flags(cpi_lap, flags);
}
-#if CONFIG_USE_VMAF_RC
- aom_init_vmaf_model_rc(&cpi->vmaf_info.vmaf_model,
- cpi->oxcf.tune_cfg.vmaf_model_path);
+#if CONFIG_TUNE_VMAF
+ if (ctx->extra_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
+ ctx->extra_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN) {
+ aom_init_vmaf_model(&cpi->vmaf_info.vmaf_model,
+ cpi->oxcf.tune_cfg.vmaf_model_path);
+ }
#endif
// Handle fixed keyframe intervals
@@ -2270,7 +2555,8 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
if (res == AOM_CODEC_OK) {
// Set up internal flags
- if (ctx->base.init_flags & AOM_CODEC_USE_PSNR) cpi->b_calculate_psnr = 1;
+ if (ctx->base.init_flags & AOM_CODEC_USE_PSNR)
+ cpi->ppi->b_calculate_psnr = 1;
if (img != NULL) {
if (!ctx->pts_offset_initialized) {
@@ -2306,11 +2592,18 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
cpi->oxcf.tool_cfg.enable_global_motion);
}
if (!ppi->lookahead)
- aom_internal_error(&cpi->common.error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&ppi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate lag buffers");
-
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int i;
+ for (i = 0; i < ppi->num_fp_contexts; i++) {
+ av1_check_initial_width(ppi->parallel_cpi[i], use_highbitdepth,
+ subsampling_x, subsampling_y);
+ }
+#else
av1_check_initial_width(cpi, use_highbitdepth, subsampling_x,
subsampling_y);
+#endif
if (cpi_lap != NULL) {
av1_check_initial_width(cpi_lap, use_highbitdepth, subsampling_x,
subsampling_y);
@@ -2320,7 +2613,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
// key frame flag when we actually encode this frame.
if (av1_receive_raw_frame(cpi, flags | ctx->next_frame_flags, &sd,
src_time_stamp, src_end_time_stamp)) {
- res = update_error_state(ctx, &cpi->common.error);
+ res = update_error_state(ctx, &ppi->error);
}
ctx->next_frame_flags = 0;
}
@@ -2337,7 +2630,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
* the buffer size anyway.
*/
if (cx_data_sz < ctx->cx_data_sz / 2) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR,
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR,
"Compressed data buffer too small");
}
}
@@ -2358,6 +2651,12 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
}
if ((num_workers > 1) && (cpi->mt_info.num_workers == 0)) {
av1_create_workers(cpi, num_workers);
+#if CONFIG_MULTITHREAD
+ av1_init_mt_sync(cpi, cpi->oxcf.pass == 1);
+ if (cpi_lap != NULL) {
+ av1_init_mt_sync(cpi_lap, 1);
+ }
+#endif // CONFIG_MULTITHREAD
if (cpi->oxcf.pass != 1) {
av1_create_second_pass_workers(cpi, num_workers);
}
@@ -2373,13 +2672,12 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
}
cpi_lap->mt_info.num_workers = cpi->mt_info.num_workers;
const int status = av1_get_compressed_data(
- cpi_lap, &lib_flags, &frame_size, NULL, &dst_time_stamp_la,
- &dst_end_time_stamp_la, !img, timestamp_ratio);
+ cpi_lap, &lib_flags, &frame_size, cx_data_sz, NULL,
+ &dst_time_stamp_la, &dst_end_time_stamp_la, !img, timestamp_ratio);
if (status != -1) {
if (status != AOM_CODEC_OK) {
- aom_internal_error(&cpi_lap->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
- cpi_lap->seq_params_locked = 1;
}
lib_flags = 0;
frame_size = 0;
@@ -2390,15 +2688,39 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
int64_t dst_time_stamp;
int64_t dst_end_time_stamp;
while (cx_data_sz >= ctx->cx_data_sz / 2 && !is_frame_visible) {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi->do_frame_data_update = true;
+ if (ppi->num_fp_contexts > 1 && ppi->gf_group.size > 1) {
+ if (cpi->gf_frame_index < ppi->gf_group.size) {
+ calc_frame_data_update_flag(&ppi->gf_group, cpi->gf_frame_index,
+ &cpi->do_frame_data_update);
+ }
+ }
+#endif
const int status = av1_get_compressed_data(
- cpi, &lib_flags, &frame_size, cx_data, &dst_time_stamp,
+ cpi, &lib_flags, &frame_size, cx_data_sz, cx_data, &dst_time_stamp,
&dst_end_time_stamp, !img, timestamp_ratio);
if (status == -1) break;
if (status != AOM_CODEC_OK) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
- cpi->seq_params_locked = 1;
+#if CONFIG_ENTROPY_STATS
+ if (ppi->cpi->oxcf.pass != 1 && !cpi->common.show_existing_frame)
+ av1_accumulate_frame_counts(&ppi->aggregate_fc, &cpi->counts);
+#endif
+#if CONFIG_INTERNAL_STATS
+ if (ppi->cpi->oxcf.pass != 1) {
+ ppi->total_time_compress_data += cpi->time_compress_data;
+ ppi->total_recode_hits += cpi->frame_recode_hits;
+ ppi->total_bytes += cpi->bytes;
+ for (int i = 0; i < MAX_MODES; i++) {
+ ppi->total_mode_chosen_counts[i] += cpi->mode_chosen_counts[i];
+ }
+ }
+#endif // CONFIG_INTERNAL_STATS
+
+ cpi->ppi->seq_params_locked = 1;
if (!frame_size) continue;
assert(cx_data != NULL && cx_data_sz != 0);
const int write_temporal_delimiter =
@@ -2413,12 +2735,13 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
const size_t move_offset = obu_header_size + length_field_size;
memmove(ctx->cx_data + move_offset, ctx->cx_data, frame_size);
obu_header_size = av1_write_obu_header(
- &cpi->level_params, OBU_TEMPORAL_DELIMITER, 0, ctx->cx_data);
+ &cpi->ppi->level_params, &cpi->frame_header_count,
+ OBU_TEMPORAL_DELIMITER, 0, ctx->cx_data);
// OBUs are preceded/succeeded by an unsigned leb128 coded integer.
if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size,
ctx->cx_data) != AOM_CODEC_OK) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
frame_size += obu_header_size + obu_payload_size + length_field_size;
@@ -2428,7 +2751,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
size_t curr_frame_size = frame_size;
if (av1_convert_sect5obus_to_annexb(cx_data, &curr_frame_size) !=
AOM_CODEC_OK) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
frame_size = curr_frame_size;
@@ -2437,7 +2760,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
memmove(cx_data + length_field_size, cx_data, frame_size);
if (av1_write_uleb_obu_size(0, (uint32_t)frame_size, cx_data) !=
AOM_CODEC_OK) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
frame_size += length_field_size;
}
@@ -2458,7 +2781,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
aom_codec_cx_pkt_t pkt;
// decrement frames_left counter
- cpi->frames_left = AOMMAX(0, cpi->frames_left - 1);
+ cpi->ppi->frames_left = AOMMAX(0, cpi->ppi->frames_left - 1);
if (ctx->oxcf.save_as_annexb) {
// B_PRIME (add TU size)
size_t tu_size = ctx->pending_cx_data_sz;
@@ -2466,7 +2789,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
memmove(ctx->cx_data + length_field_size, ctx->cx_data, tu_size);
if (av1_write_uleb_obu_size(0, (uint32_t)tu_size, ctx->cx_data) !=
AOM_CODEC_OK) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR, NULL);
+ aom_internal_error(&ppi->error, AOM_CODEC_ERROR, NULL);
}
ctx->pending_cx_data_sz += length_field_size;
}
@@ -2496,7 +2819,7 @@ static aom_codec_err_t encoder_encode(aom_codec_alg_priv_t *ctx,
}
}
- cpi->common.error.setjmp = 0;
+ ppi->error.setjmp = 0;
return res;
}
@@ -2674,7 +2997,7 @@ static aom_codec_err_t ctrl_set_number_spatial_layers(aom_codec_alg_priv_t *ctx,
const int number_spatial_layers = va_arg(args, int);
if (number_spatial_layers > MAX_NUM_SPATIAL_LAYERS)
return AOM_CODEC_INVALID_PARAM;
- ctx->ppi->cpi->common.number_spatial_layers = number_spatial_layers;
+ ctx->ppi->number_spatial_layers = number_spatial_layers;
return AOM_CODEC_OK;
}
@@ -2690,19 +3013,20 @@ static aom_codec_err_t ctrl_set_layer_id(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_set_svc_params(aom_codec_alg_priv_t *ctx,
va_list args) {
- AV1_COMP *const cpi = ctx->ppi->cpi;
+ AV1_PRIMARY *const ppi = ctx->ppi;
+ AV1_COMP *const cpi = ppi->cpi;
AV1_COMMON *const cm = &cpi->common;
aom_svc_params_t *const params = va_arg(args, aom_svc_params_t *);
- cm->number_spatial_layers = params->number_spatial_layers;
- cm->number_temporal_layers = params->number_temporal_layers;
+ ppi->number_spatial_layers = params->number_spatial_layers;
+ ppi->number_temporal_layers = params->number_temporal_layers;
cpi->svc.number_spatial_layers = params->number_spatial_layers;
cpi->svc.number_temporal_layers = params->number_temporal_layers;
- if (cm->number_spatial_layers > 1 || cm->number_temporal_layers > 1) {
+ if (ppi->number_spatial_layers > 1 || ppi->number_temporal_layers > 1) {
unsigned int sl, tl;
- cpi->use_svc = 1;
- for (sl = 0; sl < cm->number_spatial_layers; ++sl) {
- for (tl = 0; tl < cm->number_temporal_layers; ++tl) {
- const int layer = LAYER_IDS_TO_IDX(sl, tl, cm->number_temporal_layers);
+ ctx->ppi->use_svc = 1;
+ for (sl = 0; sl < ppi->number_spatial_layers; ++sl) {
+ for (tl = 0; tl < ppi->number_temporal_layers; ++tl) {
+ const int layer = LAYER_IDS_TO_IDX(sl, tl, ppi->number_temporal_layers);
LAYER_CONTEXT *lc = &cpi->svc.layer_context[layer];
lc->max_q = params->max_quantizers[layer];
lc->min_q = params->min_quantizers[layer];
@@ -2713,11 +3037,11 @@ static aom_codec_err_t ctrl_set_svc_params(aom_codec_alg_priv_t *ctx,
}
}
if (cm->current_frame.frame_number == 0) {
- if (!cpi->seq_params_locked) {
- SequenceHeader *const seq_params = &cm->seq_params;
+ if (!cpi->ppi->seq_params_locked) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
seq_params->operating_points_cnt_minus_1 =
- cm->number_spatial_layers * cm->number_temporal_layers - 1;
- av1_init_seq_coding_tools(&cm->seq_params, cm, &cpi->oxcf, 1);
+ ppi->number_spatial_layers * ppi->number_temporal_layers - 1;
+ av1_init_seq_coding_tools(ppi, &cpi->oxcf, 1);
}
av1_init_layer_context(cpi);
}
@@ -2732,13 +3056,15 @@ static aom_codec_err_t ctrl_set_svc_ref_frame_config(aom_codec_alg_priv_t *ctx,
AV1_COMP *const cpi = ctx->ppi->cpi;
aom_svc_ref_frame_config_t *const data =
va_arg(args, aom_svc_ref_frame_config_t *);
- cpi->svc.external_ref_frame_config = 1;
+ cpi->svc.set_ref_frame_config = 1;
for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
cpi->svc.reference[i] = data->reference[i];
cpi->svc.ref_idx[i] = data->ref_idx[i];
}
for (unsigned int i = 0; i < REF_FRAMES; ++i)
cpi->svc.refresh[i] = data->refresh[i];
+ cpi->svc.use_flexible_mode = 1;
+ cpi->svc.ksvc_fixed_mode = 0;
return AOM_CODEC_OK;
}
@@ -2831,18 +3157,17 @@ static aom_codec_err_t encoder_set_option(aom_codec_alg_priv_t *ctx,
// Used to mock the argv with just one string "--{name}={value}"
char *argv[2] = { NULL, "" };
size_t len = strlen(name) + strlen(value) + 4;
- char *err_string = ctx->ppi->cpi->common.error.detail;
+ char *err_string = ctx->ppi->error.detail;
#if __STDC_VERSION__ >= 201112L
// We use the keyword _Static_assert because clang-cl does not allow the
// convenience macro static_assert to be used in function scope. See
// https://bugs.llvm.org/show_bug.cgi?id=48904.
- _Static_assert(
- sizeof(ctx->ppi->cpi->common.error.detail) >= ARG_ERR_MSG_MAX_LEN,
- "The size of the err_msg buffer for arg_match_helper must be "
- "at least ARG_ERR_MSG_MAX_LEN");
+ _Static_assert(sizeof(ctx->ppi->error.detail) >= ARG_ERR_MSG_MAX_LEN,
+ "The size of the err_msg buffer for arg_match_helper must be "
+ "at least ARG_ERR_MSG_MAX_LEN");
#else
- assert(sizeof(ctx->ppi->cpi->common.error.detail) >= ARG_ERR_MSG_MAX_LEN);
+ assert(sizeof(ctx->ppi->error.detail) >= ARG_ERR_MSG_MAX_LEN);
#endif
argv[0] = aom_malloc(len * sizeof(argv[1][0]));
@@ -2909,8 +3234,11 @@ static aom_codec_err_t encoder_set_option(aom_codec_alg_priv_t *ctx,
extra_cfg.vmaf_model_path = value;
}
#endif
- else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.cq_level, argv,
- err_string)) {
+ else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.partition_info_path,
+ argv, err_string)) {
+ extra_cfg.partition_info_path = value;
+ } else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.cq_level, argv,
+ err_string)) {
extra_cfg.cq_level = arg_parse_uint_helper(&arg, err_string);
} else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.max_intra_rate_pct,
argv, err_string)) {
@@ -3161,6 +3489,9 @@ static aom_codec_err_t encoder_set_option(aom_codec_alg_priv_t *ctx,
} else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.mv_cost_upd_freq,
argv, err_string)) {
extra_cfg.mv_cost_upd_freq = arg_parse_uint_helper(&arg, err_string);
+ } else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.dv_cost_upd_freq,
+ argv, err_string)) {
+ extra_cfg.dv_cost_upd_freq = arg_parse_uint_helper(&arg, err_string);
}
#if CONFIG_DENOISE
else if (arg_match_helper(&arg, &g_av1_codec_arg_defs.denoise_noise_level,
@@ -3215,9 +3546,8 @@ static aom_codec_err_t encoder_set_option(aom_codec_alg_priv_t *ctx,
static aom_codec_err_t ctrl_get_seq_level_idx(aom_codec_alg_priv_t *ctx,
va_list args) {
int *const arg = va_arg(args, int *);
- const AV1_COMP *const cpi = ctx->ppi->cpi;
if (arg == NULL) return AOM_CODEC_INVALID_PARAM;
- return av1_get_seq_level_idx(&cpi->common.seq_params, &cpi->level_params,
+ return av1_get_seq_level_idx(&ctx->ppi->seq_params, &ctx->ppi->level_params,
arg);
}
@@ -3332,6 +3662,7 @@ static aom_codec_ctrl_fn_map_t encoder_ctrl_maps[] = {
{ AV1E_SET_SUPERBLOCK_SIZE, ctrl_set_superblock_size },
{ AV1E_SET_SINGLE_TILE_DECODING, ctrl_set_single_tile_decoding },
{ AV1E_SET_VMAF_MODEL_PATH, ctrl_set_vmaf_model_path },
+ { AV1E_SET_PARTITION_INFO_PATH, ctrl_set_partition_info_path },
{ AV1E_SET_FILM_GRAIN_TEST_VECTOR, ctrl_set_film_grain_test_vector },
{ AV1E_SET_FILM_GRAIN_TABLE, ctrl_set_film_grain_table },
{ AV1E_SET_DENOISE_NOISE_LEVEL, ctrl_set_denoise_noise_level },
@@ -3347,6 +3678,8 @@ static aom_codec_ctrl_fn_map_t encoder_ctrl_maps[] = {
{ AV1E_SET_SVC_REF_FRAME_CONFIG, ctrl_set_svc_ref_frame_config },
{ AV1E_SET_VBR_CORPUS_COMPLEXITY_LAP, ctrl_set_vbr_corpus_complexity_lap },
{ AV1E_ENABLE_SB_MULTIPASS_UNIT_TEST, ctrl_enable_sb_multipass_unit_test },
+ { AV1E_SET_DV_COST_UPD_FREQ, ctrl_set_dv_cost_upd_freq },
+ { AV1E_SET_EXTERNAL_PARTITION, ctrl_set_external_partition },
// Getters
{ AOME_GET_LAST_QUANTIZER, ctrl_get_quantizer },
@@ -3364,6 +3697,7 @@ static aom_codec_ctrl_fn_map_t encoder_ctrl_maps[] = {
};
static const aom_codec_enc_cfg_t encoder_usage_cfg[] = {
+#if !CONFIG_REALTIME_ONLY
{
// NOLINT
AOM_USAGE_GOOD_QUALITY, // g_usage - non-realtime usage
@@ -3415,25 +3749,26 @@ static const aom_codec_enc_cfg_t encoder_usage_cfg[] = {
2000, // rc_two_pass_vbrmax_section
// keyframing settings (kf)
- 0, // fwd_kf_enabled
- AOM_KF_AUTO, // kf_mode
- 0, // kf_min_dist
- 9999, // kf_max_dist
- 0, // sframe_dist
- 1, // sframe_mode
- 0, // large_scale_tile
- 0, // monochrome
- 0, // full_still_picture_hdr
- 0, // save_as_annexb
- 0, // tile_width_count
- 0, // tile_height_count
- { 0 }, // tile_widths
- { 0 }, // tile_heights
- 0, // use_fixed_qp_offsets
- { -1, -1, -1, -1, -1 }, // fixed_qp_offsets
+ 0, // fwd_kf_enabled
+ AOM_KF_AUTO, // kf_mode
+ 0, // kf_min_dist
+ 9999, // kf_max_dist
+ 0, // sframe_dist
+ 1, // sframe_mode
+ 0, // large_scale_tile
+ 0, // monochrome
+ 0, // full_still_picture_hdr
+ 0, // save_as_annexb
+ 0, // tile_width_count
+ 0, // tile_height_count
+ { 0 }, // tile_widths
+ { 0 }, // tile_heights
+ 0, // use_fixed_qp_offsets
+ { -1, -1, -1, -1, -1, -1 }, // fixed_qp_offsets
{ 0, 128, 128, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
},
+#endif // !CONFIG_REALTIME_ONLY
{
// NOLINT
AOM_USAGE_REALTIME, // g_usage - real-time usage
@@ -3485,25 +3820,26 @@ static const aom_codec_enc_cfg_t encoder_usage_cfg[] = {
2000, // rc_two_pass_vbrmax_section
// keyframing settings (kf)
- 0, // fwd_kf_enabled
- AOM_KF_AUTO, // kf_mode
- 0, // kf_min_dist
- 9999, // kf_max_dist
- 0, // sframe_dist
- 1, // sframe_mode
- 0, // large_scale_tile
- 0, // monochrome
- 0, // full_still_picture_hdr
- 0, // save_as_annexb
- 0, // tile_width_count
- 0, // tile_height_count
- { 0 }, // tile_widths
- { 0 }, // tile_heights
- 0, // use_fixed_qp_offsets
- { -1, -1, -1, -1, -1 }, // fixed_qp_offsets
+ 0, // fwd_kf_enabled
+ AOM_KF_AUTO, // kf_mode
+ 0, // kf_min_dist
+ 9999, // kf_max_dist
+ 0, // sframe_dist
+ 1, // sframe_mode
+ 0, // large_scale_tile
+ 0, // monochrome
+ 0, // full_still_picture_hdr
+ 0, // save_as_annexb
+ 0, // tile_width_count
+ 0, // tile_height_count
+ { 0 }, // tile_widths
+ { 0 }, // tile_heights
+ 0, // use_fixed_qp_offsets
+ { -1, -1, -1, -1, -1, -1 }, // fixed_qp_offsets
{ 0, 128, 128, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
},
+#if !CONFIG_REALTIME_ONLY
{
// NOLINT
AOM_USAGE_ALL_INTRA, // g_usage - all intra usage
@@ -3572,8 +3908,9 @@ static const aom_codec_enc_cfg_t encoder_usage_cfg[] = {
0, // use_fixed_qp_offsets
{ -1, -1, -1, -1, -1 }, // fixed_qp_offsets
{ 0, 128, 128, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // cfg
},
+#endif // !CONFIG_REALTIME_ONLY
};
// This data structure and function are exported in aom/aomcx.h
@@ -3598,13 +3935,13 @@ aom_codec_iface_t aom_codec_av1_cx_algo = {
},
{
// NOLINT
- 3, // 3 cfg
- encoder_usage_cfg, // aom_codec_enc_cfg_t
- encoder_encode, // aom_codec_encode_fn_t
- encoder_get_cxdata, // aom_codec_get_cx_data_fn_t
- encoder_set_config, // aom_codec_enc_config_set_fn_t
- encoder_get_global_headers, // aom_codec_get_global_headers_fn_t
- encoder_get_preview // aom_codec_get_preview_frame_fn_t
+ NELEMENTS(encoder_usage_cfg), // cfg_count
+ encoder_usage_cfg, // aom_codec_enc_cfg_t
+ encoder_encode, // aom_codec_encode_fn_t
+ encoder_get_cxdata, // aom_codec_get_cx_data_fn_t
+ encoder_set_config, // aom_codec_enc_config_set_fn_t
+ encoder_get_global_headers, // aom_codec_get_global_headers_fn_t
+ encoder_get_preview // aom_codec_get_preview_frame_fn_t
},
encoder_set_option // aom_codec_set_option_fn_t
};
diff --git a/third_party/libaom/source/libaom/av1/av1_dx_iface.c b/third_party/libaom/source/libaom/av1/av1_dx_iface.c
index 1ee8a576d3..02968abd16 100644
--- a/third_party/libaom/source/libaom/av1/av1_dx_iface.c
+++ b/third_party/libaom/source/libaom/av1/av1_dx_iface.c
@@ -115,14 +115,18 @@ static aom_codec_err_t decoder_destroy(aom_codec_alg_priv_t *ctx) {
if (ctx->frame_worker != NULL) {
AVxWorker *const worker = ctx->frame_worker;
FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
+ AV1Decoder *const pbi = frame_worker_data->pbi;
aom_get_worker_interface()->end(worker);
- aom_free(frame_worker_data->pbi->common.tpl_mvs);
- frame_worker_data->pbi->common.tpl_mvs = NULL;
+ aom_free(pbi->common.tpl_mvs);
+ pbi->common.tpl_mvs = NULL;
av1_remove_common(&frame_worker_data->pbi->common);
+ av1_free_cdef_buffers(&pbi->common, &pbi->cdef_worker, &pbi->cdef_sync,
+ pbi->num_workers);
+ av1_free_cdef_sync(&pbi->cdef_sync);
#if !CONFIG_REALTIME_ONLY
- av1_free_restoration_buffers(&frame_worker_data->pbi->common);
+ av1_free_restoration_buffers(&pbi->common);
#endif
- av1_decoder_remove(frame_worker_data->pbi);
+ av1_decoder_remove(pbi);
aom_free(frame_worker_data);
#if CONFIG_MULTITHREAD
pthread_mutex_destroy(&ctx->buffer_pool->pool_mutex);
@@ -392,7 +396,7 @@ static void init_buffer_callbacks(aom_codec_alg_priv_t *ctx) {
pool->release_fb_cb = av1_release_frame_buffer;
if (av1_alloc_internal_frame_buffers(&pool->int_frame_buffers))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Failed to initialize internal frame buffers");
pool->cb_priv = &pool->int_frame_buffers;
@@ -527,7 +531,7 @@ static aom_codec_err_t decode_one(aom_codec_alg_priv_t *ctx,
*data = frame_worker_data->data_end;
if (worker->had_error)
- return update_error_state(ctx, &frame_worker_data->pbi->common.error);
+ return update_error_state(ctx, &frame_worker_data->pbi->error);
check_resync(ctx, frame_worker_data->pbi);
@@ -558,7 +562,7 @@ static aom_codec_err_t decoder_inspect(aom_codec_alg_priv_t *ctx,
check_resync(ctx, frame_worker_data->pbi);
if (ctx->frame_worker->had_error)
- return update_error_state(ctx, &frame_worker_data->pbi->common.error);
+ return update_error_state(ctx, &frame_worker_data->pbi->error);
// Allow extra zero bytes after the frame end
while (data < data_end) {
@@ -823,7 +827,7 @@ static aom_image_t *decoder_get_frame(aom_codec_alg_priv_t *ctx,
aom_image_t *res =
add_grain_if_needed(ctx, img, &ctx->image_with_grain, grain_params);
if (!res) {
- aom_internal_error(&pbi->common.error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Grain systhesis failed\n");
}
*index += 1; // Advance the iterator to point to the next image
@@ -1091,10 +1095,9 @@ static aom_codec_err_t ctrl_get_still_picture(aom_codec_alg_priv_t *ctx,
FrameWorkerData *const frame_worker_data =
(FrameWorkerData *)worker->data1;
const AV1Decoder *pbi = frame_worker_data->pbi;
- still_picture_info->is_still_picture =
- (int)pbi->common.seq_params.still_picture;
+ still_picture_info->is_still_picture = (int)pbi->seq_params.still_picture;
still_picture_info->is_reduced_still_picture_hdr =
- (int)(pbi->common.seq_params.reduced_still_picture_hdr);
+ (int)(pbi->seq_params.reduced_still_picture_hdr);
return AOM_CODEC_OK;
} else {
return AOM_CODEC_ERROR;
@@ -1112,7 +1115,7 @@ static aom_codec_err_t ctrl_get_sb_size(aom_codec_alg_priv_t *ctx,
FrameWorkerData *const frame_worker_data =
(FrameWorkerData *)worker->data1;
const AV1Decoder *pbi = frame_worker_data->pbi;
- if (pbi->common.seq_params.sb_size == BLOCK_128X128) {
+ if (pbi->seq_params.sb_size == BLOCK_128X128) {
*sb_size = AOM_SUPERBLOCK_SIZE_128X128;
} else {
*sb_size = AOM_SUPERBLOCK_SIZE_64X64;
@@ -1291,7 +1294,7 @@ static aom_codec_err_t ctrl_get_bit_depth(aom_codec_alg_priv_t *ctx,
FrameWorkerData *const frame_worker_data =
(FrameWorkerData *)worker->data1;
const AV1_COMMON *const cm = &frame_worker_data->pbi->common;
- *bit_depth = cm->seq_params.bit_depth;
+ *bit_depth = cm->seq_params->bit_depth;
return AOM_CODEC_OK;
} else {
return AOM_CODEC_ERROR;
@@ -1327,9 +1330,9 @@ static aom_codec_err_t ctrl_get_img_format(aom_codec_alg_priv_t *ctx,
(FrameWorkerData *)worker->data1;
const AV1_COMMON *const cm = &frame_worker_data->pbi->common;
- *img_fmt = get_img_format(cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth);
+ *img_fmt = get_img_format(cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y,
+ cm->seq_params->use_highbitdepth);
return AOM_CODEC_OK;
} else {
return AOM_CODEC_ERROR;
diff --git a/third_party/libaom/source/libaom/av1/common/alloccommon.c b/third_party/libaom/source/libaom/av1/common/alloccommon.c
index cd997cd875..8624255218 100644
--- a/third_party/libaom/source/libaom/av1/common/alloccommon.c
+++ b/third_party/libaom/source/libaom/av1/common/alloccommon.c
@@ -17,8 +17,10 @@
#include "av1/common/alloccommon.h"
#include "av1/common/av1_common_int.h"
#include "av1/common/blockd.h"
+#include "av1/common/cdef_block.h"
#include "av1/common/entropymode.h"
#include "av1/common/entropymv.h"
+#include "av1/common/thread_common.h"
int av1_get_MBs(int width, int height) {
const int aligned_width = ALIGN_POWER_OF_TWO(width, 3);
@@ -51,6 +53,227 @@ void av1_free_ref_frame_buffers(BufferPool *pool) {
}
}
+static INLINE void free_cdef_linebuf_conditional(
+ AV1_COMMON *const cm, const size_t *new_linebuf_size) {
+ CdefInfo *cdef_info = &cm->cdef_info;
+ for (int plane = 0; plane < MAX_MB_PLANE; plane++) {
+ if (new_linebuf_size[plane] != cdef_info->allocated_linebuf_size[plane]) {
+ aom_free(cdef_info->linebuf[plane]);
+ cdef_info->linebuf[plane] = NULL;
+ }
+ }
+}
+
+static INLINE void free_cdef_bufs_conditional(AV1_COMMON *const cm,
+ uint16_t **colbuf,
+ uint16_t **srcbuf,
+ const size_t *new_colbuf_size,
+ const size_t new_srcbuf_size) {
+ CdefInfo *cdef_info = &cm->cdef_info;
+ if (new_srcbuf_size != cdef_info->allocated_srcbuf_size) {
+ aom_free(*srcbuf);
+ *srcbuf = NULL;
+ }
+ for (int plane = 0; plane < MAX_MB_PLANE; plane++) {
+ if (new_colbuf_size[plane] != cdef_info->allocated_colbuf_size[plane]) {
+ aom_free(colbuf[plane]);
+ colbuf[plane] = NULL;
+ }
+ }
+}
+
+static INLINE void free_cdef_bufs(uint16_t **colbuf, uint16_t **srcbuf) {
+ aom_free(*srcbuf);
+ *srcbuf = NULL;
+ for (int plane = 0; plane < MAX_MB_PLANE; plane++) {
+ aom_free(colbuf[plane]);
+ colbuf[plane] = NULL;
+ }
+}
+
+static INLINE void free_cdef_row_sync(AV1CdefRowSync **cdef_row_mt,
+ const int num_mi_rows) {
+ if (*cdef_row_mt == NULL) return;
+#if CONFIG_MULTITHREAD
+ for (int row_idx = 0; row_idx < num_mi_rows; row_idx++) {
+ pthread_mutex_destroy((*cdef_row_mt)[row_idx].row_mutex_);
+ pthread_cond_destroy((*cdef_row_mt)[row_idx].row_cond_);
+ aom_free((*cdef_row_mt)[row_idx].row_mutex_);
+ aom_free((*cdef_row_mt)[row_idx].row_cond_);
+ }
+#else
+ (void)num_mi_rows;
+#endif // CONFIG_MULTITHREAD
+ aom_free(*cdef_row_mt);
+ *cdef_row_mt = NULL;
+}
+
+void av1_free_cdef_buffers(AV1_COMMON *const cm,
+ AV1CdefWorkerData **cdef_worker,
+ AV1CdefSync *cdef_sync, int num_workers) {
+ CdefInfo *cdef_info = &cm->cdef_info;
+ const int num_mi_rows = cdef_info->allocated_mi_rows;
+
+ for (int plane = 0; plane < MAX_MB_PLANE; plane++) {
+ aom_free(cdef_info->linebuf[plane]);
+ cdef_info->linebuf[plane] = NULL;
+ }
+ // De-allocation of column buffer & source buffer (worker_0).
+ free_cdef_bufs(cdef_info->colbuf, &cdef_info->srcbuf);
+
+ if (num_workers < 2) return;
+ if (*cdef_worker != NULL) {
+ for (int idx = num_workers - 1; idx >= 1; idx--) {
+ // De-allocation of column buffer & source buffer for remaining workers.
+ free_cdef_bufs((*cdef_worker)[idx].colbuf, &(*cdef_worker)[idx].srcbuf);
+ }
+ aom_free(*cdef_worker);
+ *cdef_worker = NULL;
+ }
+ free_cdef_row_sync(&cdef_sync->cdef_row_mt, num_mi_rows);
+}
+
+static INLINE void alloc_cdef_linebuf(AV1_COMMON *const cm, uint16_t **linebuf,
+ const int num_planes) {
+ CdefInfo *cdef_info = &cm->cdef_info;
+ for (int plane = 0; plane < num_planes; plane++) {
+ if (linebuf[plane] == NULL)
+ CHECK_MEM_ERROR(cm, linebuf[plane],
+ aom_malloc(cdef_info->allocated_linebuf_size[plane]));
+ }
+}
+
+static INLINE void alloc_cdef_bufs(AV1_COMMON *const cm, uint16_t **colbuf,
+ uint16_t **srcbuf, const int num_planes) {
+ CdefInfo *cdef_info = &cm->cdef_info;
+ if (*srcbuf == NULL)
+ CHECK_MEM_ERROR(cm, *srcbuf,
+ aom_memalign(16, cdef_info->allocated_srcbuf_size));
+
+ for (int plane = 0; plane < num_planes; plane++) {
+ if (colbuf[plane] == NULL)
+ CHECK_MEM_ERROR(cm, colbuf[plane],
+ aom_malloc(cdef_info->allocated_colbuf_size[plane]));
+ }
+}
+
+static INLINE void alloc_cdef_row_sync(AV1_COMMON *const cm,
+ AV1CdefRowSync **cdef_row_mt,
+ const int num_mi_rows) {
+ if (*cdef_row_mt != NULL) return;
+
+ CHECK_MEM_ERROR(cm, *cdef_row_mt,
+ aom_malloc(sizeof(**cdef_row_mt) * num_mi_rows));
+#if CONFIG_MULTITHREAD
+ for (int row_idx = 0; row_idx < num_mi_rows; row_idx++) {
+ CHECK_MEM_ERROR(cm, (*cdef_row_mt)[row_idx].row_mutex_,
+ aom_malloc(sizeof(*(*cdef_row_mt)[row_idx].row_mutex_)));
+ pthread_mutex_init((*cdef_row_mt)[row_idx].row_mutex_, NULL);
+
+ CHECK_MEM_ERROR(cm, (*cdef_row_mt)[row_idx].row_cond_,
+ aom_malloc(sizeof(*(*cdef_row_mt)[row_idx].row_cond_)));
+ pthread_cond_init((*cdef_row_mt)[row_idx].row_cond_, NULL);
+
+ (*cdef_row_mt)[row_idx].is_row_done = 0;
+ }
+#endif // CONFIG_MULTITHREAD
+}
+
+void av1_alloc_cdef_buffers(AV1_COMMON *const cm,
+ AV1CdefWorkerData **cdef_worker,
+ AV1CdefSync *cdef_sync, int num_workers) {
+ const int num_planes = av1_num_planes(cm);
+ size_t new_linebuf_size[MAX_MB_PLANE] = { 0 };
+ size_t new_colbuf_size[MAX_MB_PLANE] = { 0 };
+ size_t new_srcbuf_size = 0;
+ CdefInfo *const cdef_info = &cm->cdef_info;
+ // Check for configuration change
+ const int num_mi_rows =
+ (cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
+ const int is_num_workers_changed =
+ cdef_info->allocated_num_workers != num_workers;
+ const int is_cdef_enabled =
+ cm->seq_params->enable_cdef && !cm->tiles.large_scale;
+
+ // num-bufs=3 represents ping-pong buffers for top linebuf,
+ // followed by bottom linebuf.
+ // ping-pong is to avoid top linebuf over-write by consecutive row.
+ int num_bufs = 3;
+ if (num_workers > 1)
+ num_bufs = (cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
+
+ if (is_cdef_enabled) {
+ // Calculate src buffer size
+ new_srcbuf_size = sizeof(*cdef_info->srcbuf) * CDEF_INBUF_SIZE;
+ for (int plane = 0; plane < num_planes; plane++) {
+ const int shift =
+ plane == AOM_PLANE_Y ? 0 : cm->seq_params->subsampling_x;
+ // Calculate top and bottom line buffer size
+ const int luma_stride =
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols << MI_SIZE_LOG2, 4);
+ new_linebuf_size[plane] = sizeof(*cdef_info->linebuf) * num_bufs *
+ (CDEF_VBORDER << 1) * (luma_stride >> shift);
+ // Calculate column buffer size
+ const int block_height =
+ (CDEF_BLOCKSIZE << (MI_SIZE_LOG2 - shift)) * 2 * CDEF_VBORDER;
+ new_colbuf_size[plane] =
+ sizeof(*cdef_info->colbuf[plane]) * block_height * CDEF_HBORDER;
+ }
+ }
+
+ // Free src, line and column buffers for worker 0 in case of reallocation
+ free_cdef_linebuf_conditional(cm, new_linebuf_size);
+ free_cdef_bufs_conditional(cm, cdef_info->colbuf, &cdef_info->srcbuf,
+ new_colbuf_size, new_srcbuf_size);
+
+ if (*cdef_worker != NULL) {
+ if (is_num_workers_changed) {
+ // Free src and column buffers for remaining workers in case of change in
+ // num_workers
+ for (int idx = cdef_info->allocated_num_workers - 1; idx >= 1; idx--)
+ free_cdef_bufs((*cdef_worker)[idx].colbuf, &(*cdef_worker)[idx].srcbuf);
+ } else if (num_workers > 1) {
+ // Free src and column buffers for remaining workers in case of
+ // reallocation
+ for (int idx = num_workers - 1; idx >= 1; idx--)
+ free_cdef_bufs_conditional(cm, (*cdef_worker)[idx].colbuf,
+ &(*cdef_worker)[idx].srcbuf, new_colbuf_size,
+ new_srcbuf_size);
+ }
+ }
+
+ if (cdef_info->allocated_mi_rows != num_mi_rows)
+ free_cdef_row_sync(&cdef_sync->cdef_row_mt, cdef_info->allocated_mi_rows);
+
+ // Store allocated sizes for reallocation
+ cdef_info->allocated_srcbuf_size = new_srcbuf_size;
+ av1_copy(cdef_info->allocated_colbuf_size, new_colbuf_size);
+ av1_copy(cdef_info->allocated_linebuf_size, new_linebuf_size);
+ // Store configuration to check change in configuration
+ cdef_info->allocated_mi_rows = num_mi_rows;
+ cdef_info->allocated_num_workers = num_workers;
+
+ if (!is_cdef_enabled) return;
+
+ // Memory allocation of column buffer & source buffer (worker_0).
+ alloc_cdef_bufs(cm, cdef_info->colbuf, &cdef_info->srcbuf, num_planes);
+ alloc_cdef_linebuf(cm, cdef_info->linebuf, num_planes);
+
+ if (num_workers < 2) return;
+
+ if (*cdef_worker == NULL)
+ CHECK_MEM_ERROR(cm, *cdef_worker,
+ aom_calloc(num_workers, sizeof(**cdef_worker)));
+
+ // Memory allocation of column buffer & source buffer for remaining workers.
+ for (int idx = num_workers - 1; idx >= 1; idx--)
+ alloc_cdef_bufs(cm, (*cdef_worker)[idx].colbuf, &(*cdef_worker)[idx].srcbuf,
+ num_planes);
+
+ alloc_cdef_row_sync(cm, &cdef_sync->cdef_row_mt,
+ cdef_info->allocated_mi_rows);
+}
+
#if !CONFIG_REALTIME_ONLY
// Assumes cm->rst_info[p].restoration_unit_size is already initialized
void av1_alloc_restoration_buffers(AV1_COMMON *cm) {
@@ -86,11 +309,11 @@ void av1_alloc_restoration_buffers(AV1_COMMON *cm) {
// Now we need to allocate enough space to store the line buffers for the
// stripes
const int frame_w = cm->superres_upscaled_width;
- const int use_highbd = cm->seq_params.use_highbitdepth;
+ const int use_highbd = cm->seq_params->use_highbitdepth;
for (int p = 0; p < num_planes; ++p) {
const int is_uv = p > 0;
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
const int plane_w = ((frame_w + ss_x) >> ss_x) + 2 * RESTORATION_EXTRA_HORZ;
const int stride = ALIGN_POWER_OF_TWO(plane_w, 5);
const int buf_size = num_stripes * stride * RESTORATION_CTX_VERT
diff --git a/third_party/libaom/source/libaom/av1/common/alloccommon.h b/third_party/libaom/source/libaom/av1/common/alloccommon.h
index e75c226831..0b43889d20 100644
--- a/third_party/libaom/source/libaom/av1/common/alloccommon.h
+++ b/third_party/libaom/source/libaom/av1/common/alloccommon.h
@@ -24,6 +24,8 @@ struct AV1Common;
struct BufferPool;
struct CommonContexts;
struct CommonModeInfoParams;
+struct AV1CdefWorker;
+struct AV1CdefSyncData;
void av1_remove_common(struct AV1Common *cm);
@@ -36,6 +38,12 @@ void av1_init_mi_buffers(struct CommonModeInfoParams *mi_params);
void av1_free_context_buffers(struct AV1Common *cm);
void av1_free_ref_frame_buffers(struct BufferPool *pool);
+void av1_alloc_cdef_buffers(struct AV1Common *const cm,
+ struct AV1CdefWorker **cdef_worker,
+ struct AV1CdefSyncData *cdef_sync, int num_workers);
+void av1_free_cdef_buffers(struct AV1Common *const cm,
+ struct AV1CdefWorker **cdef_worker,
+ struct AV1CdefSyncData *cdef_sync, int num_workers);
#if !CONFIG_REALTIME_ONLY
void av1_alloc_restoration_buffers(struct AV1Common *cm);
void av1_free_restoration_buffers(struct AV1Common *cm);
diff --git a/third_party/libaom/source/libaom/av1/common/av1_common_int.h b/third_party/libaom/source/libaom/av1/common/av1_common_int.h
index 0a68cb5fd5..981a186579 100644
--- a/third_party/libaom/source/libaom/av1/common/av1_common_int.h
+++ b/third_party/libaom/source/libaom/av1/common/av1_common_int.h
@@ -135,7 +135,10 @@ typedef struct RefCntBuffer {
// distance when a very old frame is used as a reference.
unsigned int display_order_hint;
unsigned int ref_display_order_hint[INTER_REFS_PER_FRAME];
-
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Frame's level within the hierarchical structure.
+ unsigned int pyramid_level;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
MV_REF *mvs;
uint8_t *seg_map;
struct segmentation seg;
@@ -192,12 +195,32 @@ typedef struct BufferPool {
/*!\brief Parameters related to CDEF */
typedef struct {
- int cdef_damping; /*!< CDEF damping factor */
- int nb_cdef_strengths; /*!< Number of CDEF strength values */
- int cdef_strengths[CDEF_MAX_STRENGTHS]; /*!< CDEF strength values for luma */
- int cdef_uv_strengths[CDEF_MAX_STRENGTHS]; /*!< CDEF strength values for
- chroma */
- int cdef_bits; /*!< Number of CDEF strength values in bits */
+ //! CDEF column line buffer
+ uint16_t *colbuf[MAX_MB_PLANE];
+ //! CDEF top & bottom line buffer
+ uint16_t *linebuf[MAX_MB_PLANE];
+ //! CDEF intermediate buffer
+ uint16_t *srcbuf;
+ //! CDEF column line buffer sizes
+ size_t allocated_colbuf_size[MAX_MB_PLANE];
+ //! CDEF top and bottom line buffer sizes
+ size_t allocated_linebuf_size[MAX_MB_PLANE];
+ //! CDEF intermediate buffer size
+ size_t allocated_srcbuf_size;
+ //! CDEF damping factor
+ int cdef_damping;
+ //! Number of CDEF strength values
+ int nb_cdef_strengths;
+ //! CDEF strength values for luma
+ int cdef_strengths[CDEF_MAX_STRENGTHS];
+ //! CDEF strength values for chroma
+ int cdef_uv_strengths[CDEF_MAX_STRENGTHS];
+ //! Number of CDEF strength values in bits
+ int cdef_bits;
+ //! Number of rows in the frame in 4 pixel
+ int allocated_mi_rows;
+ //! Number of CDEF workers
+ int allocated_num_workers;
} CdefInfo;
/*!\cond */
@@ -320,6 +343,10 @@ typedef struct {
unsigned int order_hint;
unsigned int display_order_hint;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Frame's level within the hierarchical structure.
+ unsigned int pyramid_level;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
unsigned int frame_number;
SkipModeInfo skip_mode_info;
int refresh_frame_flags; // Which ref frames are overwritten by this frame
@@ -602,12 +629,12 @@ struct CommonQuantParams {
/*!
* Delta of qindex (from base_qindex) for V plane DC coefficients.
- * Same as those for U plane if cm->seq_params.separate_uv_delta_q == 0.
+ * Same as those for U plane if cm->seq_params->separate_uv_delta_q == 0.
*/
int u_ac_delta_q;
/*!
* Delta of qindex (from base_qindex) for V plane AC coefficients.
- * Same as those for U plane if cm->seq_params.separate_uv_delta_q == 0.
+ * Same as those for U plane if cm->seq_params->separate_uv_delta_q == 0.
*/
int v_ac_delta_q;
@@ -728,7 +755,7 @@ typedef struct AV1Common {
/*!
* Code and details about current error status.
*/
- struct aom_internal_error_info error;
+ struct aom_internal_error_info *error;
/*!
* AV1 allows two types of frame scaling operations:
@@ -780,10 +807,6 @@ typedef struct AV1Common {
uint8_t superres_scale_denominator;
/*!
- * If true, buffer removal times are present.
- */
- bool buffer_removal_time_present;
- /*!
* buffer_removal_times[op_num] specifies the frame removal time in units of
* DecCT clock ticks counted from the removal time of the last random access
* point for operating point op_num.
@@ -950,7 +973,7 @@ typedef struct AV1Common {
* Elements part of the sequence header, that are applicable for all the
* frames in the video.
*/
- SequenceHeader seq_params;
+ SequenceHeader *seq_params;
/*!
* Current CDFs of all the symbols for the current frame.
@@ -982,7 +1005,7 @@ typedef struct AV1Common {
CommonContexts above_contexts;
/**
- * \name Signaled when cm->seq_params.frame_id_numbers_present_flag == 1
+ * \name Signaled when cm->seq_params->frame_id_numbers_present_flag == 1
*/
/**@{*/
int current_frame_id; /*!< frame ID for the current frame. */
@@ -1014,20 +1037,12 @@ typedef struct AV1Common {
int8_t ref_frame_side[REF_FRAMES];
/*!
- * Number of temporal layers: may be > 1 for SVC (scalable vector coding).
- */
- unsigned int number_temporal_layers;
- /*!
* Temporal layer ID of this frame
* (in the range 0 ... (number_temporal_layers - 1)).
*/
int temporal_layer_id;
/*!
- * Number of spatial layers: may be > 1 for SVC (scalable vector coding).
- */
- unsigned int number_spatial_layers;
- /*!
* Spatial layer ID of this frame
* (in the range 0 ... (number_spatial_layers - 1)).
*/
@@ -1192,15 +1207,15 @@ static INLINE RefCntBuffer *get_primary_ref_frame_buf(
// Returns 1 if this frame might allow mvs from some reference frame.
static INLINE int frame_might_allow_ref_frame_mvs(const AV1_COMMON *cm) {
return !cm->features.error_resilient_mode &&
- cm->seq_params.order_hint_info.enable_ref_frame_mvs &&
- cm->seq_params.order_hint_info.enable_order_hint &&
+ cm->seq_params->order_hint_info.enable_ref_frame_mvs &&
+ cm->seq_params->order_hint_info.enable_order_hint &&
!frame_is_intra_only(cm);
}
// Returns 1 if this frame might use warped_motion
static INLINE int frame_might_allow_warped_motion(const AV1_COMMON *cm) {
return !cm->features.error_resilient_mode && !frame_is_intra_only(cm) &&
- cm->seq_params.enable_warped_motion;
+ cm->seq_params->enable_warped_motion;
}
static INLINE void ensure_mv_buffer(RefCntBuffer *buf, AV1_COMMON *cm) {
@@ -1240,7 +1255,7 @@ static INLINE void ensure_mv_buffer(RefCntBuffer *buf, AV1_COMMON *cm) {
void cfl_init(CFL_CTX *cfl, const SequenceHeader *seq_params);
static INLINE int av1_num_planes(const AV1_COMMON *cm) {
- return cm->seq_params.monochrome ? 1 : MAX_MB_PLANE;
+ return cm->seq_params->monochrome ? 1 : MAX_MB_PLANE;
}
static INLINE void av1_init_above_context(CommonContexts *above_contexts,
@@ -1279,8 +1294,8 @@ static INLINE void av1_init_macroblockd(AV1_COMMON *cm, MACROBLOCKD *xd) {
}
}
xd->mi_stride = cm->mi_params.mi_stride;
- xd->error_info = &cm->error;
- cfl_init(&xd->cfl, &cm->seq_params);
+ xd->error_info = cm->error;
+ cfl_init(&xd->cfl, cm->seq_params);
}
static INLINE void set_entropy_context(MACROBLOCKD *xd, int mi_row, int mi_col,
@@ -1562,7 +1577,7 @@ static INLINE void av1_zero_above_context(AV1_COMMON *const cm,
const MACROBLOCKD *xd,
int mi_col_start, int mi_col_end,
const int tile_row) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int num_planes = av1_num_planes(cm);
const int width = mi_col_end - mi_col_start;
const int aligned_width =
diff --git a/third_party/libaom/source/libaom/av1/common/av1_loopfilter.c b/third_party/libaom/source/libaom/av1/common/av1_loopfilter.c
index caa15c21e2..18ae0f28f4 100644
--- a/third_party/libaom/source/libaom/av1/common/av1_loopfilter.c
+++ b/third_party/libaom/source/libaom/av1/common/av1_loopfilter.c
@@ -351,8 +351,14 @@ void av1_filter_block_plane_vert(const AV1_COMMON *const cm,
const uint32_t scale_vert = plane_ptr->subsampling_y;
uint8_t *const dst_ptr = plane_ptr->dst.buf;
const int dst_stride = plane_ptr->dst.stride;
- const int y_range = (MAX_MIB_SIZE >> scale_vert);
- const int x_range = (MAX_MIB_SIZE >> scale_horz);
+ const int plane_mi_rows =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_rows, scale_vert);
+ const int plane_mi_cols =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_cols, scale_horz);
+ const int y_range = AOMMIN((int)(plane_mi_rows - (mi_row >> scale_vert)),
+ (MAX_MIB_SIZE >> scale_vert));
+ const int x_range = AOMMIN((int)(plane_mi_cols - (mi_col >> scale_horz)),
+ (MAX_MIB_SIZE >> scale_horz));
for (int y = 0; y < y_range; y++) {
uint8_t *p = dst_ptr + y * MI_SIZE * dst_stride;
for (int x = 0; x < x_range;) {
@@ -376,8 +382,8 @@ void av1_filter_block_plane_vert(const AV1_COMMON *const cm,
}
#if CONFIG_AV1_HIGHBITDEPTH
- const int use_highbitdepth = cm->seq_params.use_highbitdepth;
- const aom_bit_depth_t bit_depth = cm->seq_params.bit_depth;
+ const int use_highbitdepth = cm->seq_params->use_highbitdepth;
+ const aom_bit_depth_t bit_depth = cm->seq_params->bit_depth;
switch (params.filter_length) {
// apply 4-tap filtering
case 4:
@@ -456,6 +462,84 @@ void av1_filter_block_plane_vert(const AV1_COMMON *const cm,
}
}
+void av1_filter_block_plane_vert_rt(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ const int plane,
+ const MACROBLOCKD_PLANE *const plane_ptr,
+ const uint32_t mi_row,
+ const uint32_t mi_col) {
+ const uint32_t scale_horz = plane_ptr->subsampling_x;
+ const uint32_t scale_vert = plane_ptr->subsampling_y;
+ uint8_t *const dst_ptr = plane_ptr->dst.buf;
+ const int dst_stride = plane_ptr->dst.stride;
+ const int plane_mi_rows =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_rows, scale_vert);
+ const int plane_mi_cols =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_cols, scale_horz);
+ const int y_range = AOMMIN((int)(plane_mi_rows - (mi_row >> scale_vert)),
+ (MAX_MIB_SIZE >> scale_vert));
+ const int x_range = AOMMIN((int)(plane_mi_cols - (mi_col >> scale_horz)),
+ (MAX_MIB_SIZE >> scale_horz));
+ assert(!plane);
+ assert(!(y_range % 2));
+ for (int y = 0; y < y_range; y += 2) {
+ uint8_t *p = dst_ptr + y * MI_SIZE * dst_stride;
+ for (int x = 0; x < x_range;) {
+ // inner loop always filter vertical edges in a MI block. If MI size
+ // is 8x8, it will filter the vertical edge aligned with a 8x8 block.
+ // If 4x4 transform is used, it will then filter the internal edge
+ // aligned with a 4x4 block
+ const uint32_t curr_x = ((mi_col * MI_SIZE) >> scale_horz) + x * MI_SIZE;
+ const uint32_t curr_y = ((mi_row * MI_SIZE) >> scale_vert) + y * MI_SIZE;
+ uint32_t advance_units;
+ TX_SIZE tx_size;
+ AV1_DEBLOCKING_PARAMETERS params;
+ memset(&params, 0, sizeof(params));
+
+ tx_size =
+ set_lpf_parameters(&params, ((ptrdiff_t)1 << scale_horz), cm, xd,
+ VERT_EDGE, curr_x, curr_y, plane, plane_ptr);
+ if (tx_size == TX_INVALID) {
+ params.filter_length = 0;
+ tx_size = TX_4X4;
+ }
+
+ switch (params.filter_length) {
+ // apply 4-tap filtering
+ case 4:
+ aom_lpf_vertical_4_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ case 6: // apply 6-tap filter for chroma plane only
+ assert(plane != 0);
+ aom_lpf_vertical_6_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // apply 8-tap filtering
+ case 8:
+ aom_lpf_vertical_8_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // apply 14-tap filtering
+ case 14:
+ aom_lpf_vertical_14_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // no filtering
+ default: break;
+ }
+ // advance the destination pointer
+ advance_units = tx_size_wide_unit[tx_size];
+ x += advance_units;
+ p += advance_units * MI_SIZE;
+ }
+ }
+}
+
void av1_filter_block_plane_horz(const AV1_COMMON *const cm,
const MACROBLOCKD *const xd, const int plane,
const MACROBLOCKD_PLANE *const plane_ptr,
@@ -464,8 +548,14 @@ void av1_filter_block_plane_horz(const AV1_COMMON *const cm,
const uint32_t scale_vert = plane_ptr->subsampling_y;
uint8_t *const dst_ptr = plane_ptr->dst.buf;
const int dst_stride = plane_ptr->dst.stride;
- const int y_range = (MAX_MIB_SIZE >> scale_vert);
- const int x_range = (MAX_MIB_SIZE >> scale_horz);
+ const int plane_mi_rows =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_rows, scale_vert);
+ const int plane_mi_cols =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_cols, scale_horz);
+ const int y_range = AOMMIN((int)(plane_mi_rows - (mi_row >> scale_vert)),
+ (MAX_MIB_SIZE >> scale_vert));
+ const int x_range = AOMMIN((int)(plane_mi_cols - (mi_col >> scale_horz)),
+ (MAX_MIB_SIZE >> scale_horz));
for (int x = 0; x < x_range; x++) {
uint8_t *p = dst_ptr + x * MI_SIZE;
for (int y = 0; y < y_range;) {
@@ -489,8 +579,8 @@ void av1_filter_block_plane_horz(const AV1_COMMON *const cm,
}
#if CONFIG_AV1_HIGHBITDEPTH
- const int use_highbitdepth = cm->seq_params.use_highbitdepth;
- const aom_bit_depth_t bit_depth = cm->seq_params.bit_depth;
+ const int use_highbitdepth = cm->seq_params->use_highbitdepth;
+ const aom_bit_depth_t bit_depth = cm->seq_params->bit_depth;
switch (params.filter_length) {
// apply 4-tap filtering
case 4:
@@ -572,6 +662,84 @@ void av1_filter_block_plane_horz(const AV1_COMMON *const cm,
}
}
+void av1_filter_block_plane_horz_rt(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ const int plane,
+ const MACROBLOCKD_PLANE *const plane_ptr,
+ const uint32_t mi_row,
+ const uint32_t mi_col) {
+ const uint32_t scale_horz = plane_ptr->subsampling_x;
+ const uint32_t scale_vert = plane_ptr->subsampling_y;
+ uint8_t *const dst_ptr = plane_ptr->dst.buf;
+ const int dst_stride = plane_ptr->dst.stride;
+ const int plane_mi_rows =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_rows, scale_vert);
+ const int plane_mi_cols =
+ ROUND_POWER_OF_TWO(cm->mi_params.mi_cols, scale_horz);
+ const int y_range = AOMMIN((int)(plane_mi_rows - (mi_row >> scale_vert)),
+ (MAX_MIB_SIZE >> scale_vert));
+ const int x_range = AOMMIN((int)(plane_mi_cols - (mi_col >> scale_horz)),
+ (MAX_MIB_SIZE >> scale_horz));
+ assert(!plane);
+ for (int x = 0; x < x_range; x += 2) {
+ uint8_t *p = dst_ptr + x * MI_SIZE;
+ for (int y = 0; y < y_range;) {
+ // inner loop always filter vertical edges in a MI block. If MI size
+ // is 8x8, it will first filter the vertical edge aligned with a 8x8
+ // block. If 4x4 transform is used, it will then filter the internal
+ // edge aligned with a 4x4 block
+ const uint32_t curr_x = ((mi_col * MI_SIZE) >> scale_horz) + x * MI_SIZE;
+ const uint32_t curr_y = ((mi_row * MI_SIZE) >> scale_vert) + y * MI_SIZE;
+ uint32_t advance_units;
+ TX_SIZE tx_size;
+ AV1_DEBLOCKING_PARAMETERS params;
+ memset(&params, 0, sizeof(params));
+
+ tx_size = set_lpf_parameters(
+ &params, (cm->mi_params.mi_stride << scale_vert), cm, xd, HORZ_EDGE,
+ curr_x, curr_y, plane, plane_ptr);
+ if (tx_size == TX_INVALID) {
+ params.filter_length = 0;
+ tx_size = TX_4X4;
+ }
+
+ switch (params.filter_length) {
+ // apply 4-tap filtering
+ case 4:
+ aom_lpf_horizontal_4_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // apply 6-tap filtering
+ case 6:
+ assert(plane != 0);
+ aom_lpf_horizontal_6_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // apply 8-tap filtering
+ case 8:
+ aom_lpf_horizontal_8_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // apply 14-tap filtering
+ case 14:
+ aom_lpf_horizontal_14_dual(p, dst_stride, params.mblim, params.lim,
+ params.hev_thr, params.mblim, params.lim,
+ params.hev_thr);
+ break;
+ // no filtering
+ default: break;
+ }
+ // advance the destination pointer
+ advance_units = tx_size_high_unit[tx_size];
+ y += advance_units;
+ p += advance_units * dst_stride * MI_SIZE;
+ }
+ }
+}
+
void av1_filter_block_plane_vert_test(const AV1_COMMON *const cm,
const MACROBLOCKD *const xd,
const int plane,
@@ -661,7 +829,7 @@ static void loop_filter_rows(YV12_BUFFER_CONFIG *frame_buffer, AV1_COMMON *cm,
#if CONFIG_LPF_MASK
int is_decoding,
#endif
- int plane_start, int plane_end) {
+ int plane_start, int plane_end, int is_realtime) {
struct macroblockd_plane *pd = xd->plane;
const int col_start = 0;
const int col_end = cm->mi_params.mi_cols;
@@ -679,7 +847,7 @@ static void loop_filter_rows(YV12_BUFFER_CONFIG *frame_buffer, AV1_COMMON *cm,
else if (plane == 2 && !(cm->lf.filter_level_v))
continue;
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, 0, 0,
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer, 0, 0,
plane, plane + 1);
av1_build_bitmask_vert_info(cm, &pd[plane], plane);
@@ -716,49 +884,106 @@ static void loop_filter_rows(YV12_BUFFER_CONFIG *frame_buffer, AV1_COMMON *cm,
continue;
else if (plane == 2 && !(cm->lf.filter_level_v))
continue;
-
if (cm->lf.combine_vert_horz_lf) {
// filter all vertical and horizontal edges in every 128x128 super block
for (mi_row = start; mi_row < stop; mi_row += MAX_MIB_SIZE) {
for (mi_col = col_start; mi_col < col_end; mi_col += MAX_MIB_SIZE) {
// filter vertical edges
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row,
- mi_col, plane, plane + 1);
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer,
+ mi_row, mi_col, plane, plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_vert(cm, xd, plane, &pd[plane], mi_row,
mi_col);
+#else
+ if (is_realtime && !plane) {
+ av1_filter_block_plane_vert_rt(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+
+ } else {
+ av1_filter_block_plane_vert(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+ }
+#endif
// filter horizontal edges
if (mi_col - MAX_MIB_SIZE >= 0) {
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer,
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer,
mi_row, mi_col - MAX_MIB_SIZE, plane,
plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
mi_col - MAX_MIB_SIZE);
+#else
+ if (is_realtime && !plane) {
+ av1_filter_block_plane_horz_rt(cm, xd, plane, &pd[plane], mi_row,
+ mi_col - MAX_MIB_SIZE);
+ } else {
+ av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
+ mi_col - MAX_MIB_SIZE);
+ }
+#endif
}
}
// filter horizontal edges
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row,
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer, mi_row,
mi_col - MAX_MIB_SIZE, plane, plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
mi_col - MAX_MIB_SIZE);
+#else
+ if (is_realtime && !plane) {
+ av1_filter_block_plane_horz_rt(cm, xd, plane, &pd[plane], mi_row,
+ mi_col - MAX_MIB_SIZE);
+
+ } else {
+ av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
+ mi_col - MAX_MIB_SIZE);
+ }
+#endif
}
} else {
// filter all vertical edges in every 128x128 super block
for (mi_row = start; mi_row < stop; mi_row += MAX_MIB_SIZE) {
for (mi_col = col_start; mi_col < col_end; mi_col += MAX_MIB_SIZE) {
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row,
- mi_col, plane, plane + 1);
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer,
+ mi_row, mi_col, plane, plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_vert(cm, xd, plane, &pd[plane], mi_row,
mi_col);
+#else
+ if (is_realtime && !plane) {
+ av1_filter_block_plane_vert_rt(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+ } else {
+ av1_filter_block_plane_vert(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+ }
+#endif
}
}
// filter all horizontal edges in every 128x128 super block
for (mi_row = start; mi_row < stop; mi_row += MAX_MIB_SIZE) {
for (mi_col = col_start; mi_col < col_end; mi_col += MAX_MIB_SIZE) {
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row,
- mi_col, plane, plane + 1);
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame_buffer,
+ mi_row, mi_col, plane, plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
mi_col);
+#else
+ if (is_realtime && !plane) {
+ av1_filter_block_plane_horz_rt(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+
+ } else {
+ av1_filter_block_plane_horz(cm, xd, plane, &pd[plane], mi_row,
+ mi_col);
+ }
+#endif
}
}
}
@@ -770,7 +995,8 @@ void av1_loop_filter_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
#if CONFIG_LPF_MASK
int is_decoding,
#endif
- int plane_start, int plane_end, int partial_frame) {
+ int plane_start, int plane_end, int partial_frame,
+ int is_realtime) {
int start_mi_row, end_mi_row, mi_rows_to_filter;
start_mi_row = 0;
@@ -786,5 +1012,5 @@ void av1_loop_filter_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
#if CONFIG_LPF_MASK
is_decoding,
#endif
- plane_start, plane_end);
+ plane_start, plane_end, is_realtime);
}
diff --git a/third_party/libaom/source/libaom/av1/common/av1_loopfilter.h b/third_party/libaom/source/libaom/av1/common/av1_loopfilter.h
index ca16bbe614..ed4453b2a7 100644
--- a/third_party/libaom/source/libaom/av1/common/av1_loopfilter.h
+++ b/third_party/libaom/source/libaom/av1/common/av1_loopfilter.h
@@ -151,7 +151,7 @@ void av1_loop_filter_frame(YV12_BUFFER_CONFIG *frame, struct AV1Common *cm,
#else
void av1_loop_filter_frame(YV12_BUFFER_CONFIG *frame, struct AV1Common *cm,
struct macroblockd *xd, int plane_start,
- int plane_end, int partial_frame);
+ int plane_end, int partial_frame, int is_realtime);
#endif
void av1_filter_block_plane_vert(const struct AV1Common *const cm,
@@ -164,6 +164,20 @@ void av1_filter_block_plane_horz(const struct AV1Common *const cm,
const MACROBLOCKD_PLANE *const plane_ptr,
const uint32_t mi_row, const uint32_t mi_col);
+void av1_filter_block_plane_vert_rt(const struct AV1Common *const cm,
+ const MACROBLOCKD *const xd,
+ const int plane,
+ const MACROBLOCKD_PLANE *const plane_ptr,
+ const uint32_t mi_row,
+ const uint32_t mi_col);
+
+void av1_filter_block_plane_horz_rt(const struct AV1Common *const cm,
+ const MACROBLOCKD *const xd,
+ const int plane,
+ const MACROBLOCKD_PLANE *const plane_ptr,
+ const uint32_t mi_row,
+ const uint32_t mi_col);
+
uint8_t av1_get_filter_level(const struct AV1Common *cm,
const loop_filter_info_n *lfi_n, const int dir_idx,
int plane, const MB_MODE_INFO *mbmi);
diff --git a/third_party/libaom/source/libaom/av1/common/blockd.h b/third_party/libaom/source/libaom/av1/common/blockd.h
index 1d1c381bca..5e535add2d 100644
--- a/third_party/libaom/source/libaom/av1/common/blockd.h
+++ b/third_party/libaom/source/libaom/av1/common/blockd.h
@@ -194,11 +194,6 @@ typedef struct RD_STATS {
int zero_rate;
#if CONFIG_RD_DEBUG
int txb_coeff_cost[MAX_MB_PLANE];
- // TODO(jingning): Temporary solution to silence stack over-size warning
- // in handle_inter_mode. This should be fixed after rate-distortion
- // optimization refactoring.
- int16_t txb_coeff_cost_map[MAX_MB_PLANE][TXB_COEFF_COST_MAP_SIZE]
- [TXB_COEFF_COST_MAP_SIZE];
#endif // CONFIG_RD_DEBUG
} RD_STATS;
@@ -325,6 +320,9 @@ typedef struct MB_MODE_INFO {
int8_t cdef_strength : 4;
/**@}*/
+ /*! \brief Skip CDEF for this superblock */
+ uint8_t skip_cdef_curr_sb;
+
#if CONFIG_RD_DEBUG
/*! \brief RD info used for debugging */
RD_STATS rd_stats;
@@ -552,10 +550,6 @@ typedef struct cfl_ctx {
// Whether the reconstructed luma pixels need to be stored
int store_y;
-
-#if CONFIG_DEBUG
- int rate;
-#endif // CONFIG_DEBUG
} CFL_CTX;
typedef struct dist_wtd_comp_params {
@@ -810,7 +804,7 @@ typedef struct macroblockd {
FRAME_CONTEXT *tile_ctx;
/*!
- * Bit depth: copied from cm->seq_params.bit_depth for convenience.
+ * Bit depth: copied from cm->seq_params->bit_depth for convenience.
*/
int bd;
@@ -893,7 +887,7 @@ typedef struct macroblockd {
/*!
* Mask for this block used for compound prediction.
*/
- DECLARE_ALIGNED(16, uint8_t, seg_mask[2 * MAX_SB_SQUARE]);
+ uint8_t *seg_mask;
/*!
* CFL (chroma from luma) related parameters.
@@ -937,13 +931,42 @@ typedef struct macroblockd {
/*!\cond */
static INLINE int is_cur_buf_hbd(const MACROBLOCKD *xd) {
+#if CONFIG_AV1_HIGHBITDEPTH
return xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH ? 1 : 0;
+#else
+ (void)xd;
+ return 0;
+#endif
}
static INLINE uint8_t *get_buf_by_bd(const MACROBLOCKD *xd, uint8_t *buf16) {
+#if CONFIG_AV1_HIGHBITDEPTH
return (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH)
? CONVERT_TO_BYTEPTR(buf16)
: buf16;
+#else
+ (void)xd;
+ return buf16;
+#endif
+}
+
+typedef struct BitDepthInfo {
+ int bit_depth;
+ /*! Is the image buffer high bit depth?
+ * Low bit depth buffer uses uint8_t.
+ * High bit depth buffer uses uint16_t.
+ * Equivalent to cm->seq_params->use_highbitdepth
+ */
+ int use_highbitdepth_buf;
+} BitDepthInfo;
+
+static INLINE BitDepthInfo get_bit_depth_info(const MACROBLOCKD *xd) {
+ BitDepthInfo bit_depth_info;
+ bit_depth_info.bit_depth = xd->bd;
+ bit_depth_info.use_highbitdepth_buf = is_cur_buf_hbd(xd);
+ assert(IMPLIES(!bit_depth_info.use_highbitdepth_buf,
+ bit_depth_info.bit_depth == 8));
+ return bit_depth_info;
}
static INLINE int get_sqr_bsize_idx(BLOCK_SIZE bsize) {
diff --git a/third_party/libaom/source/libaom/av1/common/cdef.c b/third_party/libaom/source/libaom/av1/common/cdef.c
index d9b5a104e4..9ab7d4d235 100644
--- a/third_party/libaom/source/libaom/av1/common/cdef.c
+++ b/third_party/libaom/source/libaom/av1/common/cdef.c
@@ -21,35 +21,6 @@
#include "av1/common/cdef_block.h"
#include "av1/common/reconinter.h"
-enum { TOP, LEFT, BOTTOM, RIGHT, BOUNDARIES } UENUM1BYTE(BOUNDARY);
-
-/*!\brief Parameters related to CDEF Block */
-typedef struct {
- uint16_t *src;
- uint8_t *dst;
- uint16_t *colbuf[MAX_MB_PLANE];
- cdef_list dlist[MI_SIZE_64X64 * MI_SIZE_64X64];
-
- int xdec;
- int ydec;
- int mi_wide_l2;
- int mi_high_l2;
- int frame_boundary[BOUNDARIES];
-
- int damping;
- int coeff_shift;
- int level;
- int sec_strength;
- int cdef_count;
- int is_zero_level;
- int dir[CDEF_NBLOCKS][CDEF_NBLOCKS];
- int var[CDEF_NBLOCKS][CDEF_NBLOCKS];
-
- int dst_stride;
- int coffset;
- int roffset;
-} CdefBlockInfo;
-
static int is_8x8_block_skip(MB_MODE_INFO **grid, int mi_row, int mi_col,
int mi_stride) {
MB_MODE_INFO **mbmi = grid + mi_row * mi_stride + mi_col;
@@ -116,10 +87,10 @@ void cdef_copy_rect8_16bit_to_16bit_c(uint16_t *dst, int dstride,
}
}
-static void copy_sb8_16(AV1_COMMON *cm, uint16_t *dst, int dstride,
- const uint8_t *src, int src_voffset, int src_hoffset,
- int sstride, int vsize, int hsize) {
- if (cm->seq_params.use_highbitdepth) {
+void av1_cdef_copy_sb8_16(const AV1_COMMON *const cm, uint16_t *const dst,
+ int dstride, const uint8_t *src, int src_voffset,
+ int src_hoffset, int sstride, int vsize, int hsize) {
+ if (cm->seq_params->use_highbitdepth) {
const uint16_t *base =
&CONVERT_TO_SHORTPTR(src)[src_voffset * sstride + src_hoffset];
cdef_copy_rect8_16bit_to_16bit(dst, dstride, base, sstride, vsize, hsize);
@@ -151,29 +122,35 @@ static INLINE void copy_rect(uint16_t *dst, int dstride, const uint16_t *src,
// Inputs:
// cm: Pointer to common structure.
// fb_info: Pointer to the CDEF block-level parameter structure.
-// linebuf: Top feedback buffer for CDEF.
+// colbuf: Left column buffer for CDEF.
// cdef_left: Left block is filtered or not.
// fbc, fbr: col and row index of a block.
// plane: plane index Y/CB/CR.
-// prev_row_cdef: Top blocks are filtered or not.
// Returns:
// Nothing will be returned.
-static void cdef_prepare_fb(AV1_COMMON *cm, CdefBlockInfo *fb_info,
- uint16_t **linebuf, const int *cdef_left, int fbc,
- int fbr, uint8_t plane,
- unsigned char *prev_row_cdef) {
+static void cdef_prepare_fb(const AV1_COMMON *const cm, CdefBlockInfo *fb_info,
+ uint16_t **const colbuf, const int *cdef_left,
+ int fbc, int fbr, int plane) {
const CommonModeInfoParams *const mi_params = &cm->mi_params;
uint16_t *src = fb_info->src;
- const int stride = (mi_params->mi_cols << MI_SIZE_LOG2) + 2 * CDEF_HBORDER;
+ const int luma_stride =
+ ALIGN_POWER_OF_TWO(mi_params->mi_cols << MI_SIZE_LOG2, 4);
const int nvfb = (mi_params->mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
const int nhfb = (mi_params->mi_cols + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
int cstart = 0;
if (!*cdef_left) cstart = -CDEF_HBORDER;
int rend, cend;
- int nhb = AOMMIN(MI_SIZE_64X64, mi_params->mi_cols - MI_SIZE_64X64 * fbc);
- int nvb = AOMMIN(MI_SIZE_64X64, mi_params->mi_rows - MI_SIZE_64X64 * fbr);
- int hsize = nhb << fb_info->mi_wide_l2;
- int vsize = nvb << fb_info->mi_high_l2;
+ const int nhb =
+ AOMMIN(MI_SIZE_64X64, mi_params->mi_cols - MI_SIZE_64X64 * fbc);
+ const int nvb =
+ AOMMIN(MI_SIZE_64X64, mi_params->mi_rows - MI_SIZE_64X64 * fbr);
+ const int hsize = nhb << fb_info->mi_wide_l2;
+ const int vsize = nvb << fb_info->mi_high_l2;
+ const uint16_t *top_linebuf = fb_info->top_linebuf[plane];
+ const uint16_t *bot_linebuf = fb_info->bot_linebuf[plane];
+ const int bot_offset = (vsize + CDEF_VBORDER) * CDEF_BSTRIDE;
+ const int stride =
+ luma_stride >> (plane == AOM_PLANE_Y ? 0 : cm->seq_params->subsampling_x);
if (fbc == nhfb - 1)
cend = hsize;
@@ -185,54 +162,55 @@ static void cdef_prepare_fb(AV1_COMMON *cm, CdefBlockInfo *fb_info,
else
rend = vsize + CDEF_VBORDER;
- if (fbc == nhfb - 1) {
- /* On the last superblock column, fill in the right border with
- CDEF_VERY_LARGE to avoid filtering with the outside. */
- fill_rect(&src[cend + CDEF_HBORDER], CDEF_BSTRIDE, rend + CDEF_VBORDER,
- hsize + CDEF_HBORDER - cend, CDEF_VERY_LARGE);
- }
- if (fbr == nvfb - 1) {
- /* On the last superblock row, fill in the bottom border with
- CDEF_VERY_LARGE to avoid filtering with the outside. */
- fill_rect(&src[(rend + CDEF_VBORDER) * CDEF_BSTRIDE], CDEF_BSTRIDE,
- CDEF_VBORDER, hsize + 2 * CDEF_HBORDER, CDEF_VERY_LARGE);
- }
/* Copy in the pixels we need from the current superblock for
deringing.*/
- copy_sb8_16(cm, &src[CDEF_VBORDER * CDEF_BSTRIDE + CDEF_HBORDER + cstart],
- CDEF_BSTRIDE, fb_info->dst, fb_info->roffset,
- fb_info->coffset + cstart, fb_info->dst_stride, rend,
- cend - cstart);
- if (!prev_row_cdef[fbc]) {
- copy_sb8_16(cm, &src[CDEF_HBORDER], CDEF_BSTRIDE, fb_info->dst,
- fb_info->roffset - CDEF_VBORDER, fb_info->coffset,
- fb_info->dst_stride, CDEF_VBORDER, hsize);
- } else if (fbr > 0) {
- copy_rect(&src[CDEF_HBORDER], CDEF_BSTRIDE,
- &linebuf[plane][fb_info->coffset], stride, CDEF_VBORDER, hsize);
+ av1_cdef_copy_sb8_16(
+ cm, &src[CDEF_VBORDER * CDEF_BSTRIDE + CDEF_HBORDER + cstart],
+ CDEF_BSTRIDE, fb_info->dst, fb_info->roffset, fb_info->coffset + cstart,
+ fb_info->dst_stride, vsize, cend - cstart);
+
+ /* Copy in the pixels we need for the current superblock from bottom buffer.*/
+ if (fbr < nvfb - 1) {
+ copy_rect(&src[bot_offset + CDEF_HBORDER], CDEF_BSTRIDE,
+ &bot_linebuf[fb_info->coffset], stride, CDEF_VBORDER, hsize);
+ } else {
+ fill_rect(&src[bot_offset + CDEF_HBORDER], CDEF_BSTRIDE, CDEF_VBORDER,
+ hsize, CDEF_VERY_LARGE);
+ }
+ if (fbr < nvfb - 1 && fbc > 0) {
+ copy_rect(&src[bot_offset], CDEF_BSTRIDE,
+ &bot_linebuf[fb_info->coffset - CDEF_HBORDER], stride,
+ CDEF_VBORDER, CDEF_HBORDER);
+ } else {
+ fill_rect(&src[bot_offset], CDEF_BSTRIDE, CDEF_VBORDER, CDEF_HBORDER,
+ CDEF_VERY_LARGE);
+ }
+ if (fbr < nvfb - 1 && fbc < nhfb - 1) {
+ copy_rect(&src[bot_offset + hsize + CDEF_HBORDER], CDEF_BSTRIDE,
+ &bot_linebuf[fb_info->coffset + hsize], stride, CDEF_VBORDER,
+ CDEF_HBORDER);
+ } else {
+ fill_rect(&src[bot_offset + hsize + CDEF_HBORDER], CDEF_BSTRIDE,
+ CDEF_VBORDER, CDEF_HBORDER, CDEF_VERY_LARGE);
+ }
+
+ /* Copy in the pixels we need from the current superblock from top buffer.*/
+ if (fbr > 0) {
+ copy_rect(&src[CDEF_HBORDER], CDEF_BSTRIDE, &top_linebuf[fb_info->coffset],
+ stride, CDEF_VBORDER, hsize);
} else {
fill_rect(&src[CDEF_HBORDER], CDEF_BSTRIDE, CDEF_VBORDER, hsize,
CDEF_VERY_LARGE);
}
- if (!prev_row_cdef[fbc - 1]) {
- copy_sb8_16(cm, src, CDEF_BSTRIDE, fb_info->dst,
- fb_info->roffset - CDEF_VBORDER,
- fb_info->coffset - CDEF_HBORDER, fb_info->dst_stride,
- CDEF_VBORDER, CDEF_HBORDER);
- } else if (fbr > 0 && fbc > 0) {
- copy_rect(src, CDEF_BSTRIDE,
- &linebuf[plane][fb_info->coffset - CDEF_HBORDER], stride,
- CDEF_VBORDER, CDEF_HBORDER);
+ if (fbr > 0 && fbc > 0) {
+ copy_rect(src, CDEF_BSTRIDE, &top_linebuf[fb_info->coffset - CDEF_HBORDER],
+ stride, CDEF_VBORDER, CDEF_HBORDER);
} else {
fill_rect(src, CDEF_BSTRIDE, CDEF_VBORDER, CDEF_HBORDER, CDEF_VERY_LARGE);
}
- if (!prev_row_cdef[fbc + 1]) {
- copy_sb8_16(cm, &src[CDEF_HBORDER + hsize], CDEF_BSTRIDE, fb_info->dst,
- fb_info->roffset - CDEF_VBORDER, fb_info->coffset + hsize,
- fb_info->dst_stride, CDEF_VBORDER, CDEF_HBORDER);
- } else if (fbr > 0 && fbc < nhfb - 1) {
+ if (fbr > 0 && fbc < nhfb - 1) {
copy_rect(&src[hsize + CDEF_HBORDER], CDEF_BSTRIDE,
- &linebuf[plane][fb_info->coffset + hsize], stride, CDEF_VBORDER,
+ &top_linebuf[fb_info->coffset + hsize], stride, CDEF_VBORDER,
CDEF_HBORDER);
} else {
fill_rect(&src[hsize + CDEF_HBORDER], CDEF_BSTRIDE, CDEF_VBORDER,
@@ -241,36 +219,25 @@ static void cdef_prepare_fb(AV1_COMMON *cm, CdefBlockInfo *fb_info,
if (*cdef_left) {
/* If we deringed the superblock on the left then we need to copy in
saved pixels. */
- copy_rect(src, CDEF_BSTRIDE, fb_info->colbuf[plane], CDEF_HBORDER,
+ copy_rect(src, CDEF_BSTRIDE, colbuf[plane], CDEF_HBORDER,
rend + CDEF_VBORDER, CDEF_HBORDER);
}
/* Saving pixels in case we need to dering the superblock on the
right. */
- copy_rect(fb_info->colbuf[plane], CDEF_HBORDER, src + hsize, CDEF_BSTRIDE,
+ copy_rect(colbuf[plane], CDEF_HBORDER, src + hsize, CDEF_BSTRIDE,
rend + CDEF_VBORDER, CDEF_HBORDER);
- copy_sb8_16(cm, &linebuf[plane][fb_info->coffset], stride, fb_info->dst,
- (MI_SIZE_64X64 << fb_info->mi_high_l2) * (fbr + 1) - CDEF_VBORDER,
- fb_info->coffset, fb_info->dst_stride, CDEF_VBORDER, hsize);
- if (fb_info->frame_boundary[TOP]) {
- fill_rect(src, CDEF_BSTRIDE, CDEF_VBORDER, hsize + 2 * CDEF_HBORDER,
- CDEF_VERY_LARGE);
- }
if (fb_info->frame_boundary[LEFT]) {
fill_rect(src, CDEF_BSTRIDE, vsize + 2 * CDEF_VBORDER, CDEF_HBORDER,
CDEF_VERY_LARGE);
}
- if (fb_info->frame_boundary[BOTTOM]) {
- fill_rect(&src[(vsize + CDEF_VBORDER) * CDEF_BSTRIDE], CDEF_BSTRIDE,
- CDEF_VBORDER, hsize + 2 * CDEF_HBORDER, CDEF_VERY_LARGE);
- }
if (fb_info->frame_boundary[RIGHT]) {
fill_rect(&src[hsize + CDEF_HBORDER], CDEF_BSTRIDE,
vsize + 2 * CDEF_VBORDER, CDEF_HBORDER, CDEF_VERY_LARGE);
}
}
-static INLINE void cdef_filter_fb(CdefBlockInfo *fb_info, uint8_t plane,
+static INLINE void cdef_filter_fb(CdefBlockInfo *const fb_info, int plane,
uint8_t use_highbitdepth) {
int offset = fb_info->dst_stride * fb_info->roffset + fb_info->coffset;
if (use_highbitdepth) {
@@ -291,11 +258,11 @@ static INLINE void cdef_filter_fb(CdefBlockInfo *fb_info, uint8_t plane,
}
// Initializes block-level parameters for CDEF.
-static INLINE void cdef_init_fb_col(MACROBLOCKD *xd,
+static INLINE void cdef_init_fb_col(const MACROBLOCKD *const xd,
const CdefInfo *const cdef_info,
- CdefBlockInfo *fb_info,
- const int mbmi_cdef_strength, int fbc,
- int fbr, uint8_t plane) {
+ CdefBlockInfo *const fb_info,
+ int mbmi_cdef_strength, int fbc, int fbr,
+ int plane) {
if (plane == AOM_PLANE_Y) {
fb_info->level =
cdef_info->cdef_strengths[mbmi_cdef_strength] / CDEF_SEC_STRENGTHS;
@@ -328,9 +295,9 @@ static INLINE void cdef_init_fb_col(MACROBLOCKD *xd,
fb_info->coffset = MI_SIZE_64X64 * fbc << fb_info->mi_wide_l2;
}
-static bool cdef_fb_col(AV1_COMMON *cm, MACROBLOCKD *xd, CdefBlockInfo *fb_info,
- int fbc, int fbr, int *cdef_left, uint16_t **linebuf,
- unsigned char *prev_row_cdef) {
+static void cdef_fb_col(const AV1_COMMON *const cm, const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info, uint16_t **const colbuf,
+ int *cdef_left, int fbc, int fbr) {
const CommonModeInfoParams *const mi_params = &cm->mi_params;
const int mbmi_cdef_strength =
mi_params
@@ -343,9 +310,9 @@ static bool cdef_fb_col(AV1_COMMON *cm, MACROBLOCKD *xd, CdefBlockInfo *fb_info,
MI_SIZE_64X64 * fbc] == NULL ||
mbmi_cdef_strength == -1) {
*cdef_left = 0;
- return 0;
+ return;
}
- for (uint8_t plane = 0; plane < num_planes; plane++) {
+ for (int plane = 0; plane < num_planes; plane++) {
cdef_init_fb_col(xd, &cm->cdef_info, fb_info, mbmi_cdef_strength, fbc, fbr,
plane);
if (fb_info->is_zero_level ||
@@ -353,20 +320,26 @@ static bool cdef_fb_col(AV1_COMMON *cm, MACROBLOCKD *xd, CdefBlockInfo *fb_info,
mi_params, fbr * MI_SIZE_64X64, fbc * MI_SIZE_64X64,
fb_info->dlist, BLOCK_64X64)) == 0) {
*cdef_left = 0;
- return 0;
+ return;
}
- cdef_prepare_fb(cm, fb_info, linebuf, cdef_left, fbc, fbr, plane,
- prev_row_cdef);
- cdef_filter_fb(fb_info, plane, cm->seq_params.use_highbitdepth);
+ cdef_prepare_fb(cm, fb_info, colbuf, cdef_left, fbc, fbr, plane);
+ cdef_filter_fb(fb_info, plane, cm->seq_params->use_highbitdepth);
}
*cdef_left = 1;
- return 1;
}
-static INLINE void cdef_init_fb_row(CdefBlockInfo *fb_info, int mi_rows,
- int fbr) {
- const int nvfb = (mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
-
+// Initializes row-level parameters for CDEF frame.
+void av1_cdef_init_fb_row(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info,
+ uint16_t **const linebuf, uint16_t *const src,
+ struct AV1CdefSyncData *const cdef_sync, int fbr) {
+ (void)cdef_sync;
+ const int num_planes = av1_num_planes(cm);
+ const int nvfb = (cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
+ const int luma_stride =
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols << MI_SIZE_LOG2, 4);
+ const bool ping_pong = fbr & 1;
// for the current filter block, it's top left corner mi structure (mi_tl)
// is first accessed to check whether the top and left boundaries are
// frame boundaries. Then bottom-left and top-right mi structures are
@@ -379,78 +352,58 @@ static INLINE void cdef_init_fb_row(CdefBlockInfo *fb_info, int mi_rows,
fb_info->frame_boundary[TOP] = (MI_SIZE_64X64 * fbr == 0) ? 1 : 0;
if (fbr != nvfb - 1)
fb_info->frame_boundary[BOTTOM] =
- (MI_SIZE_64X64 * (fbr + 1) == mi_rows) ? 1 : 0;
+ (MI_SIZE_64X64 * (fbr + 1) == cm->mi_params.mi_rows) ? 1 : 0;
else
fb_info->frame_boundary[BOTTOM] = 1;
+
+ fb_info->src = src;
+ fb_info->damping = cm->cdef_info.cdef_damping;
+ fb_info->coeff_shift = AOMMAX(cm->seq_params->bit_depth - 8, 0);
+ av1_zero(fb_info->dir);
+ av1_zero(fb_info->var);
+
+ for (int plane = 0; plane < num_planes; plane++) {
+ const int mi_high_l2 = MI_SIZE_LOG2 - xd->plane[plane].subsampling_y;
+ const int offset = MI_SIZE_64X64 * (fbr + 1) << mi_high_l2;
+ const int stride = luma_stride >> xd->plane[plane].subsampling_x;
+ // here ping-pong buffers are maintained for top linebuf
+ // to avoid linebuf over-write by consecutive row.
+ uint16_t *const top_linebuf =
+ &linebuf[plane][ping_pong * CDEF_VBORDER * stride];
+ fb_info->bot_linebuf[plane] = &linebuf[plane][(CDEF_VBORDER << 1) * stride];
+
+ if (fbr != nvfb - 1) // top line buffer copy
+ av1_cdef_copy_sb8_16(cm, top_linebuf, stride, xd->plane[plane].dst.buf,
+ offset - CDEF_VBORDER, 0,
+ xd->plane[plane].dst.stride, CDEF_VBORDER, stride);
+ fb_info->top_linebuf[plane] =
+ &linebuf[plane][(!ping_pong) * CDEF_VBORDER * stride];
+
+ if (fbr != nvfb - 1) // bottom line buffer copy
+ av1_cdef_copy_sb8_16(cm, fb_info->bot_linebuf[plane], stride,
+ xd->plane[plane].dst.buf, offset, 0,
+ xd->plane[plane].dst.stride, CDEF_VBORDER, stride);
+ }
}
-static void cdef_fb_row(AV1_COMMON *cm, MACROBLOCKD *xd, CdefBlockInfo *fb_info,
- uint16_t **linebuf, int fbr,
- unsigned char *curr_row_cdef,
- unsigned char *prev_row_cdef) {
+void av1_cdef_fb_row(const AV1_COMMON *const cm, MACROBLOCKD *xd,
+ uint16_t **const linebuf, uint16_t **const colbuf,
+ uint16_t *const src, int fbr,
+ cdef_init_fb_row_t cdef_init_fb_row_fn,
+ struct AV1CdefSyncData *const cdef_sync) {
+ CdefBlockInfo fb_info;
int cdef_left = 1;
const int nhfb = (cm->mi_params.mi_cols + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
- cdef_init_fb_row(fb_info, cm->mi_params.mi_rows, fbr);
+ cdef_init_fb_row_fn(cm, xd, &fb_info, linebuf, src, cdef_sync, fbr);
for (int fbc = 0; fbc < nhfb; fbc++) {
- fb_info->frame_boundary[LEFT] = (MI_SIZE_64X64 * fbc == 0) ? 1 : 0;
+ fb_info.frame_boundary[LEFT] = (MI_SIZE_64X64 * fbc == 0) ? 1 : 0;
if (fbc != nhfb - 1)
- fb_info->frame_boundary[RIGHT] =
+ fb_info.frame_boundary[RIGHT] =
(MI_SIZE_64X64 * (fbc + 1) == cm->mi_params.mi_cols) ? 1 : 0;
else
- fb_info->frame_boundary[RIGHT] = 1;
- curr_row_cdef[fbc] = cdef_fb_col(cm, xd, fb_info, fbc, fbr, &cdef_left,
- linebuf, prev_row_cdef);
- }
-}
-
-// Initialize the frame-level CDEF parameters.
-// Inputs:
-// frame: Pointer to input frame buffer.
-// cm: Pointer to common structure.
-// xd: Pointer to common current coding block structure.
-// fb_info: Pointer to the CDEF block-level parameter structure.
-// src: Intermediate input buffer for CDEF.
-// colbuf: Left feedback buffer for CDEF.
-// linebuf: Top feedback buffer for CDEF.
-// Returns:
-// Nothing will be returned.
-static void cdef_prepare_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
- MACROBLOCKD *xd, CdefBlockInfo *fb_info,
- uint16_t *src, uint16_t **colbuf,
- uint16_t **linebuf) {
- const int num_planes = av1_num_planes(cm);
- const int stride = (cm->mi_params.mi_cols << MI_SIZE_LOG2) + 2 * CDEF_HBORDER;
- av1_setup_dst_planes(xd->plane, cm->seq_params.sb_size, frame, 0, 0, 0,
- num_planes);
-
- for (uint8_t plane = 0; plane < num_planes; plane++) {
- linebuf[plane] = aom_malloc(sizeof(*linebuf) * CDEF_VBORDER * stride);
- const int mi_high_l2 = MI_SIZE_LOG2 - xd->plane[plane].subsampling_y;
- const int block_height = (MI_SIZE_64X64 << mi_high_l2) + 2 * CDEF_VBORDER;
- colbuf[plane] = aom_malloc(
- sizeof(*colbuf) *
- ((CDEF_BLOCKSIZE << (MI_SIZE_LOG2 - xd->plane[plane].subsampling_y)) +
- 2 * CDEF_VBORDER) *
- CDEF_HBORDER);
- fill_rect(colbuf[plane], CDEF_HBORDER, block_height, CDEF_HBORDER,
- CDEF_VERY_LARGE);
- fb_info->colbuf[plane] = colbuf[plane];
- }
-
- fb_info->src = src;
- fb_info->damping = cm->cdef_info.cdef_damping;
- fb_info->coeff_shift = AOMMAX(cm->seq_params.bit_depth - 8, 0);
- memset(fb_info->dir, 0, sizeof(fb_info->dir));
- memset(fb_info->var, 0, sizeof(fb_info->var));
-}
-
-static void cdef_free(unsigned char *row_cdef, uint16_t **colbuf,
- uint16_t **linebuf, const int num_planes) {
- aom_free(row_cdef);
- for (uint8_t plane = 0; plane < num_planes; plane++) {
- aom_free(colbuf[plane]);
- aom_free(linebuf[plane]);
+ fb_info.frame_boundary[RIGHT] = 1;
+ cdef_fb_col(cm, xd, &fb_info, colbuf, &cdef_left, fbc, fbr);
}
}
@@ -461,29 +414,15 @@ static void cdef_free(unsigned char *row_cdef, uint16_t **colbuf,
// xd: Pointer to common current coding block structure.
// Returns:
// Nothing will be returned.
-void av1_cdef_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
- MACROBLOCKD *xd) {
- DECLARE_ALIGNED(16, uint16_t, src[CDEF_INBUF_SIZE]);
- uint16_t *colbuf[MAX_MB_PLANE] = { NULL };
- uint16_t *linebuf[MAX_MB_PLANE] = { NULL };
- CdefBlockInfo fb_info;
- unsigned char *row_cdef, *prev_row_cdef, *curr_row_cdef;
+void av1_cdef_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *const cm,
+ MACROBLOCKD *xd, cdef_init_fb_row_t cdef_init_fb_row_fn) {
const int num_planes = av1_num_planes(cm);
const int nvfb = (cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
- const int nhfb = (cm->mi_params.mi_cols + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
- row_cdef = aom_malloc(sizeof(*row_cdef) * (nhfb + 2) * 2);
- memset(row_cdef, 1, sizeof(*row_cdef) * (nhfb + 2) * 2);
- prev_row_cdef = row_cdef + 1;
- curr_row_cdef = prev_row_cdef + nhfb + 2;
- cdef_prepare_frame(frame, cm, xd, &fb_info, src, colbuf, linebuf);
-
- for (int fbr = 0; fbr < nvfb; fbr++) {
- unsigned char *tmp;
- cdef_fb_row(cm, xd, &fb_info, linebuf, fbr, curr_row_cdef, prev_row_cdef);
- tmp = prev_row_cdef;
- prev_row_cdef = curr_row_cdef;
- curr_row_cdef = tmp;
- }
- cdef_free(row_cdef, colbuf, linebuf, num_planes);
+ av1_setup_dst_planes(xd->plane, cm->seq_params->sb_size, frame, 0, 0, 0,
+ num_planes);
+
+ for (int fbr = 0; fbr < nvfb; fbr++)
+ av1_cdef_fb_row(cm, xd, cm->cdef_info.linebuf, cm->cdef_info.colbuf,
+ cm->cdef_info.srcbuf, fbr, cdef_init_fb_row_fn, NULL);
}
diff --git a/third_party/libaom/source/libaom/av1/common/cdef.h b/third_party/libaom/source/libaom/av1/common/cdef.h
index 4d6e60023b..194117884e 100644
--- a/third_party/libaom/source/libaom/av1/common/cdef.h
+++ b/third_party/libaom/source/libaom/av1/common/cdef.h
@@ -23,6 +23,40 @@
#include "av1/common/av1_common_int.h"
#include "av1/common/cdef_block.h"
+enum { TOP, LEFT, BOTTOM, RIGHT, BOUNDARIES } UENUM1BYTE(BOUNDARY);
+
+struct AV1CdefSyncData;
+
+/*!\brief Parameters related to CDEF Block */
+typedef struct {
+ uint16_t *src; /*!< CDEF intermediate buffer */
+ uint16_t *top_linebuf[MAX_MB_PLANE]; /*!< CDEF top line buffer */
+ uint16_t *bot_linebuf[MAX_MB_PLANE]; /*!< CDEF bottom line buffer */
+ uint8_t *dst; /*!< CDEF destination buffer */
+ cdef_list
+ dlist[MI_SIZE_64X64 * MI_SIZE_64X64]; /*!< CDEF 8x8 block positions */
+
+ int xdec; /*!< Sub-sampling X */
+ int ydec; /*!< Sub-sampling X */
+ int mi_wide_l2; /*!< Pixels per mi unit in width */
+ int mi_high_l2; /*!< Pixels per mi unit in height */
+ int frame_boundary[BOUNDARIES]; /*!< frame boundaries */
+
+ int damping; /*!< CDEF damping factor */
+ int coeff_shift; /*!< Bit-depth based shift for calculating filter strength */
+ int level; /*!< CDEF filtering level */
+ int sec_strength; /*!< CDEF secondary strength */
+ int cdef_count; /*!< Number of CDEF sub-blocks in superblock */
+ int is_zero_level; /*!< CDEF filtering level ON/OFF */
+ int dir[CDEF_NBLOCKS]
+ [CDEF_NBLOCKS]; /*!< CDEF filter direction for all 8x8 sub-blocks*/
+ int var[CDEF_NBLOCKS][CDEF_NBLOCKS]; /*!< variance for all 8x8 sub-blocks */
+
+ int dst_stride; /*!< CDEF destination buffer stride */
+ int coffset; /*!< current superblock offset in a row */
+ int roffset; /*!< current row offset */
+} CdefBlockInfo;
+
static INLINE int sign(int i) { return i < 0 ? -1 : 1; }
static INLINE int constrain(int diff, int threshold, int damping) {
@@ -41,19 +75,36 @@ int av1_cdef_compute_sb_list(const CommonModeInfoParams *const mi_params,
int mi_row, int mi_col, cdef_list *dlist,
BLOCK_SIZE bsize);
+typedef void (*cdef_init_fb_row_t)(
+ const AV1_COMMON *const cm, const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info, uint16_t **const linebuf, uint16_t *const src,
+ struct AV1CdefSyncData *const cdef_sync, int fbr);
+
/*!\brief Function for applying CDEF to a frame
*
* \ingroup in_loop_cdef
* This function applies CDEF to a frame.
*
- * \param[in, out] frame Compressed frame buffer
- * \param[in, out] cm Pointer to top level common structure
- * \param[in] xd Pointer to common current coding block structure
+ * \param[in, out] frame Compressed frame buffer
+ * \param[in, out] cm Pointer to top level common structure
+ * \param[in] xd Pointer to common current coding block structure
+ * \param[in] cdef_init_fb_row_fn Function Pointer
*
* \return Nothing is returned. Instead, the filtered frame is output in
* \c frame.
*/
-void av1_cdef_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm, MACROBLOCKD *xd);
+void av1_cdef_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *const cm,
+ MACROBLOCKD *xd, cdef_init_fb_row_t cdef_init_fb_row_fn);
+void av1_cdef_fb_row(const AV1_COMMON *const cm, MACROBLOCKD *xd,
+ uint16_t **const linebuf, uint16_t **const colbuf,
+ uint16_t *const src, int fbr,
+ cdef_init_fb_row_t cdef_init_fb_row_fn,
+ struct AV1CdefSyncData *const cdef_sync);
+void av1_cdef_init_fb_row(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info,
+ uint16_t **const linebuf, uint16_t *const src,
+ struct AV1CdefSyncData *const cdef_sync, int fbr);
#ifdef __cplusplus
} // extern "C"
diff --git a/third_party/libaom/source/libaom/av1/common/cdef_block.h b/third_party/libaom/source/libaom/av1/common/cdef_block.h
index 6b0ae0a9db..574df2d0de 100644
--- a/third_party/libaom/source/libaom/av1/common/cdef_block.h
+++ b/third_party/libaom/source/libaom/av1/common/cdef_block.h
@@ -19,8 +19,8 @@
#define CDEF_NBLOCKS ((1 << MAX_SB_SIZE_LOG2) / 8)
#define CDEF_SB_SHIFT (MAX_SB_SIZE_LOG2 - CDEF_BLOCKSIZE_LOG2)
-/* We need to buffer three vertical lines. */
-#define CDEF_VBORDER (3)
+/* We need to buffer two vertical lines. */
+#define CDEF_VBORDER (2)
/* We only need to buffer three horizontal pixels too, but let's align to
16 bytes (8 x 16 bits) to make vectorization easier. */
#define CDEF_HBORDER (8)
diff --git a/third_party/libaom/source/libaom/av1/common/cfl.h b/third_party/libaom/source/libaom/av1/common/cfl.h
index 0062e9f7ba..0d53764f28 100644
--- a/third_party/libaom/source/libaom/av1/common/cfl.h
+++ b/third_party/libaom/source/libaom/av1/common/cfl.h
@@ -39,7 +39,7 @@ static INLINE CFL_ALLOWED_TYPE store_cfl_required(const AV1_COMMON *cm,
const MACROBLOCKD *xd) {
const MB_MODE_INFO *mbmi = xd->mi[0];
- if (cm->seq_params.monochrome) return CFL_DISALLOWED;
+ if (cm->seq_params->monochrome) return CFL_DISALLOWED;
if (!xd->is_chroma_ref) {
// For non-chroma-reference blocks, we should always store the luma pixels,
diff --git a/third_party/libaom/source/libaom/av1/common/common.h b/third_party/libaom/source/libaom/av1/common/common.h
index bed6083db2..cc2da98a16 100644
--- a/third_party/libaom/source/libaom/av1/common/common.h
+++ b/third_party/libaom/source/libaom/av1/common/common.h
@@ -50,7 +50,7 @@ static INLINE int get_unsigned_bits(unsigned int num_values) {
}
#define CHECK_MEM_ERROR(cm, lval, expr) \
- AOM_CHECK_MEM_ERROR(&cm->error, lval, expr)
+ AOM_CHECK_MEM_ERROR(cm->error, lval, expr)
#define AOM_FRAME_MARKER 0x2
diff --git a/third_party/libaom/source/libaom/av1/common/common_data.h b/third_party/libaom/source/libaom/av1/common/common_data.h
index 402845cafe..38e14714c0 100644
--- a/third_party/libaom/source/libaom/av1/common/common_data.h
+++ b/third_party/libaom/source/libaom/av1/common/common_data.h
@@ -434,9 +434,12 @@ static const int intra_mode_context[INTRA_MODES] = {
static const int quant_dist_weight[4][2] = {
{ 2, 3 }, { 2, 5 }, { 2, 7 }, { 1, MAX_FRAME_DISTANCE }
};
-static const int quant_dist_lookup_table[2][4][2] = {
- { { 9, 7 }, { 11, 5 }, { 12, 4 }, { 13, 3 } },
- { { 7, 9 }, { 5, 11 }, { 4, 12 }, { 3, 13 } },
+
+static const int quant_dist_lookup_table[4][2] = {
+ { 9, 7 },
+ { 11, 5 },
+ { 12, 4 },
+ { 13, 3 },
};
#ifdef __cplusplus
diff --git a/third_party/libaom/source/libaom/av1/common/enums.h b/third_party/libaom/source/libaom/av1/common/enums.h
index 9c2976b08d..0e1e744daf 100644
--- a/third_party/libaom/source/libaom/av1/common/enums.h
+++ b/third_party/libaom/source/libaom/av1/common/enums.h
@@ -321,6 +321,7 @@ enum { PLANE_TYPE_Y, PLANE_TYPE_UV, PLANE_TYPES } UENUM1BYTE(PLANE_TYPE);
#define CFL_ALPHABET_SIZE_LOG2 4
#define CFL_ALPHABET_SIZE (1 << CFL_ALPHABET_SIZE_LOG2)
#define CFL_MAGS_SIZE ((2 << CFL_ALPHABET_SIZE_LOG2) + 1)
+#define CFL_INDEX_ZERO CFL_ALPHABET_SIZE
#define CFL_IDX_U(idx) (idx >> CFL_ALPHABET_SIZE_LOG2)
#define CFL_IDX_V(idx) (idx & (CFL_ALPHABET_SIZE - 1))
@@ -451,6 +452,14 @@ enum {
UV_MODE_INVALID, // For uv_mode in inter blocks
} UENUM1BYTE(UV_PREDICTION_MODE);
+// Number of top model rd to store for pruning y modes in intra mode decision
+#define TOP_INTRA_MODEL_COUNT 4
+// Total number of luma intra prediction modes (include both directional and
+// non-directional modes)
+// 61 = PAETH_PRED - DC_PRED + 1 + 6 * 8
+// Because there are 8 directional modes, each has additional 6 delta angles.
+#define LUMA_MODE_COUNT 61
+
enum {
SIMPLE_TRANSLATION,
OBMC_CAUSAL, // 2-sided OBMC
diff --git a/third_party/libaom/source/libaom/av1/common/loopfiltermask.c b/third_party/libaom/source/libaom/av1/common/loopfiltermask.c
index 1ae0b112ce..22ab0adf2c 100644
--- a/third_party/libaom/source/libaom/av1/common/loopfiltermask.c
+++ b/third_party/libaom/source/libaom/av1/common/loopfiltermask.c
@@ -1002,11 +1002,11 @@ void av1_filter_block_plane_bitmask_vert(
}
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth)
+ if (cm->seq_params->use_highbitdepth)
highbd_filter_selectively_vert_row2(
ssx, CONVERT_TO_SHORTPTR(dst->buf), dst->stride, pl, mask_16x16_0,
mask_8x8_0, mask_4x4_0, mask_16x16_1, mask_8x8_1, mask_4x4_1,
- &cm->lf_info, lfl, lfl2, (int)cm->seq_params.bit_depth);
+ &cm->lf_info, lfl, lfl2, (int)cm->seq_params->bit_depth);
else
filter_selectively_vert_row2(
ssx, dst->buf, dst->stride, pl, mask_16x16_0, mask_8x8_0, mask_4x4_0,
@@ -1075,10 +1075,11 @@ void av1_filter_block_plane_bitmask_horz(
mask_4x4 = (mask_4x4 >> shift) & mask_cutoff;
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth)
- highbd_filter_selectively_horiz(
- CONVERT_TO_SHORTPTR(dst->buf), dst->stride, pl, ssx, mask_16x16,
- mask_8x8, mask_4x4, &cm->lf_info, lfl, (int)cm->seq_params.bit_depth);
+ if (cm->seq_params->use_highbitdepth)
+ highbd_filter_selectively_horiz(CONVERT_TO_SHORTPTR(dst->buf),
+ dst->stride, pl, ssx, mask_16x16,
+ mask_8x8, mask_4x4, &cm->lf_info, lfl,
+ (int)cm->seq_params->bit_depth);
else
filter_selectively_horiz(dst->buf, dst->stride, pl, ssx, mask_16x16,
mask_8x8, mask_4x4, &cm->lf_info, lfl);
@@ -1109,10 +1110,10 @@ void av1_filter_block_plane_ver(AV1_COMMON *const cm,
uint8_t *lfl2;
// filter two rows at a time
- for (r = 0; r < cm->seq_params.mib_size &&
+ for (r = 0; r < cm->seq_params->mib_size &&
((mi_row + r) << MI_SIZE_LOG2 < cm->height);
r += r_step) {
- for (c = 0; c < cm->seq_params.mib_size &&
+ for (c = 0; c < cm->seq_params->mib_size &&
((mi_col + c) << MI_SIZE_LOG2 < cm->width);
c += MI_SIZE_64X64) {
dst->buf += ((c << MI_SIZE_LOG2) >> ssx);
@@ -1159,11 +1160,11 @@ void av1_filter_block_plane_ver(AV1_COMMON *const cm,
uint64_t mask_4x4_1 = (mask_4x4 >> shift_next) & mask_cutoff;
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth)
+ if (cm->seq_params->use_highbitdepth)
highbd_filter_selectively_vert_row2(
ssx, CONVERT_TO_SHORTPTR(dst->buf), dst->stride, pl, mask_16x16_0,
mask_8x8_0, mask_4x4_0, mask_16x16_1, mask_8x8_1, mask_4x4_1,
- &cm->lf_info, lfl, lfl2, (int)cm->seq_params.bit_depth);
+ &cm->lf_info, lfl, lfl2, (int)cm->seq_params->bit_depth);
else
filter_selectively_vert_row2(ssx, dst->buf, dst->stride, pl,
mask_16x16_0, mask_8x8_0, mask_4x4_0,
@@ -1194,10 +1195,10 @@ void av1_filter_block_plane_hor(AV1_COMMON *const cm,
uint64_t mask_4x4 = 0;
uint8_t *lfl;
- for (r = 0; r < cm->seq_params.mib_size &&
+ for (r = 0; r < cm->seq_params->mib_size &&
((mi_row + r) << MI_SIZE_LOG2 < cm->height);
r += r_step) {
- for (c = 0; c < cm->seq_params.mib_size &&
+ for (c = 0; c < cm->seq_params->mib_size &&
((mi_col + c) << MI_SIZE_LOG2 < cm->width);
c += MI_SIZE_64X64) {
if (mi_row + r == 0) continue;
@@ -1235,11 +1236,11 @@ void av1_filter_block_plane_hor(AV1_COMMON *const cm,
mask_4x4 = (mask_4x4 >> shift) & mask_cutoff;
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth)
+ if (cm->seq_params->use_highbitdepth)
highbd_filter_selectively_horiz(CONVERT_TO_SHORTPTR(dst->buf),
dst->stride, pl, ssx, mask_16x16,
mask_8x8, mask_4x4, &cm->lf_info, lfl,
- (int)cm->seq_params.bit_depth);
+ (int)cm->seq_params->bit_depth);
else
filter_selectively_horiz(dst->buf, dst->stride, pl, ssx, mask_16x16,
mask_8x8, mask_4x4, &cm->lf_info, lfl);
@@ -1260,9 +1261,11 @@ void av1_store_bitmask_vartx(AV1_COMMON *cm, int mi_row, int mi_col,
const TX_SIZE tx_size_y_vert = txsize_vert_map[tx_size];
const TX_SIZE tx_size_y_horz = txsize_horz_map[tx_size];
const TX_SIZE tx_size_uv_vert = txsize_vert_map[av1_get_max_uv_txsize(
- mbmi->bsize, cm->seq_params.subsampling_x, cm->seq_params.subsampling_y)];
+ mbmi->bsize, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y)];
const TX_SIZE tx_size_uv_horz = txsize_horz_map[av1_get_max_uv_txsize(
- mbmi->bsize, cm->seq_params.subsampling_x, cm->seq_params.subsampling_y)];
+ mbmi->bsize, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y)];
const int is_square_transform_size = tx_size <= TX_64X64;
int mask_id = 0;
int offset = 0;
@@ -1330,9 +1333,11 @@ void av1_store_bitmask_univariant_tx(AV1_COMMON *cm, int mi_row, int mi_col,
const TX_SIZE tx_size_y_vert = txsize_vert_map[mbmi->tx_size];
const TX_SIZE tx_size_y_horz = txsize_horz_map[mbmi->tx_size];
const TX_SIZE tx_size_uv_vert = txsize_vert_map[av1_get_max_uv_txsize(
- mbmi->bsize, cm->seq_params.subsampling_x, cm->seq_params.subsampling_y)];
+ mbmi->bsize, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y)];
const TX_SIZE tx_size_uv_horz = txsize_horz_map[av1_get_max_uv_txsize(
- mbmi->bsize, cm->seq_params.subsampling_x, cm->seq_params.subsampling_y)];
+ mbmi->bsize, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y)];
const int is_square_transform_size = mbmi->tx_size <= TX_64X64;
int mask_id = 0;
int offset = 0;
diff --git a/third_party/libaom/source/libaom/av1/common/mv.h b/third_party/libaom/source/libaom/av1/common/mv.h
index be539e8201..3203bf7278 100644
--- a/third_party/libaom/source/libaom/av1/common/mv.h
+++ b/third_party/libaom/source/libaom/av1/common/mv.h
@@ -12,6 +12,8 @@
#ifndef AOM_AV1_COMMON_MV_H_
#define AOM_AV1_COMMON_MV_H_
+#include <stdlib.h>
+
#include "av1/common/common.h"
#include "av1/common/common_data.h"
#include "aom_dsp/aom_filter.h"
diff --git a/third_party/libaom/source/libaom/av1/common/mvref_common.c b/third_party/libaom/source/libaom/av1/common/mvref_common.c
index 04e050a691..3431e7d6ad 100644
--- a/third_party/libaom/source/libaom/av1/common/mvref_common.c
+++ b/third_party/libaom/source/libaom/av1/common/mvref_common.c
@@ -258,7 +258,7 @@ static AOM_INLINE void scan_blk_mbmi(
static int has_top_right(const AV1_COMMON *cm, const MACROBLOCKD *xd,
int mi_row, int mi_col, int bs) {
- const int sb_mi_size = mi_size_wide[cm->seq_params.sb_size];
+ const int sb_mi_size = mi_size_wide[cm->seq_params->sb_size];
const int mask_row = mi_row & (sb_mi_size - 1);
const int mask_col = mi_col & (sb_mi_size - 1);
@@ -347,7 +347,7 @@ static int add_tpl_ref_mv(const AV1_COMMON *cm, const MACROBLOCKD *xd,
const int cur_frame_index = cm->cur_frame->order_hint;
const RefCntBuffer *const buf_0 = get_ref_frame_buf(cm, rf[0]);
const int frame0_index = buf_0->order_hint;
- const int cur_offset_0 = get_relative_dist(&cm->seq_params.order_hint_info,
+ const int cur_offset_0 = get_relative_dist(&cm->seq_params->order_hint_info,
cur_frame_index, frame0_index);
int idx;
const int allow_high_precision_mv = cm->features.allow_high_precision_mv;
@@ -380,7 +380,7 @@ static int add_tpl_ref_mv(const AV1_COMMON *cm, const MACROBLOCKD *xd,
// Process compound inter mode
const RefCntBuffer *const buf_1 = get_ref_frame_buf(cm, rf[1]);
const int frame1_index = buf_1->order_hint;
- const int cur_offset_1 = get_relative_dist(&cm->seq_params.order_hint_info,
+ const int cur_offset_1 = get_relative_dist(&cm->seq_params->order_hint_info,
cur_frame_index, frame1_index);
int_mv comp_refmv;
get_mv_projection(&comp_refmv.as_mv, prev_frame_mvs->mfmv0.as_mv,
@@ -838,7 +838,9 @@ void av1_find_best_ref_mvs(int allow_hp, int_mv *mvlist, int_mv *nearest_mv,
void av1_setup_frame_buf_refs(AV1_COMMON *cm) {
cm->cur_frame->order_hint = cm->current_frame.order_hint;
cm->cur_frame->display_order_hint = cm->current_frame.display_order_hint;
-
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cm->cur_frame->pyramid_level = cm->current_frame.pyramid_level;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
MV_REFERENCE_FRAME ref_frame;
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
@@ -854,10 +856,10 @@ void av1_setup_frame_sign_bias(AV1_COMMON *cm) {
MV_REFERENCE_FRAME ref_frame;
for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
- if (cm->seq_params.order_hint_info.enable_order_hint && buf != NULL) {
+ if (cm->seq_params->order_hint_info.enable_order_hint && buf != NULL) {
const int ref_order_hint = buf->order_hint;
cm->ref_frame_sign_bias[ref_frame] =
- (get_relative_dist(&cm->seq_params.order_hint_info, ref_order_hint,
+ (get_relative_dist(&cm->seq_params->order_hint_info, ref_order_hint,
(int)cm->current_frame.order_hint) <= 0)
? 0
: 1;
@@ -930,10 +932,10 @@ static int motion_field_projection(AV1_COMMON *cm,
&start_frame_buf->ref_order_hints[0];
const int cur_order_hint = cm->cur_frame->order_hint;
int start_to_current_frame_offset = get_relative_dist(
- &cm->seq_params.order_hint_info, start_frame_order_hint, cur_order_hint);
+ &cm->seq_params->order_hint_info, start_frame_order_hint, cur_order_hint);
for (MV_REFERENCE_FRAME rf = LAST_FRAME; rf <= INTER_REFS_PER_FRAME; ++rf) {
- ref_offset[rf] = get_relative_dist(&cm->seq_params.order_hint_info,
+ ref_offset[rf] = get_relative_dist(&cm->seq_params->order_hint_info,
start_frame_order_hint,
ref_order_hints[rf - LAST_FRAME]);
}
@@ -981,7 +983,7 @@ static int motion_field_projection(AV1_COMMON *cm,
}
void av1_setup_motion_field(AV1_COMMON *cm) {
- const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
+ const OrderHintInfo *const order_hint_info = &cm->seq_params->order_hint_info;
memset(cm->ref_frame_side, 0, sizeof(cm->ref_frame_side));
if (!order_hint_info->enable_order_hint) return;
@@ -1219,7 +1221,7 @@ uint8_t av1_findSamples(const AV1_COMMON *cm, MACROBLOCKD *xd, int *pts,
}
void av1_setup_skip_mode_allowed(AV1_COMMON *cm) {
- const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
+ const OrderHintInfo *const order_hint_info = &cm->seq_params->order_hint_info;
SkipModeInfo *const skip_mode_info = &cm->current_frame.skip_mode_info;
skip_mode_info->skip_mode_allowed = 0;
@@ -1323,11 +1325,11 @@ void av1_set_frame_refs(AV1_COMMON *const cm, int *remapped_ref_idx,
int lst_frame_sort_idx = -1;
int gld_frame_sort_idx = -1;
- assert(cm->seq_params.order_hint_info.enable_order_hint);
- assert(cm->seq_params.order_hint_info.order_hint_bits_minus_1 >= 0);
+ assert(cm->seq_params->order_hint_info.enable_order_hint);
+ assert(cm->seq_params->order_hint_info.order_hint_bits_minus_1 >= 0);
const int cur_order_hint = (int)cm->current_frame.order_hint;
const int cur_frame_sort_idx =
- 1 << cm->seq_params.order_hint_info.order_hint_bits_minus_1;
+ 1 << cm->seq_params->order_hint_info.order_hint_bits_minus_1;
REF_FRAME_INFO ref_frame_info[REF_FRAMES];
int ref_flag_list[INTER_REFS_PER_FRAME] = { 0, 0, 0, 0, 0, 0, 0 };
@@ -1349,7 +1351,7 @@ void av1_set_frame_refs(AV1_COMMON *const cm, int *remapped_ref_idx,
ref_frame_info[i].sort_idx =
(offset == -1) ? -1
: cur_frame_sort_idx +
- get_relative_dist(&cm->seq_params.order_hint_info,
+ get_relative_dist(&cm->seq_params->order_hint_info,
offset, cur_order_hint);
assert(ref_frame_info[i].sort_idx >= -1);
@@ -1360,11 +1362,11 @@ void av1_set_frame_refs(AV1_COMMON *const cm, int *remapped_ref_idx,
// Confirm both LAST_FRAME and GOLDEN_FRAME are valid forward reference
// frames.
if (lst_frame_sort_idx == -1 || lst_frame_sort_idx >= cur_frame_sort_idx) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Inter frame requests a look-ahead frame as LAST");
}
if (gld_frame_sort_idx == -1 || gld_frame_sort_idx >= cur_frame_sort_idx) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Inter frame requests a look-ahead frame as GOLDEN");
}
diff --git a/third_party/libaom/source/libaom/av1/common/pred_common.h b/third_party/libaom/source/libaom/av1/common/pred_common.h
index 12bcce84f2..3db9dd69ef 100644
--- a/third_party/libaom/source/libaom/av1/common/pred_common.h
+++ b/third_party/libaom/source/libaom/av1/common/pred_common.h
@@ -107,9 +107,9 @@ static INLINE int get_comp_index_context(const AV1_COMMON *cm,
if (bck_buf != NULL) bck_frame_index = bck_buf->order_hint;
if (fwd_buf != NULL) fwd_frame_index = fwd_buf->order_hint;
- int fwd = abs(get_relative_dist(&cm->seq_params.order_hint_info,
+ int fwd = abs(get_relative_dist(&cm->seq_params->order_hint_info,
fwd_frame_index, cur_frame_index));
- int bck = abs(get_relative_dist(&cm->seq_params.order_hint_info,
+ int bck = abs(get_relative_dist(&cm->seq_params->order_hint_info,
cur_frame_index, bck_frame_index));
const MB_MODE_INFO *const above_mi = xd->above_mbmi;
diff --git a/third_party/libaom/source/libaom/av1/common/reconinter.c b/third_party/libaom/source/libaom/av1/common/reconinter.c
index ad155b26ae..70f4c6d5ee 100644
--- a/third_party/libaom/source/libaom/av1/common/reconinter.c
+++ b/third_party/libaom/source/libaom/av1/common/reconinter.c
@@ -713,8 +713,8 @@ void av1_build_one_inter_predictor(
}
void av1_dist_wtd_comp_weight_assign(const AV1_COMMON *cm,
- const MB_MODE_INFO *mbmi, int order_idx,
- int *fwd_offset, int *bck_offset,
+ const MB_MODE_INFO *mbmi, int *fwd_offset,
+ int *bck_offset,
int *use_dist_wtd_comp_avg,
int is_compound) {
assert(fwd_offset != NULL && bck_offset != NULL);
@@ -734,18 +734,18 @@ void av1_dist_wtd_comp_weight_assign(const AV1_COMMON *cm,
if (bck_buf != NULL) bck_frame_index = bck_buf->order_hint;
if (fwd_buf != NULL) fwd_frame_index = fwd_buf->order_hint;
- int d0 = clamp(abs(get_relative_dist(&cm->seq_params.order_hint_info,
+ int d0 = clamp(abs(get_relative_dist(&cm->seq_params->order_hint_info,
fwd_frame_index, cur_frame_index)),
0, MAX_FRAME_DISTANCE);
- int d1 = clamp(abs(get_relative_dist(&cm->seq_params.order_hint_info,
+ int d1 = clamp(abs(get_relative_dist(&cm->seq_params->order_hint_info,
cur_frame_index, bck_frame_index)),
0, MAX_FRAME_DISTANCE);
const int order = d0 <= d1;
if (d0 == 0 || d1 == 0) {
- *fwd_offset = quant_dist_lookup_table[order_idx][3][order];
- *bck_offset = quant_dist_lookup_table[order_idx][3][1 - order];
+ *fwd_offset = quant_dist_lookup_table[3][order];
+ *bck_offset = quant_dist_lookup_table[3][1 - order];
return;
}
@@ -758,8 +758,8 @@ void av1_dist_wtd_comp_weight_assign(const AV1_COMMON *cm,
if ((d0 > d1 && d0_c0 < d1_c1) || (d0 <= d1 && d0_c0 > d1_c1)) break;
}
- *fwd_offset = quant_dist_lookup_table[order_idx][i][order];
- *bck_offset = quant_dist_lookup_table[order_idx][i][1 - order];
+ *fwd_offset = quant_dist_lookup_table[i][order];
+ *bck_offset = quant_dist_lookup_table[i][1 - order];
}
// True if the following hold:
@@ -911,7 +911,7 @@ static void build_inter_predictors_8x8_and_bigger(
ref, plane, xd->tmp_conv_dst, MAX_SB_SIZE, is_compound, xd->bd);
av1_dist_wtd_comp_weight_assign(
- cm, mi, 0, &inter_pred_params.conv_params.fwd_offset,
+ cm, mi, &inter_pred_params.conv_params.fwd_offset,
&inter_pred_params.conv_params.bck_offset,
&inter_pred_params.conv_params.use_dist_wtd_comp_avg, is_compound);
@@ -1189,7 +1189,6 @@ void av1_build_obmc_inter_prediction(const AV1_COMMON *cm, MACROBLOCKD *xd,
void av1_setup_obmc_dst_bufs(MACROBLOCKD *xd, uint8_t **dst_buf1,
uint8_t **dst_buf2) {
-#if CONFIG_AV1_HIGHBITDEPTH
if (is_cur_buf_hbd(xd)) {
int len = sizeof(uint16_t);
dst_buf1[0] = CONVERT_TO_BYTEPTR(xd->tmp_obmc_bufs[0]);
@@ -1203,16 +1202,13 @@ void av1_setup_obmc_dst_bufs(MACROBLOCKD *xd, uint8_t **dst_buf1,
dst_buf2[2] =
CONVERT_TO_BYTEPTR(xd->tmp_obmc_bufs[1] + MAX_SB_SQUARE * 2 * len);
} else {
-#endif // CONFIG_AV1_HIGHBITDEPTH
dst_buf1[0] = xd->tmp_obmc_bufs[0];
dst_buf1[1] = xd->tmp_obmc_bufs[0] + MAX_SB_SQUARE;
dst_buf1[2] = xd->tmp_obmc_bufs[0] + MAX_SB_SQUARE * 2;
dst_buf2[0] = xd->tmp_obmc_bufs[1];
dst_buf2[1] = xd->tmp_obmc_bufs[1] + MAX_SB_SQUARE;
dst_buf2[2] = xd->tmp_obmc_bufs[1] + MAX_SB_SQUARE * 2;
-#if CONFIG_AV1_HIGHBITDEPTH
}
-#endif // CONFIG_AV1_HIGHBITDEPTH
}
void av1_setup_build_prediction_by_above_pred(
@@ -1363,10 +1359,12 @@ void av1_build_intra_predictors_for_interintra(const AV1_COMMON *cm,
assert(xd->mi[0]->angle_delta[PLANE_TYPE_UV] == 0);
assert(xd->mi[0]->filter_intra_mode_info.use_filter_intra == 0);
assert(xd->mi[0]->use_intrabc == 0);
+ const SequenceHeader *seq_params = cm->seq_params;
- av1_predict_intra_block(cm, xd, pd->width, pd->height,
- max_txsize_rect_lookup[plane_bsize], mode, 0, 0,
- FILTER_INTRA_MODES, ctx->plane[plane],
+ av1_predict_intra_block(xd, seq_params->sb_size,
+ seq_params->enable_intra_edge_filter, pd->width,
+ pd->height, max_txsize_rect_lookup[plane_bsize], mode,
+ 0, 0, FILTER_INTRA_MODES, ctx->plane[plane],
ctx->stride[plane], dst, dst_stride, 0, 0, plane);
}
diff --git a/third_party/libaom/source/libaom/av1/common/reconinter.h b/third_party/libaom/source/libaom/av1/common/reconinter.h
index c8696160b6..056dc67d07 100644
--- a/third_party/libaom/source/libaom/av1/common/reconinter.h
+++ b/third_party/libaom/source/libaom/av1/common/reconinter.h
@@ -368,8 +368,8 @@ static INLINE const uint8_t *av1_get_contiguous_soft_mask(int8_t wedge_index,
}
void av1_dist_wtd_comp_weight_assign(const AV1_COMMON *cm,
- const MB_MODE_INFO *mbmi, int order_idx,
- int *fwd_offset, int *bck_offset,
+ const MB_MODE_INFO *mbmi, int *fwd_offset,
+ int *bck_offset,
int *use_dist_wtd_comp_avg,
int is_compound);
diff --git a/third_party/libaom/source/libaom/av1/common/reconintra.c b/third_party/libaom/source/libaom/av1/common/reconintra.c
index 0c01f92183..51b01786ea 100644
--- a/third_party/libaom/source/libaom/av1/common/reconintra.c
+++ b/third_party/libaom/source/libaom/av1/common/reconintra.c
@@ -193,7 +193,7 @@ static const uint8_t *get_has_tr_table(PARTITION_TYPE partition,
return ret;
}
-static int has_top_right(const AV1_COMMON *cm, BLOCK_SIZE bsize, int mi_row,
+static int has_top_right(BLOCK_SIZE sb_size, BLOCK_SIZE bsize, int mi_row,
int mi_col, int top_available, int right_available,
PARTITION_TYPE partition, TX_SIZE txsz, int row_off,
int col_off, int ss_x, int ss_y) {
@@ -223,7 +223,7 @@ static int has_top_right(const AV1_COMMON *cm, BLOCK_SIZE bsize, int mi_row,
const int bw_in_mi_log2 = mi_size_wide_log2[bsize];
const int bh_in_mi_log2 = mi_size_high_log2[bsize];
- const int sb_mi_size = mi_size_high[cm->seq_params.sb_size];
+ const int sb_mi_size = mi_size_high[sb_size];
const int blk_row_in_sb = (mi_row & (sb_mi_size - 1)) >> bh_in_mi_log2;
const int blk_col_in_sb = (mi_col & (sb_mi_size - 1)) >> bw_in_mi_log2;
@@ -378,7 +378,7 @@ static const uint8_t *get_has_bl_table(PARTITION_TYPE partition,
return ret;
}
-static int has_bottom_left(const AV1_COMMON *cm, BLOCK_SIZE bsize, int mi_row,
+static int has_bottom_left(BLOCK_SIZE sb_size, BLOCK_SIZE bsize, int mi_row,
int mi_col, int bottom_available, int left_available,
PARTITION_TYPE partition, TX_SIZE txsz, int row_off,
int col_off, int ss_x, int ss_y) {
@@ -415,7 +415,7 @@ static int has_bottom_left(const AV1_COMMON *cm, BLOCK_SIZE bsize, int mi_row,
const int bw_in_mi_log2 = mi_size_wide_log2[bsize];
const int bh_in_mi_log2 = mi_size_high_log2[bsize];
- const int sb_mi_size = mi_size_high[cm->seq_params.sb_size];
+ const int sb_mi_size = mi_size_high[sb_size];
const int blk_row_in_sb = (mi_row & (sb_mi_size - 1)) >> bh_in_mi_log2;
const int blk_col_in_sb = (mi_col & (sb_mi_size - 1)) >> bw_in_mi_log2;
@@ -971,7 +971,7 @@ static int is_smooth(const MB_MODE_INFO *mbmi, int plane) {
}
}
-static int get_filt_type(const MACROBLOCKD *xd, int plane) {
+static int get_intra_edge_filter_type(const MACROBLOCKD *xd, int plane) {
int ab_sm, le_sm;
if (plane == 0) {
@@ -1144,11 +1144,11 @@ void av1_upsample_intra_edge_high_c(uint16_t *p, int sz, int bd) {
}
#if CONFIG_AV1_HIGHBITDEPTH
static void build_intra_predictors_high(
- const MACROBLOCKD *xd, const uint8_t *ref8, int ref_stride, uint8_t *dst8,
- int dst_stride, PREDICTION_MODE mode, int angle_delta,
- FILTER_INTRA_MODE filter_intra_mode, TX_SIZE tx_size,
- int disable_edge_filter, int n_top_px, int n_topright_px, int n_left_px,
- int n_bottomleft_px, int plane) {
+ const uint8_t *ref8, int ref_stride, uint8_t *dst8, int dst_stride,
+ PREDICTION_MODE mode, int angle_delta, FILTER_INTRA_MODE filter_intra_mode,
+ TX_SIZE tx_size, int disable_edge_filter, int n_top_px, int n_topright_px,
+ int n_left_px, int n_bottomleft_px, int intra_edge_filter_type,
+ int bit_depth) {
int i;
uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
uint16_t *ref = CONVERT_TO_SHORTPTR(ref8);
@@ -1166,7 +1166,7 @@ static void build_intra_predictors_high(
int p_angle = 0;
const int is_dr_mode = av1_is_directional_mode(mode);
const int use_filter_intra = filter_intra_mode != FILTER_INTRA_MODES;
- int base = 128 << (xd->bd - 8);
+ int base = 128 << (bit_depth - 8);
// The left_data, above_data buffers must be zeroed to fix some intermittent
// valgrind errors. Uninitialized reads in intra pred modules (e.g. width = 4
// path in av1_highbd_dr_prediction_z2_avx2()) from left_data, above_data are
@@ -1270,7 +1270,7 @@ static void build_intra_predictors_high(
if (use_filter_intra) {
highbd_filter_intra_predictor(dst, dst_stride, tx_size, above_row, left_col,
- filter_intra_mode, xd->bd);
+ filter_intra_mode, bit_depth);
return;
}
@@ -1280,61 +1280,57 @@ static void build_intra_predictors_high(
if (!disable_edge_filter) {
const int need_right = p_angle < 90;
const int need_bottom = p_angle > 180;
- const int filt_type = get_filt_type(xd, plane);
if (p_angle != 90 && p_angle != 180) {
const int ab_le = need_above_left ? 1 : 0;
if (need_above && need_left && (txwpx + txhpx >= 24)) {
filter_intra_edge_corner_high(above_row, left_col);
}
if (need_above && n_top_px > 0) {
- const int strength =
- intra_edge_filter_strength(txwpx, txhpx, p_angle - 90, filt_type);
+ const int strength = intra_edge_filter_strength(
+ txwpx, txhpx, p_angle - 90, intra_edge_filter_type);
const int n_px = n_top_px + ab_le + (need_right ? txhpx : 0);
av1_filter_intra_edge_high(above_row - ab_le, n_px, strength);
}
if (need_left && n_left_px > 0) {
const int strength = intra_edge_filter_strength(
- txhpx, txwpx, p_angle - 180, filt_type);
+ txhpx, txwpx, p_angle - 180, intra_edge_filter_type);
const int n_px = n_left_px + ab_le + (need_bottom ? txwpx : 0);
av1_filter_intra_edge_high(left_col - ab_le, n_px, strength);
}
}
- upsample_above =
- av1_use_intra_edge_upsample(txwpx, txhpx, p_angle - 90, filt_type);
+ upsample_above = av1_use_intra_edge_upsample(txwpx, txhpx, p_angle - 90,
+ intra_edge_filter_type);
if (need_above && upsample_above) {
const int n_px = txwpx + (need_right ? txhpx : 0);
- av1_upsample_intra_edge_high(above_row, n_px, xd->bd);
+ av1_upsample_intra_edge_high(above_row, n_px, bit_depth);
}
- upsample_left =
- av1_use_intra_edge_upsample(txhpx, txwpx, p_angle - 180, filt_type);
+ upsample_left = av1_use_intra_edge_upsample(txhpx, txwpx, p_angle - 180,
+ intra_edge_filter_type);
if (need_left && upsample_left) {
const int n_px = txhpx + (need_bottom ? txwpx : 0);
- av1_upsample_intra_edge_high(left_col, n_px, xd->bd);
+ av1_upsample_intra_edge_high(left_col, n_px, bit_depth);
}
}
highbd_dr_predictor(dst, dst_stride, tx_size, above_row, left_col,
- upsample_above, upsample_left, p_angle, xd->bd);
+ upsample_above, upsample_left, p_angle, bit_depth);
return;
}
// predict
if (mode == DC_PRED) {
dc_pred_high[n_left_px > 0][n_top_px > 0][tx_size](
- dst, dst_stride, above_row, left_col, xd->bd);
+ dst, dst_stride, above_row, left_col, bit_depth);
} else {
- pred_high[mode][tx_size](dst, dst_stride, above_row, left_col, xd->bd);
+ pred_high[mode][tx_size](dst, dst_stride, above_row, left_col, bit_depth);
}
}
#endif // CONFIG_AV1_HIGHBITDEPTH
-static void build_intra_predictors(const MACROBLOCKD *xd, const uint8_t *ref,
- int ref_stride, uint8_t *dst, int dst_stride,
- PREDICTION_MODE mode, int angle_delta,
- FILTER_INTRA_MODE filter_intra_mode,
- TX_SIZE tx_size, int disable_edge_filter,
- int n_top_px, int n_topright_px,
- int n_left_px, int n_bottomleft_px,
- int plane) {
+static void build_intra_predictors(
+ const uint8_t *ref, int ref_stride, uint8_t *dst, int dst_stride,
+ PREDICTION_MODE mode, int angle_delta, FILTER_INTRA_MODE filter_intra_mode,
+ TX_SIZE tx_size, int disable_edge_filter, int n_top_px, int n_topright_px,
+ int n_left_px, int n_bottomleft_px, int intra_edge_filter_type) {
int i;
const uint8_t *above_ref = ref - ref_stride;
const uint8_t *left_ref = ref - 1;
@@ -1462,33 +1458,32 @@ static void build_intra_predictors(const MACROBLOCKD *xd, const uint8_t *ref,
if (!disable_edge_filter) {
const int need_right = p_angle < 90;
const int need_bottom = p_angle > 180;
- const int filt_type = get_filt_type(xd, plane);
if (p_angle != 90 && p_angle != 180) {
const int ab_le = need_above_left ? 1 : 0;
if (need_above && need_left && (txwpx + txhpx >= 24)) {
filter_intra_edge_corner(above_row, left_col);
}
if (need_above && n_top_px > 0) {
- const int strength =
- intra_edge_filter_strength(txwpx, txhpx, p_angle - 90, filt_type);
+ const int strength = intra_edge_filter_strength(
+ txwpx, txhpx, p_angle - 90, intra_edge_filter_type);
const int n_px = n_top_px + ab_le + (need_right ? txhpx : 0);
av1_filter_intra_edge(above_row - ab_le, n_px, strength);
}
if (need_left && n_left_px > 0) {
const int strength = intra_edge_filter_strength(
- txhpx, txwpx, p_angle - 180, filt_type);
+ txhpx, txwpx, p_angle - 180, intra_edge_filter_type);
const int n_px = n_left_px + ab_le + (need_bottom ? txwpx : 0);
av1_filter_intra_edge(left_col - ab_le, n_px, strength);
}
}
- upsample_above =
- av1_use_intra_edge_upsample(txwpx, txhpx, p_angle - 90, filt_type);
+ upsample_above = av1_use_intra_edge_upsample(txwpx, txhpx, p_angle - 90,
+ intra_edge_filter_type);
if (need_above && upsample_above) {
const int n_px = txwpx + (need_right ? txhpx : 0);
av1_upsample_intra_edge(above_row, n_px);
}
- upsample_left =
- av1_use_intra_edge_upsample(txhpx, txwpx, p_angle - 180, filt_type);
+ upsample_left = av1_use_intra_edge_upsample(txhpx, txwpx, p_angle - 180,
+ intra_edge_filter_type);
if (need_left && upsample_left) {
const int n_px = txhpx + (need_bottom ? txwpx : 0);
av1_upsample_intra_edge(left_col, n_px);
@@ -1559,11 +1554,14 @@ static INLINE BLOCK_SIZE scale_chroma_bsize(BLOCK_SIZE bsize, int subsampling_x,
return bs;
}
-void av1_predict_intra_block(
- const AV1_COMMON *cm, const MACROBLOCKD *xd, int wpx, int hpx,
- TX_SIZE tx_size, PREDICTION_MODE mode, int angle_delta, int use_palette,
- FILTER_INTRA_MODE filter_intra_mode, const uint8_t *ref, int ref_stride,
- uint8_t *dst, int dst_stride, int col_off, int row_off, int plane) {
+void av1_predict_intra_block(const MACROBLOCKD *xd, BLOCK_SIZE sb_size,
+ int enable_intra_edge_filter, int wpx, int hpx,
+ TX_SIZE tx_size, PREDICTION_MODE mode,
+ int angle_delta, int use_palette,
+ FILTER_INTRA_MODE filter_intra_mode,
+ const uint8_t *ref, int ref_stride, uint8_t *dst,
+ int dst_stride, int col_off, int row_off,
+ int plane) {
const MB_MODE_INFO *const mbmi = xd->mi[0];
const int txwpx = tx_size_wide[tx_size];
const int txhpx = tx_size_high[tx_size];
@@ -1626,32 +1624,32 @@ void av1_predict_intra_block(
}
const int have_top_right =
- has_top_right(cm, bsize, mi_row, mi_col, have_top, right_available,
+ has_top_right(sb_size, bsize, mi_row, mi_col, have_top, right_available,
partition, tx_size, row_off, col_off, ss_x, ss_y);
- const int have_bottom_left =
- has_bottom_left(cm, bsize, mi_row, mi_col, bottom_available, have_left,
- partition, tx_size, row_off, col_off, ss_x, ss_y);
+ const int have_bottom_left = has_bottom_left(
+ sb_size, bsize, mi_row, mi_col, bottom_available, have_left, partition,
+ tx_size, row_off, col_off, ss_x, ss_y);
- const int disable_edge_filter = !cm->seq_params.enable_intra_edge_filter;
+ const int disable_edge_filter = !enable_intra_edge_filter;
+ const int intra_edge_filter_type = get_intra_edge_filter_type(xd, plane);
#if CONFIG_AV1_HIGHBITDEPTH
if (is_cur_buf_hbd(xd)) {
build_intra_predictors_high(
- xd, ref, ref_stride, dst, dst_stride, mode, angle_delta,
- filter_intra_mode, tx_size, disable_edge_filter,
- have_top ? AOMMIN(txwpx, xr + txwpx) : 0,
+ ref, ref_stride, dst, dst_stride, mode, angle_delta, filter_intra_mode,
+ tx_size, disable_edge_filter, have_top ? AOMMIN(txwpx, xr + txwpx) : 0,
have_top_right ? AOMMIN(txwpx, xr) : 0,
have_left ? AOMMIN(txhpx, yd + txhpx) : 0,
- have_bottom_left ? AOMMIN(txhpx, yd) : 0, plane);
+ have_bottom_left ? AOMMIN(txhpx, yd) : 0, intra_edge_filter_type,
+ xd->bd);
return;
}
#endif
- build_intra_predictors(xd, ref, ref_stride, dst, dst_stride, mode,
- angle_delta, filter_intra_mode, tx_size,
- disable_edge_filter,
- have_top ? AOMMIN(txwpx, xr + txwpx) : 0,
- have_top_right ? AOMMIN(txwpx, xr) : 0,
- have_left ? AOMMIN(txhpx, yd + txhpx) : 0,
- have_bottom_left ? AOMMIN(txhpx, yd) : 0, plane);
+ build_intra_predictors(
+ ref, ref_stride, dst, dst_stride, mode, angle_delta, filter_intra_mode,
+ tx_size, disable_edge_filter, have_top ? AOMMIN(txwpx, xr + txwpx) : 0,
+ have_top_right ? AOMMIN(txwpx, xr) : 0,
+ have_left ? AOMMIN(txhpx, yd + txhpx) : 0,
+ have_bottom_left ? AOMMIN(txhpx, yd) : 0, intra_edge_filter_type);
}
void av1_predict_intra_block_facade(const AV1_COMMON *cm, MACROBLOCKD *xd,
@@ -1669,6 +1667,7 @@ void av1_predict_intra_block_facade(const AV1_COMMON *cm, MACROBLOCKD *xd,
? mbmi->filter_intra_mode_info.filter_intra_mode
: FILTER_INTRA_MODES;
const int angle_delta = mbmi->angle_delta[plane != AOM_PLANE_Y] * ANGLE_STEP;
+ const SequenceHeader *seq_params = cm->seq_params;
if (plane != AOM_PLANE_Y && mbmi->uv_mode == UV_CFL_PRED) {
#if CONFIG_DEBUG
@@ -1687,10 +1686,11 @@ void av1_predict_intra_block_facade(const AV1_COMMON *cm, MACROBLOCKD *xd,
CFL_CTX *const cfl = &xd->cfl;
CFL_PRED_TYPE pred_plane = get_cfl_pred_type(plane);
if (cfl->dc_pred_is_cached[pred_plane] == 0) {
- av1_predict_intra_block(cm, xd, pd->width, pd->height, tx_size, mode,
- angle_delta, use_palette, filter_intra_mode, dst,
- dst_stride, dst, dst_stride, blk_col, blk_row,
- plane);
+ av1_predict_intra_block(xd, seq_params->sb_size,
+ seq_params->enable_intra_edge_filter, pd->width,
+ pd->height, tx_size, mode, angle_delta,
+ use_palette, filter_intra_mode, dst, dst_stride,
+ dst, dst_stride, blk_col, blk_row, plane);
if (cfl->use_dc_pred_cache) {
cfl_store_dc_pred(xd, dst, pred_plane, tx_size_wide[tx_size]);
cfl->dc_pred_is_cached[pred_plane] = 1;
@@ -1701,9 +1701,10 @@ void av1_predict_intra_block_facade(const AV1_COMMON *cm, MACROBLOCKD *xd,
cfl_predict_block(xd, dst, dst_stride, tx_size, plane);
return;
}
- av1_predict_intra_block(cm, xd, pd->width, pd->height, tx_size, mode,
- angle_delta, use_palette, filter_intra_mode, dst,
- dst_stride, dst, dst_stride, blk_col, blk_row, plane);
+ av1_predict_intra_block(
+ xd, seq_params->sb_size, seq_params->enable_intra_edge_filter, pd->width,
+ pd->height, tx_size, mode, angle_delta, use_palette, filter_intra_mode,
+ dst, dst_stride, dst, dst_stride, blk_col, blk_row, plane);
}
void av1_init_intra_predictors(void) {
diff --git a/third_party/libaom/source/libaom/av1/common/reconintra.h b/third_party/libaom/source/libaom/av1/common/reconintra.h
index 907db5daf8..fa66ccd541 100644
--- a/third_party/libaom/source/libaom/av1/common/reconintra.h
+++ b/third_party/libaom/source/libaom/av1/common/reconintra.h
@@ -26,11 +26,14 @@ void av1_init_intra_predictors(void);
void av1_predict_intra_block_facade(const AV1_COMMON *cm, MACROBLOCKD *xd,
int plane, int blk_col, int blk_row,
TX_SIZE tx_size);
-void av1_predict_intra_block(
- const AV1_COMMON *cm, const MACROBLOCKD *xd, int wpx, int hpx,
- TX_SIZE tx_size, PREDICTION_MODE mode, int angle_delta, int use_palette,
- FILTER_INTRA_MODE filter_intra_mode, const uint8_t *ref, int ref_stride,
- uint8_t *dst, int dst_stride, int col_off, int row_off, int plane);
+void av1_predict_intra_block(const MACROBLOCKD *xd, BLOCK_SIZE sb_size,
+ int enable_intra_edge_filter, int wpx, int hpx,
+ TX_SIZE tx_size, PREDICTION_MODE mode,
+ int angle_delta, int use_palette,
+ FILTER_INTRA_MODE filter_intra_mode,
+ const uint8_t *ref, int ref_stride, uint8_t *dst,
+ int dst_stride, int col_off, int row_off,
+ int plane);
// Mapping of interintra to intra mode for use in the intra component
static const PREDICTION_MODE interintra_to_intra_mode[INTERINTRA_MODES] = {
@@ -64,7 +67,7 @@ static INLINE int av1_allow_intrabc(const AV1_COMMON *const cm) {
static INLINE int av1_filter_intra_allowed_bsize(const AV1_COMMON *const cm,
BLOCK_SIZE bs) {
- if (!cm->seq_params.enable_filter_intra || bs == BLOCK_INVALID) return 0;
+ if (!cm->seq_params->enable_filter_intra || bs == BLOCK_INVALID) return 0;
return block_size_wide[bs] <= 32 && block_size_high[bs] <= 32;
}
diff --git a/third_party/libaom/source/libaom/av1/common/resize.c b/third_party/libaom/source/libaom/av1/common/resize.c
index 0cfb5a29b8..112a08a539 100644
--- a/third_party/libaom/source/libaom/av1/common/resize.c
+++ b/third_party/libaom/source/libaom/av1/common/resize.c
@@ -1263,7 +1263,7 @@ void av1_upscale_normative_rows(const AV1_COMMON *cm, const uint8_t *src,
int src_stride, uint8_t *dst, int dst_stride,
int plane, int rows) {
const int is_uv = (plane > 0);
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
const int downscaled_plane_width = ROUND_POWER_OF_TWO(cm->width, ss_x);
const int upscaled_plane_width =
ROUND_POWER_OF_TWO(cm->superres_upscaled_width, ss_x);
@@ -1305,11 +1305,11 @@ void av1_upscale_normative_rows(const AV1_COMMON *cm, const uint8_t *src,
const int pad_right = (j == cm->tiles.cols - 1);
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth)
+ if (cm->seq_params->use_highbitdepth)
highbd_upscale_normative_rect(src_ptr, rows, src_width, src_stride,
dst_ptr, rows, dst_width, dst_stride,
x_step_qn, x0_qn, pad_left, pad_right,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
else
upscale_normative_rect(src_ptr, rows, src_width, src_stride, dst_ptr,
rows, dst_width, dst_stride, x_step_qn, x0_qn,
@@ -1354,18 +1354,18 @@ YV12_BUFFER_CONFIG *av1_scale_if_required(
if (scaling_required) {
const int num_planes = av1_num_planes(cm);
#if CONFIG_AV1_HIGHBITDEPTH
- if (use_optimized_scaler && cm->seq_params.bit_depth == AOM_BITS_8) {
+ if (use_optimized_scaler && cm->seq_params->bit_depth == AOM_BITS_8) {
av1_resize_and_extend_frame(unscaled, scaled, filter, phase, num_planes);
} else {
av1_resize_and_extend_frame_nonnormative(
- unscaled, scaled, (int)cm->seq_params.bit_depth, num_planes);
+ unscaled, scaled, (int)cm->seq_params->bit_depth, num_planes);
}
#else
if (use_optimized_scaler) {
av1_resize_and_extend_frame(unscaled, scaled, filter, phase, num_planes);
} else {
av1_resize_and_extend_frame_nonnormative(
- unscaled, scaled, (int)cm->seq_params.bit_depth, num_planes);
+ unscaled, scaled, (int)cm->seq_params->bit_depth, num_planes);
}
#endif
return scaled;
@@ -1432,7 +1432,7 @@ static void copy_buffer_config(const YV12_BUFFER_CONFIG *const src,
void av1_superres_upscale(AV1_COMMON *cm, BufferPool *const pool) {
const int num_planes = av1_num_planes(cm);
if (!av1_superres_scaled(cm)) return;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int byte_alignment = cm->features.byte_alignment;
YV12_BUFFER_CONFIG copy_buffer;
@@ -1445,7 +1445,7 @@ void av1_superres_upscale(AV1_COMMON *cm, BufferPool *const pool) {
&copy_buffer, aligned_width, cm->height, seq_params->subsampling_x,
seq_params->subsampling_y, seq_params->use_highbitdepth,
AOM_BORDER_IN_PIXELS, byte_alignment))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate copy buffer for superres upscaling");
// Copy function assumes the frames are the same size.
@@ -1468,7 +1468,7 @@ void av1_superres_upscale(AV1_COMMON *cm, BufferPool *const pool) {
if (release_fb_cb(cb_priv, fb)) {
unlock_buffer_pool(pool);
aom_internal_error(
- &cm->error, AOM_CODEC_MEM_ERROR,
+ cm->error, AOM_CODEC_MEM_ERROR,
"Failed to free current frame buffer before superres upscaling");
}
// aom_realloc_frame_buffer() leaves config data for frame_to_show intact
@@ -1479,7 +1479,7 @@ void av1_superres_upscale(AV1_COMMON *cm, BufferPool *const pool) {
AOM_BORDER_IN_PIXELS, byte_alignment, fb, cb, cb_priv, 0)) {
unlock_buffer_pool(pool);
aom_internal_error(
- &cm->error, AOM_CODEC_MEM_ERROR,
+ cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate current frame buffer for superres upscaling");
}
unlock_buffer_pool(pool);
@@ -1495,7 +1495,7 @@ void av1_superres_upscale(AV1_COMMON *cm, BufferPool *const pool) {
seq_params->subsampling_y, seq_params->use_highbitdepth,
AOM_BORDER_IN_PIXELS, byte_alignment))
aom_internal_error(
- &cm->error, AOM_CODEC_MEM_ERROR,
+ cm->error, AOM_CODEC_MEM_ERROR,
"Failed to reallocate current frame buffer for superres upscaling");
// Restore config data back to frame_to_show
diff --git a/third_party/libaom/source/libaom/av1/common/restoration.c b/third_party/libaom/source/libaom/av1/common/restoration.c
index 41d0e22501..202953c889 100644
--- a/third_party/libaom/source/libaom/av1/common/restoration.c
+++ b/third_party/libaom/source/libaom/av1/common/restoration.c
@@ -42,8 +42,8 @@ const sgr_params_type av1_sgr_params[SGRPROJ_PARAMS] = {
AV1PixelRect av1_whole_frame_rect(const AV1_COMMON *cm, int is_uv) {
AV1PixelRect rect;
- int ss_x = is_uv && cm->seq_params.subsampling_x;
- int ss_y = is_uv && cm->seq_params.subsampling_y;
+ int ss_x = is_uv && cm->seq_params->subsampling_x;
+ int ss_y = is_uv && cm->seq_params->subsampling_y;
rect.top = 0;
rect.bottom = ROUND_POWER_OF_TWO(cm->height, ss_y);
@@ -1107,7 +1107,7 @@ void av1_loop_restoration_filter_frame_init(AV1LrStruct *lr_ctxt,
YV12_BUFFER_CONFIG *frame,
AV1_COMMON *cm, int optimized_lr,
int num_planes) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int bit_depth = seq_params->bit_depth;
const int highbd = seq_params->use_highbitdepth;
lr_ctxt->dst = &cm->rst_frame;
@@ -1118,7 +1118,7 @@ void av1_loop_restoration_filter_frame_init(AV1LrStruct *lr_ctxt,
lr_ctxt->dst, frame_width, frame_height, seq_params->subsampling_x,
seq_params->subsampling_y, highbd, AOM_RESTORATION_FRAME_BORDER,
cm->features.byte_alignment, NULL, NULL, NULL, 0) < 0)
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate restoration dst buffer");
lr_ctxt->on_rest_unit = filter_frame_on_unit;
@@ -1299,7 +1299,7 @@ void av1_foreach_rest_unit_in_plane(const struct AV1Common *cm, int plane,
int32_t *tmpbuf,
RestorationLineBuffers *rlbs) {
const int is_uv = plane > 0;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
const RestorationInfo *rsi = &cm->rst_info[plane];
@@ -1315,7 +1315,7 @@ int av1_loop_restoration_corners_in_sb(const struct AV1Common *cm, int plane,
int *rrow1) {
assert(rcol0 && rcol1 && rrow0 && rrow1);
- if (bsize != cm->seq_params.sb_size) return 0;
+ if (bsize != cm->seq_params->sb_size) return 0;
if (cm->rst_info[plane].frame_restoration_type == RESTORE_NONE) return 0;
assert(!cm->features.all_lossless);
@@ -1345,8 +1345,8 @@ int av1_loop_restoration_corners_in_sb(const struct AV1Common *cm, int plane,
const int vert_units = av1_lr_count_units_in_tile(size, tile_h);
// The size of an MI-unit on this plane of the image
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
const int mi_size_x = MI_SIZE >> ss_x;
const int mi_size_y = MI_SIZE >> ss_y;
@@ -1427,7 +1427,7 @@ static void save_deblock_boundary_lines(
int upscaled_width;
int line_bytes;
if (av1_superres_scaled(cm)) {
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
upscaled_width = (cm->superres_upscaled_width + ss_x) >> ss_x;
line_bytes = upscaled_width << use_highbd;
if (use_highbd)
@@ -1474,7 +1474,7 @@ static void save_cdef_boundary_lines(const YV12_BUFFER_CONFIG *frame,
// At the point where this function is called, we've already applied
// superres. So we don't need to extend the lines here, we can just
// pull directly from the topmost row of the upscaled frame.
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
const int upscaled_width = av1_superres_scaled(cm)
? (cm->superres_upscaled_width + ss_x) >> ss_x
: src_width;
@@ -1494,7 +1494,7 @@ static void save_tile_row_boundary_lines(const YV12_BUFFER_CONFIG *frame,
int use_highbd, int plane,
AV1_COMMON *cm, int after_cdef) {
const int is_uv = plane > 0;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
const int stripe_height = RESTORATION_PROC_UNIT_SIZE >> ss_y;
const int stripe_off = RESTORATION_UNIT_OFFSET >> ss_y;
@@ -1559,7 +1559,7 @@ static void save_tile_row_boundary_lines(const YV12_BUFFER_CONFIG *frame,
void av1_loop_restoration_save_boundary_lines(const YV12_BUFFER_CONFIG *frame,
AV1_COMMON *cm, int after_cdef) {
const int num_planes = av1_num_planes(cm);
- const int use_highbd = cm->seq_params.use_highbitdepth;
+ const int use_highbd = cm->seq_params->use_highbitdepth;
for (int p = 0; p < num_planes; ++p) {
save_tile_row_boundary_lines(frame, use_highbd, p, cm, after_cdef);
}
diff --git a/third_party/libaom/source/libaom/av1/common/thread_common.c b/third_party/libaom/source/libaom/av1/common/thread_common.c
index 638dc4c951..0c45749de1 100644
--- a/third_party/libaom/source/libaom/av1/common/thread_common.c
+++ b/third_party/libaom/source/libaom/av1/common/thread_common.c
@@ -152,6 +152,61 @@ static void loop_filter_data_reset(LFWorkerData *lf_data,
}
}
+void av1_alloc_cdef_sync(AV1_COMMON *const cm, AV1CdefSync *cdef_sync,
+ int num_workers) {
+ if (num_workers < 1) return;
+#if CONFIG_MULTITHREAD
+ if (cdef_sync->mutex_ == NULL) {
+ CHECK_MEM_ERROR(cm, cdef_sync->mutex_,
+ aom_malloc(sizeof(*(cdef_sync->mutex_))));
+ if (cdef_sync->mutex_) pthread_mutex_init(cdef_sync->mutex_, NULL);
+ }
+#else
+ (void)cm;
+ (void)cdef_sync;
+#endif // CONFIG_MULTITHREAD
+}
+
+void av1_free_cdef_sync(AV1CdefSync *cdef_sync) {
+ if (cdef_sync == NULL) return;
+#if CONFIG_MULTITHREAD
+ if (cdef_sync->mutex_ != NULL) {
+ pthread_mutex_destroy(cdef_sync->mutex_);
+ aom_free(cdef_sync->mutex_);
+ }
+#endif // CONFIG_MULTITHREAD
+}
+
+static INLINE void cdef_row_mt_sync_read(AV1CdefSync *const cdef_sync,
+ int row) {
+ if (!row) return;
+#if CONFIG_MULTITHREAD
+ AV1CdefRowSync *const cdef_row_mt = cdef_sync->cdef_row_mt;
+ pthread_mutex_lock(cdef_row_mt[row - 1].row_mutex_);
+ while (cdef_row_mt[row - 1].is_row_done != 1)
+ pthread_cond_wait(cdef_row_mt[row - 1].row_cond_,
+ cdef_row_mt[row - 1].row_mutex_);
+ cdef_row_mt[row - 1].is_row_done = 0;
+ pthread_mutex_unlock(cdef_row_mt[row - 1].row_mutex_);
+#else
+ (void)cdef_sync;
+#endif // CONFIG_MULTITHREAD
+}
+
+static INLINE void cdef_row_mt_sync_write(AV1CdefSync *const cdef_sync,
+ int row) {
+#if CONFIG_MULTITHREAD
+ AV1CdefRowSync *const cdef_row_mt = cdef_sync->cdef_row_mt;
+ pthread_mutex_lock(cdef_row_mt[row].row_mutex_);
+ pthread_cond_signal(cdef_row_mt[row].row_cond_);
+ cdef_row_mt[row].is_row_done = 1;
+ pthread_mutex_unlock(cdef_row_mt[row].row_mutex_);
+#else
+ (void)cdef_sync;
+ (void)row;
+#endif // CONFIG_MULTITHREAD
+}
+
static INLINE void sync_read(AV1LfSync *const lf_sync, int r, int c,
int plane) {
#if CONFIG_MULTITHREAD
@@ -211,7 +266,7 @@ static void enqueue_lf_jobs(AV1LfSync *lf_sync, AV1_COMMON *cm, int start,
#if CONFIG_LPF_MASK
int is_decoding,
#endif
- int plane_start, int plane_end) {
+ int plane_start, int plane_end, int is_realtime) {
int mi_row, plane, dir;
AV1LfMTInfo *lf_job_queue = lf_sync->job_queue;
lf_sync->jobs_enqueued = 0;
@@ -238,6 +293,7 @@ static void enqueue_lf_jobs(AV1LfSync *lf_sync, AV1_COMMON *cm, int start,
lf_job_queue->mi_row = mi_row;
lf_job_queue->plane = plane;
lf_job_queue->dir = dir;
+ lf_job_queue->is_realtime = is_realtime;
lf_job_queue++;
lf_sync->jobs_enqueued++;
}
@@ -272,7 +328,7 @@ static INLINE void thread_loop_filter_rows(
const int sb_cols =
ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols, MAX_MIB_SIZE_LOG2) >>
MAX_MIB_SIZE_LOG2;
- int mi_row, mi_col, plane, dir;
+ int mi_row, mi_col, plane, dir, is_realtime;
int r, c;
while (1) {
@@ -283,17 +339,29 @@ static INLINE void thread_loop_filter_rows(
plane = cur_job_info->plane;
dir = cur_job_info->dir;
r = mi_row >> MAX_MIB_SIZE_LOG2;
+ is_realtime = cur_job_info->is_realtime && !plane;
if (dir == 0) {
for (mi_col = 0; mi_col < cm->mi_params.mi_cols;
mi_col += MAX_MIB_SIZE) {
c = mi_col >> MAX_MIB_SIZE_LOG2;
- av1_setup_dst_planes(planes, cm->seq_params.sb_size, frame_buffer,
+ av1_setup_dst_planes(planes, cm->seq_params->sb_size, frame_buffer,
mi_row, mi_col, plane, plane + 1);
-
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_vert(cm, xd, plane, &planes[plane], mi_row,
mi_col);
+#else
+ if (is_realtime) {
+ av1_filter_block_plane_vert_rt(cm, xd, plane, &planes[plane],
+ mi_row, mi_col);
+
+ } else {
+ av1_filter_block_plane_vert(cm, xd, plane, &planes[plane], mi_row,
+ mi_col);
+ }
+#endif
sync_write(lf_sync, r, c, sb_cols, plane);
}
} else if (dir == 1) {
@@ -309,10 +377,21 @@ static INLINE void thread_loop_filter_rows(
// completed
sync_read(lf_sync, r + 1, c, plane);
- av1_setup_dst_planes(planes, cm->seq_params.sb_size, frame_buffer,
+ av1_setup_dst_planes(planes, cm->seq_params->sb_size, frame_buffer,
mi_row, mi_col, plane, plane + 1);
+#if CONFIG_AV1_HIGHBITDEPTH
+ (void)is_realtime;
av1_filter_block_plane_horz(cm, xd, plane, &planes[plane], mi_row,
mi_col);
+#else
+ if (is_realtime) {
+ av1_filter_block_plane_horz_rt(cm, xd, plane, &planes[plane],
+ mi_row, mi_col);
+ } else {
+ av1_filter_block_plane_horz(cm, xd, plane, &planes[plane], mi_row,
+ mi_col);
+ }
+#endif
}
}
} else {
@@ -405,7 +484,7 @@ static void loop_filter_rows_mt(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
int is_decoding,
#endif
AVxWorker *workers, int nworkers,
- AV1LfSync *lf_sync) {
+ AV1LfSync *lf_sync, int is_realtime) {
const AVxWorkerInterface *const winterface = aom_get_worker_interface();
#if CONFIG_LPF_MASK
int sb_rows;
@@ -441,7 +520,7 @@ static void loop_filter_rows_mt(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
#if CONFIG_LPF_MASK
is_decoding,
#endif
- plane_start, plane_end);
+ plane_start, plane_end, is_realtime);
// Set up loopfilter thread data.
for (i = num_workers - 1; i >= 0; --i) {
@@ -484,7 +563,7 @@ void av1_loop_filter_frame_mt(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
int is_decoding,
#endif
AVxWorker *workers, int num_workers,
- AV1LfSync *lf_sync) {
+ AV1LfSync *lf_sync, int is_realtime) {
int start_mi_row, end_mi_row, mi_rows_to_filter;
start_mi_row = 0;
@@ -512,7 +591,7 @@ void av1_loop_filter_frame_mt(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
// TODO(chengchen): can we remove this?
struct macroblockd_plane *pd = xd->plane;
- av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame, 0, 0, plane,
+ av1_setup_dst_planes(pd, cm->seq_params->sb_size, frame, 0, 0, plane,
plane + 1);
av1_build_bitmask_vert_info(cm, &pd[plane], plane);
@@ -526,7 +605,7 @@ void av1_loop_filter_frame_mt(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
}
#else
loop_filter_rows_mt(frame, cm, xd, start_mi_row, end_mi_row, plane_start,
- plane_end, workers, num_workers, lf_sync);
+ plane_end, workers, num_workers, lf_sync, is_realtime);
#endif
}
@@ -720,7 +799,7 @@ static void enqueue_lr_jobs(AV1LrSync *lr_sync, AV1LrStruct *lr_ctxt,
for (int plane = 0; plane < num_planes; plane++) {
if (cm->rst_info[plane].frame_restoration_type == RESTORE_NONE) continue;
const int is_uv = plane > 0;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
AV1PixelRect tile_rect = ctxt[plane].tile_rect;
const int unit_size = ctxt[plane].rsi->restoration_unit_size;
@@ -932,3 +1011,198 @@ void av1_loop_restoration_filter_frame_mt(YV12_BUFFER_CONFIG *frame,
cm);
}
#endif
+
+// Initializes cdef_sync parameters.
+static AOM_INLINE void reset_cdef_job_info(AV1CdefSync *const cdef_sync) {
+ cdef_sync->end_of_frame = 0;
+ cdef_sync->fbr = 0;
+ cdef_sync->fbc = 0;
+}
+
+static AOM_INLINE void launch_cdef_workers(AVxWorker *const workers,
+ int num_workers) {
+ const AVxWorkerInterface *const winterface = aom_get_worker_interface();
+ for (int i = num_workers - 1; i >= 0; i--) {
+ AVxWorker *const worker = &workers[i];
+ if (i == 0)
+ winterface->execute(worker);
+ else
+ winterface->launch(worker);
+ }
+}
+
+static AOM_INLINE void sync_cdef_workers(AVxWorker *const workers,
+ AV1_COMMON *const cm,
+ int num_workers) {
+ const AVxWorkerInterface *const winterface = aom_get_worker_interface();
+ int had_error = 0;
+
+ // Wait for completion of Cdef frame.
+ for (int i = num_workers - 1; i >= 0; i--) {
+ AVxWorker *const worker = &workers[i];
+ had_error |= !winterface->sync(worker);
+ }
+ if (had_error)
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
+ "Failed to process cdef frame");
+}
+
+// Updates the row index of the next job to be processed.
+// Also updates end_of_frame flag when the processing of all rows is complete.
+static void update_cdef_row_next_job_info(AV1CdefSync *const cdef_sync,
+ const int nvfb) {
+ cdef_sync->fbr++;
+ if (cdef_sync->fbr == nvfb) {
+ cdef_sync->end_of_frame = 1;
+ }
+}
+
+// Checks if a job is available. If job is available,
+// populates next job information and returns 1, else returns 0.
+static AOM_INLINE int get_cdef_row_next_job(AV1CdefSync *const cdef_sync,
+ int *cur_fbr, const int nvfb) {
+#if CONFIG_MULTITHREAD
+ pthread_mutex_lock(cdef_sync->mutex_);
+#endif // CONFIG_MULTITHREAD
+ int do_next_row = 0;
+ // Populates information needed for current job and update the row
+ // index of the next row to be processed.
+ if (cdef_sync->end_of_frame == 0) {
+ do_next_row = 1;
+ *cur_fbr = cdef_sync->fbr;
+ update_cdef_row_next_job_info(cdef_sync, nvfb);
+ }
+#if CONFIG_MULTITHREAD
+ pthread_mutex_unlock(cdef_sync->mutex_);
+#endif // CONFIG_MULTITHREAD
+ return do_next_row;
+}
+
+// Hook function for each thread in CDEF multi-threading.
+static int cdef_sb_row_worker_hook(void *arg1, void *arg2) {
+ AV1CdefSync *const cdef_sync = (AV1CdefSync *)arg1;
+ AV1CdefWorkerData *const cdef_worker = (AV1CdefWorkerData *)arg2;
+ const int nvfb =
+ (cdef_worker->cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
+ int cur_fbr;
+ while (get_cdef_row_next_job(cdef_sync, &cur_fbr, nvfb)) {
+ av1_cdef_fb_row(cdef_worker->cm, cdef_worker->xd, cdef_worker->linebuf,
+ cdef_worker->colbuf, cdef_worker->srcbuf, cur_fbr,
+ cdef_worker->cdef_init_fb_row_fn, cdef_sync);
+ }
+ return 1;
+}
+
+// Assigns CDEF hook function and thread data to each worker.
+static void prepare_cdef_frame_workers(
+ AV1_COMMON *const cm, MACROBLOCKD *xd, AV1CdefWorkerData *const cdef_worker,
+ AVxWorkerHook hook, AVxWorker *const workers, AV1CdefSync *const cdef_sync,
+ int num_workers, cdef_init_fb_row_t cdef_init_fb_row_fn) {
+ const int num_planes = av1_num_planes(cm);
+
+ cdef_worker[0].srcbuf = cm->cdef_info.srcbuf;
+ for (int plane = 0; plane < num_planes; plane++)
+ cdef_worker[0].colbuf[plane] = cm->cdef_info.colbuf[plane];
+ for (int i = num_workers - 1; i >= 0; i--) {
+ AVxWorker *const worker = &workers[i];
+ cdef_worker[i].cm = cm;
+ cdef_worker[i].xd = xd;
+ cdef_worker[i].cdef_init_fb_row_fn = cdef_init_fb_row_fn;
+ for (int plane = 0; plane < num_planes; plane++)
+ cdef_worker[i].linebuf[plane] = cm->cdef_info.linebuf[plane];
+
+ worker->hook = hook;
+ worker->data1 = cdef_sync;
+ worker->data2 = &cdef_worker[i];
+ }
+}
+
+// Initializes row-level parameters for CDEF frame.
+void av1_cdef_init_fb_row_mt(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info,
+ uint16_t **const linebuf, uint16_t *const src,
+ struct AV1CdefSyncData *const cdef_sync, int fbr) {
+ const int num_planes = av1_num_planes(cm);
+ const int nvfb = (cm->mi_params.mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
+ const int luma_stride =
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols << MI_SIZE_LOG2, 4);
+
+ // for the current filter block, it's top left corner mi structure (mi_tl)
+ // is first accessed to check whether the top and left boundaries are
+ // frame boundaries. Then bottom-left and top-right mi structures are
+ // accessed to check whether the bottom and right boundaries
+ // (respectively) are frame boundaries.
+ //
+ // Note that we can't just check the bottom-right mi structure - eg. if
+ // we're at the right-hand edge of the frame but not the bottom, then
+ // the bottom-right mi is NULL but the bottom-left is not.
+ fb_info->frame_boundary[TOP] = (MI_SIZE_64X64 * fbr == 0) ? 1 : 0;
+ if (fbr != nvfb - 1)
+ fb_info->frame_boundary[BOTTOM] =
+ (MI_SIZE_64X64 * (fbr + 1) == cm->mi_params.mi_rows) ? 1 : 0;
+ else
+ fb_info->frame_boundary[BOTTOM] = 1;
+
+ fb_info->src = src;
+ fb_info->damping = cm->cdef_info.cdef_damping;
+ fb_info->coeff_shift = AOMMAX(cm->seq_params->bit_depth - 8, 0);
+ av1_zero(fb_info->dir);
+ av1_zero(fb_info->var);
+
+ for (int plane = 0; plane < num_planes; plane++) {
+ const int stride = luma_stride >> xd->plane[plane].subsampling_x;
+ uint16_t *top_linebuf = &linebuf[plane][0];
+ uint16_t *bot_linebuf = &linebuf[plane][nvfb * CDEF_VBORDER * stride];
+ {
+ const int mi_high_l2 = MI_SIZE_LOG2 - xd->plane[plane].subsampling_y;
+ const int top_offset = MI_SIZE_64X64 * (fbr + 1) << mi_high_l2;
+ const int bot_offset = MI_SIZE_64X64 * (fbr + 1) << mi_high_l2;
+
+ if (fbr != nvfb - 1) // if (fbr != 0) // top line buffer copy
+ av1_cdef_copy_sb8_16(
+ cm, &top_linebuf[(fbr + 1) * CDEF_VBORDER * stride], stride,
+ xd->plane[plane].dst.buf, top_offset - CDEF_VBORDER, 0,
+ xd->plane[plane].dst.stride, CDEF_VBORDER, stride);
+ if (fbr != nvfb - 1) // bottom line buffer copy
+ av1_cdef_copy_sb8_16(cm, &bot_linebuf[fbr * CDEF_VBORDER * stride],
+ stride, xd->plane[plane].dst.buf, bot_offset, 0,
+ xd->plane[plane].dst.stride, CDEF_VBORDER, stride);
+ }
+
+ fb_info->top_linebuf[plane] = &linebuf[plane][fbr * CDEF_VBORDER * stride];
+ fb_info->bot_linebuf[plane] =
+ &linebuf[plane]
+ [nvfb * CDEF_VBORDER * stride + (fbr * CDEF_VBORDER * stride)];
+ }
+
+ cdef_row_mt_sync_write(cdef_sync, fbr);
+ cdef_row_mt_sync_read(cdef_sync, fbr);
+}
+
+// Implements multi-threading for CDEF.
+// Perform CDEF on input frame.
+// Inputs:
+// frame: Pointer to input frame buffer.
+// cm: Pointer to common structure.
+// xd: Pointer to common current coding block structure.
+// Returns:
+// Nothing will be returned.
+void av1_cdef_frame_mt(AV1_COMMON *const cm, MACROBLOCKD *const xd,
+ AV1CdefWorkerData *const cdef_worker,
+ AVxWorker *const workers, AV1CdefSync *const cdef_sync,
+ int num_workers,
+ cdef_init_fb_row_t cdef_init_fb_row_fn) {
+ YV12_BUFFER_CONFIG *frame = &cm->cur_frame->buf;
+ const int num_planes = av1_num_planes(cm);
+
+ av1_setup_dst_planes(xd->plane, cm->seq_params->sb_size, frame, 0, 0, 0,
+ num_planes);
+
+ reset_cdef_job_info(cdef_sync);
+ prepare_cdef_frame_workers(cm, xd, cdef_worker, cdef_sb_row_worker_hook,
+ workers, cdef_sync, num_workers,
+ cdef_init_fb_row_fn);
+ launch_cdef_workers(workers, num_workers);
+ sync_cdef_workers(workers, cm, num_workers);
+}
diff --git a/third_party/libaom/source/libaom/av1/common/thread_common.h b/third_party/libaom/source/libaom/av1/common/thread_common.h
index 97b8abcff6..bcb4b879c1 100644
--- a/third_party/libaom/source/libaom/av1/common/thread_common.h
+++ b/third_party/libaom/source/libaom/av1/common/thread_common.h
@@ -15,6 +15,7 @@
#include "config/aom_config.h"
#include "av1/common/av1_loopfilter.h"
+#include "av1/common/cdef.h"
#include "aom_util/aom_thread.h"
#ifdef __cplusplus
@@ -27,6 +28,7 @@ typedef struct AV1LfMTInfo {
int mi_row;
int plane;
int dir;
+ int is_realtime;
} AV1LfMTInfo;
// Loopfilter row synchronization
@@ -97,6 +99,55 @@ typedef struct AV1LrSyncData {
int jobs_dequeued;
} AV1LrSync;
+typedef struct AV1CdefWorker {
+ AV1_COMMON *cm;
+ MACROBLOCKD *xd;
+ uint16_t *colbuf[MAX_MB_PLANE];
+ uint16_t *srcbuf;
+ uint16_t *linebuf[MAX_MB_PLANE];
+ cdef_init_fb_row_t cdef_init_fb_row_fn;
+} AV1CdefWorkerData;
+
+typedef struct AV1CdefRowSync {
+#if CONFIG_MULTITHREAD
+ pthread_mutex_t *row_mutex_;
+ pthread_cond_t *row_cond_;
+#endif // CONFIG_MULTITHREAD
+ int is_row_done;
+} AV1CdefRowSync;
+
+// Data related to CDEF search multi-thread synchronization.
+typedef struct AV1CdefSyncData {
+#if CONFIG_MULTITHREAD
+ // Mutex lock used while dispatching jobs.
+ pthread_mutex_t *mutex_;
+#endif // CONFIG_MULTITHREAD
+ // Data related to CDEF row mt sync information
+ AV1CdefRowSync *cdef_row_mt;
+ // Flag to indicate all blocks are processed and end of frame is reached
+ int end_of_frame;
+ // Row index in units of 64x64 block
+ int fbr;
+ // Column index in units of 64x64 block
+ int fbc;
+} AV1CdefSync;
+
+void av1_cdef_frame_mt(AV1_COMMON *const cm, MACROBLOCKD *const xd,
+ AV1CdefWorkerData *const cdef_worker,
+ AVxWorker *const workers, AV1CdefSync *const cdef_sync,
+ int num_workers, cdef_init_fb_row_t cdef_init_fb_row_fn);
+void av1_cdef_init_fb_row_mt(const AV1_COMMON *const cm,
+ const MACROBLOCKD *const xd,
+ CdefBlockInfo *const fb_info,
+ uint16_t **const linebuf, uint16_t *const src,
+ struct AV1CdefSyncData *const cdef_sync, int fbr);
+void av1_cdef_copy_sb8_16(const AV1_COMMON *const cm, uint16_t *const dst,
+ int dstride, const uint8_t *src, int src_voffset,
+ int src_hoffset, int sstride, int vsize, int hsize);
+void av1_alloc_cdef_sync(AV1_COMMON *const cm, AV1CdefSync *cdef_sync,
+ int num_workers);
+void av1_free_cdef_sync(AV1CdefSync *cdef_sync);
+
// Deallocate loopfilter synchronization related mutex and data.
void av1_loop_filter_dealloc(AV1LfSync *lf_sync);
@@ -107,7 +158,7 @@ void av1_loop_filter_frame_mt(YV12_BUFFER_CONFIG *frame, struct AV1Common *cm,
int is_decoding,
#endif
AVxWorker *workers, int num_workers,
- AV1LfSync *lf_sync);
+ AV1LfSync *lf_sync, int is_realtime);
#if !CONFIG_REALTIME_ONLY
void av1_loop_restoration_filter_frame_mt(YV12_BUFFER_CONFIG *frame,
diff --git a/third_party/libaom/source/libaom/av1/common/tile_common.c b/third_party/libaom/source/libaom/av1/common/tile_common.c
index 1b11bd7606..8f5d2a6316 100644
--- a/third_party/libaom/source/libaom/av1/common/tile_common.c
+++ b/third_party/libaom/source/libaom/av1/common/tile_common.c
@@ -28,7 +28,7 @@ static int tile_log2(int blk_size, int target) {
}
void av1_get_tile_limits(AV1_COMMON *const cm) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
CommonTileParams *const tiles = &cm->tiles;
const int mi_cols =
ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols, seq_params->mib_size_log2);
@@ -130,9 +130,9 @@ void av1_calculate_tile_rows(const SequenceHeader *const seq_params,
void av1_tile_set_row(TileInfo *tile, const AV1_COMMON *cm, int row) {
assert(row < cm->tiles.rows);
int mi_row_start = cm->tiles.row_start_sb[row]
- << cm->seq_params.mib_size_log2;
+ << cm->seq_params->mib_size_log2;
int mi_row_end = cm->tiles.row_start_sb[row + 1]
- << cm->seq_params.mib_size_log2;
+ << cm->seq_params->mib_size_log2;
tile->tile_row = row;
tile->mi_row_start = mi_row_start;
tile->mi_row_end = AOMMIN(mi_row_end, cm->mi_params.mi_rows);
@@ -142,9 +142,9 @@ void av1_tile_set_row(TileInfo *tile, const AV1_COMMON *cm, int row) {
void av1_tile_set_col(TileInfo *tile, const AV1_COMMON *cm, int col) {
assert(col < cm->tiles.cols);
int mi_col_start = cm->tiles.col_start_sb[col]
- << cm->seq_params.mib_size_log2;
+ << cm->seq_params->mib_size_log2;
int mi_col_end = cm->tiles.col_start_sb[col + 1]
- << cm->seq_params.mib_size_log2;
+ << cm->seq_params->mib_size_log2;
tile->tile_col = col;
tile->mi_col_start = mi_col_start;
tile->mi_col_end = AOMMIN(mi_col_end, cm->mi_params.mi_cols);
@@ -153,16 +153,16 @@ void av1_tile_set_col(TileInfo *tile, const AV1_COMMON *cm, int col) {
int av1_get_sb_rows_in_tile(AV1_COMMON *cm, TileInfo tile) {
int mi_rows_aligned_to_sb = ALIGN_POWER_OF_TWO(
- tile.mi_row_end - tile.mi_row_start, cm->seq_params.mib_size_log2);
- int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params.mib_size_log2;
+ tile.mi_row_end - tile.mi_row_start, cm->seq_params->mib_size_log2);
+ int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params->mib_size_log2;
return sb_rows;
}
int av1_get_sb_cols_in_tile(AV1_COMMON *cm, TileInfo tile) {
int mi_cols_aligned_to_sb = ALIGN_POWER_OF_TWO(
- tile.mi_col_end - tile.mi_col_start, cm->seq_params.mib_size_log2);
- int sb_cols = mi_cols_aligned_to_sb >> cm->seq_params.mib_size_log2;
+ tile.mi_col_end - tile.mi_col_start, cm->seq_params->mib_size_log2);
+ int sb_cols = mi_cols_aligned_to_sb >> cm->seq_params->mib_size_log2;
return sb_cols;
}
@@ -195,8 +195,8 @@ AV1PixelRect av1_get_tile_rect(const TileInfo *tile_info, const AV1_COMMON *cm,
r.bottom = AOMMIN(r.bottom, frame_h);
// Convert to coordinates in the appropriate plane
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
r.left = ROUND_POWER_OF_TWO(r.left, ss_x);
r.right = ROUND_POWER_OF_TWO(r.right, ss_x);
@@ -215,7 +215,7 @@ void av1_get_uniform_tile_size(const AV1_COMMON *cm, int *w, int *h) {
for (int i = 0; i < tiles->cols; ++i) {
const int tile_width_sb =
tiles->col_start_sb[i + 1] - tiles->col_start_sb[i];
- const int tile_w = tile_width_sb * cm->seq_params.mib_size;
+ const int tile_w = tile_width_sb * cm->seq_params->mib_size;
assert(i == 0 || tile_w == *w); // ensure all tiles have same dimension
*w = tile_w;
}
@@ -223,7 +223,7 @@ void av1_get_uniform_tile_size(const AV1_COMMON *cm, int *w, int *h) {
for (int i = 0; i < tiles->rows; ++i) {
const int tile_height_sb =
tiles->row_start_sb[i + 1] - tiles->row_start_sb[i];
- const int tile_h = tile_height_sb * cm->seq_params.mib_size;
+ const int tile_h = tile_height_sb * cm->seq_params->mib_size;
assert(i == 0 || tile_h == *h); // ensure all tiles have same dimension
*h = tile_h;
}
diff --git a/third_party/libaom/source/libaom/av1/decoder/decodeframe.c b/third_party/libaom/source/libaom/av1/decoder/decodeframe.c
index b364714e0a..9ca7d3cd35 100644
--- a/third_party/libaom/source/libaom/av1/decoder/decodeframe.c
+++ b/third_party/libaom/source/libaom/av1/decoder/decodeframe.c
@@ -76,12 +76,11 @@
// Checks that the remaining bits start with a 1 and ends with 0s.
// It consumes an additional byte, if already byte aligned before the check.
int av1_check_trailing_bits(AV1Decoder *pbi, struct aom_read_bit_buffer *rb) {
- AV1_COMMON *const cm = &pbi->common;
// bit_offset is set to 0 (mod 8) when the reader is already byte aligned
int bits_before_alignment = 8 - rb->bit_offset % 8;
int trailing = aom_rb_read_literal(rb, bits_before_alignment);
if (trailing != (1 << (bits_before_alignment - 1))) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
return 0;
@@ -304,16 +303,18 @@ static AOM_INLINE void decode_reconstruct_tx(
const int bsw = tx_size_wide_unit[sub_txs];
const int bsh = tx_size_high_unit[sub_txs];
const int sub_step = bsw * bsh;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
assert(bsw > 0 && bsh > 0);
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
- for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
- const int offsetr = blk_row + row;
+ for (int row = 0; row < row_end; row += bsh) {
+ const int offsetr = blk_row + row;
+ for (int col = 0; col < col_end; col += bsw) {
const int offsetc = blk_col + col;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
-
decode_reconstruct_tx(cm, td, r, mbmi, plane, plane_bsize, offsetr,
offsetc, block, sub_txs, eob_total);
block += sub_step;
@@ -362,7 +363,7 @@ static AOM_INLINE void decode_mbmi_block(AV1Decoder *const pbi,
PARTITION_TYPE partition,
BLOCK_SIZE bsize) {
AV1_COMMON *const cm = &pbi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int bw = mi_size_wide[bsize];
const int bh = mi_size_high[bsize];
const int x_mis = AOMMIN(bw, cm->mi_params.mi_cols - mi_col);
@@ -914,6 +915,16 @@ static AOM_INLINE void decode_token_recon_block(AV1Decoder *const pbi,
if (plane && !xd->is_chroma_ref) break;
const struct macroblockd_plane *const pd = &xd->plane[plane];
const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
+#if CONFIG_REALTIME_ONLY
+ // Realtime only build doesn't support 4x rectangular txfm sizes.
+ if (tx_size == TX_4X16 || tx_size == TX_16X4 || tx_size == TX_8X32 ||
+ tx_size == TX_32X8 || tx_size == TX_16X64 ||
+ tx_size == TX_64X16) {
+ aom_internal_error(
+ xd->error_info, AOM_CODEC_UNSUP_FEATURE,
+ "Realtime only build doesn't support rectangular txfm sizes");
+ }
+#endif
const int stepr = tx_size_high_unit[tx_size];
const int stepc = tx_size_wide_unit[tx_size];
@@ -1219,9 +1230,9 @@ static AOM_INLINE void parse_decode_block(AV1Decoder *const pbi,
: (j == 1 ? quant_params->u_ac_delta_q
: quant_params->v_ac_delta_q);
xd->plane[j].seg_dequant_QTX[i][0] = av1_dc_quant_QTX(
- current_qindex, dc_delta_q, cm->seq_params.bit_depth);
+ current_qindex, dc_delta_q, cm->seq_params->bit_depth);
xd->plane[j].seg_dequant_QTX[i][1] = av1_ac_quant_QTX(
- current_qindex, ac_delta_q, cm->seq_params.bit_depth);
+ current_qindex, ac_delta_q, cm->seq_params->bit_depth);
}
}
}
@@ -1554,9 +1565,9 @@ static AOM_INLINE void decode_restoration_mode(AV1_COMMON *cm,
}
}
if (!all_none) {
- assert(cm->seq_params.sb_size == BLOCK_64X64 ||
- cm->seq_params.sb_size == BLOCK_128X128);
- const int sb_size = cm->seq_params.sb_size == BLOCK_128X128 ? 128 : 64;
+ assert(cm->seq_params->sb_size == BLOCK_64X64 ||
+ cm->seq_params->sb_size == BLOCK_128X128);
+ const int sb_size = cm->seq_params->sb_size == BLOCK_128X128 ? 128 : 64;
for (int p = 0; p < num_planes; ++p)
cm->rst_info[p].restoration_unit_size = sb_size;
@@ -1576,7 +1587,8 @@ static AOM_INLINE void decode_restoration_mode(AV1_COMMON *cm,
}
if (num_planes > 1) {
- int s = AOMMIN(cm->seq_params.subsampling_x, cm->seq_params.subsampling_y);
+ int s =
+ AOMMIN(cm->seq_params->subsampling_x, cm->seq_params->subsampling_y);
if (s && !chroma_none) {
cm->rst_info[1].restoration_unit_size =
cm->rst_info[0].restoration_unit_size >> (aom_rb_read_bit(rb) * s);
@@ -1847,7 +1859,7 @@ static AOM_INLINE void setup_quantization(CommonQuantParams *quant_params,
// Build y/uv dequant values based on segmentation.
static AOM_INLINE void setup_segmentation_dequant(AV1_COMMON *const cm,
MACROBLOCKD *const xd) {
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
// When segmentation is disabled, only the first value is used. The
// remaining are don't cares.
const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
@@ -1909,7 +1921,7 @@ static AOM_INLINE void setup_superres(AV1_COMMON *const cm,
cm->superres_upscaled_width = *width;
cm->superres_upscaled_height = *height;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
if (!seq_params->enable_superres) return;
if (aom_rb_read_bit(rb)) {
@@ -1930,7 +1942,7 @@ static AOM_INLINE void resize_context_buffers(AV1_COMMON *cm, int width,
int height) {
#if CONFIG_SIZE_LIMIT
if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Dimensions of %dx%d beyond allowed size of %dx%d.",
width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
#endif
@@ -1950,7 +1962,7 @@ static AOM_INLINE void resize_context_buffers(AV1_COMMON *cm, int width,
// consistent and to force a realloc next time.
cm->width = 0;
cm->height = 0;
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate context buffers");
}
} else {
@@ -1968,7 +1980,7 @@ static AOM_INLINE void resize_context_buffers(AV1_COMMON *cm, int width,
static AOM_INLINE void setup_buffer_pool(AV1_COMMON *cm) {
BufferPool *const pool = cm->buffer_pool;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
lock_buffer_pool(pool);
if (aom_realloc_frame_buffer(
@@ -1978,7 +1990,7 @@ static AOM_INLINE void setup_buffer_pool(AV1_COMMON *cm) {
&cm->cur_frame->raw_frame_buffer, pool->get_fb_cb, pool->cb_priv,
0)) {
unlock_buffer_pool(pool);
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
}
unlock_buffer_pool(pool);
@@ -1999,7 +2011,7 @@ static AOM_INLINE void setup_buffer_pool(AV1_COMMON *cm) {
static AOM_INLINE void setup_frame_size(AV1_COMMON *cm,
int frame_size_override_flag,
struct aom_read_bit_buffer *rb) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
int width, height;
if (frame_size_override_flag) {
@@ -2008,7 +2020,7 @@ static AOM_INLINE void setup_frame_size(AV1_COMMON *cm,
av1_read_frame_size(rb, num_bits_width, num_bits_height, &width, &height);
if (width > seq_params->max_frame_width ||
height > seq_params->max_frame_height) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Frame dimensions are larger than the maximum values");
}
} else {
@@ -2049,7 +2061,7 @@ static AOM_INLINE void setup_frame_size_with_refs(
// the middle of a stream, and static analysis will error if we don't do
// a null check here.
if (ref_buf == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Invalid condition: invalid reference buffer");
} else {
const YV12_BUFFER_CONFIG *const buf = &ref_buf->buf;
@@ -2065,7 +2077,7 @@ static AOM_INLINE void setup_frame_size_with_refs(
}
}
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
if (!found) {
int num_bits_width = seq_params->num_bits_width;
int num_bits_height = seq_params->num_bits_height;
@@ -2077,7 +2089,7 @@ static AOM_INLINE void setup_frame_size_with_refs(
}
if (width <= 0 || height <= 0)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Invalid frame size");
// Check to make sure at least one of frames that this frame references
@@ -2089,7 +2101,7 @@ static AOM_INLINE void setup_frame_size_with_refs(
ref_frame->buf.y_crop_height, width, height);
}
if (!has_valid_ref_frame)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Referenced frame has invalid size");
for (int i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
const RefCntBuffer *const ref_frame = get_ref_frame_buf(cm, i);
@@ -2097,7 +2109,7 @@ static AOM_INLINE void setup_frame_size_with_refs(
ref_frame->buf.bit_depth, ref_frame->buf.subsampling_x,
ref_frame->buf.subsampling_y, seq_params->bit_depth,
seq_params->subsampling_x, seq_params->subsampling_y))
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Referenced frame has incompatible color format");
}
setup_buffer_pool(cm);
@@ -2117,7 +2129,7 @@ static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
static AOM_INLINE void read_tile_info_max_tile(
AV1_COMMON *const cm, struct aom_read_bit_buffer *const rb) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
CommonTileParams *const tiles = &cm->tiles;
int width_mi =
ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols, seq_params->mib_size_log2);
@@ -2213,7 +2225,7 @@ static AOM_INLINE void read_tile_info(AV1Decoder *const pbi,
pbi->context_update_tile_id =
aom_rb_read_literal(rb, cm->tiles.log2_rows + cm->tiles.log2_cols);
if (pbi->context_update_tile_id >= cm->tiles.rows * cm->tiles.cols) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Invalid context_update_tile_id");
}
// tile size magnitude
@@ -2366,7 +2378,7 @@ static const uint8_t *get_ls_tile_buffers(
// Get the whole of the last column, otherwise stop at the required tile.
for (int r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
- get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
+ get_ls_tile_buffer(tile_col_data_end[c], &pbi->error, &data,
tile_buffers, tile_size_bytes, c, r, tile_copy_mode);
}
}
@@ -2378,7 +2390,7 @@ static const uint8_t *get_ls_tile_buffers(
data = tile_col_data_end[c - 1];
for (int r = 0; r < tile_rows; ++r) {
- get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
+ get_ls_tile_buffer(tile_col_data_end[c], &pbi->error, &data,
tile_buffers, tile_size_bytes, c, r, tile_copy_mode);
}
}
@@ -2446,11 +2458,11 @@ static AOM_INLINE void get_tile_buffers(
if (tc < start_tile || tc > end_tile) continue;
if (data + hdr_offset >= data_end)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Data ended before all tiles were read.");
data += hdr_offset;
- get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
- &pbi->common.error, &data, buf);
+ get_tile_buffer(data_end, pbi->tile_size_bytes, is_last, &pbi->error,
+ &data, buf);
}
}
}
@@ -2460,7 +2472,7 @@ static AOM_INLINE void set_cb_buffer(AV1Decoder *pbi, DecoderCodingBlock *dcb,
const int num_planes, int mi_row,
int mi_col) {
AV1_COMMON *const cm = &pbi->common;
- int mib_size_log2 = cm->seq_params.mib_size_log2;
+ int mib_size_log2 = cm->seq_params->mib_size_log2;
int stride = (cm->mi_params.mi_cols >> mib_size_log2) + 1;
int offset = (mi_row >> mib_size_log2) * stride + (mi_col >> mib_size_log2);
CB_BUFFER *cb_buffer = cb_buffer_base + offset;
@@ -2629,11 +2641,11 @@ static AOM_INLINE void decode_tile_sb_row(AV1Decoder *pbi, ThreadData *const td,
pbi->tile_data + tile_info.tile_row * cm->tiles.cols + tile_info.tile_col;
const int sb_cols_in_tile = av1_get_sb_cols_in_tile(cm, tile_info);
const int sb_row_in_tile =
- (mi_row - tile_info.mi_row_start) >> cm->seq_params.mib_size_log2;
+ (mi_row - tile_info.mi_row_start) >> cm->seq_params->mib_size_log2;
int sb_col_in_tile = 0;
for (int mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
- mi_col += cm->seq_params.mib_size, sb_col_in_tile++) {
+ mi_col += cm->seq_params->mib_size, sb_col_in_tile++) {
set_cb_buffer(pbi, &td->dcb, pbi->cb_buffer_base, num_planes, mi_row,
mi_col);
@@ -2641,7 +2653,7 @@ static AOM_INLINE void decode_tile_sb_row(AV1Decoder *pbi, ThreadData *const td,
// Decoding of the super-block
decode_partition(pbi, td, mi_row, mi_col, td->bit_reader,
- cm->seq_params.sb_size, 0x2);
+ cm->seq_params->sb_size, 0x2);
sync_write(&tile_data->dec_row_mt_sync, sb_row_in_tile, sb_col_in_tile,
sb_cols_in_tile);
@@ -2711,16 +2723,16 @@ static AOM_INLINE void decode_tile(AV1Decoder *pbi, ThreadData *const td,
av1_reset_loop_restoration(xd, num_planes);
for (int mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
- mi_row += cm->seq_params.mib_size) {
+ mi_row += cm->seq_params->mib_size) {
av1_zero_left_context(xd);
for (int mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
- mi_col += cm->seq_params.mib_size) {
+ mi_col += cm->seq_params->mib_size) {
set_cb_buffer(pbi, dcb, &td->cb_buffer_base, num_planes, 0, 0);
// Bit-stream parsing and decoding of the superblock
decode_partition(pbi, td, mi_row, mi_col, td->bit_reader,
- cm->seq_params.sb_size, 0x3);
+ cm->seq_params->sb_size, 0x3);
if (aom_reader_has_overflowed(td->bit_reader)) {
aom_merge_corrupted_flag(&dcb->corrupted, 1);
@@ -2801,6 +2813,10 @@ static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
decoder_alloc_tile_data(pbi, n_tiles);
}
+ if (pbi->dcb.xd.seg_mask == NULL)
+ CHECK_MEM_ERROR(cm, pbi->dcb.xd.seg_mask,
+ (uint8_t *)aom_memalign(
+ 16, 2 * MAX_SB_SQUARE * sizeof(*pbi->dcb.xd.seg_mask)));
#if CONFIG_ACCOUNTING
if (pbi->acct_enabled) {
aom_accounting_reset(&pbi->accounting);
@@ -2837,7 +2853,7 @@ static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
av1_tile_init(&td->dcb.xd.tile, cm, row, col);
td->dcb.xd.current_base_qindex = cm->quant_params.base_qindex;
setup_bool_decoder(tile_bs_buf->data, data_end, tile_bs_buf->size,
- &cm->error, td->bit_reader, allow_update_cdf);
+ &pbi->error, td->bit_reader, allow_update_cdf);
#if CONFIG_ACCOUNTING
if (pbi->acct_enabled) {
td->bit_reader->accounting = &pbi->accounting;
@@ -2859,7 +2875,7 @@ static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
decode_tile(pbi, td, row, col);
aom_merge_corrupted_flag(&pbi->dcb.corrupted, td->dcb.corrupted);
if (pbi->dcb.corrupted)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Failed to decode tile data");
}
}
@@ -3017,7 +3033,7 @@ static int get_next_job_info(AV1Decoder *const pbi,
const int tile_cols_end = frame_row_mt_info->tile_cols_end;
const int start_tile = frame_row_mt_info->start_tile;
const int end_tile = frame_row_mt_info->end_tile;
- const int sb_mi_size = mi_size_wide[cm->seq_params.sb_size];
+ const int sb_mi_size = mi_size_wide[cm->seq_params->sb_size];
int num_mis_to_decode, num_threads_working;
int num_mis_waiting_for_decode;
int min_threads_working = INT_MAX;
@@ -3135,7 +3151,7 @@ static INLINE void signal_parse_sb_row_done(AV1Decoder *const pbi,
static AOM_INLINE void parse_tile_row_mt(AV1Decoder *pbi, ThreadData *const td,
TileDataDec *const tile_data) {
AV1_COMMON *const cm = &pbi->common;
- const int sb_mi_size = mi_size_wide[cm->seq_params.sb_size];
+ const int sb_mi_size = mi_size_wide[cm->seq_params->sb_size];
const int num_planes = av1_num_planes(cm);
TileInfo tile_info = tile_data->tile_info;
int tile_row = tile_info.tile_row;
@@ -3148,16 +3164,16 @@ static AOM_INLINE void parse_tile_row_mt(AV1Decoder *pbi, ThreadData *const td,
av1_reset_loop_restoration(xd, num_planes);
for (int mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
- mi_row += cm->seq_params.mib_size) {
+ mi_row += cm->seq_params->mib_size) {
av1_zero_left_context(xd);
for (int mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
- mi_col += cm->seq_params.mib_size) {
+ mi_col += cm->seq_params->mib_size) {
set_cb_buffer(pbi, dcb, pbi->cb_buffer_base, num_planes, mi_row, mi_col);
// Bit-stream parsing of the superblock
decode_partition(pbi, td, mi_row, mi_col, td->bit_reader,
- cm->seq_params.sb_size, 0x1);
+ cm->seq_params->sb_size, 0x1);
if (aom_reader_has_overflowed(td->bit_reader)) {
aom_merge_corrupted_flag(&dcb->corrupted, 1);
@@ -3357,6 +3373,8 @@ void av1_free_mc_tmp_buf(ThreadData *thread_data) {
aom_free(thread_data->tmp_conv_dst);
thread_data->tmp_conv_dst = NULL;
+ aom_free(thread_data->seg_mask);
+ thread_data->seg_mask = NULL;
for (int i = 0; i < 2; ++i) {
aom_free(thread_data->tmp_obmc_bufs[i]);
thread_data->tmp_obmc_bufs[i] = NULL;
@@ -3389,6 +3407,10 @@ static AOM_INLINE void allocate_mc_tmp_buf(AV1_COMMON *const cm,
CHECK_MEM_ERROR(cm, thread_data->tmp_conv_dst,
aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE *
sizeof(*thread_data->tmp_conv_dst)));
+ CHECK_MEM_ERROR(cm, thread_data->seg_mask,
+ (uint8_t *)aom_memalign(
+ 16, 2 * MAX_SB_SQUARE * sizeof(*thread_data->seg_mask)));
+
for (int i = 0; i < 2; ++i) {
CHECK_MEM_ERROR(
cm, thread_data->tmp_obmc_bufs[i],
@@ -3411,6 +3433,8 @@ static AOM_INLINE void reset_dec_workers(AV1Decoder *pbi,
thread_data->td->dcb.mc_buf[0] = thread_data->td->mc_buf[0];
thread_data->td->dcb.mc_buf[1] = thread_data->td->mc_buf[1];
thread_data->td->dcb.xd.tmp_conv_dst = thread_data->td->tmp_conv_dst;
+ if (worker_idx)
+ thread_data->td->dcb.xd.seg_mask = thread_data->td->seg_mask;
for (int j = 0; j < 2; ++j) {
thread_data->td->dcb.xd.tmp_obmc_bufs[j] =
thread_data->td->tmp_obmc_bufs[j];
@@ -3481,7 +3505,7 @@ static AOM_INLINE void decode_mt_init(AV1Decoder *pbi) {
winterface->init(worker);
worker->thread_name = "aom tile worker";
if (worker_idx != 0 && !winterface->reset(worker)) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_ERROR,
"Tile decoder thread creation failed");
}
@@ -3498,7 +3522,7 @@ static AOM_INLINE void decode_mt_init(AV1Decoder *pbi) {
thread_data->error_info.setjmp = 0;
}
}
- const int use_highbd = cm->seq_params.use_highbitdepth;
+ const int use_highbd = cm->seq_params->use_highbitdepth;
const int buf_size = MC_TEMP_BUF_PELS << use_highbd;
for (worker_idx = 1; worker_idx < pbi->max_threads; ++worker_idx) {
DecWorkerData *const thread_data = pbi->thread_data + worker_idx;
@@ -3590,6 +3614,10 @@ static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
decoder_alloc_tile_data(pbi, n_tiles);
}
+ if (pbi->dcb.xd.seg_mask == NULL)
+ CHECK_MEM_ERROR(cm, pbi->dcb.xd.seg_mask,
+ (uint8_t *)aom_memalign(
+ 16, 2 * MAX_SB_SQUARE * sizeof(*pbi->dcb.xd.seg_mask)));
for (int row = 0; row < tile_rows; row++) {
for (int col = 0; col < tile_cols; col++) {
@@ -3606,7 +3634,7 @@ static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
sync_dec_workers(pbi, num_workers);
if (pbi->dcb.corrupted)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Failed to decode tile data");
if (tiles->large_scale) {
@@ -3624,8 +3652,8 @@ static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
static AOM_INLINE void dec_alloc_cb_buf(AV1Decoder *pbi) {
AV1_COMMON *const cm = &pbi->common;
- int size = ((cm->mi_params.mi_rows >> cm->seq_params.mib_size_log2) + 1) *
- ((cm->mi_params.mi_cols >> cm->seq_params.mib_size_log2) + 1);
+ int size = ((cm->mi_params.mi_rows >> cm->seq_params->mib_size_log2) + 1) *
+ ((cm->mi_params.mi_cols >> cm->seq_params->mib_size_log2) + 1);
if (pbi->cb_buffer_alloc_size < size) {
av1_dec_free_cb_buf(pbi);
@@ -3669,10 +3697,10 @@ static AOM_INLINE void row_mt_frame_init(AV1Decoder *pbi, int tile_rows_start,
tile_data->dec_row_mt_sync.num_threads_working = 0;
tile_data->dec_row_mt_sync.mi_rows =
ALIGN_POWER_OF_TWO(tile_info.mi_row_end - tile_info.mi_row_start,
- cm->seq_params.mib_size_log2);
+ cm->seq_params->mib_size_log2);
tile_data->dec_row_mt_sync.mi_cols =
ALIGN_POWER_OF_TWO(tile_info.mi_col_end - tile_info.mi_col_start,
- cm->seq_params.mib_size_log2);
+ cm->seq_params->mib_size_log2);
frame_row_mt_info->mi_rows_to_decode +=
tile_data->dec_row_mt_sync.mi_rows;
@@ -3776,6 +3804,10 @@ static const uint8_t *decode_tiles_row_mt(AV1Decoder *pbi, const uint8_t *data,
}
decoder_alloc_tile_data(pbi, n_tiles);
}
+ if (pbi->dcb.xd.seg_mask == NULL)
+ CHECK_MEM_ERROR(cm, pbi->dcb.xd.seg_mask,
+ (uint8_t *)aom_memalign(
+ 16, 2 * MAX_SB_SQUARE * sizeof(*pbi->dcb.xd.seg_mask)));
for (int row = 0; row < tile_rows; row++) {
for (int col = 0; col < tile_cols; col++) {
@@ -3811,7 +3843,7 @@ static const uint8_t *decode_tiles_row_mt(AV1Decoder *pbi, const uint8_t *data,
sync_dec_workers(pbi, num_workers);
if (pbi->dcb.corrupted)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Failed to decode tile data");
if (tiles->large_scale) {
@@ -3829,7 +3861,7 @@ static const uint8_t *decode_tiles_row_mt(AV1Decoder *pbi, const uint8_t *data,
static AOM_INLINE void error_handler(void *data) {
AV1_COMMON *const cm = (AV1_COMMON *)data;
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
}
// Reads the high_bitdepth and twelve_bit fields in color_config() and sets
@@ -3860,7 +3892,7 @@ static AOM_INLINE void read_bitdepth(
void av1_read_film_grain_params(AV1_COMMON *cm,
struct aom_read_bit_buffer *rb) {
aom_film_grain_t *pars = &cm->film_grain_params;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
pars->apply_grain = aom_rb_read_bit(rb);
if (!pars->apply_grain) {
@@ -3890,7 +3922,7 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
}
}
if (!found) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Invalid film grain reference idx %d. ref_frame_idx = "
"{%d, %d, %d, %d, %d, %d, %d}",
film_grain_params_ref_idx, cm->remapped_ref_idx[0],
@@ -3900,11 +3932,11 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
}
RefCntBuffer *const buf = cm->ref_frame_map[film_grain_params_ref_idx];
if (buf == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Invalid Film grain reference idx");
}
if (!buf->film_grain_params_present) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Film grain reference parameters not available");
}
uint16_t random_seed = pars->random_seed;
@@ -3916,13 +3948,13 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
// Scaling functions parameters
pars->num_y_points = aom_rb_read_literal(rb, 4); // max 14
if (pars->num_y_points > 14)
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Number of points for film grain luma scaling function "
"exceeds the maximum value.");
for (int i = 0; i < pars->num_y_points; i++) {
pars->scaling_points_y[i][0] = aom_rb_read_literal(rb, 8);
if (i && pars->scaling_points_y[i - 1][0] >= pars->scaling_points_y[i][0])
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"First coordinate of the scaling function points "
"shall be increasing.");
pars->scaling_points_y[i][1] = aom_rb_read_literal(rb, 8);
@@ -3941,14 +3973,14 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
} else {
pars->num_cb_points = aom_rb_read_literal(rb, 4); // max 10
if (pars->num_cb_points > 10)
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Number of points for film grain cb scaling function "
"exceeds the maximum value.");
for (int i = 0; i < pars->num_cb_points; i++) {
pars->scaling_points_cb[i][0] = aom_rb_read_literal(rb, 8);
if (i &&
pars->scaling_points_cb[i - 1][0] >= pars->scaling_points_cb[i][0])
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"First coordinate of the scaling function points "
"shall be increasing.");
pars->scaling_points_cb[i][1] = aom_rb_read_literal(rb, 8);
@@ -3956,14 +3988,14 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
pars->num_cr_points = aom_rb_read_literal(rb, 4); // max 10
if (pars->num_cr_points > 10)
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Number of points for film grain cr scaling function "
"exceeds the maximum value.");
for (int i = 0; i < pars->num_cr_points; i++) {
pars->scaling_points_cr[i][0] = aom_rb_read_literal(rb, 8);
if (i &&
pars->scaling_points_cr[i - 1][0] >= pars->scaling_points_cr[i][0])
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"First coordinate of the scaling function points "
"shall be increasing.");
pars->scaling_points_cr[i][1] = aom_rb_read_literal(rb, 8);
@@ -3972,7 +4004,7 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
if ((seq_params->subsampling_x == 1) && (seq_params->subsampling_y == 1) &&
(((pars->num_cb_points == 0) && (pars->num_cr_points != 0)) ||
((pars->num_cb_points != 0) && (pars->num_cr_points == 0))))
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"In YCbCr 4:2:0, film grain shall be applied "
"to both chroma components or neither.");
}
@@ -4024,13 +4056,13 @@ void av1_read_film_grain_params(AV1_COMMON *cm,
static AOM_INLINE void read_film_grain(AV1_COMMON *cm,
struct aom_read_bit_buffer *rb) {
- if (cm->seq_params.film_grain_params_present &&
+ if (cm->seq_params->film_grain_params_present &&
(cm->show_frame || cm->showable_frame)) {
av1_read_film_grain_params(cm, rb);
} else {
memset(&cm->film_grain_params, 0, sizeof(cm->film_grain_params));
}
- cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
+ cm->film_grain_params.bit_depth = cm->seq_params->bit_depth;
memcpy(&cm->cur_frame->film_grain_params, &cm->film_grain_params,
sizeof(aom_film_grain_t));
}
@@ -4164,7 +4196,7 @@ void av1_read_op_parameters_info(aom_dec_model_op_parameters_t *op_params,
static AOM_INLINE void read_temporal_point_info(
AV1_COMMON *const cm, struct aom_read_bit_buffer *rb) {
cm->frame_presentation_time = aom_rb_read_unsigned_literal(
- rb, cm->seq_params.decoder_model_info.frame_presentation_time_length);
+ rb, cm->seq_params->decoder_model_info.frame_presentation_time_length);
}
void av1_read_sequence_header(AV1_COMMON *cm, struct aom_read_bit_buffer *rb,
@@ -4192,7 +4224,7 @@ void av1_read_sequence_header(AV1_COMMON *cm, struct aom_read_bit_buffer *rb,
seq_params->frame_id_length =
aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
if (seq_params->frame_id_length > 16)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(cm->error, AOM_CODEC_CORRUPT_FRAME,
"Invalid frame_id_length");
}
@@ -4446,7 +4478,7 @@ static INLINE void reset_frame_buffers(AV1_COMMON *cm) {
static int read_uncompressed_header(AV1Decoder *pbi,
struct aom_read_bit_buffer *rb) {
AV1_COMMON *const cm = &pbi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
CurrentFrame *const current_frame = &cm->current_frame;
FeatureFlags *const features = &cm->features;
MACROBLOCKD *const xd = &pbi->dcb.xd;
@@ -4457,7 +4489,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
sframe_info->is_s_frame_at_altref = 0;
if (!pbi->sequence_header_ready) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"No sequence header");
}
@@ -4479,14 +4511,14 @@ static int read_uncompressed_header(AV1Decoder *pbi,
if (cm->show_existing_frame) {
if (pbi->sequence_header_changed) {
aom_internal_error(
- &cm->error, AOM_CODEC_CORRUPT_FRAME,
+ &pbi->error, AOM_CODEC_CORRUPT_FRAME,
"New sequence header starts with a show_existing_frame.");
}
// Show an existing frame directly.
const int existing_frame_idx = aom_rb_read_literal(rb, 3);
RefCntBuffer *const frame_to_show = cm->ref_frame_map[existing_frame_idx];
if (frame_to_show == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"Buffer does not contain a decoded frame");
}
if (seq_params->decoder_model_info_present_flag &&
@@ -4500,7 +4532,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
* referencing */
if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
pbi->valid_for_referencing[existing_frame_idx] == 0)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Reference buffer frame ID mismatch");
}
lock_buffer_pool(pool);
@@ -4526,7 +4558,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
// show_existing_frame is used to show a previous frame, that the value
// of showable_frame for the previous frame was equal to 1.
if (!frame_to_show->showable_frame) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"Buffer does not contain a showable frame");
}
// Section 6.8.2: It is a requirement of bitstream conformance that when
@@ -4554,7 +4586,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
pbi->decoding_first_frame = 1;
reset_frame_buffers(cm);
} else {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Sequence header has changed without a keyframe.");
}
}
@@ -4569,7 +4601,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
}
if (seq_params->still_picture &&
(current_frame->frame_type != KEY_FRAME || !cm->show_frame)) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Still pictures must be coded as shown keyframes");
}
cm->showable_frame = current_frame->frame_type != KEY_FRAME;
@@ -4641,7 +4673,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
/* Check current_frame_id for conformance */
if (prev_frame_id == cm->current_frame_id ||
diff_frame_id >= (1 << (frame_id_length - 1))) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Invalid value of current_frame_id");
}
}
@@ -4672,18 +4704,18 @@ static int read_uncompressed_header(AV1Decoder *pbi,
}
if (seq_params->decoder_model_info_present_flag) {
- cm->buffer_removal_time_present = aom_rb_read_bit(rb);
- if (cm->buffer_removal_time_present) {
+ pbi->buffer_removal_time_present = aom_rb_read_bit(rb);
+ if (pbi->buffer_removal_time_present) {
for (int op_num = 0;
op_num < seq_params->operating_points_cnt_minus_1 + 1; op_num++) {
if (seq_params->op_params[op_num].decoder_model_param_present_flag) {
- if ((((seq_params->operating_point_idc[op_num] >>
+ if (seq_params->operating_point_idc[op_num] == 0 ||
+ (((seq_params->operating_point_idc[op_num] >>
cm->temporal_layer_id) &
0x1) &&
((seq_params->operating_point_idc[op_num] >>
(cm->spatial_layer_id + 8)) &
- 0x1)) ||
- seq_params->operating_point_idc[op_num] == 0) {
+ 0x1))) {
cm->buffer_removal_times[op_num] = aom_rb_read_unsigned_literal(
rb, seq_params->decoder_model_info.buffer_removal_time_length);
} else {
@@ -4713,7 +4745,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
if (current_frame->frame_type == INTRA_ONLY_FRAME) {
current_frame->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
if (current_frame->refresh_frame_flags == 0xFF) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"Intra only frames cannot have refresh flags 0xFF");
}
if (pbi->need_resync) {
@@ -4747,7 +4779,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
// pixels set to neutral grey.
int buf_idx = get_free_fb(cm);
if (buf_idx == INVALID_IDX) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Unable to find free frame buffer");
}
buf = &frame_bufs[buf_idx];
@@ -4760,7 +4792,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
&buf->raw_frame_buffer, pool->get_fb_cb, pool->cb_priv, 0)) {
decrease_ref_count(buf, pool);
unlock_buffer_pool(pool);
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
}
unlock_buffer_pool(pool);
@@ -4827,10 +4859,10 @@ static int read_uncompressed_header(AV1Decoder *pbi,
// reference to a slot that hasn't been set yet. That's what we are
// checking here.
if (lst_buf == NULL)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Inter frame requests nonexistent reference");
if (gld_buf == NULL)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Inter frame requests nonexistent reference");
av1_set_frame_refs(cm, cm->remapped_ref_idx, lst_ref, gld_ref);
@@ -4848,7 +4880,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
// reference to a slot that hasn't been set yet. That's what we are
// checking here.
if (cm->ref_frame_map[ref] == NULL)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Inter frame requests nonexistent reference");
cm->remapped_ref_idx[i] = ref;
} else {
@@ -4856,7 +4888,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
}
// Check valid for referencing
if (pbi->valid_for_referencing[ref] == 0)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Reference frame not valid for referencing");
cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
@@ -4872,7 +4904,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
// Compare values derived from delta_frame_id_minus_1 and
// refresh_frame_flags.
if (ref_frame_id != cm->ref_frame_id[ref])
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Reference buffer frame ID mismatch");
}
}
@@ -4895,7 +4927,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
cm->prev_frame = get_primary_ref_frame_buf(cm);
if (features->primary_ref_frame != PRIMARY_REF_NONE &&
get_primary_ref_frame_buf(cm) == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Reference frame containing this frame's initial "
"frame context is unavailable.");
}
@@ -4915,7 +4947,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
ref_scale_factors, ref_buf->buf.y_crop_width,
ref_buf->buf.y_crop_height, cm->width, cm->height);
if ((!av1_is_valid_scale(ref_scale_factors)))
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"Reference frame has invalid dimensions");
}
}
@@ -4952,7 +4984,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
cm->cur_frame->buf.render_height = cm->render_height;
if (pbi->need_resync) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Keyframe / intra-only frame required to reset decoder"
" state");
}
@@ -4973,13 +5005,13 @@ static int read_uncompressed_header(AV1Decoder *pbi,
read_tile_info(pbi, rb);
if (!av1_is_min_tile_width_satisfied(cm)) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Minimum tile width requirement not satisfied");
}
CommonQuantParams *const quant_params = &cm->quant_params;
setup_quantization(quant_params, av1_num_planes(cm),
- cm->seq_params.separate_uv_delta_q, rb);
+ cm->seq_params->separate_uv_delta_q, rb);
xd->bd = (int)seq_params->bit_depth;
CommonContexts *const above_contexts = &cm->above_contexts;
@@ -4990,7 +5022,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
if (av1_alloc_above_context_buffers(above_contexts, cm->tiles.rows,
cm->mi_params.mi_cols,
av1_num_planes(cm))) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate context buffers");
}
}
@@ -5070,7 +5102,7 @@ static int read_uncompressed_header(AV1Decoder *pbi,
features->reduced_tx_set_used = aom_rb_read_bit(rb);
if (features->allow_ref_frame_mvs && !frame_might_allow_ref_frame_mvs(cm)) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Frame wrongly requests reference frame MVs");
}
@@ -5170,7 +5202,7 @@ uint32_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi,
// Use the default frame context values.
*cm->fc = *cm->default_frame_context;
if (!cm->fc->initialized)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Uninitialized entropy context.");
}
return uncomp_hdr_size;
@@ -5180,8 +5212,8 @@ uint32_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi,
av1_setup_motion_field(cm);
- av1_setup_block_planes(xd, cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y, num_planes);
+ av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y, num_planes);
if (cm->features.primary_ref_frame == PRIMARY_REF_NONE) {
// use the default frame context values
*cm->fc = *cm->default_frame_context;
@@ -5189,7 +5221,7 @@ uint32_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi,
*cm->fc = get_primary_ref_frame_buf(cm)->frame_context;
}
if (!cm->fc->initialized)
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Uninitialized entropy context.");
pbi->dcb.corrupted = 0;
@@ -5207,7 +5239,7 @@ static AOM_INLINE void setup_frame_info(AV1Decoder *pbi) {
av1_alloc_restoration_buffers(cm);
}
#endif
- const int use_highbd = cm->seq_params.use_highbitdepth;
+ const int use_highbd = cm->seq_params->use_highbitdepth;
const int buf_size = MC_TEMP_BUF_PELS << use_highbd;
if (pbi->td.mc_buf_size != buf_size) {
av1_free_mc_tmp_buf(&pbi->td);
@@ -5242,13 +5274,17 @@ void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
// If the bit stream is monochrome, set the U and V buffers to a constant.
if (num_planes < 3) {
- set_planes_to_neutral_grey(&cm->seq_params, xd->cur_buf, 1);
+ set_planes_to_neutral_grey(cm->seq_params, xd->cur_buf, 1);
}
if (end_tile != tiles->rows * tiles->cols - 1) {
return;
}
+ av1_alloc_cdef_buffers(cm, &pbi->cdef_worker, &pbi->cdef_sync,
+ pbi->num_workers);
+ av1_alloc_cdef_sync(cm, &pbi->cdef_sync, pbi->num_workers);
+
if (!cm->features.allow_intrabc && !tiles->single_tile_decoding) {
if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
if (pbi->num_workers > 1) {
@@ -5257,13 +5293,13 @@ void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
#if CONFIG_LPF_MASK
1,
#endif
- pbi->tile_workers, pbi->num_workers, &pbi->lf_row_sync);
+ pbi->tile_workers, pbi->num_workers, &pbi->lf_row_sync, 0);
} else {
av1_loop_filter_frame(&cm->cur_frame->buf, cm, &pbi->dcb.xd,
#if CONFIG_LPF_MASK
1,
#endif
- 0, num_planes, 0);
+ 0, num_planes, 0, 0);
}
}
@@ -5285,7 +5321,14 @@ void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
cm, 0);
if (do_cdef) {
- av1_cdef_frame(&pbi->common.cur_frame->buf, cm, &pbi->dcb.xd);
+ if (pbi->num_workers > 1) {
+ av1_cdef_frame_mt(cm, &pbi->dcb.xd, pbi->cdef_worker,
+ pbi->tile_workers, &pbi->cdef_sync,
+ pbi->num_workers, av1_cdef_init_fb_row_mt);
+ } else {
+ av1_cdef_frame(&pbi->common.cur_frame->buf, cm, &pbi->dcb.xd,
+ av1_cdef_init_fb_row);
+ }
}
superres_post_decode(pbi);
@@ -5323,7 +5366,14 @@ void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
#else
if (!optimized_loop_restoration) {
if (do_cdef) {
- av1_cdef_frame(&pbi->common.cur_frame->buf, cm, &pbi->dcb.xd);
+ if (pbi->num_workers > 1) {
+ av1_cdef_frame_mt(cm, &pbi->dcb.xd, pbi->cdef_worker,
+ pbi->tile_workers, &pbi->cdef_sync,
+ pbi->num_workers, av1_cdef_init_fb_row_mt);
+ } else {
+ av1_cdef_frame(&pbi->common.cur_frame->buf, cm, &pbi->dcb.xd,
+ av1_cdef_init_fb_row);
+ }
}
}
#endif // !CONFIG_REALTIME_ONLY
@@ -5339,7 +5389,7 @@ void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
av1_reset_cdf_symbol_counters(cm->fc);
}
} else {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Decode failed. Frame data is corrupted.");
}
diff --git a/third_party/libaom/source/libaom/av1/decoder/decodemv.c b/third_party/libaom/source/libaom/av1/decoder/decodemv.c
index 412be86989..839bda2be6 100644
--- a/third_party/libaom/source/libaom/av1/decoder/decodemv.c
+++ b/third_party/libaom/source/libaom/av1/decoder/decodemv.c
@@ -46,7 +46,7 @@ static void read_cdef(AV1_COMMON *cm, aom_reader *r, MACROBLOCKD *const xd) {
// At the start of a superblock, mark that we haven't yet read CDEF strengths
// for any of the CDEF units contained in this superblock.
- const int sb_mask = (cm->seq_params.mib_size - 1);
+ const int sb_mask = (cm->seq_params->mib_size - 1);
const int mi_row_in_sb = (xd->mi_row & sb_mask);
const int mi_col_in_sb = (xd->mi_col & sb_mask);
if (mi_row_in_sb == 0 && mi_col_in_sb == 0) {
@@ -61,7 +61,7 @@ static void read_cdef(AV1_COMMON *cm, aom_reader *r, MACROBLOCKD *const xd) {
const int index_mask = cdef_size;
const int cdef_unit_row_in_sb = ((xd->mi_row & index_mask) != 0);
const int cdef_unit_col_in_sb = ((xd->mi_col & index_mask) != 0);
- const int index = (cm->seq_params.sb_size == BLOCK_128X128)
+ const int index = (cm->seq_params->sb_size == BLOCK_128X128)
? cdef_unit_col_in_sb + 2 * cdef_unit_row_in_sb
: 0;
@@ -85,12 +85,12 @@ static int read_delta_qindex(AV1_COMMON *cm, const MACROBLOCKD *xd,
aom_reader *r, MB_MODE_INFO *const mbmi) {
int sign, abs, reduced_delta_qindex = 0;
BLOCK_SIZE bsize = mbmi->bsize;
- const int b_col = xd->mi_col & (cm->seq_params.mib_size - 1);
- const int b_row = xd->mi_row & (cm->seq_params.mib_size - 1);
+ const int b_col = xd->mi_col & (cm->seq_params->mib_size - 1);
+ const int b_row = xd->mi_row & (cm->seq_params->mib_size - 1);
const int read_delta_q_flag = (b_col == 0 && b_row == 0);
FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
- if ((bsize != cm->seq_params.sb_size || mbmi->skip_txfm == 0) &&
+ if ((bsize != cm->seq_params->sb_size || mbmi->skip_txfm == 0) &&
read_delta_q_flag) {
abs = aom_read_symbol(r, ec_ctx->delta_q_cdf, DELTA_Q_PROBS + 1, ACCT_STR);
const int smallval = (abs < DELTA_Q_SMALL);
@@ -117,11 +117,11 @@ static int read_delta_lflevel(const AV1_COMMON *const cm, aom_reader *r,
int mi_row) {
int reduced_delta_lflevel = 0;
const BLOCK_SIZE bsize = mbmi->bsize;
- const int b_col = mi_col & (cm->seq_params.mib_size - 1);
- const int b_row = mi_row & (cm->seq_params.mib_size - 1);
+ const int b_col = mi_col & (cm->seq_params->mib_size - 1);
+ const int b_row = mi_row & (cm->seq_params->mib_size - 1);
const int read_delta_lf_flag = (b_col == 0 && b_row == 0);
- if ((bsize != cm->seq_params.sb_size || mbmi->skip_txfm == 0) &&
+ if ((bsize != cm->seq_params->sb_size || mbmi->skip_txfm == 0) &&
read_delta_lf_flag) {
int abs = aom_read_symbol(r, cdf, DELTA_LF_PROBS + 1, ACCT_STR);
const int smallval = (abs < DELTA_LF_SMALL);
@@ -579,7 +579,7 @@ static void read_palette_mode_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
aom_read_symbol(r, xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
PALETTE_SIZES, ACCT_STR) +
2;
- read_palette_colors_y(xd, cm->seq_params.bit_depth, pmi, r);
+ read_palette_colors_y(xd, cm->seq_params->bit_depth, pmi, r);
}
}
if (num_planes > 1 && mbmi->uv_mode == UV_DC_PRED && xd->is_chroma_ref) {
@@ -591,7 +591,7 @@ static void read_palette_mode_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
aom_read_symbol(r, xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
PALETTE_SIZES, ACCT_STR) +
2;
- read_palette_colors_uv(xd, cm->seq_params.bit_depth, pmi, r);
+ read_palette_colors_uv(xd, cm->seq_params->bit_depth, pmi, r);
}
}
}
@@ -682,7 +682,7 @@ static INLINE int assign_dv(AV1_COMMON *cm, MACROBLOCKD *xd, int_mv *mv,
mv->as_mv.row = (mv->as_mv.row >> 3) * 8;
int valid = is_mv_valid(&mv->as_mv) &&
av1_is_dv_valid(mv->as_mv, cm, xd, mi_row, mi_col, bsize,
- cm->seq_params.mib_size_log2);
+ cm->seq_params->mib_size_log2);
return valid;
}
@@ -711,7 +711,7 @@ static void read_intrabc_info(AV1_COMMON *const cm, DecoderCodingBlock *dcb,
av1_find_best_ref_mvs(0, ref_mvs[INTRA_FRAME], &nearestmv, &nearmv, 0);
int_mv dv_ref = nearestmv.as_int == 0 ? nearmv : nearestmv;
if (dv_ref.as_int == 0)
- av1_find_ref_dv(&dv_ref, &xd->tile, cm->seq_params.mib_size, xd->mi_row);
+ av1_find_ref_dv(&dv_ref, &xd->tile, cm->seq_params->mib_size, xd->mi_row);
// Ref DV should not have sub-pel.
int valid_dv = (dv_ref.as_mv.col & 7) == 0 && (dv_ref.as_mv.row & 7) == 0;
dv_ref.as_mv.col = (dv_ref.as_mv.col >> 3) * 8;
@@ -816,7 +816,7 @@ static void read_intra_frame_mode_info(AV1_COMMON *const cm,
? read_angle_delta(r, ec_ctx->angle_delta_cdf[mbmi->mode - V_PRED])
: 0;
- if (!cm->seq_params.monochrome && xd->is_chroma_ref) {
+ if (!cm->seq_params->monochrome && xd->is_chroma_ref) {
mbmi->uv_mode =
read_intra_mode_uv(ec_ctx, r, is_cfl_allowed(xd), mbmi->mode);
if (mbmi->uv_mode == UV_CFL_PRED) {
@@ -1076,7 +1076,7 @@ static void read_intra_block_mode_info(AV1_COMMON *const cm,
use_angle_delta && av1_is_directional_mode(mbmi->mode)
? read_angle_delta(r, ec_ctx->angle_delta_cdf[mbmi->mode - V_PRED])
: 0;
- if (!cm->seq_params.monochrome && xd->is_chroma_ref) {
+ if (!cm->seq_params->monochrome && xd->is_chroma_ref) {
mbmi->uv_mode =
read_intra_mode_uv(ec_ctx, r, is_cfl_allowed(xd), mbmi->mode);
if (mbmi->uv_mode == UV_CFL_PRED) {
@@ -1375,7 +1375,7 @@ static void read_inter_block_mode_info(AV1Decoder *const pbi,
aom_merge_corrupted_flag(&dcb->corrupted, mv_corrupted_flag);
mbmi->use_wedge_interintra = 0;
- if (cm->seq_params.enable_interintra_compound && !mbmi->skip_mode &&
+ if (cm->seq_params->enable_interintra_compound && !mbmi->skip_mode &&
is_interintra_allowed(mbmi)) {
const int bsize_group = size_group_lookup[bsize];
const int interintra =
@@ -1423,7 +1423,7 @@ static void read_inter_block_mode_info(AV1Decoder *const pbi,
if (has_second_ref(mbmi) && !mbmi->skip_mode) {
// Read idx to indicate current compound inter prediction mode group
const int masked_compound_used = is_any_masked_compound_used(bsize) &&
- cm->seq_params.enable_masked_compound;
+ cm->seq_params->enable_masked_compound;
if (masked_compound_used) {
const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
@@ -1432,7 +1432,7 @@ static void read_inter_block_mode_info(AV1Decoder *const pbi,
}
if (mbmi->comp_group_idx == 0) {
- if (cm->seq_params.order_hint_info.enable_dist_wtd_comp) {
+ if (cm->seq_params->order_hint_info.enable_dist_wtd_comp) {
const int comp_index_ctx = get_comp_index_context(cm, xd);
mbmi->compound_idx = (uint8_t)aom_read_symbol(
r, ec_ctx->compound_index_cdf[comp_index_ctx], 2, ACCT_STR);
@@ -1473,7 +1473,7 @@ static void read_inter_block_mode_info(AV1Decoder *const pbi,
}
read_mb_interp_filter(xd, features->interp_filter,
- cm->seq_params.enable_dual_filter, mbmi, r);
+ cm->seq_params->enable_dual_filter, mbmi, r);
#if !CONFIG_REALTIME_ONLY
if (mbmi->motion_mode == WARPED_CAUSAL) {
@@ -1573,11 +1573,11 @@ void av1_read_mode_info(AV1Decoder *const pbi, DecoderCodingBlock *dcb,
if (frame_is_intra_only(cm)) {
read_intra_frame_mode_info(cm, dcb, r);
- if (cm->seq_params.order_hint_info.enable_ref_frame_mvs)
+ if (cm->seq_params->order_hint_info.enable_ref_frame_mvs)
intra_copy_frame_mvs(cm, xd->mi_row, xd->mi_col, x_mis, y_mis);
} else {
read_inter_frame_mode_info(pbi, dcb, r);
- if (cm->seq_params.order_hint_info.enable_ref_frame_mvs)
+ if (cm->seq_params->order_hint_info.enable_ref_frame_mvs)
av1_copy_frame_mvs(cm, mi, xd->mi_row, xd->mi_col, x_mis, y_mis);
}
}
diff --git a/third_party/libaom/source/libaom/av1/decoder/decoder.c b/third_party/libaom/source/libaom/av1/decoder/decoder.c
index 1680734a09..40dd71cea2 100644
--- a/third_party/libaom/source/libaom/av1/decoder/decoder.c
+++ b/third_party/libaom/source/libaom/av1/decoder/decoder.c
@@ -97,17 +97,19 @@ AV1Decoder *av1_decoder_create(BufferPool *const pool) {
av1_zero(*pbi);
AV1_COMMON *volatile const cm = &pbi->common;
+ cm->seq_params = &pbi->seq_params;
+ cm->error = &pbi->error;
// The jmp_buf is valid only for the duration of the function that calls
// setjmp(). Therefore, this function must reset the 'setjmp' field to 0
// before it returns.
- if (setjmp(cm->error.jmp)) {
- cm->error.setjmp = 0;
+ if (setjmp(pbi->error.jmp)) {
+ pbi->error.setjmp = 0;
av1_decoder_remove(pbi);
return NULL;
}
- cm->error.setjmp = 1;
+ pbi->error.setjmp = 1;
CHECK_MEM_ERROR(cm, cm->fc,
(FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
@@ -129,7 +131,7 @@ AV1Decoder *av1_decoder_create(BufferPool *const pool) {
pbi->decoding_first_frame = 1;
pbi->common.buffer_pool = pool;
- cm->seq_params.bit_depth = AOM_BITS_8;
+ cm->seq_params->bit_depth = AOM_BITS_8;
cm->mi_params.free_mi = dec_free_mi;
cm->mi_params.setup_mi = dec_setup_mi;
@@ -146,7 +148,7 @@ AV1Decoder *av1_decoder_create(BufferPool *const pool) {
aom_accounting_init(&pbi->accounting);
#endif
- cm->error.setjmp = 0;
+ pbi->error.setjmp = 0;
aom_get_worker_interface()->init(&pbi->lf_worker);
pbi->lf_worker.thread_name = "aom lf worker";
@@ -194,6 +196,7 @@ void av1_decoder_remove(AV1Decoder *pbi) {
}
aom_free(pbi->thread_data);
}
+ aom_free(pbi->dcb.xd.seg_mask);
for (i = 0; i < pbi->num_workers; ++i) {
AVxWorker *const worker = &pbi->tile_workers[i];
@@ -261,16 +264,16 @@ aom_codec_err_t av1_copy_reference_dec(AV1Decoder *pbi, int idx,
const YV12_BUFFER_CONFIG *const cfg = get_ref_frame(cm, idx);
if (cfg == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
+ aom_internal_error(&pbi->error, AOM_CODEC_ERROR, "No reference frame");
return AOM_CODEC_ERROR;
}
if (!equal_dimensions(cfg, sd))
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_ERROR,
"Incorrect buffer dimensions");
else
aom_yv12_copy_frame(cfg, sd, num_planes);
- return cm->error.error_code;
+ return pbi->error.error_code;
}
static int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
@@ -293,13 +296,13 @@ aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx,
ref_buf = get_ref_frame(cm, idx);
if (ref_buf == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
+ aom_internal_error(cm->error, AOM_CODEC_ERROR, "No reference frame");
return AOM_CODEC_ERROR;
}
if (!use_external_ref) {
if (!equal_dimensions(ref_buf, sd)) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Incorrect buffer dimensions");
} else {
// Overwrite the reference frame buffer.
@@ -307,7 +310,7 @@ aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx,
}
} else {
if (!equal_dimensions_and_border(ref_buf, sd)) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Incorrect buffer dimensions");
} else {
// Overwrite the reference frame buffer pointers.
@@ -323,7 +326,7 @@ aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx,
}
}
- return cm->error.error_code;
+ return cm->error->error_code;
}
aom_codec_err_t av1_copy_new_frame_dec(AV1_COMMON *cm,
@@ -332,12 +335,12 @@ aom_codec_err_t av1_copy_new_frame_dec(AV1_COMMON *cm,
const int num_planes = av1_num_planes(cm);
if (!equal_dimensions_and_border(new_frame, sd))
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Incorrect buffer dimensions");
else
aom_yv12_copy_frame(new_frame, sd, num_planes);
- return cm->error.error_code;
+ return cm->error->error_code;
}
static void release_current_frame(AV1Decoder *pbi) {
@@ -355,7 +358,7 @@ static void release_current_frame(AV1Decoder *pbi) {
// Consumes a reference to cm->cur_frame.
//
// This functions returns void. It reports failure by setting
-// cm->error.error_code.
+// pbi->error.error_code.
static void update_frame_buffers(AV1Decoder *pbi, int frame_decoded) {
int ref_index = 0, mask;
AV1_COMMON *const cm = &pbi->common;
@@ -388,7 +391,7 @@ static void update_frame_buffers(AV1Decoder *pbi, int frame_decoded) {
// error
cm->cur_frame->buf.corrupted = 1;
decrease_ref_count(cm->cur_frame, pool);
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
} else {
pbi->output_frames[pbi->num_output_frames] = cm->cur_frame;
pbi->num_output_frames++;
@@ -427,8 +430,8 @@ int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
const uint8_t **psource) {
AV1_COMMON *volatile const cm = &pbi->common;
const uint8_t *source = *psource;
- cm->error.error_code = AOM_CODEC_OK;
- cm->error.has_detail = 0;
+ pbi->error.error_code = AOM_CODEC_OK;
+ pbi->error.has_detail = 0;
if (size == 0) {
// This is used to signal that we are missing frames.
@@ -444,18 +447,18 @@ int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
}
if (assign_cur_frame_new_fb(cm) == NULL) {
- cm->error.error_code = AOM_CODEC_MEM_ERROR;
+ pbi->error.error_code = AOM_CODEC_MEM_ERROR;
return 1;
}
// The jmp_buf is valid only for the duration of the function that calls
// setjmp(). Therefore, this function must reset the 'setjmp' field to 0
// before it returns.
- if (setjmp(cm->error.jmp)) {
+ if (setjmp(pbi->error.jmp)) {
const AVxWorkerInterface *const winterface = aom_get_worker_interface();
int i;
- cm->error.setjmp = 0;
+ pbi->error.setjmp = 0;
// Synchronize all threads immediately as a subsequent decode call may
// cause a resize invalidating some allocations.
@@ -469,15 +472,15 @@ int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
return -1;
}
- cm->error.setjmp = 1;
+ pbi->error.setjmp = 1;
int frame_decoded =
aom_decode_frame_from_obus(pbi, source, source + size, psource);
if (frame_decoded < 0) {
- assert(cm->error.error_code != AOM_CODEC_OK);
+ assert(pbi->error.error_code != AOM_CODEC_OK);
release_current_frame(pbi);
- cm->error.setjmp = 0;
+ pbi->error.setjmp = 0;
return 1;
}
@@ -498,8 +501,8 @@ int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
pbi->decoding_first_frame = 0;
}
- if (cm->error.error_code != AOM_CODEC_OK) {
- cm->error.setjmp = 0;
+ if (pbi->error.error_code != AOM_CODEC_OK) {
+ pbi->error.setjmp = 0;
return 1;
}
@@ -518,7 +521,7 @@ int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
}
// Update progress in frame parallel decode.
- cm->error.setjmp = 0;
+ pbi->error.setjmp = 0;
return 0;
}
diff --git a/third_party/libaom/source/libaom/av1/decoder/decoder.h b/third_party/libaom/source/libaom/av1/decoder/decoder.h
index b20e9c1dda..226b9dca85 100644
--- a/third_party/libaom/source/libaom/av1/decoder/decoder.h
+++ b/third_party/libaom/source/libaom/av1/decoder/decoder.h
@@ -112,6 +112,8 @@ typedef struct ThreadData {
// Motion compensation buffer used to get a prediction buffer with extended
// borders. One buffer for each of the two possible references.
uint8_t *mc_buf[2];
+ // Mask for this block used for compound prediction.
+ uint8_t *seg_mask;
// Allocated size of 'mc_buf'.
int32_t mc_buf_size;
// If true, the pointers in 'mc_buf' were converted from highbd pointers.
@@ -227,6 +229,8 @@ typedef struct AV1Decoder {
AV1LfSync lf_row_sync;
AV1LrSync lr_row_sync;
AV1LrStruct lr_ctxt;
+ AV1CdefSync cdef_sync;
+ AV1CdefWorkerData *cdef_worker;
AVxWorker *tile_workers;
int num_workers;
DecWorkerData *thread_data;
@@ -330,6 +334,32 @@ typedef struct AV1Decoder {
int is_arf_frame_present;
int num_tile_groups;
aom_s_frame_info sframe_info;
+
+ /*!
+ * Elements part of the sequence header, that are applicable for all the
+ * frames in the video.
+ */
+ SequenceHeader seq_params;
+
+ /*!
+ * If true, buffer removal times are present.
+ */
+ bool buffer_removal_time_present;
+
+ /*!
+ * Code and details about current error status.
+ */
+ struct aom_internal_error_info error;
+
+ /*!
+ * Number of temporal layers: may be > 1 for SVC (scalable vector coding).
+ */
+ unsigned int number_temporal_layers;
+
+ /*!
+ * Number of spatial layers: may be > 1 for SVC (scalable vector coding).
+ */
+ unsigned int number_spatial_layers;
} AV1Decoder;
// Returns 0 on success. Sets pbi->common.error.error_code to a nonzero error
diff --git a/third_party/libaom/source/libaom/av1/decoder/obu.c b/third_party/libaom/source/libaom/av1/decoder/obu.c
index d3d1f0e8be..6c80148cc9 100644
--- a/third_party/libaom/source/libaom/av1/decoder/obu.c
+++ b/third_party/libaom/source/libaom/av1/decoder/obu.c
@@ -69,7 +69,7 @@ static int byte_alignment(AV1_COMMON *const cm,
struct aom_read_bit_buffer *const rb) {
while (rb->bit_offset & 7) {
if (aom_rb_read_bit(rb)) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ cm->error->error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
}
@@ -110,12 +110,12 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
// Use a local variable to store the information as we decode. At the end,
// if no errors have occurred, cm->seq_params is updated.
- SequenceHeader sh = cm->seq_params;
+ SequenceHeader sh = *cm->seq_params;
SequenceHeader *const seq_params = &sh;
seq_params->profile = av1_read_profile(rb);
if (seq_params->profile > CONFIG_MAX_DECODE_PROFILE) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return 0;
}
@@ -124,7 +124,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
seq_params->reduced_still_picture_hdr = aom_rb_read_bit(rb);
// Video must have reduced_still_picture_hdr = 0
if (!seq_params->still_picture && seq_params->reduced_still_picture_hdr) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return 0;
}
@@ -135,7 +135,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
seq_params->operating_points_cnt_minus_1 = 0;
seq_params->operating_point_idc[0] = 0;
if (!read_bitstream_level(&seq_params->seq_level_idx[0], rb)) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return 0;
}
seq_params->tier[0] = 0;
@@ -144,7 +144,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
} else {
seq_params->timing_info_present = aom_rb_read_bit(rb);
if (seq_params->timing_info_present) {
- av1_read_timing_info_header(&seq_params->timing_info, &cm->error, rb);
+ av1_read_timing_info_header(&seq_params->timing_info, &pbi->error, rb);
seq_params->decoder_model_info_present_flag = aom_rb_read_bit(rb);
if (seq_params->decoder_model_info_present_flag)
@@ -159,7 +159,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
seq_params->operating_point_idc[i] =
aom_rb_read_literal(rb, OP_POINTS_IDC_BITS);
if (!read_bitstream_level(&seq_params->seq_level_idx[i], rb)) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return 0;
}
// This is the seq_level_idx[i] > 7 check in the spec. seq_level_idx 7
@@ -188,7 +188,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
// Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass
// the check
if (seq_params->op_params[i].bitrate == 0)
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"AV1 does not support this combination of "
"profile, level, and tier.");
// Buffer size in bits/s is bitrate in bits/s * 1 s
@@ -212,7 +212,7 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
aom_rb_read_literal(rb, 4) + 1;
if (seq_params->op_params[i].initial_display_delay > 10)
aom_internal_error(
- &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ &pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"AV1 does not support more than 10 decoded frames delay");
} else {
seq_params->op_params[i].initial_display_delay = 10;
@@ -232,19 +232,19 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
pbi->current_operating_point =
seq_params->operating_point_idc[operating_point];
if (aom_get_num_layers_from_operating_point_idc(
- pbi->current_operating_point, &cm->number_spatial_layers,
- &cm->number_temporal_layers) != AOM_CODEC_OK) {
- cm->error.error_code = AOM_CODEC_ERROR;
+ pbi->current_operating_point, &pbi->number_spatial_layers,
+ &pbi->number_temporal_layers) != AOM_CODEC_OK) {
+ pbi->error.error_code = AOM_CODEC_ERROR;
return 0;
}
av1_read_sequence_header(cm, rb, seq_params);
- av1_read_color_config(rb, pbi->allow_lowbitdepth, seq_params, &cm->error);
+ av1_read_color_config(rb, pbi->allow_lowbitdepth, seq_params, &pbi->error);
if (!(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0) &&
!(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1) &&
!(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 0)) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(&pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"Only 4:4:4, 4:2:2 and 4:2:0 are currently supported, "
"%d %d subsampling is not supported.\n",
seq_params->subsampling_x, seq_params->subsampling_y);
@@ -253,18 +253,18 @@ static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
seq_params->film_grain_params_present = aom_rb_read_bit(rb);
if (av1_check_trailing_bits(pbi, rb) != 0) {
- // cm->error.error_code is already set.
+ // pbi->error.error_code is already set.
return 0;
}
// If a sequence header has been decoded before, we check if the new
// one is consistent with the old one.
if (pbi->sequence_header_ready) {
- if (!are_seq_headers_consistent(&cm->seq_params, seq_params))
+ if (!are_seq_headers_consistent(cm->seq_params, seq_params))
pbi->sequence_header_changed = 1;
}
- cm->seq_params = *seq_params;
+ *cm->seq_params = *seq_params;
pbi->sequence_header_ready = 1;
return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
@@ -303,7 +303,7 @@ static int32_t read_tile_group_header(AV1Decoder *pbi,
tile_start_and_end_present_flag = aom_rb_read_bit(rb);
if (tile_start_implicit && tile_start_and_end_present_flag) {
aom_internal_error(
- &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ &pbi->error, AOM_CODEC_UNSUP_BITSTREAM,
"For OBU_FRAME type obu tile_start_and_end_present_flag must be 0");
return -1;
}
@@ -318,20 +318,20 @@ static int32_t read_tile_group_header(AV1Decoder *pbi,
*end_tile = aom_rb_read_literal(rb, tile_bits);
}
if (*start_tile != pbi->next_start_tile) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"tg_start (%d) must be equal to %d", *start_tile,
pbi->next_start_tile);
return -1;
}
if (*start_tile > *end_tile) {
aom_internal_error(
- &cm->error, AOM_CODEC_CORRUPT_FRAME,
+ &pbi->error, AOM_CODEC_CORRUPT_FRAME,
"tg_end (%d) must be greater than or equal to tg_start (%d)", *end_tile,
*start_tile);
return -1;
}
if (*end_tile >= num_tiles) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"tg_end (%d) must be less than NumTiles (%d)", *end_tile,
num_tiles);
return -1;
@@ -388,15 +388,16 @@ static void alloc_tile_list_buffer(AV1Decoder *pbi) {
(pbi->output_frame_height_in_tiles_minus_1 + 1));
// Allocate the tile list output buffer.
- // Note: if cm->seq_params.use_highbitdepth is 1 and cm->seq_params.bit_depth
- // is 8, we could allocate less memory, namely, 8 bits/pixel.
+ // Note: if cm->seq_params->use_highbitdepth is 1 and
+ // cm->seq_params->bit_depth is 8, we could allocate less memory, namely, 8
+ // bits/pixel.
if (aom_alloc_frame_buffer(&pbi->tile_list_outbuf, output_frame_width,
- output_frame_height, cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y,
- (cm->seq_params.use_highbitdepth &&
- (cm->seq_params.bit_depth > AOM_BITS_8)),
+ output_frame_height, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y,
+ (cm->seq_params->use_highbitdepth &&
+ (cm->seq_params->bit_depth > AOM_BITS_8)),
0, cm->features.byte_alignment))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate the tile list output buffer");
}
@@ -430,8 +431,8 @@ static void copy_decoded_tile_to_tile_list_buffer(AV1Decoder *pbi,
av1_get_uniform_tile_size(cm, &tile_width, &tile_height);
const int tile_width_in_pixels = tile_width * MI_SIZE;
const int tile_height_in_pixels = tile_height * MI_SIZE;
- const int ssy = cm->seq_params.subsampling_y;
- const int ssx = cm->seq_params.subsampling_x;
+ const int ssy = cm->seq_params->subsampling_y;
+ const int ssx = cm->seq_params->subsampling_x;
const int num_planes = av1_num_planes(cm);
YV12_BUFFER_CONFIG *cur_frame = &cm->cur_frame->buf;
@@ -455,8 +456,8 @@ static void copy_decoded_tile_to_tile_list_buffer(AV1Decoder *pbi,
int vstart2 = tr * h;
int hstart2 = tc * w;
- if (cm->seq_params.use_highbitdepth &&
- cm->seq_params.bit_depth == AOM_BITS_8) {
+ if (cm->seq_params->use_highbitdepth &&
+ cm->seq_params->bit_depth == AOM_BITS_8) {
yv12_tile_copy(cur_frame, hstart1, hend1, vstart1, vend1,
&pbi->tile_list_outbuf, hstart2, vstart2, plane);
} else {
@@ -501,7 +502,7 @@ static uint32_t read_and_decode_one_tile_list(AV1Decoder *pbi,
pbi->output_frame_height_in_tiles_minus_1 = aom_rb_read_literal(rb, 8);
pbi->tile_count_minus_1 = aom_rb_read_literal(rb, 16);
if (pbi->tile_count_minus_1 > MAX_TILES - 1) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
@@ -524,7 +525,7 @@ static uint32_t read_and_decode_one_tile_list(AV1Decoder *pbi,
// Set reference for each tile.
int ref_idx = aom_rb_read_literal(rb, 8);
if (ref_idx >= MAX_EXTERNAL_REFERENCES) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
av1_set_reference_dec(cm, cm->remapped_ref_idx[0], 1,
@@ -535,14 +536,14 @@ static uint32_t read_and_decode_one_tile_list(AV1Decoder *pbi,
if (pbi->dec_tile_row < 0 || pbi->dec_tile_col < 0 ||
pbi->dec_tile_row >= cm->tiles.rows ||
pbi->dec_tile_col >= cm->tiles.cols) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
pbi->coded_tile_data_size = aom_rb_read_literal(rb, 16) + 1;
data += tile_info_bytes;
if ((size_t)(data_end - data) < pbi->coded_tile_data_size) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
@@ -581,18 +582,17 @@ static void alloc_read_metadata(AV1Decoder *const pbi,
OBU_METADATA_TYPE metadata_type,
const uint8_t *data, size_t sz,
aom_metadata_insert_flags_t insert_flag) {
- AV1_COMMON *const cm = &pbi->common;
if (!pbi->metadata) {
pbi->metadata = aom_img_metadata_array_alloc(0);
if (!pbi->metadata) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate metadata array");
}
}
aom_metadata_t *metadata =
aom_img_metadata_alloc(metadata_type, data, sz, insert_flag);
if (!metadata) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Error allocating metadata");
}
aom_metadata_t **metadata_array =
@@ -600,7 +600,7 @@ static void alloc_read_metadata(AV1Decoder *const pbi,
(pbi->metadata->sz + 1) * sizeof(metadata));
if (!metadata_array) {
aom_img_metadata_free(metadata);
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(&pbi->error, AOM_CODEC_MEM_ERROR,
"Error growing metadata array");
}
pbi->metadata->metadata_array = metadata_array;
@@ -611,22 +611,21 @@ static void alloc_read_metadata(AV1Decoder *const pbi,
// On failure, calls aom_internal_error() and does not return.
static void read_metadata_itut_t35(AV1Decoder *const pbi, const uint8_t *data,
size_t sz) {
- AV1_COMMON *const cm = &pbi->common;
if (sz == 0) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"itu_t_t35_country_code is missing");
}
int country_code_size = 1;
if (*data == 0xFF) {
if (sz == 1) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"itu_t_t35_country_code_extension_byte is missing");
}
++country_code_size;
}
int end_index = get_last_nonzero_byte_index(data, sz);
if (end_index < country_code_size) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"No trailing bits found in ITU-T T.35 metadata OBU");
}
// itu_t_t35_payload_bytes is byte aligned. Section 6.7.2 of the spec says:
@@ -634,7 +633,7 @@ static void read_metadata_itut_t35(AV1Decoder *const pbi, const uint8_t *data,
// specified in Recommendation ITU-T T.35.
// Therefore the first trailing byte should be 0x80.
if (data[end_index] != 0x80) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"The last nonzero byte of the ITU-T T.35 metadata OBU "
"is 0x%02x, should be 0x80.",
data[end_index]);
@@ -648,9 +647,8 @@ static void read_metadata_itut_t35(AV1Decoder *const pbi, const uint8_t *data,
static size_t read_metadata_hdr_cll(AV1Decoder *const pbi, const uint8_t *data,
size_t sz) {
const size_t kHdrCllPayloadSize = 4;
- AV1_COMMON *const cm = &pbi->common;
if (sz < kHdrCllPayloadSize) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Incorrect HDR CLL metadata payload size");
}
alloc_read_metadata(pbi, OBU_METADATA_TYPE_HDR_CLL, data, kHdrCllPayloadSize,
@@ -663,9 +661,8 @@ static size_t read_metadata_hdr_cll(AV1Decoder *const pbi, const uint8_t *data,
static size_t read_metadata_hdr_mdcv(AV1Decoder *const pbi, const uint8_t *data,
size_t sz) {
const size_t kMdcvPayloadSize = 24;
- AV1_COMMON *const cm = &pbi->common;
if (sz < kMdcvPayloadSize) {
- aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
+ aom_internal_error(&pbi->error, AOM_CODEC_CORRUPT_FRAME,
"Incorrect HDR MDCV metadata payload size");
}
alloc_read_metadata(pbi, OBU_METADATA_TYPE_HDR_MDCV, data, kMdcvPayloadSize,
@@ -770,11 +767,10 @@ static uint8_t get_last_nonzero_byte(const uint8_t *data, size_t sz) {
// pbi->common.error.error_code and returns 0, or calls aom_internal_error()
// and does not return.
static size_t read_metadata(AV1Decoder *pbi, const uint8_t *data, size_t sz) {
- AV1_COMMON *const cm = &pbi->common;
size_t type_length;
uint64_t type_value;
if (aom_uleb_decode(data, sz, &type_value, &type_length) < 0) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
const OBU_METADATA_TYPE metadata_type = (OBU_METADATA_TYPE)type_value;
@@ -782,7 +778,7 @@ static size_t read_metadata(AV1Decoder *pbi, const uint8_t *data, size_t sz) {
// If metadata_type is reserved for future use or a user private value,
// ignore the entire OBU and just check trailing bits.
if (get_last_nonzero_byte(data + type_length, sz - type_length) == 0) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
return sz;
@@ -796,7 +792,7 @@ static size_t read_metadata(AV1Decoder *pbi, const uint8_t *data, size_t sz) {
type_length +
read_metadata_hdr_cll(pbi, data + type_length, sz - type_length);
if (get_last_nonzero_byte(data + bytes_read, sz - bytes_read) != 0x80) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
return sz;
@@ -805,7 +801,7 @@ static size_t read_metadata(AV1Decoder *pbi, const uint8_t *data, size_t sz) {
type_length +
read_metadata_hdr_mdcv(pbi, data + type_length, sz - type_length);
if (get_last_nonzero_byte(data + bytes_read, sz - bytes_read) != 0x80) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
return sz;
@@ -820,7 +816,7 @@ static size_t read_metadata(AV1Decoder *pbi, const uint8_t *data, size_t sz) {
read_metadata_timecode(&rb);
}
if (av1_check_trailing_bits(pbi, &rb) != 0) {
- // cm->error.error_code is already set.
+ // pbi->error.error_code is already set.
return 0;
}
assert((rb.bit_offset & 7) == 0);
@@ -838,7 +834,7 @@ static size_t read_padding(AV1_COMMON *const cm, const uint8_t *data,
// trailing byte should be 0x80. See https://crbug.com/aomedia/2393.
const uint8_t last_nonzero_byte = get_last_nonzero_byte(data, sz);
if (last_nonzero_byte != 0x80) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ cm->error->error_code = AOM_CODEC_CORRUPT_FRAME;
return 0;
}
}
@@ -846,7 +842,7 @@ static size_t read_padding(AV1_COMMON *const cm, const uint8_t *data,
}
// On success, returns a boolean that indicates whether the decoding of the
-// current frame is finished. On failure, sets cm->error.error_code and
+// current frame is finished. On failure, sets pbi->error.error_code and
// returns -1.
int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
const uint8_t *data_end,
@@ -872,7 +868,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
pbi->num_tile_groups = 0;
if (data_end < data) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
@@ -880,7 +876,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
if (!cm->tiles.large_scale) pbi->camera_frame_header_ready = 0;
// decode frame as a series of OBUs
- while (!frame_decoding_finished && cm->error.error_code == AOM_CODEC_OK) {
+ while (!frame_decoding_finished && pbi->error.error_code == AOM_CODEC_OK) {
struct aom_read_bit_buffer rb;
size_t payload_size = 0;
size_t decoded_payload_size = 0;
@@ -890,7 +886,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
if (bytes_available == 0 && !pbi->seen_frame_header) {
*p_data_end = data;
- cm->error.error_code = AOM_CODEC_OK;
+ pbi->error.error_code = AOM_CODEC_OK;
break;
}
@@ -899,7 +895,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
&obu_header, &payload_size, &bytes_read);
if (status != AOM_CODEC_OK) {
- cm->error.error_code = status;
+ pbi->error.error_code = status;
return -1;
}
@@ -912,7 +908,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
data += bytes_read;
if ((size_t)(data_end - data) < payload_size) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
@@ -936,16 +932,16 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
if (pbi->seen_frame_header) {
// A new temporal unit has started, but the frame in the previous
// temporal unit is incomplete.
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
break;
case OBU_SEQUENCE_HEADER:
decoded_payload_size = read_sequence_header_obu(pbi, &rb);
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
// The sequence header should not change in the middle of a frame.
if (pbi->sequence_header_changed && pbi->seen_frame_header) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
break;
@@ -954,13 +950,13 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
case OBU_FRAME:
if (obu_header.type == OBU_REDUNDANT_FRAME_HEADER) {
if (!pbi->seen_frame_header) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
} else {
// OBU_FRAME_HEADER or OBU_FRAME.
if (pbi->seen_frame_header) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
}
@@ -978,7 +974,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
// frame_header_obu.
if (frame_header_size > payload_size ||
memcmp(data, frame_header, frame_header_size) != 0) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
assert(rb.bit_offset == 0);
@@ -990,7 +986,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
if (cm->show_existing_frame) {
if (obu_header.type == OBU_FRAME) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return -1;
}
frame_decoding_finished = 1;
@@ -1012,23 +1008,23 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
if (obu_header.type != OBU_FRAME) break;
obu_payload_offset = frame_header_size;
// Byte align the reader before reading the tile group.
- // byte_alignment() has set cm->error.error_code if it returns -1.
+ // byte_alignment() has set pbi->error.error_code if it returns -1.
if (byte_alignment(cm, &rb)) return -1;
AOM_FALLTHROUGH_INTENDED; // fall through to read tile group.
case OBU_TILE_GROUP:
if (!pbi->seen_frame_header) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
if (obu_payload_offset > payload_size) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
decoded_payload_size += read_one_tile_group_obu(
pbi, &rb, is_first_tg_obu_received, data + obu_payload_offset,
data + payload_size, p_data_end, &frame_decoding_finished,
obu_header.type == OBU_FRAME);
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
is_first_tg_obu_received = 0;
if (frame_decoding_finished) {
pbi->seen_frame_header = 0;
@@ -1038,18 +1034,18 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
break;
case OBU_METADATA:
decoded_payload_size = read_metadata(pbi, data, payload_size);
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
break;
case OBU_TILE_LIST:
if (CONFIG_NORMAL_TILE_MODE) {
- cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
+ pbi->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
return -1;
}
// This OBU type is purely for the large scale tile coding mode.
// The common camera frame header has to be already decoded.
if (!pbi->camera_frame_header_ready) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
@@ -1058,17 +1054,17 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
decoded_payload_size =
read_and_decode_one_tile_list(pbi, &rb, data, data + payload_size,
p_data_end, &frame_decoding_finished);
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
break;
case OBU_PADDING:
decoded_payload_size = read_padding(cm, data, payload_size);
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
break;
default:
// Skip unrecognized OBUs
if (payload_size > 0 &&
get_last_nonzero_byte(data, payload_size) == 0) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
decoded_payload_size = payload_size;
@@ -1077,7 +1073,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
// Check that the signalled OBU size matches the actual amount of data read
if (decoded_payload_size > payload_size) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
@@ -1085,7 +1081,7 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
while (decoded_payload_size < payload_size) {
uint8_t padding_byte = data[decoded_payload_size++];
if (padding_byte != 0) {
- cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
+ pbi->error.error_code = AOM_CODEC_CORRUPT_FRAME;
return -1;
}
}
@@ -1093,6 +1089,6 @@ int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
data += payload_size;
}
- if (cm->error.error_code != AOM_CODEC_OK) return -1;
+ if (pbi->error.error_code != AOM_CODEC_OK) return -1;
return frame_decoding_finished;
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/aq_complexity.c b/third_party/libaom/source/libaom/av1/encoder/aq_complexity.c
index 3ea5f63020..278e1ca92f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/aq_complexity.c
+++ b/third_party/libaom/source/libaom/av1/encoder/aq_complexity.c
@@ -81,7 +81,7 @@ void av1_setup_in_frame_q_adj(AV1_COMP *cpi) {
if (is_frame_aq_enabled(cpi)) {
int segment;
const int aq_strength =
- get_aq_c_strength(base_qindex, cm->seq_params.bit_depth);
+ get_aq_c_strength(base_qindex, cm->seq_params->bit_depth);
// Clear down the segment map.
memset(cpi->enc_seg.map, DEFAULT_AQ2_SEG,
@@ -108,7 +108,7 @@ void av1_setup_in_frame_q_adj(AV1_COMP *cpi) {
qindex_delta = av1_compute_qdelta_by_rate(
&cpi->rc, cm->current_frame.frame_type, base_qindex,
aq_c_q_adj_factor[aq_strength][segment], cpi->is_screen_content_type,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
// For AQ complexity mode, we dont allow Q0 in a segment if the base
// Q is not 0. Q0 (lossless) implies 4x4 only and in AQ mode 2 a segment
@@ -150,17 +150,17 @@ void av1_caq_select_segment(const AV1_COMP *cpi, MACROBLOCK *mb, BLOCK_SIZE bs,
// It is converted to bits << AV1_PROB_COST_SHIFT units.
const int64_t num = (int64_t)(cpi->rc.sb64_target_rate * xmis * ymis)
<< AV1_PROB_COST_SHIFT;
- const int denom = cm->seq_params.mib_size * cm->seq_params.mib_size;
+ const int denom = cm->seq_params->mib_size * cm->seq_params->mib_size;
const int target_rate = (int)(num / denom);
double logvar;
double low_var_thresh;
const int aq_strength = get_aq_c_strength(cm->quant_params.base_qindex,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
aom_clear_system_state();
low_var_thresh =
(is_stat_consumption_stage_twopass(cpi))
- ? AOMMAX(exp(cpi->twopass.mb_av_energy), MIN_DEFAULT_LV_THRESH)
+ ? AOMMAX(exp(cpi->ppi->twopass.mb_av_energy), MIN_DEFAULT_LV_THRESH)
: DEFAULT_LV_THRESH;
av1_setup_src_planes(mb, cpi->source, mi_row, mi_col, num_planes, bs);
diff --git a/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.c b/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.c
index c7abe43c87..40b8c254d4 100644
--- a/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.c
+++ b/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.c
@@ -12,6 +12,7 @@
#include <limits.h>
#include <math.h>
+#include "av1/common/pred_common.h"
#include "av1/common/seg_common.h"
#include "av1/encoder/aq_cyclicrefresh.h"
#include "av1/encoder/ratectrl.h"
@@ -82,7 +83,7 @@ static int compute_deltaq(const AV1_COMP *cpi, int q, double rate_factor) {
const RATE_CONTROL *const rc = &cpi->rc;
int deltaq = av1_compute_qdelta_by_rate(
rc, cpi->common.current_frame.frame_type, q, rate_factor,
- cpi->is_screen_content_type, cpi->common.seq_params.bit_depth);
+ cpi->is_screen_content_type, cpi->common.seq_params->bit_depth);
if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
deltaq = -cr->max_qdelta_perc * q / 100;
}
@@ -94,7 +95,7 @@ int av1_cyclic_refresh_estimate_bits_at_q(const AV1_COMP *cpi,
const AV1_COMMON *const cm = &cpi->common;
const FRAME_TYPE frame_type = cm->current_frame.frame_type;
const int base_qindex = cm->quant_params.base_qindex;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
const int mbs = cm->mi_params.MBs;
const int num4x4bl = mbs << 4;
@@ -138,15 +139,51 @@ int av1_cyclic_refresh_rc_bits_per_mb(const AV1_COMP *cpi, int i,
bits_per_mb =
(int)((1.0 - weight_segment) *
av1_rc_bits_per_mb(cm->current_frame.frame_type, i,
- correction_factor, cm->seq_params.bit_depth,
+ correction_factor, cm->seq_params->bit_depth,
cpi->is_screen_content_type) +
weight_segment * av1_rc_bits_per_mb(cm->current_frame.frame_type,
i + deltaq, correction_factor,
- cm->seq_params.bit_depth,
+ cm->seq_params->bit_depth,
cpi->is_screen_content_type));
return bits_per_mb;
}
+void av1_cyclic_reset_segment_skip(const AV1_COMP *cpi, MACROBLOCK *const x,
+ int mi_row, int mi_col, BLOCK_SIZE bsize) {
+ int cdf_num;
+ const AV1_COMMON *const cm = &cpi->common;
+ MACROBLOCKD *const xd = &x->e_mbd;
+ MB_MODE_INFO *const mbmi = xd->mi[0];
+ const int prev_segment_id = mbmi->segment_id;
+ mbmi->segment_id = av1_get_spatial_seg_pred(cm, xd, &cdf_num);
+ if (prev_segment_id != mbmi->segment_id) {
+ CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
+ const int bw = mi_size_wide[bsize];
+ const int bh = mi_size_high[bsize];
+ const int xmis = AOMMIN(cm->mi_params.mi_cols - mi_col, bw);
+ const int ymis = AOMMIN(cm->mi_params.mi_rows - mi_row, bh);
+ const int block_index = mi_row * cm->mi_params.mi_cols + mi_col;
+ for (int mi_y = 0; mi_y < ymis; mi_y++) {
+ for (int mi_x = 0; mi_x < xmis; mi_x++) {
+ const int map_offset =
+ block_index + mi_y * cm->mi_params.mi_cols + mi_x;
+ cr->map[map_offset] = 0;
+ cpi->enc_seg.map[map_offset] = mbmi->segment_id;
+ cm->cur_frame->seg_map[map_offset] = mbmi->segment_id;
+ }
+ }
+ if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST1)
+ x->actual_num_seg1_blocks -= xmis * ymis;
+ else if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST2)
+ x->actual_num_seg2_blocks -= xmis * ymis;
+ if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1)
+ x->actual_num_seg1_blocks += xmis * ymis;
+ else if (cyclic_refresh_segment_id(mbmi->segment_id) ==
+ CR_SEGMENT_ID_BOOST2)
+ x->actual_num_seg2_blocks += xmis * ymis;
+ }
+}
+
void av1_cyclic_refresh_update_segment(const AV1_COMP *cpi, MACROBLOCK *const x,
int mi_row, int mi_col, BLOCK_SIZE bsize,
int64_t rate, int64_t dist, int skip,
@@ -191,22 +228,21 @@ void av1_cyclic_refresh_update_segment(const AV1_COMP *cpi, MACROBLOCK *const x,
// Update entries in the cyclic refresh map with new_map_value, and
// copy mbmi->segment_id into global segmentation map.
- // 8x8 is smallest coding block size for non-key frames.
- const int sh = bw << 1;
- for (int mi_y = 0; mi_y < ymis; mi_y += 2) {
- for (int mi_x = 0; mi_x < xmis; mi_x += 2) {
- int map_offset = block_index + mi_y * cm->mi_params.mi_cols + mi_x;
+ for (int mi_y = 0; mi_y < ymis; mi_y++) {
+ for (int mi_x = 0; mi_x < xmis; mi_x++) {
+ const int map_offset = block_index + mi_y * cm->mi_params.mi_cols + mi_x;
cr->map[map_offset] = new_map_value;
cpi->enc_seg.map[map_offset] = mbmi->segment_id;
+ cm->cur_frame->seg_map[map_offset] = mbmi->segment_id;
}
- // Accumulate cyclic refresh update counters.
- if (!dry_run && !frame_is_intra_only(cm)) {
- if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1)
- x->actual_num_seg1_blocks += sh;
- else if (cyclic_refresh_segment_id(mbmi->segment_id) ==
- CR_SEGMENT_ID_BOOST2)
- x->actual_num_seg2_blocks += sh;
- }
+ }
+ // Accumulate cyclic refresh update counters.
+ if (!dry_run) {
+ if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1)
+ x->actual_num_seg1_blocks += xmis * ymis;
+ else if (cyclic_refresh_segment_id(mbmi->segment_id) ==
+ CR_SEGMENT_ID_BOOST2)
+ x->actual_num_seg2_blocks += xmis * ymis;
}
}
@@ -234,15 +270,15 @@ void av1_cyclic_refresh_postencode(AV1_COMP *const cpi) {
const int avg_cnt_zeromv =
100 * cr->cnt_zeromv / (mi_params->mi_rows * mi_params->mi_cols);
- if (!cpi->use_svc ||
- (cpi->use_svc &&
+ if (!cpi->ppi->use_svc ||
+ (cpi->ppi->use_svc &&
!cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame &&
cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)) {
rc->avg_frame_low_motion =
(3 * rc->avg_frame_low_motion + avg_cnt_zeromv) / 4;
// For SVC: set avg_frame_low_motion (only computed on top spatial layer)
// to all lower spatial layers.
- if (cpi->use_svc &&
+ if (cpi->ppi->use_svc &&
svc->spatial_layer_id == svc->number_spatial_layers - 1) {
for (int i = 0; i < svc->number_spatial_layers - 1; ++i) {
const int layer = LAYER_IDS_TO_IDX(i, svc->temporal_layer_id,
@@ -257,15 +293,16 @@ void av1_cyclic_refresh_postencode(AV1_COMP *const cpi) {
void av1_cyclic_refresh_set_golden_update(AV1_COMP *const cpi) {
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
// Set minimum gf_interval for GF update to a multiple of the refresh period,
// with some max limit. Depending on past encoding stats, GF flag may be
// reset and update may not occur until next baseline_gf_interval.
if (cr->percent_refresh > 0)
- rc->baseline_gf_interval = AOMMIN(2 * (100 / cr->percent_refresh), 40);
+ p_rc->baseline_gf_interval = AOMMIN(2 * (100 / cr->percent_refresh), 40);
else
- rc->baseline_gf_interval = 20;
- if (rc->avg_frame_low_motion < 40) rc->baseline_gf_interval = 8;
+ p_rc->baseline_gf_interval = 20;
+ if (rc->avg_frame_low_motion < 40) p_rc->baseline_gf_interval = 8;
}
// Update the segmentation map, and related quantities: cyclic refresh map,
@@ -282,10 +319,10 @@ static void cyclic_refresh_update_map(AV1_COMP *const cpi) {
int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
int xmis, ymis, x, y;
memset(seg_map, CR_SEGMENT_ID_BASE, mi_params->mi_rows * mi_params->mi_cols);
- sb_cols = (mi_params->mi_cols + cm->seq_params.mib_size - 1) /
- cm->seq_params.mib_size;
- sb_rows = (mi_params->mi_rows + cm->seq_params.mib_size - 1) /
- cm->seq_params.mib_size;
+ sb_cols = (mi_params->mi_cols + cm->seq_params->mib_size - 1) /
+ cm->seq_params->mib_size;
+ sb_rows = (mi_params->mi_rows + cm->seq_params->mib_size - 1) /
+ cm->seq_params->mib_size;
sbs_in_frame = sb_cols * sb_rows;
// Number of target blocks to get the q delta (segment 1).
block_count =
@@ -302,8 +339,8 @@ static void cyclic_refresh_update_map(AV1_COMP *const cpi) {
// Get the mi_row/mi_col corresponding to superblock index i.
int sb_row_index = (i / sb_cols);
int sb_col_index = i - sb_row_index * sb_cols;
- int mi_row = sb_row_index * cm->seq_params.mib_size;
- int mi_col = sb_col_index * cm->seq_params.mib_size;
+ int mi_row = sb_row_index * cm->seq_params->mib_size;
+ int mi_col = sb_col_index * cm->seq_params->mib_size;
// TODO(any): Ensure the population of
// cpi->common.features.allow_screen_content_tools and use the same instead
// of cpi->oxcf.tune_cfg.content == AOM_CONTENT_SCREEN
@@ -315,8 +352,8 @@ static void cyclic_refresh_update_map(AV1_COMP *const cpi) {
assert(mi_col >= 0 && mi_col < mi_params->mi_cols);
bl_index = mi_row * mi_params->mi_cols + mi_col;
// Loop through all MI blocks in superblock and update map.
- xmis = AOMMIN(mi_params->mi_cols - mi_col, cm->seq_params.mib_size);
- ymis = AOMMIN(mi_params->mi_rows - mi_row, cm->seq_params.mib_size);
+ xmis = AOMMIN(mi_params->mi_cols - mi_col, cm->seq_params->mib_size);
+ ymis = AOMMIN(mi_params->mi_rows - mi_row, cm->seq_params->mib_size);
// cr_map only needed at 8x8 blocks.
for (y = 0; y < ymis; y += 2) {
for (x = 0; x < xmis; x += 2) {
@@ -361,11 +398,20 @@ void av1_cyclic_refresh_update_parameters(AV1_COMP *const cpi) {
int qp_thresh = AOMMIN(20, rc->best_quality << 1);
int qp_max_thresh = 118 * MAXQ >> 7;
cr->apply_cyclic_refresh = 1;
+ int avg_frame_qindex_inter_frame;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ avg_frame_qindex_inter_frame =
+ (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
+ ? cpi->ppi->temp_avg_frame_qindex[INTER_FRAME]
+ : rc->avg_frame_qindex[INTER_FRAME];
+#else
+ avg_frame_qindex_inter_frame = rc->avg_frame_qindex[INTER_FRAME];
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
if (frame_is_intra_only(cm) || is_lossless_requested(&cpi->oxcf.rc_cfg) ||
cpi->svc.temporal_layer_id > 0 ||
- rc->avg_frame_qindex[INTER_FRAME] < qp_thresh ||
+ avg_frame_qindex_inter_frame < qp_thresh ||
(rc->frames_since_key > 20 &&
- rc->avg_frame_qindex[INTER_FRAME] > qp_max_thresh) ||
+ avg_frame_qindex_inter_frame > qp_max_thresh) ||
(rc->avg_frame_low_motion < 45 && rc->frames_since_key > 40)) {
cr->apply_cyclic_refresh = 0;
return;
@@ -446,7 +492,7 @@ void av1_cyclic_refresh_setup(AV1_COMP *const cpi) {
return;
} else {
const double q = av1_convert_qindex_to_q(cm->quant_params.base_qindex,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
aom_clear_system_state();
// Set rate threshold to some multiple (set to 2 for now) of the target
// rate (target is given by sb64_target_rate and scaled by 256).
diff --git a/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.h b/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.h
index 97bd6f26b1..1c0d5cb4d7 100644
--- a/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.h
+++ b/third_party/libaom/source/libaom/av1/encoder/aq_cyclicrefresh.h
@@ -161,6 +161,30 @@ int av1_cyclic_refresh_estimate_bits_at_q(const struct AV1_COMP *cpi,
int av1_cyclic_refresh_rc_bits_per_mb(const struct AV1_COMP *cpi, int i,
double correction_factor);
+/*!\brief Update segment_id for blocks are skipped.
+ *
+ * After encoding a given prediction block, of size bsize at (mi_row, mi_col),
+ * check if we should reset the segment_id based on skip_txfm,
+ * and update the cyclic_refresh map and segmentation counters.
+ *
+ * \ingroup cyclic_refresh
+ * \callgraph
+ * \callergraph
+ *
+ * \param[in] cpi Top level encoder structure
+ * \param[in] x Pointer to MACROBLOCK structure
+ * \param[in] mi_row Row coordinate of the block in a step size of MI_SIZE
+ * \param[in] mi_col Col coordinate of the block in a step size of MI_SIZE
+ * \param[in] bsize Block size
+ *
+ * \return Update the \c mbmi->segment_id, the \c cpi->cyclic_refresh and
+ * the \c cm->cpi->enc_seg.map.
+ */
+
+void av1_cyclic_reset_segment_skip(const struct AV1_COMP *cpi,
+ MACROBLOCK *const x, int mi_row, int mi_col,
+ BLOCK_SIZE bsize);
+
/*!\brief Update segment_id for block based on mode selected.
*
* Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
diff --git a/third_party/libaom/source/libaom/av1/encoder/aq_variance.c b/third_party/libaom/source/libaom/av1/encoder/aq_variance.c
index 92d7ad172d..79bf9f8419 100644
--- a/third_party/libaom/source/libaom/av1/encoder/aq_variance.c
+++ b/third_party/libaom/source/libaom/av1/encoder/aq_variance.c
@@ -52,7 +52,7 @@ void av1_vaq_frame_setup(AV1_COMP *cpi) {
int resolution_change =
cm->prev_frame && (cm->width != cm->prev_frame->width ||
cm->height != cm->prev_frame->height);
- int avg_energy = (int)(cpi->twopass.mb_av_energy - 2);
+ int avg_energy = (int)(cpi->ppi->twopass.mb_av_energy - 2);
double avg_ratio;
if (avg_energy > 7) avg_energy = 7;
if (avg_energy < 0) avg_energy = 0;
@@ -81,7 +81,7 @@ void av1_vaq_frame_setup(AV1_COMP *cpi) {
int qindex_delta = av1_compute_qdelta_by_rate(
&cpi->rc, cm->current_frame.frame_type, base_qindex,
rate_ratio[i] / avg_ratio, cpi->is_screen_content_type,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
// We don't allow qindex 0 in a segment if the base value is not 0.
// Q index 0 (lossless) implies 4x4 encoding only and in AQ mode a segment
@@ -126,14 +126,14 @@ int av1_log_block_var(const AV1_COMP *cpi, MACROBLOCK *x, BLOCK_SIZE bs) {
for (j = 0; j < bw; j += 4) {
if (is_cur_buf_hbd(xd)) {
var +=
- log(1.0 + cpi->fn_ptr[BLOCK_4X4].vf(
+ log(1.0 + cpi->ppi->fn_ptr[BLOCK_4X4].vf(
x->plane[0].src.buf + i * x->plane[0].src.stride + j,
x->plane[0].src.stride,
CONVERT_TO_BYTEPTR(av1_highbd_all_zeros), 0, &sse) /
16);
} else {
var +=
- log(1.0 + cpi->fn_ptr[BLOCK_4X4].vf(
+ log(1.0 + cpi->ppi->fn_ptr[BLOCK_4X4].vf(
x->plane[0].src.buf + i * x->plane[0].src.stride + j,
x->plane[0].src.stride, av1_all_zeros, 0, &sse) /
16);
@@ -154,15 +154,12 @@ static unsigned int haar_ac_energy(MACROBLOCK *x, BLOCK_SIZE bs) {
MACROBLOCKD *xd = &x->e_mbd;
int stride = x->plane[0].src.stride;
uint8_t *buf = x->plane[0].src.buf;
- const int bw = MI_SIZE * mi_size_wide[bs];
- const int bh = MI_SIZE * mi_size_high[bs];
+ const int num_8x8_cols = block_size_wide[bs] / 8;
+ const int num_8x8_rows = block_size_high[bs] / 8;
const int hbd = is_cur_buf_hbd(xd);
- int var = 0;
- for (int r = 0; r < bh; r += 8)
- for (int c = 0; c < bw; c += 8) {
- var += av1_haar_ac_sad_8x8_uint8_input(buf + c + r * stride, stride, hbd);
- }
+ int64_t var = av1_haar_ac_sad_mxn_uint8_input(buf, stride, hbd, num_8x8_rows,
+ num_8x8_cols);
return (unsigned int)((uint64_t)var * 256) >> num_pels_log2_lookup[bs];
}
@@ -178,7 +175,7 @@ int av1_block_wavelet_energy_level(const AV1_COMP *cpi, MACROBLOCK *x,
double energy, energy_midpoint;
aom_clear_system_state();
energy_midpoint = (is_stat_consumption_stage_twopass(cpi))
- ? cpi->twopass.frame_avg_haar_energy
+ ? cpi->ppi->twopass.frame_avg_haar_energy
: DEFAULT_E_MIDPOINT;
energy = av1_log_block_wavelet_energy(x, bs) - energy_midpoint;
return clamp((int)round(energy), ENERGY_MIN, ENERGY_MAX);
@@ -199,7 +196,7 @@ int av1_compute_q_from_energy_level_deltaq_mode(const AV1_COMP *const cpi,
int qindex_delta = av1_compute_qdelta_by_rate(
&cpi->rc, cm->current_frame.frame_type, base_qindex,
deltaq_rate_ratio[rate_level], cpi->is_screen_content_type,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
if ((base_qindex != 0) && ((base_qindex + qindex_delta) == 0)) {
qindex_delta = -base_qindex + 1;
diff --git a/third_party/libaom/source/libaom/av1/encoder/av1_noise_estimate.c b/third_party/libaom/source/libaom/av1/encoder/av1_noise_estimate.c
index dbc86c5034..8b2fc38923 100644
--- a/third_party/libaom/source/libaom/av1/encoder/av1_noise_estimate.c
+++ b/third_party/libaom/source/libaom/av1/encoder/av1_noise_estimate.c
@@ -27,8 +27,8 @@
#if CONFIG_AV1_TEMPORAL_DENOISING
// For SVC: only do noise estimation on top spatial layer.
static INLINE int noise_est_svc(const struct AV1_COMP *const cpi) {
- return (!cpi->use_svc ||
- (cpi->use_svc &&
+ return (!cpi->ppi->use_svc ||
+ (cpi->ppi->use_svc &&
cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1));
}
#endif
@@ -61,7 +61,7 @@ static int enable_noise_estimation(AV1_COMP *const cpi) {
cpi->common.height != resize_pending_params->height));
#if CONFIG_AV1_HIGHBITDEPTH
- if (cpi->common.seq_params.use_highbitdepth) return 0;
+ if (cpi->common.seq_params->use_highbitdepth) return 0;
#endif
// Enable noise estimation if denoising is on.
#if CONFIG_AV1_TEMPORAL_DENOISING
@@ -75,7 +75,7 @@ static int enable_noise_estimation(AV1_COMP *const cpi) {
// Not enabled for low resolutions.
if (cpi->oxcf.pass == 0 && cpi->oxcf.rc_cfg.mode == AOM_CBR &&
cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ && cpi->oxcf.speed >= 5 &&
- resize_pending == 0 && !cpi->use_svc &&
+ resize_pending == 0 && !cpi->ppi->use_svc &&
cpi->oxcf.tune_cfg.content != AOM_CONTENT_SCREEN &&
cpi->common.width * cpi->common.height >= 640 * 360)
return 1;
@@ -227,7 +227,7 @@ void av1_update_noise_estimate(AV1_COMP *const cpi) {
unsigned int sse;
// Compute variance between co-located blocks from current and
// last input frames.
- unsigned int variance = cpi->fn_ptr[bsize].vf(
+ unsigned int variance = cpi->ppi->fn_ptr[bsize].vf(
src_y, src_ystride, last_src_y, last_src_ystride, &sse);
unsigned int hist_index = variance / bin_size;
if (hist_index < MAX_VAR_HIST_BINS)
diff --git a/third_party/libaom/source/libaom/av1/encoder/av1_quantize.c b/third_party/libaom/source/libaom/av1/encoder/av1_quantize.c
index 9d38e2d77d..2b07e4c71b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/av1_quantize.c
+++ b/third_party/libaom/source/libaom/av1/encoder/av1_quantize.c
@@ -33,6 +33,40 @@ void av1_quantize_skip(intptr_t n_coeffs, tran_low_t *qcoeff_ptr,
*eob_ptr = 0;
}
+int av1_quantize_fp_no_qmatrix(const int16_t quant_ptr[2],
+ const int16_t dequant_ptr[2],
+ const int16_t round_ptr[2], int log_scale,
+ const int16_t *scan, int coeff_count,
+ const tran_low_t *coeff_ptr,
+ tran_low_t *qcoeff_ptr,
+ tran_low_t *dqcoeff_ptr) {
+ memset(qcoeff_ptr, 0, coeff_count * sizeof(*qcoeff_ptr));
+ memset(dqcoeff_ptr, 0, coeff_count * sizeof(*dqcoeff_ptr));
+ const int rounding[2] = { ROUND_POWER_OF_TWO(round_ptr[0], log_scale),
+ ROUND_POWER_OF_TWO(round_ptr[1], log_scale) };
+ int eob = 0;
+ for (int i = 0; i < coeff_count; i++) {
+ const int rc = scan[i];
+ const int32_t thresh = (int32_t)(dequant_ptr[rc != 0]);
+ const int coeff = coeff_ptr[rc];
+ const int coeff_sign = AOMSIGN(coeff);
+ int64_t abs_coeff = (coeff ^ coeff_sign) - coeff_sign;
+ int tmp32 = 0;
+ if ((abs_coeff << (1 + log_scale)) >= thresh) {
+ abs_coeff = clamp64(abs_coeff + rounding[rc != 0], INT16_MIN, INT16_MAX);
+ tmp32 = (int)((abs_coeff * quant_ptr[rc != 0]) >> (16 - log_scale));
+ if (tmp32) {
+ qcoeff_ptr[rc] = (tmp32 ^ coeff_sign) - coeff_sign;
+ const tran_low_t abs_dqcoeff =
+ (tmp32 * dequant_ptr[rc != 0]) >> log_scale;
+ dqcoeff_ptr[rc] = (abs_dqcoeff ^ coeff_sign) - coeff_sign;
+ }
+ }
+ if (tmp32) eob = i + 1;
+ }
+ return eob;
+}
+
static void quantize_fp_helper_c(
const tran_low_t *coeff_ptr, intptr_t n_coeffs, const int16_t *zbin_ptr,
const int16_t *round_ptr, const int16_t *quant_ptr,
@@ -53,26 +87,9 @@ static void quantize_fp_helper_c(
memset(dqcoeff_ptr, 0, n_coeffs * sizeof(*dqcoeff_ptr));
if (qm_ptr == NULL && iqm_ptr == NULL) {
- for (i = 0; i < n_coeffs; i++) {
- const int rc = scan[i];
- const int32_t thresh = (int32_t)(dequant_ptr[rc != 0]);
- const int coeff = coeff_ptr[rc];
- const int coeff_sign = AOMSIGN(coeff);
- int64_t abs_coeff = (coeff ^ coeff_sign) - coeff_sign;
- int tmp32 = 0;
- if ((abs_coeff << (1 + log_scale)) >= thresh) {
- abs_coeff =
- clamp64(abs_coeff + rounding[rc != 0], INT16_MIN, INT16_MAX);
- tmp32 = (int)((abs_coeff * quant_ptr[rc != 0]) >> (16 - log_scale));
- if (tmp32) {
- qcoeff_ptr[rc] = (tmp32 ^ coeff_sign) - coeff_sign;
- const tran_low_t abs_dqcoeff =
- (tmp32 * dequant_ptr[rc != 0]) >> log_scale;
- dqcoeff_ptr[rc] = (abs_dqcoeff ^ coeff_sign) - coeff_sign;
- }
- }
- if (tmp32) eob = i;
- }
+ *eob_ptr = av1_quantize_fp_no_qmatrix(quant_ptr, dequant_ptr, round_ptr,
+ log_scale, scan, (int)n_coeffs,
+ coeff_ptr, qcoeff_ptr, dqcoeff_ptr);
} else {
// Quantization pass: All coefficients with index >= zero_flag are
// skippable. Note: zero_flag can be zero.
@@ -100,8 +117,8 @@ static void quantize_fp_helper_c(
if (tmp32) eob = i;
}
+ *eob_ptr = eob + 1;
}
- *eob_ptr = eob + 1;
}
#if CONFIG_AV1_HIGHBITDEPTH
@@ -767,7 +784,7 @@ void av1_set_quantizer(AV1_COMMON *const cm, int min_qmlevel, int max_qmlevel,
aom_get_qmlevel(quant_params->base_qindex + quant_params->u_ac_delta_q,
min_qmlevel, max_qmlevel);
- if (!cm->seq_params.separate_uv_delta_q)
+ if (!cm->seq_params->separate_uv_delta_q)
quant_params->qmatrix_level_v = quant_params->qmatrix_level_u;
else
quant_params->qmatrix_level_v =
diff --git a/third_party/libaom/source/libaom/av1/encoder/av1_quantize.h b/third_party/libaom/source/libaom/av1/encoder/av1_quantize.h
index ad9619747a..215feb0603 100644
--- a/third_party/libaom/source/libaom/av1/encoder/av1_quantize.h
+++ b/third_party/libaom/source/libaom/av1/encoder/av1_quantize.h
@@ -118,6 +118,32 @@ int av1_qindex_to_quantizer(int qindex);
void av1_quantize_skip(intptr_t n_coeffs, tran_low_t *qcoeff_ptr,
tran_low_t *dqcoeff_ptr, uint16_t *eob_ptr);
+/*!\brief Quantize transform coefficients without using qmatrix
+ *
+ * quant_ptr, dequant_ptr and round_ptr are size 2 arrays,
+ * where index 0 corresponds to dc coeff and index 1 corresponds to ac coeffs.
+ *
+ * \param[in] quant_ptr 16-bit fixed point representation of inverse
+ * quantize step size, i.e. 2^16/dequant
+ * \param[in] dequant_ptr quantize step size
+ * \param[in] round_ptr rounding
+ * \param[in] log_scale the relative log scale of the transform
+ * coefficients
+ * \param[in] scan scan[i] indicates the position of ith to-be-coded
+ * coefficient
+ * \param[in] coeff_count number of coefficients
+ * \param[out] qcoeff_ptr quantized coefficients
+ * \param[out] dqcoeff_ptr dequantized coefficients
+ *
+ * \return The last non-zero coefficient's scan index plus 1
+ */
+int av1_quantize_fp_no_qmatrix(const int16_t quant_ptr[2],
+ const int16_t dequant_ptr[2],
+ const int16_t round_ptr[2], int log_scale,
+ const int16_t *scan, int coeff_count,
+ const tran_low_t *coeff_ptr,
+ tran_low_t *qcoeff_ptr, tran_low_t *dqcoeff_ptr);
+
void av1_quantize_fp_facade(const tran_low_t *coeff_ptr, intptr_t n_coeffs,
const MACROBLOCK_PLANE *p, tran_low_t *qcoeff_ptr,
tran_low_t *dqcoeff_ptr, uint16_t *eob_ptr,
diff --git a/third_party/libaom/source/libaom/av1/encoder/av1_temporal_denoiser.c b/third_party/libaom/source/libaom/av1/encoder/av1_temporal_denoiser.c
index 6c5bb930e1..96f3d7dcfe 100644
--- a/third_party/libaom/source/libaom/av1/encoder/av1_temporal_denoiser.c
+++ b/third_party/libaom/source/libaom/av1/encoder/av1_temporal_denoiser.c
@@ -349,7 +349,7 @@ void av1_denoiser_denoise(AV1_COMP *cpi, MACROBLOCK *mb, int mi_row, int mi_col,
&cpi->common, denoiser, mb, bs, increase_denoising, mi_row, mi_col, ctx,
motion_magnitude, &zeromv_filter, cpi->svc.number_spatial_layers,
cpi->source->y_width, cpi->svc.ref_idx[0], cpi->svc.ref_idx[3],
- cpi->use_svc, cpi->svc.spatial_layer_id, use_gf_temporal_ref);
+ cpi->ppi->use_svc, cpi->svc.spatial_layer_id, use_gf_temporal_ref);
if (decision == FILTER_BLOCK) {
decision = av1_denoiser_filter(src.buf, src.stride, mc_avg_start,
@@ -415,7 +415,7 @@ void av1_denoiser_update_frame_info(
return;
}
- if (svc->external_ref_frame_config) {
+ if (svc->set_ref_frame_config) {
int i;
for (i = 0; i < REF_FRAMES; i++) {
if (svc->refresh[svc->spatial_layer_id] & (1 << i))
@@ -485,8 +485,8 @@ static int av1_denoiser_realloc_svc_helper(AV1_COMMON *cm,
if (denoiser->running_avg_y[fb_idx].buffer_alloc == NULL) {
fail = aom_alloc_frame_buffer(
&denoiser->running_avg_y[fb_idx], cm->width, cm->height,
- cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth, AOM_BORDER_IN_PIXELS,
+ cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
+ cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS,
cm->features.byte_alignment);
if (fail) {
av1_denoiser_free(denoiser);
@@ -501,7 +501,7 @@ int av1_denoiser_realloc_svc(AV1_COMMON *cm, AV1_DENOISER *denoiser,
int refresh_alt, int refresh_gld, int refresh_lst,
int alt_fb_idx, int gld_fb_idx, int lst_fb_idx) {
int fail = 0;
- if (svc->external_ref_frame_config) {
+ if (svc->set_ref_frame_config) {
int i;
for (i = 0; i < REF_FRAMES; i++) {
if (cm->current_frame.frame_type == KEY_FRAME ||
@@ -724,7 +724,7 @@ void av1_denoiser_update_ref_frame(AV1_COMP *const cpi) {
(cpi->common.width != cpi->resize_pending_params.width ||
cpi->common.height != cpi->resize_pending_params.height));
- if (cpi->use_svc) {
+ if (cpi->ppi->use_svc) {
// TODO(kyslov) Enable when SVC temporal denosing is implemented
#if 0
const int svc_buf_shift =
@@ -746,7 +746,7 @@ void av1_denoiser_update_ref_frame(AV1_COMP *const cpi) {
cpi->refresh_golden_frame,
cpi->refresh_last_frame, cpi->alt_fb_idx,
cpi->gld_fb_idx, cpi->lst_fb_idx))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to re-allocate denoiser for SVC");
#endif
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/bitstream.c b/third_party/libaom/source/libaom/av1/encoder/bitstream.c
index 2b583790ff..85c0183b17 100644
--- a/third_party/libaom/source/libaom/av1/encoder/bitstream.c
+++ b/third_party/libaom/source/libaom/av1/encoder/bitstream.c
@@ -41,6 +41,7 @@
#include "av1/encoder/cost.h"
#include "av1/encoder/encodemv.h"
#include "av1/encoder/encodetxb.h"
+#include "av1/encoder/ethread.h"
#include "av1/encoder/mcomp.h"
#include "av1/encoder/palette.h"
#include "av1/encoder/segmentation.h"
@@ -185,12 +186,13 @@ static AOM_INLINE void write_tx_size_vartx(MACROBLOCKD *xd,
}
assert(bsw > 0 && bsh > 0);
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
+ for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
+ const int offsetr = blk_row + row;
for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
- int offsetr = blk_row + row;
- int offsetc = blk_col + col;
+ const int offsetc = blk_col + col;
write_tx_size_vartx(xd, mbmi, sub_txs, depth + 1, offsetr, offsetc, w);
}
+ }
}
}
@@ -313,14 +315,16 @@ static AOM_INLINE void write_delta_qindex(const MACROBLOCKD *xd,
static AOM_INLINE void write_delta_lflevel(const AV1_COMMON *cm,
const MACROBLOCKD *xd, int lf_id,
- int delta_lflevel, aom_writer *w) {
+ int delta_lflevel,
+ int delta_lf_multi, aom_writer *w) {
int sign = delta_lflevel < 0;
int abs = sign ? -delta_lflevel : delta_lflevel;
int rem_bits, thr;
int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
+ (void)cm;
- if (cm->delta_q_info.delta_lf_multi) {
+ if (delta_lf_multi) {
assert(lf_id >= 0 && lf_id < (av1_num_planes(cm) > 1 ? FRAME_LF_COUNT
: FRAME_LF_COUNT - 2));
aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
@@ -380,7 +384,6 @@ static AOM_INLINE void pack_txb_tokens(
#if CONFIG_RD_DEBUG
TOKEN_STATS tmp_token_stats;
init_token_stats(&tmp_token_stats);
- token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
token_stats->cost += tmp_token_stats.cost;
#endif
} else {
@@ -388,14 +391,17 @@ static AOM_INLINE void pack_txb_tokens(
const int bsw = tx_size_wide_unit[sub_txs];
const int bsh = tx_size_high_unit[sub_txs];
const int step = bsh * bsw;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
assert(bsw > 0 && bsh > 0);
- for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
- for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
- const int offsetr = blk_row + r;
+ for (int r = 0; r < row_end; r += bsh) {
+ const int offsetr = blk_row + r;
+ for (int c = 0; c < col_end; c += bsw) {
const int offsetc = blk_col + c;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
bit_depth, block, offsetr, offsetc, sub_txs,
token_stats);
@@ -445,7 +451,7 @@ int av1_neg_interleave(int x, int ref, int max) {
}
}
-static AOM_INLINE void write_segment_id(AV1_COMP *cpi,
+static AOM_INLINE void write_segment_id(AV1_COMP *cpi, MACROBLOCKD *const xd,
const MB_MODE_INFO *const mbmi,
aom_writer *w,
const struct segmentation *seg,
@@ -454,7 +460,6 @@ static AOM_INLINE void write_segment_id(AV1_COMP *cpi,
if (!seg->enabled || !seg->update_map) return;
AV1_COMMON *const cm = &cpi->common;
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
int cdf_num;
const int pred = av1_get_spatial_seg_pred(cm, xd, &cdf_num);
const int mi_row = xd->mi_row;
@@ -613,8 +618,8 @@ static AOM_INLINE void write_angle_delta(aom_writer *w, int angle_delta,
}
static AOM_INLINE void write_mb_interp_filter(AV1_COMMON *const cm,
- const MACROBLOCKD *xd,
- aom_writer *w) {
+ ThreadData *td, aom_writer *w) {
+ const MACROBLOCKD *xd = &td->mb.e_mbd;
const MB_MODE_INFO *const mbmi = xd->mi[0];
FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
@@ -633,8 +638,8 @@ static AOM_INLINE void write_mb_interp_filter(AV1_COMMON *const cm,
av1_extract_interp_filter(mbmi->interp_filters, dir);
aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
SWITCHABLE_FILTERS);
- ++cm->cur_frame->interp_filter_selected[filter];
- if (cm->seq_params.enable_dual_filter == 0) return;
+ ++td->interp_filter_selected[filter];
+ if (cm->seq_params->enable_dual_filter == 0) return;
}
}
}
@@ -777,7 +782,7 @@ static AOM_INLINE void write_palette_mode_info(const AV1_COMMON *cm,
aom_write_symbol(w, n - PALETTE_MIN_SIZE,
xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
PALETTE_SIZES);
- write_palette_colors_y(xd, pmi, cm->seq_params.bit_depth, w);
+ write_palette_colors_y(xd, pmi, cm->seq_params->bit_depth, w);
}
}
@@ -792,7 +797,7 @@ static AOM_INLINE void write_palette_mode_info(const AV1_COMMON *cm,
aom_write_symbol(w, n - PALETTE_MIN_SIZE,
xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
PALETTE_SIZES);
- write_palette_colors_uv(xd, pmi, cm->seq_params.bit_depth, w);
+ write_palette_colors_uv(xd, pmi, cm->seq_params->bit_depth, w);
}
}
}
@@ -874,7 +879,7 @@ static AOM_INLINE void write_cdef(AV1_COMMON *cm, MACROBLOCKD *const xd,
// At the start of a superblock, mark that we haven't yet written CDEF
// strengths for any of the CDEF units contained in this superblock.
- const int sb_mask = (cm->seq_params.mib_size - 1);
+ const int sb_mask = (cm->seq_params->mib_size - 1);
const int mi_row_in_sb = (xd->mi_row & sb_mask);
const int mi_col_in_sb = (xd->mi_col & sb_mask);
if (mi_row_in_sb == 0 && mi_col_in_sb == 0) {
@@ -889,7 +894,7 @@ static AOM_INLINE void write_cdef(AV1_COMMON *cm, MACROBLOCKD *const xd,
const int index_mask = cdef_size;
const int cdef_unit_row_in_sb = ((xd->mi_row & index_mask) != 0);
const int cdef_unit_col_in_sb = ((xd->mi_col & index_mask) != 0);
- const int index = (cm->seq_params.sb_size == BLOCK_128X128)
+ const int index = (cm->seq_params->sb_size == BLOCK_128X128)
? cdef_unit_col_in_sb + 2 * cdef_unit_row_in_sb
: 0;
@@ -909,9 +914,9 @@ static AOM_INLINE void write_cdef(AV1_COMMON *cm, MACROBLOCKD *const xd,
}
static AOM_INLINE void write_inter_segment_id(
- AV1_COMP *cpi, aom_writer *w, const struct segmentation *const seg,
- struct segmentation_probs *const segp, int skip, int preskip) {
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
+ AV1_COMP *cpi, MACROBLOCKD *const xd, aom_writer *w,
+ const struct segmentation *const seg, struct segmentation_probs *const segp,
+ int skip, int preskip) {
MB_MODE_INFO *const mbmi = xd->mi[0];
AV1_COMMON *const cm = &cpi->common;
const int mi_row = xd->mi_row;
@@ -923,7 +928,7 @@ static AOM_INLINE void write_inter_segment_id(
} else {
if (seg->segid_preskip) return;
if (skip) {
- write_segment_id(cpi, mbmi, w, seg, segp, 1);
+ write_segment_id(cpi, xd, mbmi, w, seg, segp, 1);
if (seg->temporal_update) mbmi->seg_id_predicted = 0;
return;
}
@@ -933,35 +938,33 @@ static AOM_INLINE void write_inter_segment_id(
aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
aom_write_symbol(w, pred_flag, pred_cdf, 2);
if (!pred_flag) {
- write_segment_id(cpi, mbmi, w, seg, segp, 0);
+ write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
}
if (pred_flag) {
set_spatial_segment_id(&cm->mi_params, cm->cur_frame->seg_map,
mbmi->bsize, mi_row, mi_col, mbmi->segment_id);
}
} else {
- write_segment_id(cpi, mbmi, w, seg, segp, 0);
+ write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
}
}
}
// If delta q is present, writes delta_q index.
// Also writes delta_q loop filter levels, if present.
-static AOM_INLINE void write_delta_q_params(AV1_COMP *cpi, int skip,
+static AOM_INLINE void write_delta_q_params(AV1_COMMON *const cm,
+ MACROBLOCKD *const xd, int skip,
aom_writer *w) {
- AV1_COMMON *const cm = &cpi->common;
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
if (delta_q_info->delta_q_present_flag) {
- MACROBLOCK *const x = &cpi->td.mb;
- MACROBLOCKD *const xd = &x->e_mbd;
const MB_MODE_INFO *const mbmi = xd->mi[0];
const BLOCK_SIZE bsize = mbmi->bsize;
const int super_block_upper_left =
- ((xd->mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
- ((xd->mi_col & (cm->seq_params.mib_size - 1)) == 0);
+ ((xd->mi_row & (cm->seq_params->mib_size - 1)) == 0) &&
+ ((xd->mi_col & (cm->seq_params->mib_size - 1)) == 0);
- if ((bsize != cm->seq_params.sb_size || skip == 0) &&
+ if ((bsize != cm->seq_params->sb_size || skip == 0) &&
super_block_upper_left) {
assert(mbmi->current_qindex > 0);
const int reduced_delta_qindex =
@@ -977,14 +980,14 @@ static AOM_INLINE void write_delta_q_params(AV1_COMP *cpi, int skip,
int reduced_delta_lflevel =
(mbmi->delta_lf[lf_id] - xd->delta_lf[lf_id]) /
delta_q_info->delta_lf_res;
- write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
+ write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, 1, w);
xd->delta_lf[lf_id] = mbmi->delta_lf[lf_id];
}
} else {
int reduced_delta_lflevel =
(mbmi->delta_lf_from_base - xd->delta_lf_from_base) /
delta_q_info->delta_lf_res;
- write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
+ write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, 0, w);
xd->delta_lf_from_base = mbmi->delta_lf_from_base;
}
}
@@ -992,12 +995,10 @@ static AOM_INLINE void write_delta_q_params(AV1_COMP *cpi, int skip,
}
}
-static AOM_INLINE void write_intra_prediction_modes(AV1_COMP *cpi,
+static AOM_INLINE void write_intra_prediction_modes(const AV1_COMMON *cm,
+ MACROBLOCKD *const xd,
int is_keyframe,
aom_writer *w) {
- const AV1_COMMON *const cm = &cpi->common;
- MACROBLOCK *const x = &cpi->td.mb;
- MACROBLOCKD *const xd = &x->e_mbd;
FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
const MB_MODE_INFO *const mbmi = xd->mi[0];
const PREDICTION_MODE mode = mbmi->mode;
@@ -1020,7 +1021,7 @@ static AOM_INLINE void write_intra_prediction_modes(AV1_COMP *cpi,
}
// UV mode and UV angle delta.
- if (!cm->seq_params.monochrome && xd->is_chroma_ref) {
+ if (!cm->seq_params->monochrome && xd->is_chroma_ref) {
const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(xd), w);
if (uv_mode == UV_CFL_PRED)
@@ -1082,9 +1083,10 @@ static INLINE int_mv get_ref_mv(const MACROBLOCK *x, int ref_idx) {
x->mbmi_ext_frame);
}
-static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
+static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, ThreadData *const td,
+ aom_writer *w) {
AV1_COMMON *const cm = &cpi->common;
- MACROBLOCK *const x = &cpi->td.mb;
+ MACROBLOCK *const x = &td->mb;
MACROBLOCKD *const xd = &x->e_mbd;
FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
const struct segmentation *const seg = &cm->seg;
@@ -1099,7 +1101,7 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
const int is_compound = has_second_ref(mbmi);
int ref;
- write_inter_segment_id(cpi, w, seg, segp, 0, 1);
+ write_inter_segment_id(cpi, xd, w, seg, segp, 0, 1);
write_skip_mode(cm, xd, segment_id, mbmi, w);
@@ -1107,18 +1109,18 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
const int skip =
mbmi->skip_mode ? 1 : write_skip(cm, xd, segment_id, mbmi, w);
- write_inter_segment_id(cpi, w, seg, segp, skip, 0);
+ write_inter_segment_id(cpi, xd, w, seg, segp, skip, 0);
write_cdef(cm, xd, w, skip);
- write_delta_q_params(cpi, skip, w);
+ write_delta_q_params(cm, xd, skip, w);
if (!mbmi->skip_mode) write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
if (mbmi->skip_mode) return;
if (!is_inter) {
- write_intra_prediction_modes(cpi, 0, w);
+ write_intra_prediction_modes(cm, xd, 0, w);
} else {
int16_t mode_ctx;
@@ -1146,21 +1148,23 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
for (ref = 0; ref < 1 + is_compound; ++ref) {
nmv_context *nmvc = &ec_ctx->nmvc;
const int_mv ref_mv = get_ref_mv(x, ref);
- av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
+ av1_encode_mv(cpi, w, td, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
allow_hp);
}
} else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
nmv_context *nmvc = &ec_ctx->nmvc;
const int_mv ref_mv = get_ref_mv(x, 1);
- av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv, &ref_mv.as_mv, nmvc, allow_hp);
+ av1_encode_mv(cpi, w, td, &mbmi->mv[1].as_mv, &ref_mv.as_mv, nmvc,
+ allow_hp);
} else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
nmv_context *nmvc = &ec_ctx->nmvc;
const int_mv ref_mv = get_ref_mv(x, 0);
- av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv, &ref_mv.as_mv, nmvc, allow_hp);
+ av1_encode_mv(cpi, w, td, &mbmi->mv[0].as_mv, &ref_mv.as_mv, nmvc,
+ allow_hp);
}
if (cpi->common.current_frame.reference_mode != COMPOUND_REFERENCE &&
- cpi->common.seq_params.enable_interintra_compound &&
+ cpi->common.seq_params->enable_interintra_compound &&
is_interintra_allowed(mbmi)) {
const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
const int bsize_group = size_group_lookup[bsize];
@@ -1187,7 +1191,7 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
// Group B (1): interintra, compound_diffwtd, wedge
if (has_second_ref(mbmi)) {
const int masked_compound_used = is_any_masked_compound_used(bsize) &&
- cm->seq_params.enable_masked_compound;
+ cm->seq_params->enable_masked_compound;
if (masked_compound_used) {
const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
@@ -1201,7 +1205,7 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
if (mbmi->compound_idx)
assert(mbmi->interinter_comp.type == COMPOUND_AVERAGE);
- if (cm->seq_params.order_hint_info.enable_dist_wtd_comp) {
+ if (cm->seq_params->order_hint_info.enable_dist_wtd_comp) {
const int comp_index_ctx = get_comp_index_context(cm, xd);
aom_write_symbol(w, mbmi->compound_idx,
ec_ctx->compound_index_cdf[comp_index_ctx], 2);
@@ -1234,7 +1238,7 @@ static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, aom_writer *w) {
}
}
}
- write_mb_interp_filter(cm, xd, w);
+ write_mb_interp_filter(cm, td, w);
}
}
@@ -1264,23 +1268,23 @@ static AOM_INLINE void write_mb_modes_kf(
const MB_MODE_INFO *const mbmi = xd->mi[0];
if (seg->segid_preskip && seg->update_map)
- write_segment_id(cpi, mbmi, w, seg, segp, 0);
+ write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
const int skip = write_skip(cm, xd, mbmi->segment_id, mbmi, w);
if (!seg->segid_preskip && seg->update_map)
- write_segment_id(cpi, mbmi, w, seg, segp, skip);
+ write_segment_id(cpi, xd, mbmi, w, seg, segp, skip);
write_cdef(cm, xd, w, skip);
- write_delta_q_params(cpi, skip, w);
+ write_delta_q_params(cm, xd, skip, w);
if (av1_allow_intrabc(cm)) {
write_intrabc_info(xd, mbmi_ext_frame, w);
if (is_intrabc_block(mbmi)) return;
}
- write_intra_prediction_modes(cpi, 1, w);
+ write_intra_prediction_modes(cm, xd, 1, w);
}
#if CONFIG_RD_DEBUG
@@ -1295,24 +1299,8 @@ static AOM_INLINE void dump_mode_info(MB_MODE_INFO *mi) {
static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
int plane) {
if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
- int r, c;
printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
- printf("rd txb_coeff_cost_map\n");
- for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
- for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
- printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
- }
- printf("\n");
- }
-
- printf("pack txb_coeff_cost_map\n");
- for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
- for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
- printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
- }
- printf("\n");
- }
return 1;
}
return 0;
@@ -1376,13 +1364,14 @@ static AOM_INLINE void enc_dump_logs(
}
#endif // ENC_MISMATCH_DEBUG
-static AOM_INLINE void write_mbmi_b(AV1_COMP *cpi, aom_writer *w) {
+static AOM_INLINE void write_mbmi_b(AV1_COMP *cpi, ThreadData *const td,
+ aom_writer *w) {
AV1_COMMON *const cm = &cpi->common;
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
+ MACROBLOCKD *const xd = &td->mb.e_mbd;
MB_MODE_INFO *m = xd->mi[0];
if (frame_is_intra_only(cm)) {
- write_mb_modes_kf(cpi, xd, cpi->td.mb.mbmi_ext_frame, w);
+ write_mb_modes_kf(cpi, xd, td->mb.mbmi_ext_frame, w);
} else {
// has_subpel_mv_component needs the ref frame buffers set up to look
// up if they are scaled. has_subpel_mv_component is in turn needed by
@@ -1393,7 +1382,7 @@ static AOM_INLINE void write_mbmi_b(AV1_COMP *cpi, aom_writer *w) {
enc_dump_logs(cm, &cpi->mbmi_ext_info, xd->mi_row, xd->mi_col);
#endif // ENC_MISMATCH_DEBUG
- pack_inter_mode_mvs(cpi, w);
+ pack_inter_mode_mvs(cpi, td, w);
}
}
@@ -1426,18 +1415,17 @@ static AOM_INLINE void write_inter_txb_coeff(
for (int blk_row = row >> ss_y; blk_row < unit_height; blk_row += bkh) {
for (int blk_col = col >> ss_x; blk_col < unit_width; blk_col += bkw) {
pack_txb_tokens(w, cm, x, tok, tok_end, xd, mbmi, plane, plane_bsize,
- cm->seq_params.bit_depth, *block, blk_row, blk_col,
+ cm->seq_params->bit_depth, *block, blk_row, blk_col,
max_tx_size, token_stats);
*block += step;
}
}
}
-static AOM_INLINE void write_tokens_b(AV1_COMP *cpi, aom_writer *w,
- const TokenExtra **tok,
+static AOM_INLINE void write_tokens_b(AV1_COMP *cpi, MACROBLOCK *const x,
+ aom_writer *w, const TokenExtra **tok,
const TokenExtra *const tok_end) {
AV1_COMMON *const cm = &cpi->common;
- MACROBLOCK *const x = &cpi->td.mb;
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = xd->mi[0];
const BLOCK_SIZE bsize = mbmi->bsize;
@@ -1487,17 +1475,18 @@ static AOM_INLINE void write_tokens_b(AV1_COMP *cpi, aom_writer *w,
}
}
-static AOM_INLINE void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
- aom_writer *w, const TokenExtra **tok,
+static AOM_INLINE void write_modes_b(AV1_COMP *cpi, ThreadData *const td,
+ const TileInfo *const tile, aom_writer *w,
+ const TokenExtra **tok,
const TokenExtra *const tok_end,
int mi_row, int mi_col) {
const AV1_COMMON *cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
- MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
+ MACROBLOCKD *xd = &td->mb.e_mbd;
FRAME_CONTEXT *tile_ctx = xd->tile_ctx;
const int grid_idx = mi_row * mi_params->mi_stride + mi_col;
xd->mi = mi_params->mi_grid_base + grid_idx;
- cpi->td.mb.mbmi_ext_frame =
+ td->mb.mbmi_ext_frame =
cpi->mbmi_ext_info.frame_base +
get_mi_ext_idx(mi_row, mi_col, cm->mi_params.mi_alloc_bsize,
cpi->mbmi_ext_info.stride);
@@ -1506,7 +1495,7 @@ static AOM_INLINE void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
const MB_MODE_INFO *mbmi = xd->mi[0];
const BLOCK_SIZE bsize = mbmi->bsize;
- assert(bsize <= cm->seq_params.sb_size ||
+ assert(bsize <= cm->seq_params->sb_size ||
(bsize >= BLOCK_SIZES && bsize < BLOCK_SIZES_ALL));
const int bh = mi_size_high[bsize];
@@ -1518,7 +1507,7 @@ static AOM_INLINE void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
xd->left_txfm_context =
xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
- write_mbmi_b(cpi, w);
+ write_mbmi_b(cpi, td, w);
for (int plane = 0; plane < AOMMIN(2, av1_num_planes(cm)); ++plane) {
const uint8_t palette_size_plane =
@@ -1567,10 +1556,10 @@ static AOM_INLINE void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
if (!mbmi->skip_txfm) {
int start = aom_tell_size(w);
- write_tokens_b(cpi, w, tok, tok_end);
+ write_tokens_b(cpi, &td->mb, w, tok, tok_end);
const int end = aom_tell_size(w);
- cpi->rc.coefficient_size += end - start;
+ td->coefficient_size += end - start;
}
}
@@ -1612,12 +1601,12 @@ static AOM_INLINE void write_partition(const AV1_COMMON *const cm,
}
static AOM_INLINE void write_modes_sb(
- AV1_COMP *const cpi, const TileInfo *const tile, aom_writer *const w,
- const TokenExtra **tok, const TokenExtra *const tok_end, int mi_row,
- int mi_col, BLOCK_SIZE bsize) {
+ AV1_COMP *const cpi, ThreadData *const td, const TileInfo *const tile,
+ aom_writer *const w, const TokenExtra **tok,
+ const TokenExtra *const tok_end, int mi_row, int mi_col, BLOCK_SIZE bsize) {
const AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
+ MACROBLOCKD *const xd = &td->mb.e_mbd;
assert(bsize < BLOCK_SIZES_ALL);
const int hbs = mi_size_wide[bsize] / 2;
const int quarter_step = mi_size_wide[bsize] / 4;
@@ -1639,8 +1628,7 @@ static AOM_INLINE void write_modes_sb(
const int runit_idx = rcol + rrow * rstride;
const RestorationUnitInfo *rui =
&cm->rst_info[plane].unit_info[runit_idx];
- loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane,
- cpi->td.counts);
+ loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane, td->counts);
}
}
}
@@ -1650,51 +1638,53 @@ static AOM_INLINE void write_modes_sb(
write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
switch (partition) {
case PARTITION_NONE:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
break;
case PARTITION_HORZ:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
if (mi_row + hbs < mi_params->mi_rows)
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
break;
case PARTITION_VERT:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
if (mi_col + hbs < mi_params->mi_cols)
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
break;
case PARTITION_SPLIT:
- write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
- write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
- write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
- write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
+ write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row, mi_col, subsize);
+ write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs,
+ subsize);
+ write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col,
+ subsize);
+ write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
subsize);
break;
case PARTITION_HORZ_A:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
break;
case PARTITION_HORZ_B:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
break;
case PARTITION_VERT_A:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
break;
case PARTITION_VERT_B:
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
break;
case PARTITION_HORZ_4:
for (i = 0; i < 4; ++i) {
int this_mi_row = mi_row + i * quarter_step;
if (i > 0 && this_mi_row >= mi_params->mi_rows) break;
- write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, this_mi_row, mi_col);
}
break;
case PARTITION_VERT_4:
@@ -1702,7 +1692,7 @@ static AOM_INLINE void write_modes_sb(
int this_mi_col = mi_col + i * quarter_step;
if (i > 0 && this_mi_col >= mi_params->mi_cols) break;
- write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
+ write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, this_mi_col);
}
break;
default: assert(0);
@@ -1712,12 +1702,12 @@ static AOM_INLINE void write_modes_sb(
update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
}
-static AOM_INLINE void write_modes(AV1_COMP *const cpi,
+static AOM_INLINE void write_modes(AV1_COMP *const cpi, ThreadData *const td,
const TileInfo *const tile,
aom_writer *const w, int tile_row,
int tile_col) {
AV1_COMMON *const cm = &cpi->common;
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
+ MACROBLOCKD *const xd = &td->mb.e_mbd;
const int mi_row_start = tile->mi_row_start;
const int mi_row_end = tile->mi_row_end;
const int mi_col_start = tile->mi_col_start;
@@ -1735,9 +1725,9 @@ static AOM_INLINE void write_modes(AV1_COMP *const cpi,
}
for (int mi_row = mi_row_start; mi_row < mi_row_end;
- mi_row += cm->seq_params.mib_size) {
+ mi_row += cm->seq_params->mib_size) {
const int sb_row_in_tile =
- (mi_row - tile->mi_row_start) >> cm->seq_params.mib_size_log2;
+ (mi_row - tile->mi_row_start) >> cm->seq_params->mib_size_log2;
const TokenExtra *tok =
cpi->token_info.tplist[tile_row][tile_col][sb_row_in_tile].start;
const TokenExtra *tok_end =
@@ -1746,10 +1736,10 @@ static AOM_INLINE void write_modes(AV1_COMP *const cpi,
av1_zero_left_context(xd);
for (int mi_col = mi_col_start; mi_col < mi_col_end;
- mi_col += cm->seq_params.mib_size) {
- cpi->td.mb.cb_coef_buff = av1_get_cb_coeff_buffer(cpi, mi_row, mi_col);
- write_modes_sb(cpi, tile, w, &tok, tok_end, mi_row, mi_col,
- cm->seq_params.sb_size);
+ mi_col += cm->seq_params->mib_size) {
+ td->mb.cb_coef_buff = av1_get_cb_coeff_buffer(cpi, mi_row, mi_col);
+ write_modes_sb(cpi, td, tile, w, &tok, tok_end, mi_row, mi_col,
+ cm->seq_params->sb_size);
}
assert(tok == tok_end);
}
@@ -1758,7 +1748,7 @@ static AOM_INLINE void write_modes(AV1_COMP *const cpi,
static AOM_INLINE void encode_restoration_mode(
AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
assert(!cm->features.all_lossless);
- if (!cm->seq_params.enable_restoration) return;
+ if (!cm->seq_params->enable_restoration) return;
if (cm->features.allow_intrabc) return;
const int num_planes = av1_num_planes(cm);
int all_none = 1, chroma_none = 1;
@@ -1789,9 +1779,9 @@ static AOM_INLINE void encode_restoration_mode(
}
}
if (!all_none) {
- assert(cm->seq_params.sb_size == BLOCK_64X64 ||
- cm->seq_params.sb_size == BLOCK_128X128);
- const int sb_size = cm->seq_params.sb_size == BLOCK_128X128 ? 128 : 64;
+ assert(cm->seq_params->sb_size == BLOCK_64X64 ||
+ cm->seq_params->sb_size == BLOCK_128X128);
+ const int sb_size = cm->seq_params->sb_size == BLOCK_128X128 ? 128 : 64;
RestorationInfo *rsi = &cm->rst_info[0];
@@ -1807,7 +1797,8 @@ static AOM_INLINE void encode_restoration_mode(
}
if (num_planes > 1) {
- int s = AOMMIN(cm->seq_params.subsampling_x, cm->seq_params.subsampling_y);
+ int s =
+ AOMMIN(cm->seq_params->subsampling_x, cm->seq_params->subsampling_y);
if (s && !chroma_none) {
aom_wb_write_bit(wb, cm->rst_info[1].restoration_unit_size !=
cm->rst_info[0].restoration_unit_size);
@@ -2040,7 +2031,7 @@ static AOM_INLINE void encode_loopfilter(AV1_COMMON *cm,
static AOM_INLINE void encode_cdef(const AV1_COMMON *cm,
struct aom_write_bit_buffer *wb) {
assert(!cm->features.coded_lossless);
- if (!cm->seq_params.enable_cdef) return;
+ if (!cm->seq_params->enable_cdef) return;
if (cm->features.allow_intrabc) return;
const int num_planes = av1_num_planes(cm);
int i;
@@ -2093,7 +2084,7 @@ static AOM_INLINE void encode_quantization(
}
}
-static AOM_INLINE void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
+static AOM_INLINE void encode_segmentation(AV1_COMMON *cm,
struct aom_write_bit_buffer *wb) {
int i, j;
struct segmentation *seg = &cm->seg;
@@ -2102,17 +2093,9 @@ static AOM_INLINE void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
if (!seg->enabled) return;
// Write update flags
- if (cm->features.primary_ref_frame == PRIMARY_REF_NONE) {
- assert(seg->update_map == 1);
- seg->temporal_update = 0;
- assert(seg->update_data == 1);
- } else {
+ if (cm->features.primary_ref_frame != PRIMARY_REF_NONE) {
aom_wb_write_bit(wb, seg->update_map);
- if (seg->update_map) {
- // Select the coding strategy (temporal or spatial)
- av1_choose_segmap_coding_method(cm, xd);
- aom_wb_write_bit(wb, seg->temporal_update);
- }
+ if (seg->update_map) aom_wb_write_bit(wb, seg->temporal_update);
aom_wb_write_bit(wb, seg->update_data);
}
@@ -2163,11 +2146,11 @@ static AOM_INLINE void wb_write_uniform(struct aom_write_bit_buffer *wb, int n,
static AOM_INLINE void write_tile_info_max_tile(
const AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
int width_mi =
- ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols, cm->seq_params.mib_size_log2);
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_cols, cm->seq_params->mib_size_log2);
int height_mi =
- ALIGN_POWER_OF_TWO(cm->mi_params.mi_rows, cm->seq_params.mib_size_log2);
- int width_sb = width_mi >> cm->seq_params.mib_size_log2;
- int height_sb = height_mi >> cm->seq_params.mib_size_log2;
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_rows, cm->seq_params->mib_size_log2);
+ int width_sb = width_mi >> cm->seq_params->mib_size_log2;
+ int height_sb = height_mi >> cm->seq_params->mib_size_log2;
int size_sb, i;
const CommonTileParams *const tiles = &cm->tiles;
@@ -2244,13 +2227,6 @@ static AOM_INLINE void write_ext_tile_info(
}
}
-// Stores the location and size of a tile's data in the bitstream. Used for
-// later identifying identical tiles
-typedef struct TileBufferEnc {
- uint8_t *data;
- size_t size;
-} TileBufferEnc;
-
static INLINE int find_identical_tile(
const int tile_row, const int tile_col,
TileBufferEnc (*const tile_buffers)[MAX_TILE_COLS]) {
@@ -2314,7 +2290,7 @@ static AOM_INLINE void write_render_size(const AV1_COMMON *cm,
static AOM_INLINE void write_superres_scale(const AV1_COMMON *const cm,
struct aom_write_bit_buffer *wb) {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
if (!seq_params->enable_superres) {
assert(cm->superres_scale_denominator == SCALE_NUMERATOR);
return;
@@ -2341,7 +2317,7 @@ static AOM_INLINE void write_frame_size(const AV1_COMMON *cm,
const int coded_height = cm->superres_upscaled_height - 1;
if (frame_size_override) {
- const SequenceHeader *seq_params = &cm->seq_params;
+ const SequenceHeader *seq_params = cm->seq_params;
int num_bits_width = seq_params->num_bits_width;
int num_bits_height = seq_params->num_bits_height;
aom_wb_write_literal(wb, coded_width, num_bits_width);
@@ -2499,7 +2475,7 @@ static AOM_INLINE void write_tu_pts_info(AV1_COMMON *const cm,
struct aom_write_bit_buffer *wb) {
aom_wb_write_unsigned_literal(
wb, cm->frame_presentation_time,
- cm->seq_params.decoder_model_info.frame_presentation_time_length);
+ cm->seq_params->decoder_model_info.frame_presentation_time_length);
}
static AOM_INLINE void write_film_grain_params(
@@ -2537,15 +2513,15 @@ static AOM_INLINE void write_film_grain_params(
aom_wb_write_literal(wb, pars->scaling_points_y[i][1], 8);
}
- if (!cm->seq_params.monochrome) {
+ if (!cm->seq_params->monochrome) {
aom_wb_write_bit(wb, pars->chroma_scaling_from_luma);
} else {
assert(!pars->chroma_scaling_from_luma);
}
- if (cm->seq_params.monochrome || pars->chroma_scaling_from_luma ||
- ((cm->seq_params.subsampling_x == 1) &&
- (cm->seq_params.subsampling_y == 1) && (pars->num_y_points == 0))) {
+ if (cm->seq_params->monochrome || pars->chroma_scaling_from_luma ||
+ ((cm->seq_params->subsampling_x == 1) &&
+ (cm->seq_params->subsampling_y == 1) && (pars->num_y_points == 0))) {
assert(pars->num_cb_points == 0 && pars->num_cr_points == 0);
} else {
aom_wb_write_literal(wb, pars->num_cb_points, 4); // max 10
@@ -2841,12 +2817,11 @@ static int check_frame_refs_short_signaling(AV1_COMMON *const cm) {
// New function based on HLS R18
static AOM_INLINE void write_uncompressed_header_obu(
- AV1_COMP *cpi, struct aom_write_bit_buffer *saved_wb,
+ AV1_COMP *cpi, MACROBLOCKD *const xd, struct aom_write_bit_buffer *saved_wb,
struct aom_write_bit_buffer *wb) {
AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const CommonQuantParams *quant_params = &cm->quant_params;
- MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
CurrentFrame *const current_frame = &cm->current_frame;
FeatureFlags *const features = &cm->features;
@@ -2925,7 +2900,7 @@ static AOM_INLINE void write_uncompressed_header_obu(
if (cm->superres_upscaled_width > seq_params->max_frame_width ||
cm->superres_upscaled_height > seq_params->max_frame_height) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Frame dimensions are larger than the maximum values");
}
@@ -2947,24 +2922,24 @@ static AOM_INLINE void write_uncompressed_header_obu(
}
if (seq_params->decoder_model_info_present_flag) {
- aom_wb_write_bit(wb, cm->buffer_removal_time_present);
- if (cm->buffer_removal_time_present) {
+ aom_wb_write_bit(wb, cpi->ppi->buffer_removal_time_present);
+ if (cpi->ppi->buffer_removal_time_present) {
for (int op_num = 0;
op_num < seq_params->operating_points_cnt_minus_1 + 1; op_num++) {
if (seq_params->op_params[op_num].decoder_model_param_present_flag) {
- if (((seq_params->operating_point_idc[op_num] >>
+ if (seq_params->operating_point_idc[op_num] == 0 ||
+ ((seq_params->operating_point_idc[op_num] >>
cm->temporal_layer_id) &
0x1 &&
(seq_params->operating_point_idc[op_num] >>
(cm->spatial_layer_id + 8)) &
- 0x1) ||
- seq_params->operating_point_idc[op_num] == 0) {
+ 0x1)) {
aom_wb_write_unsigned_literal(
wb, cm->buffer_removal_times[op_num],
seq_params->decoder_model_info.buffer_removal_time_length);
cm->buffer_removal_times[op_num]++;
if (cm->buffer_removal_times[op_num] == 0) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"buffer_removal_time overflowed");
}
}
@@ -3051,7 +3026,7 @@ static AOM_INLINE void write_uncompressed_header_obu(
1;
if (delta_frame_id_minus_1 < 0 ||
delta_frame_id_minus_1 >= (1 << diff_len)) {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Invalid delta_frame_id_minus_1");
}
aom_wb_write_literal(wb, delta_frame_id_minus_1, diff_len);
@@ -3088,8 +3063,8 @@ static AOM_INLINE void write_uncompressed_header_obu(
write_tile_info(cm, saved_wb, wb);
encode_quantization(quant_params, av1_num_planes(cm),
- cm->seq_params.separate_uv_delta_q, wb);
- encode_segmentation(cm, xd, wb);
+ cm->seq_params->separate_uv_delta_q, wb);
+ encode_segmentation(cm, wb);
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
if (delta_q_info->delta_q_present_flag) assert(quant_params->base_qindex > 0);
@@ -3288,11 +3263,11 @@ static int remux_tiles(const CommonTileParams *const tiles, uint8_t *dst,
}
uint32_t av1_write_obu_header(AV1LevelParams *const level_params,
- OBU_TYPE obu_type, int obu_extension,
- uint8_t *const dst) {
+ int *frame_header_count, OBU_TYPE obu_type,
+ int obu_extension, uint8_t *const dst) {
if (level_params->keep_level_stats &&
(obu_type == OBU_FRAME || obu_type == OBU_FRAME_HEADER))
- ++level_params->frame_header_count;
+ ++(*frame_header_count);
struct aom_write_bit_buffer wb = { dst, 0 };
uint32_t size = 0;
@@ -3326,8 +3301,8 @@ int av1_write_uleb_obu_size(size_t obu_header_size, size_t obu_payload_size,
return AOM_CODEC_OK;
}
-static size_t obu_memmove(size_t obu_header_size, size_t obu_payload_size,
- uint8_t *data) {
+size_t av1_obu_memmove(size_t obu_header_size, size_t obu_payload_size,
+ uint8_t *data) {
const size_t length_field_size = aom_uleb_size_in_bytes(obu_payload_size);
const size_t move_dst_offset = length_field_size + obu_header_size;
const size_t move_src_offset = obu_header_size;
@@ -3426,12 +3401,12 @@ uint32_t av1_write_sequence_header_obu(const SequenceHeader *seq_params,
return size;
}
-static uint32_t write_frame_header_obu(AV1_COMP *cpi,
+static uint32_t write_frame_header_obu(AV1_COMP *cpi, MACROBLOCKD *const xd,
struct aom_write_bit_buffer *saved_wb,
uint8_t *const dst,
int append_trailing_bits) {
struct aom_write_bit_buffer wb = { dst, 0 };
- write_uncompressed_header_obu(cpi, saved_wb, &wb);
+ write_uncompressed_header_obu(cpi, xd, saved_wb, &wb);
if (append_trailing_bits) add_trailing_bits(&wb);
return aom_wb_bytes_written(&wb);
}
@@ -3455,12 +3430,6 @@ static uint32_t write_tile_group_header(uint8_t *const dst, int start_tile,
return size;
}
-typedef struct {
- uint8_t *frame_header;
- size_t obu_header_byte_offset;
- size_t total_length;
-} FrameHeaderInfo;
-
extern void av1_print_uncompressed_frame_header(const uint8_t *data, int size,
const char *filename);
@@ -3473,16 +3442,17 @@ typedef struct {
static uint32_t init_large_scale_tile_obu_header(
AV1_COMP *const cpi, uint8_t **data, struct aom_write_bit_buffer *saved_wb,
LargeTileFrameOBU *lst_obu) {
- AV1LevelParams *const level_params = &cpi->level_params;
+ AV1LevelParams *const level_params = &cpi->ppi->level_params;
CurrentFrame *const current_frame = &cpi->common.current_frame;
// For large_scale_tile case, we always have only one tile group, so it can
// be written as an OBU_FRAME.
const OBU_TYPE obu_type = OBU_FRAME;
- lst_obu->tg_hdr_size = av1_write_obu_header(level_params, obu_type, 0, *data);
+ lst_obu->tg_hdr_size = av1_write_obu_header(
+ level_params, &cpi->frame_header_count, obu_type, 0, *data);
*data += lst_obu->tg_hdr_size;
const uint32_t frame_header_size =
- write_frame_header_obu(cpi, saved_wb, *data, 0);
+ write_frame_header_obu(cpi, &cpi->td.mb.e_mbd, saved_wb, *data, 0);
*data += frame_header_size;
lst_obu->frame_header_size = frame_header_size;
// (yunqing) This test ensures the correctness of large scale tile coding.
@@ -3520,7 +3490,7 @@ static void write_large_scale_tile_obu_size(
*total_size += lst_obu->tg_hdr_size;
const uint32_t obu_payload_size = *total_size - lst_obu->tg_hdr_size;
const size_t length_field_size =
- obu_memmove(lst_obu->tg_hdr_size, obu_payload_size, dst);
+ av1_obu_memmove(lst_obu->tg_hdr_size, obu_payload_size, dst);
if (av1_write_uleb_obu_size(lst_obu->tg_hdr_size, obu_payload_size, dst) !=
AOM_CODEC_OK)
assert(0);
@@ -3551,6 +3521,7 @@ static void write_large_scale_tile_obu(
const int tile_rows = tiles->rows;
unsigned int tile_size = 0;
+ av1_reset_pack_bs_thread_data(&cpi->td);
for (int tile_col = 0; tile_col < tile_cols; tile_col++) {
TileInfo tile_info;
const int is_last_col = (tile_col == tile_cols - 1);
@@ -3579,7 +3550,7 @@ static void write_large_scale_tile_obu(
mode_bc.allow_update_cdf =
mode_bc.allow_update_cdf && !cm->features.disable_cdf_update;
aom_start_encode(&mode_bc, buf->data + data_offset);
- write_modes(cpi, &tile_info, &mode_bc, tile_row, tile_col);
+ write_modes(cpi, &cpi->td, &tile_info, &mode_bc, tile_row, tile_col);
aom_stop_encode(&mode_bc);
tile_size = mode_bc.pos;
buf->size = tile_size;
@@ -3627,6 +3598,7 @@ static void write_large_scale_tile_obu(
*max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
}
}
+ av1_accumulate_pack_bs_thread_data(cpi, &cpi->td);
}
// Packs information in the obu header for large scale tiles.
@@ -3656,147 +3628,236 @@ static INLINE uint32_t pack_large_scale_tiles_in_tg_obus(
return total_size;
}
+// Writes obu, tile group and uncompressed headers to bitstream.
+void av1_write_obu_tg_tile_headers(AV1_COMP *const cpi, MACROBLOCKD *const xd,
+ PackBSParams *const pack_bs_params,
+ const int tile_idx) {
+ AV1_COMMON *const cm = &cpi->common;
+ const CommonTileParams *const tiles = &cm->tiles;
+ int *const curr_tg_hdr_size = &pack_bs_params->curr_tg_hdr_size;
+ const int tg_size =
+ (tiles->rows * tiles->cols + cpi->num_tg - 1) / cpi->num_tg;
+
+ // Write Tile group, frame and OBU header
+ // A new tile group begins at this tile. Write the obu header and
+ // tile group header
+ const OBU_TYPE obu_type = (cpi->num_tg == 1) ? OBU_FRAME : OBU_TILE_GROUP;
+ *curr_tg_hdr_size = av1_write_obu_header(
+ &cpi->ppi->level_params, &cpi->frame_header_count, obu_type,
+ pack_bs_params->obu_extn_header, pack_bs_params->tile_data_curr);
+ pack_bs_params->obu_header_size = *curr_tg_hdr_size;
+
+ if (cpi->num_tg == 1)
+ *curr_tg_hdr_size += write_frame_header_obu(
+ cpi, xd, pack_bs_params->saved_wb,
+ pack_bs_params->tile_data_curr + *curr_tg_hdr_size, 0);
+ *curr_tg_hdr_size += write_tile_group_header(
+ pack_bs_params->tile_data_curr + *curr_tg_hdr_size, tile_idx,
+ AOMMIN(tile_idx + tg_size - 1, tiles->cols * tiles->rows - 1),
+ (tiles->log2_rows + tiles->log2_cols), cpi->num_tg > 1);
+ *pack_bs_params->total_size += *curr_tg_hdr_size;
+}
+
+// Pack tile data in the bitstream with tile_group, frame
+// and OBU header.
+void av1_pack_tile_info(AV1_COMP *const cpi, ThreadData *const td,
+ PackBSParams *const pack_bs_params) {
+ aom_writer mode_bc;
+ AV1_COMMON *const cm = &cpi->common;
+ int tile_row = pack_bs_params->tile_row;
+ int tile_col = pack_bs_params->tile_col;
+ uint32_t *const total_size = pack_bs_params->total_size;
+ TileInfo tile_info;
+ av1_tile_set_col(&tile_info, cm, tile_col);
+ av1_tile_set_row(&tile_info, cm, tile_row);
+ mode_bc.allow_update_cdf = 1;
+ mode_bc.allow_update_cdf =
+ mode_bc.allow_update_cdf && !cm->features.disable_cdf_update;
+
+ unsigned int tile_size;
+
+ const int num_planes = av1_num_planes(cm);
+ av1_reset_loop_restoration(&td->mb.e_mbd, num_planes);
+
+ pack_bs_params->buf.data = pack_bs_params->dst + *total_size;
+
+ // The last tile of the tile group does not have a header.
+ if (!pack_bs_params->is_last_tile_in_tg) *total_size += 4;
+
+ // Pack tile data
+ aom_start_encode(&mode_bc, pack_bs_params->dst + *total_size);
+ write_modes(cpi, td, &tile_info, &mode_bc, tile_row, tile_col);
+ aom_stop_encode(&mode_bc);
+ tile_size = mode_bc.pos;
+ assert(tile_size >= AV1_MIN_TILE_SIZE_BYTES);
+
+ pack_bs_params->buf.size = tile_size;
+
+ // Write tile size
+ if (!pack_bs_params->is_last_tile_in_tg) {
+ // size of this tile
+ mem_put_le32(pack_bs_params->buf.data, tile_size - AV1_MIN_TILE_SIZE_BYTES);
+ }
+}
+
+void av1_write_last_tile_info(
+ AV1_COMP *const cpi, const FrameHeaderInfo *fh_info,
+ struct aom_write_bit_buffer *saved_wb, size_t *curr_tg_data_size,
+ uint8_t *curr_tg_start, uint32_t *const total_size,
+ uint8_t **tile_data_start, int *const largest_tile_id,
+ int *const is_first_tg, uint32_t obu_header_size, uint8_t obu_extn_header) {
+ // write current tile group size
+ const uint32_t obu_payload_size =
+ (uint32_t)(*curr_tg_data_size) - obu_header_size;
+ const size_t length_field_size =
+ av1_obu_memmove(obu_header_size, obu_payload_size, curr_tg_start);
+ if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size,
+ curr_tg_start) != AOM_CODEC_OK) {
+ assert(0);
+ }
+ *curr_tg_data_size += (int)length_field_size;
+ *total_size += (uint32_t)length_field_size;
+ *tile_data_start += length_field_size;
+ if (cpi->num_tg == 1) {
+ // if this tg is combined with the frame header then update saved
+ // frame header base offset according to length field size
+ saved_wb->bit_buffer += length_field_size;
+ }
+
+ if (!(*is_first_tg) && cpi->common.features.error_resilient_mode) {
+ // Make room for a duplicate Frame Header OBU.
+ memmove(curr_tg_start + fh_info->total_length, curr_tg_start,
+ *curr_tg_data_size);
+
+ // Insert a copy of the Frame Header OBU.
+ memcpy(curr_tg_start, fh_info->frame_header, fh_info->total_length);
+
+ // Force context update tile to be the first tile in error
+ // resilient mode as the duplicate frame headers will have
+ // context_update_tile_id set to 0
+ *largest_tile_id = 0;
+
+ // Rewrite the OBU header to change the OBU type to Redundant Frame
+ // Header.
+ av1_write_obu_header(&cpi->ppi->level_params, &cpi->frame_header_count,
+ OBU_REDUNDANT_FRAME_HEADER, obu_extn_header,
+ &curr_tg_start[fh_info->obu_header_byte_offset]);
+
+ *curr_tg_data_size += (int)(fh_info->total_length);
+ *total_size += (uint32_t)(fh_info->total_length);
+ }
+ *is_first_tg = 0;
+}
+
+void av1_reset_pack_bs_thread_data(ThreadData *const td) {
+ td->coefficient_size = 0;
+ td->max_mv_magnitude = 0;
+ av1_zero(td->interp_filter_selected);
+}
+
+void av1_accumulate_pack_bs_thread_data(AV1_COMP *const cpi,
+ ThreadData const *td) {
+ int do_max_mv_magnitude_update = 1;
+ cpi->rc.coefficient_size += td->coefficient_size;
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Disable max_mv_magnitude update for parallel frames based on update flag.
+ if (!cpi->do_frame_data_update) do_max_mv_magnitude_update = 0;
+#endif
+
+ if (cpi->sf.mv_sf.auto_mv_step_size && do_max_mv_magnitude_update)
+ cpi->mv_search_params.max_mv_magnitude =
+ AOMMAX(cpi->mv_search_params.max_mv_magnitude, td->max_mv_magnitude);
+
+ for (InterpFilter filter = EIGHTTAP_REGULAR; filter < SWITCHABLE; filter++)
+ cpi->common.cur_frame->interp_filter_selected[filter] +=
+ td->interp_filter_selected[filter];
+}
+
// Store information related to each default tile in the OBU header.
static void write_tile_obu(
AV1_COMP *const cpi, uint8_t *const dst, uint32_t *total_size,
- struct aom_write_bit_buffer *saved_wb, uint8_t obu_extension_header,
+ struct aom_write_bit_buffer *saved_wb, uint8_t obu_extn_header,
const FrameHeaderInfo *fh_info, int *const largest_tile_id,
unsigned int *max_tile_size, uint32_t *const obu_header_size,
uint8_t **tile_data_start) {
AV1_COMMON *const cm = &cpi->common;
+ MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
const CommonTileParams *const tiles = &cm->tiles;
- AV1LevelParams *const level_params = &cpi->level_params;
- TileBufferEnc tile_buffers[MAX_TILE_ROWS][MAX_TILE_COLS];
const int tile_cols = tiles->cols;
const int tile_rows = tiles->rows;
- unsigned int tile_size = 0;
// Fixed size tile groups for the moment
const int num_tg_hdrs = cpi->num_tg;
const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
int tile_count = 0;
- int curr_tg_data_size = 0;
- uint8_t *data = dst;
+ size_t curr_tg_data_size = 0;
+ uint8_t *tile_data_curr = dst;
int new_tg = 1;
- int first_tg = 1;
+ int is_first_tg = 1;
+ av1_reset_pack_bs_thread_data(&cpi->td);
for (int tile_row = 0; tile_row < tile_rows; tile_row++) {
for (int tile_col = 0; tile_col < tile_cols; tile_col++) {
- aom_writer mode_bc;
const int tile_idx = tile_row * tile_cols + tile_col;
- TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
- int is_last_tile_in_tg = 0;
+ int is_last_tile_in_tg = 0;
if (new_tg) {
- data = dst + *total_size;
-
- // A new tile group begins at this tile. Write the obu header and
- // tile group header
- const OBU_TYPE obu_type =
- (num_tg_hdrs == 1) ? OBU_FRAME : OBU_TILE_GROUP;
- curr_tg_data_size = av1_write_obu_header(level_params, obu_type,
- obu_extension_header, data);
- *obu_header_size = curr_tg_data_size;
-
- if (num_tg_hdrs == 1)
- curr_tg_data_size += write_frame_header_obu(
- cpi, saved_wb, data + curr_tg_data_size, 0);
- curr_tg_data_size += write_tile_group_header(
- data + curr_tg_data_size, tile_idx,
- AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
- (tiles->log2_rows + tiles->log2_cols), cpi->num_tg > 1);
- *total_size += curr_tg_data_size;
- *tile_data_start += curr_tg_data_size;
- new_tg = 0;
+ tile_data_curr = dst + *total_size;
tile_count = 0;
}
tile_count++;
- TileInfo tile_info;
- av1_tile_set_col(&tile_info, cm, tile_col);
- av1_tile_set_row(&tile_info, cm, tile_row);
- if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
+ if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1))
is_last_tile_in_tg = 1;
- new_tg = 1;
- } else {
- is_last_tile_in_tg = 0;
- }
- buf->data = dst + *total_size;
+ xd->tile_ctx = &this_tile->tctx;
- // The last tile of the tile group does not have a header.
- if (!is_last_tile_in_tg) *total_size += 4;
+ // PackBSParams stores all parameters required to pack tile and header
+ // info.
+ PackBSParams pack_bs_params;
+ pack_bs_params.dst = dst;
+ pack_bs_params.curr_tg_hdr_size = 0;
+ pack_bs_params.is_last_tile_in_tg = is_last_tile_in_tg;
+ pack_bs_params.new_tg = new_tg;
+ pack_bs_params.obu_extn_header = obu_extn_header;
+ pack_bs_params.obu_header_size = 0;
+ pack_bs_params.saved_wb = saved_wb;
+ pack_bs_params.tile_col = tile_col;
+ pack_bs_params.tile_row = tile_row;
+ pack_bs_params.tile_data_curr = tile_data_curr;
+ pack_bs_params.total_size = total_size;
- cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
- mode_bc.allow_update_cdf = 1;
- mode_bc.allow_update_cdf =
- mode_bc.allow_update_cdf && !cm->features.disable_cdf_update;
- const int num_planes = av1_num_planes(cm);
- av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
+ if (new_tg)
+ av1_write_obu_tg_tile_headers(cpi, xd, &pack_bs_params, tile_idx);
- aom_start_encode(&mode_bc, dst + *total_size);
- write_modes(cpi, &tile_info, &mode_bc, tile_row, tile_col);
- aom_stop_encode(&mode_bc);
- tile_size = mode_bc.pos;
- assert(tile_size >= AV1_MIN_TILE_SIZE_BYTES);
+ av1_pack_tile_info(cpi, &cpi->td, &pack_bs_params);
- curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
- buf->size = tile_size;
- if (tile_size > *max_tile_size) {
- *largest_tile_id = tile_cols * tile_row + tile_col;
- *max_tile_size = tile_size;
+ if (new_tg) {
+ curr_tg_data_size = pack_bs_params.curr_tg_hdr_size;
+ *tile_data_start += pack_bs_params.curr_tg_hdr_size;
+ *obu_header_size = pack_bs_params.obu_header_size;
+ new_tg = 0;
}
+ if (is_last_tile_in_tg) new_tg = 1;
- if (!is_last_tile_in_tg) {
- // size of this tile
- mem_put_le32(buf->data, tile_size - AV1_MIN_TILE_SIZE_BYTES);
- } else {
- // write current tile group size
- const uint32_t obu_payload_size = curr_tg_data_size - *obu_header_size;
- const size_t length_field_size =
- obu_memmove(*obu_header_size, obu_payload_size, data);
- if (av1_write_uleb_obu_size(*obu_header_size, obu_payload_size, data) !=
- AOM_CODEC_OK) {
- assert(0);
- }
- curr_tg_data_size += (int)length_field_size;
- *total_size += (uint32_t)length_field_size;
- *tile_data_start += length_field_size;
- if (num_tg_hdrs == 1) {
- // if this tg is combined with the frame header then update saved
- // frame header base offset accroding to length field size
- saved_wb->bit_buffer += length_field_size;
- }
+ curr_tg_data_size +=
+ (pack_bs_params.buf.size + (is_last_tile_in_tg ? 0 : 4));
- if (!first_tg && cm->features.error_resilient_mode) {
- // Make room for a duplicate Frame Header OBU.
- memmove(data + fh_info->total_length, data, curr_tg_data_size);
-
- // Insert a copy of the Frame Header OBU.
- memcpy(data, fh_info->frame_header, fh_info->total_length);
-
- // Force context update tile to be the first tile in error
- // resiliant mode as the duplicate frame headers will have
- // context_update_tile_id set to 0
- *largest_tile_id = 0;
-
- // Rewrite the OBU header to change the OBU type to Redundant Frame
- // Header.
- av1_write_obu_header(level_params, OBU_REDUNDANT_FRAME_HEADER,
- obu_extension_header,
- &data[fh_info->obu_header_byte_offset]);
-
- data += fh_info->total_length;
-
- curr_tg_data_size += (int)(fh_info->total_length);
- *total_size += (uint32_t)(fh_info->total_length);
- }
- first_tg = 0;
+ if (pack_bs_params.buf.size > *max_tile_size) {
+ *largest_tile_id = tile_idx;
+ *max_tile_size = (unsigned int)pack_bs_params.buf.size;
}
- *total_size += tile_size;
+ if (is_last_tile_in_tg)
+ av1_write_last_tile_info(cpi, fh_info, saved_wb, &curr_tg_data_size,
+ tile_data_curr, total_size, tile_data_start,
+ largest_tile_id, &is_first_tg,
+ *obu_header_size, obu_extn_header);
+ *total_size += (uint32_t)pack_bs_params.buf.size;
}
}
+ av1_accumulate_pack_bs_thread_data(cpi, &cpi->td);
}
// Write total buffer size and related information into the OBU header for
@@ -3854,6 +3915,24 @@ static void write_tile_obu_size(AV1_COMP *const cpi, uint8_t *const dst,
}
}
+// As per the experiments, single-thread bitstream packing is better for
+// frames with a smaller bitstream size. This behavior is due to setup time
+// overhead of multithread function would be more than that of time required
+// to pack the smaller bitstream of such frames. We set a threshold on the
+// total absolute sum of transform coeffs to detect such frames and disable
+// Multithreading.
+int enable_pack_bitstream_mt(const TileDataEnc *tile_data, int num_tiles,
+ int num_workers) {
+ if (AOMMIN(num_workers, num_tiles) <= 1) return 0;
+
+ const int num_work_sqr = num_workers * num_workers;
+ const uint64_t thresh = 50;
+ uint64_t frame_abs_sum_level = 0;
+ for (int idx = 0; idx < num_tiles; idx++)
+ frame_abs_sum_level += tile_data[idx].abs_sum_level;
+ return ((frame_abs_sum_level > (num_work_sqr * thresh) / (num_workers - 1)));
+}
+
static INLINE uint32_t pack_tiles_in_tg_obus(
AV1_COMP *const cpi, uint8_t *const dst,
struct aom_write_bit_buffer *saved_wb, uint8_t obu_extension_header,
@@ -3863,16 +3942,25 @@ static INLINE uint32_t pack_tiles_in_tg_obus(
unsigned int max_tile_size = 0;
uint32_t obu_header_size = 0;
uint8_t *tile_data_start = dst;
-
- write_tile_obu(cpi, dst, &total_size, saved_wb, obu_extension_header, fh_info,
- largest_tile_id, &max_tile_size, &obu_header_size,
- &tile_data_start);
-
+ const int num_workers = cpi->mt_info.num_mod_workers[MOD_PACK_BS];
const int tile_cols = tiles->cols;
const int tile_rows = tiles->rows;
- const int have_tiles = tile_cols * tile_rows > 1;
+ const int num_tiles = tile_rows * tile_cols;
+
+ const int enable_mt =
+ enable_pack_bitstream_mt(cpi->tile_data, num_tiles, num_workers);
- if (have_tiles)
+ if (enable_mt) {
+ av1_write_tile_obu_mt(cpi, dst, &total_size, saved_wb, obu_extension_header,
+ fh_info, largest_tile_id, &max_tile_size,
+ &obu_header_size, &tile_data_start);
+ } else {
+ write_tile_obu(cpi, dst, &total_size, saved_wb, obu_extension_header,
+ fh_info, largest_tile_id, &max_tile_size, &obu_header_size,
+ &tile_data_start);
+ }
+
+ if (num_tiles > 1)
write_tile_obu_size(cpi, dst, saved_wb, *largest_tile_id, &total_size,
max_tile_size, obu_header_size, tile_data_start);
return total_size;
@@ -3887,6 +3975,9 @@ static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
const CommonTileParams *const tiles = &cm->tiles;
*largest_tile_id = 0;
+ // Select the coding strategy (temporal or spatial)
+ if (cm->seg.enabled) av1_choose_segmap_coding_method(cm, &cpi->td.mb.e_mbd);
+
if (tiles->large_scale)
return pack_large_scale_tiles_in_tg_obus(cpi, dst, saved_wb,
largest_tile_id);
@@ -3926,18 +4017,20 @@ static size_t av1_write_metadata_array(AV1_COMP *const cpi, uint8_t *dst) {
(cm->current_frame.frame_type != KEY_FRAME &&
current_metadata->insert_flag == AOM_MIF_NON_KEY_FRAME) ||
current_metadata->insert_flag == AOM_MIF_ANY_FRAME) {
- obu_header_size =
- av1_write_obu_header(&cpi->level_params, OBU_METADATA, 0, dst);
+ obu_header_size = av1_write_obu_header(&cpi->ppi->level_params,
+ &cpi->frame_header_count,
+ OBU_METADATA, 0, dst);
obu_payload_size =
av1_write_metadata_obu(current_metadata, dst + obu_header_size);
- length_field_size = obu_memmove(obu_header_size, obu_payload_size, dst);
+ length_field_size =
+ av1_obu_memmove(obu_header_size, obu_payload_size, dst);
if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, dst) ==
AOM_CODEC_OK) {
const size_t obu_size = obu_header_size + obu_payload_size;
dst += obu_size + length_field_size;
total_bytes_written += obu_size + length_field_size;
} else {
- aom_internal_error(&cpi->common.error, AOM_CODEC_ERROR,
+ aom_internal_error(cpi->common.error, AOM_CODEC_ERROR,
"Error writing metadata OBU size");
}
}
@@ -3951,7 +4044,7 @@ int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size,
uint8_t *data = dst;
uint32_t data_size;
AV1_COMMON *const cm = &cpi->common;
- AV1LevelParams *const level_params = &cpi->level_params;
+ AV1LevelParams *const level_params = &cpi->ppi->level_params;
uint32_t obu_header_size = 0;
uint32_t obu_payload_size = 0;
FrameHeaderInfo fh_info = { NULL, 0, 0 };
@@ -3967,19 +4060,19 @@ int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size,
bitstream_queue_reset_write();
#endif
- level_params->frame_header_count = 0;
+ cpi->frame_header_count = 0;
// The TD is now written outside the frame encode loop
// write sequence header obu if KEY_FRAME, preceded by 4-byte size
if (cm->current_frame.frame_type == KEY_FRAME && !cpi->no_show_fwd_kf) {
- obu_header_size =
- av1_write_obu_header(level_params, OBU_SEQUENCE_HEADER, 0, data);
+ obu_header_size = av1_write_obu_header(
+ level_params, &cpi->frame_header_count, OBU_SEQUENCE_HEADER, 0, data);
obu_payload_size =
- av1_write_sequence_header_obu(&cm->seq_params, data + obu_header_size);
+ av1_write_sequence_header_obu(cm->seq_params, data + obu_header_size);
const size_t length_field_size =
- obu_memmove(obu_header_size, obu_payload_size, data);
+ av1_obu_memmove(obu_header_size, obu_payload_size, data);
if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
AOM_CODEC_OK) {
return AOM_CODEC_ERROR;
@@ -3998,12 +4091,13 @@ int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size,
if (write_frame_header) {
// Write Frame Header OBU.
fh_info.frame_header = data;
- obu_header_size = av1_write_obu_header(level_params, OBU_FRAME_HEADER,
- obu_extension_header, data);
- obu_payload_size =
- write_frame_header_obu(cpi, &saved_wb, data + obu_header_size, 1);
+ obu_header_size =
+ av1_write_obu_header(level_params, &cpi->frame_header_count,
+ OBU_FRAME_HEADER, obu_extension_header, data);
+ obu_payload_size = write_frame_header_obu(cpi, &cpi->td.mb.e_mbd, &saved_wb,
+ data + obu_header_size, 1);
- length_field = obu_memmove(obu_header_size, obu_payload_size, data);
+ length_field = av1_obu_memmove(obu_header_size, obu_payload_size, data);
if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
AOM_CODEC_OK) {
return AOM_CODEC_ERROR;
diff --git a/third_party/libaom/source/libaom/av1/encoder/bitstream.h b/third_party/libaom/source/libaom/av1/encoder/bitstream.h
index df35ecccfa..e32cd3bd19 100644
--- a/third_party/libaom/source/libaom/av1/encoder/bitstream.h
+++ b/third_party/libaom/source/libaom/av1/encoder/bitstream.h
@@ -16,9 +16,67 @@
extern "C" {
#endif
-#include "av1/encoder/encoder.h"
+#include "av1/common/av1_common_int.h"
+#include "av1/common/blockd.h"
+#include "av1/common/enums.h"
+#include "av1/encoder/level.h"
+#include "aom_dsp/bitwriter.h"
struct aom_write_bit_buffer;
+struct AV1_COMP;
+struct ThreadData;
+
+/*!\cond */
+
+// Stores the location and size of a tile's data in the bitstream. Used for
+// later identifying identical tiles
+typedef struct {
+ uint8_t *data;
+ size_t size;
+} TileBufferEnc;
+
+typedef struct {
+ uint8_t *frame_header;
+ size_t obu_header_byte_offset;
+ size_t total_length;
+} FrameHeaderInfo;
+
+typedef struct {
+ struct aom_write_bit_buffer *saved_wb; // Bit stream buffer writer structure
+ TileBufferEnc buf; // Structure to hold bitstream buffer and size
+ uint32_t *total_size; // Size of the bitstream buffer for the tile in bytes
+ uint8_t *dst; // Base address of tile bitstream buffer
+ uint8_t *tile_data_curr; // Base address of tile-group bitstream buffer
+ size_t tile_buf_size; // Available bitstream buffer for the tile in bytes
+ uint8_t obu_extn_header; // Presence of OBU extension header
+ uint32_t obu_header_size; // Size of the OBU header
+ int curr_tg_hdr_size; // Size of the obu, tg, frame headers
+ int tile_size_mi; // Tile size in mi units
+ int tile_row; // Number of tile rows
+ int tile_col; // Number of tile columns
+ int is_last_tile_in_tg; // Flag to indicate last tile in a tile-group
+ int new_tg; // Flag to indicate starting of a new tile-group
+} PackBSParams;
+
+typedef struct {
+ uint64_t abs_sum_level;
+ uint16_t tile_idx;
+} PackBSTileOrder;
+
+// Pack bitstream data for pack bitstream multi-threading.
+typedef struct {
+#if CONFIG_MULTITHREAD
+ // Mutex lock used while dispatching jobs.
+ pthread_mutex_t *mutex_;
+#endif
+ // Tile order structure of pack bitstream multithreading.
+ PackBSTileOrder pack_bs_tile_order[MAX_TILES];
+
+ // Index of next job to be processed.
+ int next_job_idx;
+} AV1EncPackBSSync;
+
+/*!\endcond */
// Writes only the OBU Sequence Header payload, and returns the size of the
// payload written to 'dst'. This function does not write the OBU header, the
@@ -29,23 +87,44 @@ uint32_t av1_write_sequence_header_obu(const SequenceHeader *seq_params,
// Writes the OBU header byte, and the OBU header extension byte when
// 'obu_extension' is non-zero. Returns number of bytes written to 'dst'.
uint32_t av1_write_obu_header(AV1LevelParams *const level_params,
- OBU_TYPE obu_type, int obu_extension,
- uint8_t *const dst);
+ int *frame_header_count, OBU_TYPE obu_type,
+ int obu_extension, uint8_t *const dst);
int av1_write_uleb_obu_size(size_t obu_header_size, size_t obu_payload_size,
uint8_t *dest);
+// Pack tile data in the bitstream with tile_group, frame
+// and OBU header.
+void av1_pack_tile_info(struct AV1_COMP *const cpi, struct ThreadData *const td,
+ PackBSParams *const pack_bs_params);
+
+void av1_write_last_tile_info(
+ struct AV1_COMP *const cpi, const FrameHeaderInfo *fh_info,
+ struct aom_write_bit_buffer *saved_wb, size_t *curr_tg_data_size,
+ uint8_t *curr_tg_start, uint32_t *const total_size,
+ uint8_t **tile_data_start, int *const largest_tile_id,
+ int *const is_first_tg, uint32_t obu_header_size, uint8_t obu_extn_header);
+
/*!\brief Pack the bitstream for one frame
*
* \ingroup high_level_algo
* \callgraph
*/
-int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size,
+int av1_pack_bitstream(struct AV1_COMP *const cpi, uint8_t *dst, size_t *size,
int *const largest_tile_id);
void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
TX_TYPE tx_type, TX_SIZE tx_size, aom_writer *w);
+void av1_reset_pack_bs_thread_data(struct ThreadData *const td);
+
+void av1_accumulate_pack_bs_thread_data(struct AV1_COMP *const cpi,
+ struct ThreadData const *td);
+
+void av1_write_obu_tg_tile_headers(struct AV1_COMP *const cpi,
+ MACROBLOCKD *const xd,
+ PackBSParams *const pack_bs_params,
+ const int tile_idx);
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/block.h b/third_party/libaom/source/libaom/av1/encoder/block.h
index 59353cfac3..aaf3654a5f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/block.h
+++ b/third_party/libaom/source/libaom/av1/encoder/block.h
@@ -102,7 +102,7 @@ typedef struct {
*/
typedef struct macroblock_plane {
//! Stores source - pred so the txfm can be computed later
- DECLARE_ALIGNED(32, int16_t, src_diff[MAX_SB_SQUARE]);
+ int16_t *src_diff;
//! Dequantized coefficients
tran_low_t *dqcoeff;
//! Quantized coefficients
@@ -778,6 +778,23 @@ typedef struct {
/**@}*/
} MvCosts;
+/*! \brief Holds mv costs for intrabc.
+ */
+typedef struct {
+ /*! Costs for coding the joint mv. */
+ int joint_mv[MV_JOINTS];
+
+ /*! \brief Cost of transmitting the actual motion vector.
+ * dv_costs_alloc[0][i] is the cost of motion vector with horizontal
+ * component (mv_row) equal to i - MV_MAX. dv_costs_alloc[1][i] is the cost of
+ * motion vector with vertical component (mv_col) equal to i - MV_MAX.
+ */
+ int dv_costs_alloc[2][MV_VALS];
+
+ /*! Points to the middle of \ref dv_costs_alloc. */
+ int *dv_costs[2];
+} IntraBCMVCosts;
+
/*! \brief Holds the costs needed to encode the coefficients
*/
typedef struct {
@@ -817,6 +834,14 @@ typedef struct {
int lighting_change;
int low_sumdiff;
} CONTENT_STATE_SB;
+
+// Structure to hold pixel level gradient info.
+typedef struct {
+ uint16_t abs_dx_abs_dy_sum;
+ int8_t hist_bin_idx;
+ bool is_dx_zero;
+} PixelLevelGradientInfo;
+
/*!\endcond */
/*! \brief Encoder's parameters related to the current coding block.
@@ -945,6 +970,11 @@ typedef struct macroblock {
//! multipliers for motion search.
MvCosts *mv_costs;
+ /*! The rate needed to encode a new motion vector to the bitstream in intrabc
+ * mode.
+ */
+ IntraBCMVCosts *dv_costs;
+
//! The rate needed to signal the txfm coefficients to the bitstream.
CoeffCosts coeff_costs;
/**@}*/
@@ -1014,6 +1044,10 @@ typedef struct macroblock {
int pred_mv_sad[REF_FRAMES];
//! The minimum of \ref pred_mv_sad.
int best_pred_mv_sad;
+ //! The sad of the 1st mv ref (nearest).
+ int pred_mv0_sad[REF_FRAMES];
+ //! The sad of the 2nd mv ref (near).
+ int pred_mv1_sad[REF_FRAMES];
/*! \brief Disables certain ref frame pruning based on tpl.
*
@@ -1092,8 +1126,7 @@ typedef struct macroblock {
* In the second pass, we retry the winner modes with more thorough txfm
* options.
*/
- WinnerModeStats winner_mode_stats[AOMMAX(MAX_WINNER_MODE_COUNT_INTRA,
- MAX_WINNER_MODE_COUNT_INTER)];
+ WinnerModeStats *winner_mode_stats;
//! Tracks how many winner modes there are.
int winner_mode_count;
@@ -1147,10 +1180,20 @@ typedef struct macroblock {
*/
IntraBCHashInfo intrabc_hash_info;
- /*! \brief Whether to reuse the mode stored in intermode_cache. */
- int use_intermode_cache;
- /*! \brief The mode to reuse during \ref av1_rd_pick_inter_mode. */
- const MB_MODE_INFO *intermode_cache;
+ /*! \brief Whether to reuse the mode stored in mb_mode_cache. */
+ int use_mb_mode_cache;
+ /*! \brief The mode to reuse during \ref av1_rd_pick_intra_mode_sb and
+ * \ref av1_rd_pick_inter_mode. */
+ const MB_MODE_INFO *mb_mode_cache;
+ /*! \brief Pointer to the buffer which caches gradient information.
+ *
+ * Pointer to the array of structures to store gradient information of each
+ * pixel in a superblock. The buffer constitutes of MAX_SB_SQUARE pixel level
+ * structures for each of the plane types (PLANE_TYPE_Y and PLANE_TYPE_UV).
+ */
+ PixelLevelGradientInfo *pixel_gradient_info;
+ /*! \brief Flags indicating the availability of cached gradient info. */
+ bool is_sb_gradient_cached[PLANE_TYPES];
/**@}*/
/*****************************************************************************
@@ -1195,6 +1238,8 @@ typedef struct macroblock {
* Used in REALTIME coding mode to enhance the visual quality at the boundary
* of moving color objects.
*/
+ uint8_t color_sensitivity_sb[2];
+ //! Color sensitivity flag for the coding block.
uint8_t color_sensitivity[2];
/**@}*/
diff --git a/third_party/libaom/source/libaom/av1/encoder/compound_type.c b/third_party/libaom/source/libaom/av1/encoder/compound_type.c
index aacb7fc88a..00fa3890bf 100644
--- a/third_party/libaom/source/libaom/av1/encoder/compound_type.c
+++ b/third_party/libaom/source/libaom/av1/encoder/compound_type.c
@@ -48,31 +48,31 @@ static INLINE int is_comp_rd_match(const AV1_COMP *const cpi,
if (is_global_mv_block(mi, wm->wmtype) != st->is_global[i]) return 0;
}
- // Store the stats for COMPOUND_AVERAGE and COMPOUND_DISTWTD
- for (int comp_type = COMPOUND_AVERAGE; comp_type <= COMPOUND_DISTWTD;
- comp_type++) {
- comp_rate[comp_type] = st->rate[comp_type];
- comp_dist[comp_type] = st->dist[comp_type];
- comp_model_rate[comp_type] = st->model_rate[comp_type];
- comp_model_dist[comp_type] = st->model_dist[comp_type];
- comp_rs2[comp_type] = st->comp_rs2[comp_type];
- }
-
- // For compound wedge/segment, reuse data only if NEWMV is not present in
- // either of the directions
+ int reuse_data[COMPOUND_TYPES] = { 1, 1, 0, 0 };
+ // For compound wedge, reuse data if newmv search is disabled when NEWMV is
+ // present or if NEWMV is not present in either of the directions
if ((!have_newmv_in_inter_mode(mi->mode) &&
!have_newmv_in_inter_mode(st->mode)) ||
- (cpi->sf.inter_sf.disable_interinter_wedge_newmv_search)) {
- memcpy(&comp_rate[COMPOUND_WEDGE], &st->rate[COMPOUND_WEDGE],
- sizeof(comp_rate[COMPOUND_WEDGE]) * 2);
- memcpy(&comp_dist[COMPOUND_WEDGE], &st->dist[COMPOUND_WEDGE],
- sizeof(comp_dist[COMPOUND_WEDGE]) * 2);
- memcpy(&comp_model_rate[COMPOUND_WEDGE], &st->model_rate[COMPOUND_WEDGE],
- sizeof(comp_model_rate[COMPOUND_WEDGE]) * 2);
- memcpy(&comp_model_dist[COMPOUND_WEDGE], &st->model_dist[COMPOUND_WEDGE],
- sizeof(comp_model_dist[COMPOUND_WEDGE]) * 2);
- memcpy(&comp_rs2[COMPOUND_WEDGE], &st->comp_rs2[COMPOUND_WEDGE],
- sizeof(comp_rs2[COMPOUND_WEDGE]) * 2);
+ (cpi->sf.inter_sf.disable_interinter_wedge_newmv_search))
+ reuse_data[COMPOUND_WEDGE] = 1;
+ // For compound diffwtd, reuse data if fast search is enabled (no newmv search
+ // when NEWMV is present) or if NEWMV is not present in either of the
+ // directions
+ if (cpi->sf.inter_sf.enable_fast_compound_mode_search ||
+ (!have_newmv_in_inter_mode(mi->mode) &&
+ !have_newmv_in_inter_mode(st->mode)))
+ reuse_data[COMPOUND_DIFFWTD] = 1;
+
+ // Store the stats for the different compound types
+ for (int comp_type = COMPOUND_AVERAGE; comp_type < COMPOUND_TYPES;
+ comp_type++) {
+ if (reuse_data[comp_type]) {
+ comp_rate[comp_type] = st->rate[comp_type];
+ comp_dist[comp_type] = st->dist[comp_type];
+ comp_model_rate[comp_type] = st->model_rate[comp_type];
+ comp_model_dist[comp_type] = st->model_dist[comp_type];
+ comp_rs2[comp_type] = st->comp_rs2[comp_type];
+ }
}
return 1;
}
@@ -166,14 +166,14 @@ static int8_t estimate_wedge_sign(const AV1_COMP *cpi, const MACROBLOCK *x,
// TODO(nithya): Sign estimation assumes 45 degrees (1st and 4th quadrants)
// for all codebooks; experiment with other quadrant combinations for
// 0, 90 and 135 degrees also.
- cpi->fn_ptr[f_index].vf(src, src_stride, pred0, stride0, &esq[0][0]);
- cpi->fn_ptr[f_index].vf(src + bh_by2 * src_stride + bw_by2, src_stride,
- pred0 + bh_by2 * stride0 + bw_by2, stride0,
- &esq[0][1]);
- cpi->fn_ptr[f_index].vf(src, src_stride, pred1, stride1, &esq[1][0]);
- cpi->fn_ptr[f_index].vf(src + bh_by2 * src_stride + bw_by2, src_stride,
- pred1 + bh_by2 * stride1 + bw_by2, stride0,
- &esq[1][1]);
+ cpi->ppi->fn_ptr[f_index].vf(src, src_stride, pred0, stride0, &esq[0][0]);
+ cpi->ppi->fn_ptr[f_index].vf(src + bh_by2 * src_stride + bw_by2, src_stride,
+ pred0 + bh_by2 * stride0 + bw_by2, stride0,
+ &esq[0][1]);
+ cpi->ppi->fn_ptr[f_index].vf(src, src_stride, pred1, stride1, &esq[1][0]);
+ cpi->ppi->fn_ptr[f_index].vf(src + bh_by2 * src_stride + bw_by2, src_stride,
+ pred1 + bh_by2 * stride1 + bw_by2, stride0,
+ &esq[1][1]);
tl = ((int64_t)esq[0][0]) - ((int64_t)esq[1][0]);
br = ((int64_t)esq[1][1]) - ((int64_t)esq[0][1]);
@@ -314,7 +314,7 @@ static int64_t pick_interinter_wedge(
int8_t wedge_sign = 0;
assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
- assert(cpi->common.seq_params.enable_masked_compound);
+ assert(cpi->common.seq_params->enable_masked_compound);
if (cpi->sf.inter_sf.fast_wedge_sign_estimate) {
wedge_sign = estimate_wedge_sign(cpi, x, bsize, p0, bw, p1, bw);
@@ -392,7 +392,7 @@ static int64_t pick_interintra_wedge(const AV1_COMP *const cpi,
const MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = xd->mi[0];
assert(av1_is_wedge_used(bsize));
- assert(cpi->common.seq_params.enable_interintra_compound);
+ assert(cpi->common.seq_params->enable_interintra_compound);
const struct buf_2d *const src = &x->plane[0].src;
const int bw = block_size_wide[bsize];
@@ -836,7 +836,7 @@ static INLINE int compute_valid_comp_types(MACROBLOCK *x,
const int try_average_comp = (mode_search_mask & (1 << COMPOUND_AVERAGE));
const int try_distwtd_comp =
((mode_search_mask & (1 << COMPOUND_DISTWTD)) &&
- cm->seq_params.order_hint_info.enable_dist_wtd_comp == 1 &&
+ cm->seq_params->order_hint_info.enable_dist_wtd_comp == 1 &&
cpi->sf.inter_sf.use_dist_wtd_comp_flag != DIST_WTD_COMP_DISABLED);
// Check if COMPOUND_AVERAGE and COMPOUND_DISTWTD are valid cases
@@ -1058,10 +1058,12 @@ static int64_t masked_compound_type_rd(
if (compound_type == COMPOUND_WEDGE) {
unsigned int sse;
if (is_cur_buf_hbd(xd))
- (void)cpi->fn_ptr[bsize].vf(CONVERT_TO_BYTEPTR(*preds0), *strides,
- CONVERT_TO_BYTEPTR(*preds1), *strides, &sse);
+ (void)cpi->ppi->fn_ptr[bsize].vf(CONVERT_TO_BYTEPTR(*preds0), *strides,
+ CONVERT_TO_BYTEPTR(*preds1), *strides,
+ &sse);
else
- (void)cpi->fn_ptr[bsize].vf(*preds0, *strides, *preds1, *strides, &sse);
+ (void)cpi->ppi->fn_ptr[bsize].vf(*preds0, *strides, *preds1, *strides,
+ &sse);
const unsigned int mse =
ROUND_POWER_OF_TWO(sse, num_pels_log2_lookup[bsize]);
// If two predictors are very similar, skip wedge compound mode search
@@ -1164,7 +1166,8 @@ static int64_t masked_compound_type_rd(
assert(comp_dist[compound_type] != INT64_MAX);
// When disable_interinter_wedge_newmv_search is set, motion refinement is
// disabled. Hence rate and distortion can be reused in this case as well
- assert(IMPLIES(have_newmv_in_inter_mode(this_mode),
+ assert(IMPLIES((have_newmv_in_inter_mode(this_mode) &&
+ (compound_type == COMPOUND_WEDGE)),
cpi->sf.inter_sf.disable_interinter_wedge_newmv_search));
assert(mbmi->mv[0].as_int == cur_mv[0].as_int);
assert(mbmi->mv[1].as_int == cur_mv[1].as_int);
@@ -1338,11 +1341,12 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
if (have_newmv_in_inter_mode(this_mode)) {
InterPredParams inter_pred_params;
av1_dist_wtd_comp_weight_assign(
- &cpi->common, mbmi, 0, &inter_pred_params.conv_params.fwd_offset,
+ &cpi->common, mbmi, &inter_pred_params.conv_params.fwd_offset,
&inter_pred_params.conv_params.bck_offset,
&inter_pred_params.conv_params.use_dist_wtd_comp_avg, 1);
int mask_value = inter_pred_params.conv_params.fwd_offset * 4;
- memset(xd->seg_mask, mask_value, sizeof(xd->seg_mask));
+ memset(xd->seg_mask, mask_value,
+ sizeof(xd->seg_mask[0]) * 2 * MAX_SB_SQUARE);
tmp_rate_mv = av1_interinter_compound_motion_search(cpi, x, cur_mv,
bsize, this_mode);
}
@@ -1369,7 +1373,7 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
int_mv tmp_mv[2] = { mbmi->mv[0], mbmi->mv[1] };
int best_rs2 = 0;
int best_rate_mv = *rate_mv;
- const int wedge_mask_size = get_wedge_types_lookup(bsize);
+ int wedge_mask_size = get_wedge_types_lookup(bsize);
int need_mask_search = args->wedge_index == -1;
if (need_mask_search && !have_newmv_in_inter_mode(this_mode)) {
@@ -1392,7 +1396,8 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
mode_rd = RDCOST(x->rdmult, rs2 + rd_stats->rate, 0);
if (mode_rd >= ref_best_rd / 2) continue;
- if (have_newmv_in_inter_mode(this_mode)) {
+ if (have_newmv_in_inter_mode(this_mode) &&
+ !cpi->sf.inter_sf.disable_interinter_wedge_newmv_search) {
tmp_rate_mv = av1_interinter_compound_motion_search(
cpi, x, cur_mv, bsize, this_mode);
av1_enc_build_inter_predictor(cm, xd, mi_row, mi_col, orig_dst,
@@ -1425,6 +1430,33 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
best_rs2 = rs2;
}
}
+ // Consider the asymmetric partitions for oblique angle only if the
+ // corresponding symmetric partition is the best so far.
+ // Note: For horizontal and vertical types, both symmetric and
+ // asymmetric partitions are always considered.
+ if (cpi->sf.inter_sf.enable_fast_wedge_mask_search) {
+ // The first 4 entries in wedge_codebook_16_heqw/hltw/hgtw[16]
+ // correspond to symmetric partitions of the 4 oblique angles, the
+ // next 4 entries correspond to the vertical/horizontal
+ // symmetric/asymmetric partitions and the last 8 entries correspond
+ // to the asymmetric partitions of oblique types.
+ const int idx_before_asym_oblique = 7;
+ const int last_oblique_sym_idx = 3;
+ if (wedge_mask == idx_before_asym_oblique) {
+ if (best_mask_index > last_oblique_sym_idx) {
+ break;
+ } else {
+ // Asymmetric (Index-1) map for the corresponding oblique masks.
+ // WEDGE_OBLIQUE27: sym - 0, asym - 8, 9
+ // WEDGE_OBLIQUE63: sym - 1, asym - 12, 13
+ // WEDGE_OBLIQUE117: sym - 2, asym - 14, 15
+ // WEDGE_OBLIQUE153: sym - 3, asym - 10, 11
+ const int asym_mask_idx[4] = { 7, 11, 13, 9 };
+ wedge_mask = asym_mask_idx[best_mask_index];
+ wedge_mask_size = wedge_mask + 3;
+ }
+ }
+ }
}
if (need_mask_search) {
@@ -1439,7 +1471,8 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
rs2 = masked_type_cost[cur_type];
rs2 += get_interinter_compound_mask_rate(&x->mode_costs, mbmi);
- if (have_newmv_in_inter_mode(this_mode)) {
+ if (have_newmv_in_inter_mode(this_mode) &&
+ !cpi->sf.inter_sf.disable_interinter_wedge_newmv_search) {
tmp_rate_mv = av1_interinter_compound_motion_search(cpi, x, cur_mv,
bsize, this_mode);
}
@@ -1485,7 +1518,8 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
if (have_newmv_in_inter_mode(this_mode)) {
// hard coded number for diff wtd
int mask_value = mask_index == 0 ? 38 : 26;
- memset(xd->seg_mask, mask_value, sizeof(xd->seg_mask));
+ memset(xd->seg_mask, mask_value,
+ sizeof(xd->seg_mask[0]) * 2 * MAX_SB_SQUARE);
tmp_rate_mv = av1_interinter_compound_motion_search(cpi, x, cur_mv,
bsize, this_mode);
}
@@ -1522,7 +1556,8 @@ int av1_compound_type_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
rs2 += get_interinter_compound_mask_rate(&x->mode_costs, mbmi);
int mask_value = mbmi->interinter_comp.mask_type == 0 ? 38 : 26;
- memset(xd->seg_mask, mask_value, sizeof(xd->seg_mask));
+ memset(xd->seg_mask, mask_value,
+ sizeof(xd->seg_mask[0]) * 2 * MAX_SB_SQUARE);
if (have_newmv_in_inter_mode(this_mode)) {
tmp_rate_mv = av1_interinter_compound_motion_search(cpi, x, cur_mv,
diff --git a/third_party/libaom/source/libaom/av1/encoder/context_tree.c b/third_party/libaom/source/libaom/av1/encoder/context_tree.c
index 566576e4f5..9fd9d1b1e8 100644
--- a/third_party/libaom/source/libaom/av1/encoder/context_tree.c
+++ b/third_party/libaom/source/libaom/av1/encoder/context_tree.c
@@ -230,7 +230,7 @@ static AOM_INLINE int get_pc_tree_nodes(const int is_sb_size_128,
void av1_setup_sms_tree(AV1_COMP *const cpi, ThreadData *td) {
AV1_COMMON *const cm = &cpi->common;
const int stat_generation_stage = is_stat_generation_stage(cpi);
- const int is_sb_size_128 = cm->seq_params.sb_size == BLOCK_128X128;
+ const int is_sb_size_128 = cm->seq_params->sb_size == BLOCK_128X128;
const int tree_nodes =
get_pc_tree_nodes(is_sb_size_128, stat_generation_stage);
int sms_tree_index = 0;
diff --git a/third_party/libaom/source/libaom/av1/encoder/dwt.c b/third_party/libaom/source/libaom/av1/encoder/dwt.c
index b5ed4a3446..5dfbcb677b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/dwt.c
+++ b/third_party/libaom/source/libaom/av1/encoder/dwt.c
@@ -147,9 +147,23 @@ uint32_t av1_variance(uint8_t *input, int bw, int bh, int stride) {
return sse - (uint32_t)(((int64_t)sum * sum) / (bw * bh));
}
-int av1_haar_ac_sad_8x8_uint8_input(const uint8_t *input, int stride, int hbd) {
+static int haar_ac_sad_8x8_uint8_input(const uint8_t *input, int stride,
+ int hbd) {
tran_low_t output[64];
av1_fdwt8x8_uint8_input_c(input, output, stride, hbd);
return av1_haar_ac_sad(output, 8, 8, 8);
}
+
+int64_t av1_haar_ac_sad_mxn_uint8_input(const uint8_t *input, int stride,
+ int hbd, int num_8x8_rows,
+ int num_8x8_cols) {
+ int64_t wavelet_energy = 0;
+ for (int r8 = 0; r8 < num_8x8_rows; ++r8) {
+ for (int c8 = 0; c8 < num_8x8_cols; ++c8) {
+ wavelet_energy += haar_ac_sad_8x8_uint8_input(
+ input + c8 * 8 + r8 * 8 * stride, stride, hbd);
+ }
+ }
+ return wavelet_energy;
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/dwt.h b/third_party/libaom/source/libaom/av1/encoder/dwt.h
index 1bd32edb3b..443b6bc12c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/dwt.h
+++ b/third_party/libaom/source/libaom/av1/encoder/dwt.h
@@ -19,6 +19,9 @@
void av1_fdwt8x8_uint8_input_c(const uint8_t *input, tran_low_t *output,
int stride, int hbd);
-int av1_haar_ac_sad_8x8_uint8_input(const uint8_t *input, int stride, int hbd);
+
+int64_t av1_haar_ac_sad_mxn_uint8_input(const uint8_t *input, int stride,
+ int hbd, int num_8x8_rows,
+ int num_8x8_cols);
#endif // AOM_AV1_ENCODER_DWT_H_
diff --git a/third_party/libaom/source/libaom/av1/encoder/enc_enums.h b/third_party/libaom/source/libaom/av1/encoder/enc_enums.h
index 319e5d02c9..20cefa16a5 100644
--- a/third_party/libaom/source/libaom/av1/encoder/enc_enums.h
+++ b/third_party/libaom/source/libaom/av1/encoder/enc_enums.h
@@ -216,6 +216,8 @@ enum {
NUM_SINGLE_REF_MODES = SINGLE_REF_MODE_END - SINGLE_REF_MODE_START,
THR_MODE_START = THR_NEARESTMV,
THR_MODE_END = MAX_MODES,
+ THR_INTER_MODE_START = THR_MODE_START,
+ THR_INTER_MODE_END = THR_DC,
THR_INVALID = 255
} UENUM1BYTE(THR_MODES);
diff --git a/third_party/libaom/source/libaom/av1/encoder/encode_strategy.c b/third_party/libaom/source/libaom/av1/encoder/encode_strategy.c
index da7ec4487d..01f2959d85 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encode_strategy.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encode_strategy.c
@@ -106,11 +106,19 @@ void av1_configure_buffer_updates(
}
if (ext_refresh_frame_flags->update_pending &&
- (!is_stat_generation_stage(cpi)))
+ (!is_stat_generation_stage(cpi))) {
set_refresh_frame_flags(refresh_frame_flags,
ext_refresh_frame_flags->golden_frame,
ext_refresh_frame_flags->bwd_ref_frame,
ext_refresh_frame_flags->alt_ref_frame);
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
+ if (ext_refresh_frame_flags->golden_frame)
+ gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE;
+ if (ext_refresh_frame_flags->alt_ref_frame)
+ gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE;
+ if (ext_refresh_frame_flags->bwd_ref_frame)
+ gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE;
+ }
if (force_refresh_all)
set_refresh_frame_flags(refresh_frame_flags, true, true, true);
@@ -141,7 +149,7 @@ static INLINE int is_frame_droppable(
const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags) {
// Droppable frame is only used by external refresh flags. VoD setting won't
// trigger its use case.
- if (svc->external_ref_frame_config)
+ if (svc->set_ref_frame_config)
return svc->non_reference_frame;
else if (ext_refresh_frame_flags->update_pending)
return !(ext_refresh_frame_flags->alt_ref_frame ||
@@ -168,7 +176,7 @@ static INLINE void update_frames_till_gf_update(AV1_COMP *cpi) {
static INLINE void update_gf_group_index(AV1_COMP *cpi) {
// Increment the gf group index ready for the next frame.
- ++cpi->gf_group.index;
+ ++cpi->gf_frame_index;
}
static void update_rc_counts(AV1_COMP *cpi) {
@@ -216,7 +224,7 @@ static int get_current_frame_ref_type(
// TODO(jingning): This table should be a lot simpler with the new
// ARF system in place. Keep frame_params for the time being as we are
// still evaluating a few design options.
- switch (cpi->gf_group.layer_depth[cpi->gf_group.index]) {
+ switch (cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index]) {
case 0: return 0;
case 1: return 1;
case MAX_ARF_LAYERS:
@@ -238,16 +246,16 @@ static int choose_primary_ref_frame(
// In large scale case, always use Last frame's frame contexts.
// Note(yunqing): In other cases, primary_ref_frame is chosen based on
- // cpi->gf_group.layer_depth[cpi->gf_group.index], which also controls
+ // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls
// frame bit allocation.
if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME);
- if (cpi->use_svc) return av1_svc_primary_ref_frame(cpi);
+ if (cpi->ppi->use_svc) return av1_svc_primary_ref_frame(cpi);
// Find the most recent reference frame with the same reference type as the
// current frame
const int current_ref_type = get_current_frame_ref_type(cpi, frame_params);
- int wanted_fb = cpi->fb_of_context_type[current_ref_type];
+ int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type];
int primary_ref_frame = PRIMARY_REF_NONE;
for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
@@ -303,7 +311,7 @@ static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) {
// Clear down mmx registers
aom_clear_system_state();
- if (cpi->use_svc && cpi->svc.spatial_layer_id > 0) {
+ if (cpi->ppi->use_svc && cpi->svc.spatial_layer_id > 0) {
cpi->framerate = cpi->svc.base_framerate;
av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
return;
@@ -372,17 +380,17 @@ static struct lookahead_entry *choose_frame_source(
struct lookahead_entry **last_source,
EncodeFrameParams *const frame_params) {
AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
struct lookahead_entry *source = NULL;
// Source index in lookahead buffer.
- int src_index = gf_group->arf_src_offset[gf_group->index];
+ int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
// TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q
if (src_index &&
(is_forced_keyframe_pending(cpi->ppi->lookahead, src_index,
cpi->compressor_stage) != -1) &&
- cpi->oxcf.rc_cfg.mode != AOM_Q) {
+ cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) {
src_index = 0;
*flush = 1;
}
@@ -395,7 +403,7 @@ static struct lookahead_entry *choose_frame_source(
// If this is a key frame and keyframe filtering is enabled with overlay,
// then do not pop.
if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 &&
- gf_group->update_type[gf_group->index] == ARF_UPDATE &&
+ gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
!is_stat_generation_stage(cpi) && cpi->ppi->lookahead) {
if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz &&
(*flush ||
@@ -404,16 +412,37 @@ static struct lookahead_entry *choose_frame_source(
*pop_lookahead = 0;
}
}
+
+ // LAP stage does not have ARFs or forward key-frames,
+ // hence, always pop_lookahead here.
+ if (is_stat_generation_stage(cpi)) {
+ *pop_lookahead = 1;
+ src_index = 0;
+ }
+
frame_params->show_frame = *pop_lookahead;
- if (*pop_lookahead) {
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Future frame in parallel encode set
+ if (gf_group->src_offset[cpi->gf_frame_index] != 0 &&
+ !is_stat_generation_stage(cpi) &&
+ 0 /*will be turned on along with frame parallel encode*/) {
+ src_index = gf_group->src_offset[cpi->gf_frame_index];
+ // Don't remove future frames from lookahead_ctx. They will be
+ // removed in their actual encode call.
+ *pop_lookahead = 0;
+ }
+#endif
+ if (frame_params->show_frame) {
// show frame, pop from buffer
// Get last frame source.
if (cm->current_frame.frame_number > 0) {
- *last_source =
- av1_lookahead_peek(cpi->ppi->lookahead, -1, cpi->compressor_stage);
+ *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1,
+ cpi->compressor_stage);
}
// Read in the source frame.
- source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
+ source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
+ cpi->compressor_stage);
} else {
// no show frames are arf frames
source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
@@ -677,7 +706,17 @@ void av1_update_ref_frame_map(AV1_COMP *cpi,
return;
}
-static int get_free_ref_map_index(const RefBufferStack *ref_buffer_stack) {
+static int get_free_ref_map_index(
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ RefFrameMapPair ref_map_pairs[REF_FRAMES],
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ const RefBufferStack *ref_buffer_stack) {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ (void)ref_buffer_stack;
+ for (int idx = 0; idx < REF_FRAMES; ++idx)
+ if (ref_map_pairs[idx].disp_order == -1) return idx;
+ return INVALID_IDX;
+#else
for (int idx = 0; idx < REF_FRAMES; ++idx) {
int is_free = 1;
for (int i = 0; i < ref_buffer_stack->arf_stack_size; ++i) {
@@ -704,11 +743,61 @@ static int get_free_ref_map_index(const RefBufferStack *ref_buffer_stack) {
if (is_free) return idx;
}
return INVALID_IDX;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+ int update_arf, int cur_frame_disp) {
+ int arf_count = 0;
+ int oldest_arf_order = INT32_MAX;
+ int oldest_arf_idx = -1;
+
+ int oldest_frame_order = INT32_MAX;
+ int oldest_idx = -1;
+
+ for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
+ RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
+ if (ref_pair.disp_order == -1) continue;
+ const int frame_order = ref_pair.disp_order;
+ const int reference_frame_level = ref_pair.pyr_level;
+ // Do not refresh a future frame.
+ if (frame_order > cur_frame_disp) continue;
+
+ // Keep track of the oldest level 1 frame if the current frame is also level
+ // 1.
+ if (reference_frame_level == 1) {
+ // If there are more than 2 level 1 frames in the reference list,
+ // discard the oldest.
+ if (frame_order < oldest_arf_order) {
+ oldest_arf_order = frame_order;
+ oldest_arf_idx = map_idx;
+ }
+ arf_count++;
+ continue;
+ }
+
+ // Update the overall oldest reference frame.
+ if (frame_order < oldest_frame_order) {
+ oldest_frame_order = frame_order;
+ oldest_idx = map_idx;
+ }
+ }
+ if (update_arf && arf_count > 2) return oldest_arf_idx;
+ if (oldest_idx >= 0) return oldest_idx;
+ if (oldest_arf_idx >= 0) return oldest_arf_idx;
+ assert(0 && "No valid refresh index found");
+ return -1;
+}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
const EncodeFrameParams *const frame_params,
FRAME_UPDATE_TYPE frame_update_type,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int cur_disp_order,
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
const RefBufferStack *const ref_buffer_stack) {
const AV1_COMMON *const cm = &cpi->common;
const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
@@ -733,7 +822,7 @@ int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
int refresh_mask = 0;
if (ext_refresh_frame_flags->update_pending) {
- if (svc->external_ref_frame_config) {
+ if (svc->set_ref_frame_config) {
for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
int ref_frame_map_idx = svc->ref_idx[i];
refresh_mask |= svc->refresh[ref_frame_map_idx] << ref_frame_map_idx;
@@ -777,7 +866,30 @@ int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
}
// Search for the open slot to store the current frame.
- int free_fb_index = get_free_ref_map_index(ref_buffer_stack);
+ int free_fb_index = get_free_ref_map_index(
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ ref_frame_map_pairs,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ ref_buffer_stack);
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // No refresh necessary for these frame types.
+ if (frame_update_type == OVERLAY_UPDATE ||
+ frame_update_type == INTNL_OVERLAY_UPDATE)
+ return refresh_mask;
+
+ // If there is an open slot, refresh that one instead of replacing a
+ // reference.
+ if (free_fb_index != INVALID_IDX) {
+ refresh_mask = 1 << free_fb_index;
+ return refresh_mask;
+ }
+
+ const int update_arf = frame_update_type == ARF_UPDATE;
+ const int refresh_idx =
+ get_refresh_idx(ref_frame_map_pairs, update_arf, cur_disp_order);
+ return 1 << refresh_idx;
+#else
switch (frame_update_type) {
case KF_UPDATE:
case GF_UPDATE:
@@ -843,6 +955,7 @@ int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
}
return refresh_mask;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
#if !CONFIG_REALTIME_ONLY
@@ -852,10 +965,10 @@ void setup_mi(AV1_COMP *const cpi, YV12_BUFFER_CONFIG *src) {
MACROBLOCK *const x = &cpi->td.mb;
MACROBLOCKD *const xd = &x->e_mbd;
- av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params.sb_size);
+ av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params->sb_size);
- av1_setup_block_planes(xd, cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y, num_planes);
+ av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y, num_planes);
set_mi_offsets(&cm->mi_params, xd, 0, 0);
}
@@ -872,8 +985,9 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
#endif
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
// Decide whether to apply temporal filtering to the source frame.
int apply_filtering = 0;
@@ -887,7 +1001,7 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
oxcf->algo_cfg.arnr_max_frames > 0 && oxcf->gf_cfg.lag_in_frames > 1;
if (allow_kf_filtering) {
const double y_noise_level = av1_estimate_noise_from_single_plane(
- frame_input->source, 0, cm->seq_params.bit_depth);
+ frame_input->source, 0, cm->seq_params->bit_depth);
apply_filtering = y_noise_level > 0;
} else {
apply_filtering = 0;
@@ -900,6 +1014,9 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
// ARF
apply_filtering = oxcf->algo_cfg.arnr_max_frames > 0;
}
+ if (is_stat_generation_stage(cpi)) {
+ apply_filtering = 0;
+ }
#if CONFIG_COLLECT_COMPONENT_TIMING
if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time);
@@ -911,7 +1028,7 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
int show_existing_alt_ref = 0;
// TODO(bohanli): figure out why we need frame_type in cm here.
cm->current_frame.frame_type = frame_params->frame_type;
- int arf_src_index = gf_group->arf_src_offset[gf_group->index];
+ int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
int is_forward_keyframe = 0;
if (!frame_params->show_frame && cpi->no_show_fwd_kf) {
// TODO(angiebird): Figure out why this condition yields forward keyframe.
@@ -922,8 +1039,8 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
av1_temporal_filter(cpi, arf_src_index, update_type,
is_forward_keyframe, &show_existing_alt_ref);
if (code_arf) {
- aom_extend_frame_borders(&cpi->alt_ref_buffer, av1_num_planes(cm));
- frame_input->source = &cpi->alt_ref_buffer;
+ aom_extend_frame_borders(&cpi->ppi->alt_ref_buffer, av1_num_planes(cm));
+ frame_input->source = &cpi->ppi->alt_ref_buffer;
aom_copy_metadata_to_frame_buffer(frame_input->source,
source_buffer->metadata);
}
@@ -944,12 +1061,12 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
// Don't do tpl for fwd key frames or fwd key frame overlays
allow_tpl = allow_tpl && !cpi->sf.tpl_sf.disable_filtered_key_tpl &&
!cpi->no_show_fwd_kf &&
- gf_group->update_type[gf_group->index] != OVERLAY_UPDATE;
+ gf_group->update_type[cpi->gf_frame_index] != OVERLAY_UPDATE;
} else {
// Do tpl after ARF is filtered, or if no ARF, at the second frame of GF
// group.
// TODO(bohanli): if no ARF, just do it at the first frame.
- int gf_index = gf_group->index;
+ int gf_index = cpi->gf_frame_index;
allow_tpl = allow_tpl && (gf_group->update_type[gf_index] == ARF_UPDATE ||
gf_group->update_type[gf_index] == GF_UPDATE);
if (allow_tpl) {
@@ -962,10 +1079,13 @@ static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
if (allow_tpl == 0) {
// Avoid the use of unintended TPL stats from previous GOP's results.
- if (gf_group->index == 0) av1_init_tpl_stats(&cpi->tpl_data);
+ if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi))
+ av1_init_tpl_stats(&cpi->ppi->tpl_data);
} else {
- if (!cpi->tpl_data.skip_tpl_setup_stats)
+ if (!cpi->skip_tpl_setup_stats) {
+ av1_tpl_preload_rc_estimate(cpi, frame_params);
av1_tpl_setup_stats(cpi, 0, frame_params, frame_input);
+ }
}
if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) !=
@@ -1003,12 +1123,262 @@ static INLINE int find_unused_ref_frame(const int *used_ref_frames,
return INVALID_IDX;
}
-void av1_get_ref_frames(AV1_COMP *const cpi, RefBufferStack *ref_buffer_stack) {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+/*!\cond */
+// Struct to keep track of relevant reference frame data.
+typedef struct {
+ int map_idx;
+ int disp_order;
+ int pyr_level;
+ int used;
+} RefBufMapData;
+/*!\endcond */
+
+// Comparison function to sort reference frames in ascending display order.
+static int compare_map_idx_pair_asc(const void *a, const void *b) {
+ if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) {
+ return 0;
+ } else if (((const RefBufMapData *)a)->disp_order >
+ ((const RefBufMapData *)b)->disp_order) {
+ return 1;
+ } else {
+ return -1;
+ }
+}
+
+// Checks to see if a particular reference frame is already in the reference
+// frame map.
+static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) {
+ for (int i = 0; i < n_frames; i++) {
+ if (disp_order == map[i].disp_order) return 1;
+ }
+ return 0;
+}
+
+// Add a reference buffer index to a named reference slot.
+static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx,
+ int frame) {
+ remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx;
+ ref->used = 1;
+}
+
+// Threshold dictating when we are allowed to start considering
+// leaving lowest level frames unmapped.
+#define LOW_LEVEL_FRAMES_TR 5
+
+// Find which reference buffer should be left out of the named mapping.
+// This is because there are 8 reference buffers and only 7 named slots.
+static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs,
+ int n_min_level_refs, int min_level,
+ int cur_frame_disp) {
+ int max_dist = 0;
+ int unmapped_idx = -1;
+ if (n_bufs <= ALTREF_FRAME) return;
+ for (int i = 0; i < n_bufs; i++) {
+ if (buffer_map[i].used) continue;
+ if (buffer_map[i].pyr_level != min_level ||
+ n_min_level_refs >= LOW_LEVEL_FRAMES_TR) {
+ int dist = abs(cur_frame_disp - buffer_map[i].disp_order);
+ if (dist > max_dist) {
+ max_dist = dist;
+ unmapped_idx = i;
+ }
+ }
+ }
+ assert(unmapped_idx >= 0 && "Unmapped reference not found");
+ buffer_map[unmapped_idx].used = 1;
+}
+
+static void get_ref_frames(AV1_COMP *const cpi,
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+ int cur_frame_disp) {
AV1_COMMON *cm = &cpi->common;
int *const remapped_ref_idx = cm->remapped_ref_idx;
- int *const arf_stack = ref_buffer_stack->arf_stack;
- int *const lst_stack = ref_buffer_stack->lst_stack;
- int *const gld_stack = ref_buffer_stack->gld_stack;
+
+ int buf_map_idx = 0;
+
+ // Initialize reference frame mappings.
+ for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
+
+ RefBufMapData buffer_map[REF_FRAMES];
+ int n_bufs = 0;
+ memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0]));
+ int min_level = MAX_ARF_LAYERS;
+ int max_level = 0;
+
+ // Go through current reference buffers and store display order, pyr level,
+ // and map index.
+ for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
+ // Get reference frame buffer.
+ RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
+ if (ref_pair.disp_order == -1) continue;
+ const int frame_order = ref_pair.disp_order;
+ // Avoid duplicates.
+ if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue;
+ const int reference_frame_level = ref_pair.pyr_level;
+
+ // Keep track of the lowest and highest levels that currently exist.
+ if (reference_frame_level < min_level) min_level = reference_frame_level;
+ if (reference_frame_level > max_level) max_level = reference_frame_level;
+
+ buffer_map[n_bufs].map_idx = map_idx;
+ buffer_map[n_bufs].disp_order = frame_order;
+ buffer_map[n_bufs].pyr_level = reference_frame_level;
+ buffer_map[n_bufs].used = 0;
+ n_bufs++;
+ }
+
+ // Sort frames in ascending display order.
+ qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc);
+
+ int n_min_level_refs = 0;
+ int n_past_high_level = 0;
+ int closest_past_ref = -1;
+ int golden_idx = -1;
+ int altref_idx = -1;
+
+ // Find the GOLDEN_FRAME and BWDREF_FRAME.
+ // Also collect various stats about the reference frames for the remaining
+ // mappings.
+ for (int i = n_bufs - 1; i >= 0; i--) {
+ if (buffer_map[i].pyr_level == min_level) {
+ // Keep track of the number of lowest level frames.
+ n_min_level_refs++;
+ if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 &&
+ remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) {
+ // Save index for GOLDEN.
+ golden_idx = i;
+ } else if (buffer_map[i].disp_order > cur_frame_disp &&
+ altref_idx == -1 &&
+ remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) {
+ // Save index for ALTREF.
+ altref_idx = i;
+ }
+ } else if (buffer_map[i].disp_order == cur_frame_disp) {
+ // Map the BWDREF_FRAME if this is the show_existing_frame.
+ add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME);
+ }
+
+ // Keep track of the number of past frames that are not at the lowest level.
+ if (buffer_map[i].disp_order < cur_frame_disp &&
+ buffer_map[i].pyr_level != min_level)
+ n_past_high_level++;
+
+ // Keep track of where the frames change from being past frames to future
+ // frames.
+ if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0)
+ closest_past_ref = i;
+ }
+
+ // Do not map GOLDEN and ALTREF based on their pyramid level if all reference
+ // frames have the same level.
+ if (n_min_level_refs <= n_bufs) {
+ // Map the GOLDEN_FRAME.
+ if (golden_idx > -1)
+ add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME);
+ // Map the ALTREF_FRAME.
+ if (altref_idx > -1)
+ add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME);
+ }
+
+ // Find the buffer to be excluded from the mapping.
+ set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level,
+ cur_frame_disp);
+
+ // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME.
+ for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) {
+ // Continue if the current ref slot is already full.
+ if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
+ // Find the next unmapped reference buffer
+ // in decreasing ouptut order relative to current picture.
+ int next_buf_max = 0;
+ int next_disp_order = INT_MIN;
+ for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
+ if (!buffer_map[buf_map_idx].used &&
+ buffer_map[buf_map_idx].disp_order < cur_frame_disp &&
+ buffer_map[buf_map_idx].disp_order > next_disp_order) {
+ next_disp_order = buffer_map[buf_map_idx].disp_order;
+ next_buf_max = buf_map_idx;
+ }
+ }
+ buf_map_idx = next_buf_max;
+ if (buf_map_idx < 0) break;
+ if (buffer_map[buf_map_idx].used) break;
+ add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
+ }
+
+ // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME.
+ for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) {
+ // Continue if the current ref slot is already full.
+ if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
+ // Find the next unmapped reference buffer
+ // in increasing ouptut order relative to current picture.
+ int next_buf_max = 0;
+ int next_disp_order = INT_MAX;
+ for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
+ if (!buffer_map[buf_map_idx].used &&
+ buffer_map[buf_map_idx].disp_order > cur_frame_disp &&
+ buffer_map[buf_map_idx].disp_order < next_disp_order) {
+ next_disp_order = buffer_map[buf_map_idx].disp_order;
+ next_buf_max = buf_map_idx;
+ }
+ }
+ buf_map_idx = next_buf_max;
+ if (buf_map_idx < 0) break;
+ if (buffer_map[buf_map_idx].used) break;
+ add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
+ }
+
+ // Place remaining past frames.
+ buf_map_idx = closest_past_ref;
+ for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) {
+ // Continue if the current ref slot is already full.
+ if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
+ // Find the next unmapped reference buffer.
+ for (; buf_map_idx >= 0; buf_map_idx--) {
+ if (!buffer_map[buf_map_idx].used) break;
+ }
+ if (buf_map_idx < 0) break;
+ if (buffer_map[buf_map_idx].used) break;
+ add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
+ }
+
+ // Place remaining future frames.
+ buf_map_idx = n_bufs - 1;
+ for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) {
+ // Continue if the current ref slot is already full.
+ if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
+ // Find the next unmapped reference buffer.
+ for (; buf_map_idx > closest_past_ref; buf_map_idx--) {
+ if (!buffer_map[buf_map_idx].used) break;
+ }
+ if (buf_map_idx < 0) break;
+ if (buffer_map[buf_map_idx].used) break;
+ add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
+ }
+
+ // Fill any slots that are empty (should only happen for the first 7 frames).
+ for (int i = 0; i < REF_FRAMES; ++i)
+ if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0;
+}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+void av1_get_ref_frames(const RefBufferStack *ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ AV1_COMP *cpi,
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+ int cur_frame_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ int remapped_ref_idx[REF_FRAMES]) {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ (void)ref_buffer_stack;
+ (void)remapped_ref_idx;
+ get_ref_frames(cpi, ref_frame_map_pairs, cur_frame_disp);
+ return;
+#else
+ const int *const arf_stack = ref_buffer_stack->arf_stack;
+ const int *const lst_stack = ref_buffer_stack->lst_stack;
+ const int *const gld_stack = ref_buffer_stack->gld_stack;
const int arf_stack_size = ref_buffer_stack->arf_stack_size;
const int lst_stack_size = ref_buffer_stack->lst_stack_size;
const int gld_stack_size = ref_buffer_stack->gld_stack_size;
@@ -1079,6 +1449,7 @@ void av1_get_ref_frames(AV1_COMP *const cpi, RefBufferStack *ref_buffer_stack) {
remapped_ref_idx[idx] = ref_buffer_stack->gld_stack[0];
}
}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
@@ -1088,7 +1459,7 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
int flush) {
AV1EncoderConfig *const oxcf = &cpi->oxcf;
AV1_COMMON *const cm = &cpi->common;
- GF_GROUP *gf_group = &cpi->gf_group;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
ExternalFlags *const ext_flags = &cpi->ext_flags;
GFConfig *const gf_cfg = &oxcf->gf_cfg;
@@ -1112,9 +1483,9 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) {
#if !CONFIG_REALTIME_ONLY
- if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
+ if (flush && oxcf->pass == 1 && !cpi->ppi->twopass.first_pass_done) {
av1_end_first_pass(cpi); /* get last stats packet */
- cpi->twopass.first_pass_done = 1;
+ cpi->ppi->twopass.first_pass_done = 1;
}
#endif
return -1;
@@ -1128,11 +1499,9 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height);
}
- cpi->tpl_data.skip_tpl_setup_stats = 0;
+ cpi->skip_tpl_setup_stats = 0;
#if !CONFIG_REALTIME_ONLY
- const int use_one_pass_rt_params = has_no_stats_stage(cpi) &&
- oxcf->mode == REALTIME &&
- gf_cfg->lag_in_frames == 0;
+ const int use_one_pass_rt_params = is_one_pass_rt_params(cpi);
if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) {
#if CONFIG_COLLECT_COMPONENT_TIMING
start_timing(cpi, av1_get_second_pass_params_time);
@@ -1148,19 +1517,19 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
// If this is a forward keyframe, mark as a show_existing_frame
// TODO(bohanli): find a consistent condition for fwd keyframes
if (oxcf->kf_cfg.fwd_kf_enabled &&
- gf_group->update_type[gf_group->index] == OVERLAY_UPDATE &&
+ gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE &&
cpi->rc.frames_to_key == 0) {
frame_params.show_existing_frame = 1;
} else {
frame_params.show_existing_frame =
(cpi->show_existing_alt_ref &&
- gf_group->update_type[gf_group->index] == OVERLAY_UPDATE) ||
- gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE;
+ gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) ||
+ gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE;
}
frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags);
// Reset show_existing_alt_ref decision to 0 after it is used.
- if (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE) {
+ if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) {
cpi->show_existing_alt_ref = 0;
}
} else {
@@ -1181,13 +1550,20 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
if (source == NULL) { // If no source was found, we can't encode a frame.
#if !CONFIG_REALTIME_ONLY
- if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
+ if (flush && oxcf->pass == 1 && !cpi->ppi->twopass.first_pass_done) {
av1_end_first_pass(cpi); /* get last stats packet */
- cpi->twopass.first_pass_done = 1;
+ cpi->ppi->twopass.first_pass_done = 1;
}
#endif
return -1;
}
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // reset src_offset to allow actual encode call for this frame to get its
+ // source.
+ gf_group->src_offset[cpi->gf_frame_index] = 0;
+#endif
+
// Source may be changed if temporal filtered later.
frame_input.source = &source->img;
frame_input.last_source = last_source != NULL ? &last_source->img : NULL;
@@ -1216,7 +1592,7 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
&cm->film_grain_params);
} else {
cm->cur_frame->film_grain_params_present =
- cm->seq_params.film_grain_params_present;
+ cm->seq_params->film_grain_params_present;
}
// only one operating point supported now
const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp);
@@ -1226,19 +1602,20 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
#if CONFIG_REALTIME_ONLY
av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
- if (cpi->oxcf.speed >= 5 && cm->number_spatial_layers == 1 &&
- cm->number_temporal_layers == 1)
- av1_set_reference_structure_one_pass_rt(cpi, gf_group->index == 0);
+ if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
+ cpi->ppi->number_temporal_layers == 1)
+ av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
#else
if (use_one_pass_rt_params) {
av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
- if (cpi->oxcf.speed >= 5 && cm->number_spatial_layers == 1 &&
- cm->number_temporal_layers == 1)
- av1_set_reference_structure_one_pass_rt(cpi, gf_group->index == 0);
+ if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
+ cpi->ppi->number_temporal_layers == 1)
+ av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
}
#endif
- FRAME_UPDATE_TYPE frame_update_type = get_frame_update_type(gf_group);
+ FRAME_UPDATE_TYPE frame_update_type =
+ get_frame_update_type(gf_group, cpi->gf_frame_index);
if (frame_params.show_existing_frame &&
frame_params.frame_type != KEY_FRAME) {
@@ -1302,9 +1679,21 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
const RefCntBuffer *ref_frames[INTER_REFS_PER_FRAME];
const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME];
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
+ init_ref_map_pair(cpi, ref_frame_map_pairs);
+ const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
+ const int cur_frame_disp =
+ cpi->common.current_frame.frame_number + order_offset;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
if (!ext_flags->refresh_frame.update_pending) {
- av1_get_ref_frames(cpi, &cpi->ref_buffer_stack);
- } else if (cpi->svc.external_ref_frame_config) {
+ av1_get_ref_frames(&cpi->ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi, ref_frame_map_pairs, cur_frame_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ cm->remapped_ref_idx);
+ } else if (cpi->svc.set_ref_frame_config) {
for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++)
cm->remapped_ref_idx[i] = cpi->svc.ref_idx[i];
}
@@ -1319,19 +1708,54 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
frame_params.ref_frame_flags = get_ref_frame_flags(
&cpi->sf, ref_frame_buf, ext_flags->ref_frame_flags);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE.
+ if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) {
+ frame_params.primary_ref_frame = PRIMARY_REF_NONE;
+ } else {
+ frame_params.primary_ref_frame =
+ choose_primary_ref_frame(cpi, &frame_params);
+ }
+#else
frame_params.primary_ref_frame =
choose_primary_ref_frame(cpi, &frame_params);
- frame_params.order_offset = gf_group->arf_src_offset[gf_group->index];
-
- frame_params.refresh_frame_flags = av1_get_refresh_frame_flags(
- cpi, &frame_params, frame_update_type, &cpi->ref_buffer_stack);
-
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+ frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
+
+ frame_params.refresh_frame_flags =
+ av1_get_refresh_frame_flags(cpi, &frame_params, frame_update_type,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cur_frame_disp, ref_frame_map_pairs,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ &cpi->ref_buffer_stack);
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Make the frames marked as is_frame_non_ref to non-reference frames.
+ if (gf_group->is_frame_non_ref[cpi->gf_frame_index])
+ frame_params.refresh_frame_flags = 0;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ frame_params.existing_fb_idx_to_show = INVALID_IDX;
+ // Find the frame buffer to show based on display order.
+ if (frame_params.show_existing_frame) {
+ for (int frame = 0; frame < REF_FRAMES; frame++) {
+ const RefCntBuffer *const buf = cm->ref_frame_map[frame];
+ if (buf == NULL) continue;
+ const int frame_order = (int)buf->display_order_hint;
+ if (frame_order == cur_frame_disp)
+ frame_params.existing_fb_idx_to_show = frame;
+ }
+ }
+#else
frame_params.existing_fb_idx_to_show =
frame_params.show_existing_frame
? (frame_update_type == INTNL_OVERLAY_UPDATE
? get_ref_frame_map_idx(cm, BWDREF_FRAME)
: get_ref_frame_map_idx(cm, ALTREF_FRAME))
: INVALID_IDX;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
// The way frame_params->remapped_ref_idx is setup is a placeholder.
@@ -1351,6 +1775,12 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm;
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Copy previous frame's largest MV component from ppi to cpi.
+ if (!is_stat_generation_stage(cpi) && cpi->do_frame_data_update)
+ cpi->mv_search_params.max_mv_magnitude = cpi->ppi->max_mv_magnitude;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
#if CONFIG_REALTIME_ONLY
if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
AOM_CODEC_OK) {
@@ -1369,10 +1799,17 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
}
#endif // CONFIG_REALTIME_ONLY
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Store current frame's largest MV component in ppi.
+ if (!is_stat_generation_stage(cpi) && cpi->do_frame_data_update)
+ cpi->ppi->max_mv_magnitude = cpi->mv_search_params.max_mv_magnitude;
+#endif
+
if (!is_stat_generation_stage(cpi)) {
// First pass doesn't modify reference buffer assignment or produce frame
// flags
update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags);
+#if !CONFIG_FRAME_PARALLEL_ENCODE
if (!ext_flags->refresh_frame.update_pending) {
int ref_map_index =
av1_get_refresh_ref_frame_map(cm->current_frame.refresh_frame_flags);
@@ -1380,6 +1817,7 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
cm->show_existing_frame, ref_map_index,
&cpi->ref_buffer_stack);
}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
#if !CONFIG_REALTIME_ONLY
@@ -1408,7 +1846,7 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
}
if (!is_stat_generation_stage(cpi)) {
- update_fb_of_context_type(cpi, &frame_params, cpi->fb_of_context_type);
+ update_fb_of_context_type(cpi, &frame_params, cpi->ppi->fb_of_context_type);
set_additional_frame_flags(cm, frame_flags);
update_rc_counts(cpi);
}
@@ -1421,7 +1859,7 @@ int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
cpi->droppable = is_frame_droppable(&cpi->svc, &ext_flags->refresh_frame);
}
- if (cpi->use_svc) av1_save_layer_context(cpi);
+ if (cpi->ppi->use_svc) av1_save_layer_context(cpi);
return AOM_CODEC_OK;
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encode_strategy.h b/third_party/libaom/source/libaom/av1/encoder/encode_strategy.h
index 351e8a1328..c7b75c8430 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encode_strategy.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encode_strategy.h
@@ -69,6 +69,10 @@ void av1_configure_buffer_updates(
int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
const EncodeFrameParams *const frame_params,
FRAME_UPDATE_TYPE frame_update_type,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int cur_disp_order,
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
const RefBufferStack *const ref_buffer_stack);
int av1_get_refresh_ref_frame_map(int refresh_frame_flags);
@@ -79,7 +83,25 @@ void av1_update_ref_frame_map(AV1_COMP *cpi,
int ref_map_index,
RefBufferStack *ref_buffer_stack);
-void av1_get_ref_frames(AV1_COMP *const cpi, RefBufferStack *ref_buffer_stack);
+/*!\brief Obtain indices of reference frames from reference frame buffer stacks
+ *
+ * \callgraph
+ * \callergraph
+ *
+ * \param[in] ref_buffer_stack Data structure for reference frame buffer
+ * stacks.
+ * \param[out] remapped_ref_idx An array for storing indices of reference
+ * frames. The index is used to retrieve a
+ * reference frame buffer from ref_frame_map
+ * in AV1Common.
+ */
+void av1_get_ref_frames(const RefBufferStack *ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ AV1_COMP *cpi,
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
+ int cur_frame_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ int remapped_ref_idx[REF_FRAMES]);
int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
const int up_to_index,
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodeframe.c b/third_party/libaom/source/libaom/av1/encoder/encodeframe.c
index 24d3488245..b3f836b481 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodeframe.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encodeframe.c
@@ -55,6 +55,7 @@
#include "av1/encoder/encodetxb.h"
#include "av1/encoder/ethread.h"
#include "av1/encoder/extend.h"
+#include "av1/encoder/intra_mode_search_utils.h"
#include "av1/encoder/ml.h"
#include "av1/encoder/motion_search_facade.h"
#include "av1/encoder/partition_strategy.h"
@@ -150,7 +151,7 @@ unsigned int av1_get_sby_perpixel_variance(const AV1_COMP *cpi,
BLOCK_SIZE bs) {
unsigned int sse;
const unsigned int var =
- cpi->fn_ptr[bs].vf(ref->buf, ref->stride, AV1_VAR_OFFS, 0, &sse);
+ cpi->ppi->fn_ptr[bs].vf(ref->buf, ref->stride, AV1_VAR_OFFS, 0, &sse);
return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
}
@@ -163,9 +164,9 @@ unsigned int av1_high_get_sby_perpixel_variance(const AV1_COMP *cpi,
const uint16_t *high_var_offs[3] = { AV1_HIGH_VAR_OFFS_8,
AV1_HIGH_VAR_OFFS_10,
AV1_HIGH_VAR_OFFS_12 };
- var =
- cpi->fn_ptr[bs].vf(ref->buf, ref->stride,
- CONVERT_TO_BYTEPTR(high_var_offs[off_index]), 0, &sse);
+ var = cpi->ppi->fn_ptr[bs].vf(ref->buf, ref->stride,
+ CONVERT_TO_BYTEPTR(high_var_offs[off_index]), 0,
+ &sse);
return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
}
@@ -181,7 +182,8 @@ static unsigned int get_sby_perpixel_diff_variance(const AV1_COMP *const cpi,
assert(last != NULL);
last_y =
&last->y_buffer[mi_row * MI_SIZE * last->y_stride + mi_col * MI_SIZE];
- var = cpi->fn_ptr[bs].vf(ref->buf, ref->stride, last_y, last->y_stride, &sse);
+ var = cpi->ppi->fn_ptr[bs].vf(ref->buf, ref->stride, last_y, last->y_stride,
+ &sse);
return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
}
@@ -242,7 +244,7 @@ static AOM_INLINE void setup_delta_q(AV1_COMP *const cpi, ThreadData *td,
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
assert(delta_q_info->delta_q_present_flag);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
// Delta-q modulation based on variance
av1_setup_src_planes(x, cpi->source, mi_row, mi_col, num_planes, sb_size);
@@ -307,7 +309,7 @@ static AOM_INLINE void setup_delta_q(AV1_COMP *const cpi, ThreadData *td,
(int8_t)clamp(delta_lf_from_base, -MAX_LOOP_FILTER, MAX_LOOP_FILTER);
const int frame_lf_count =
av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
- const int mib_size = cm->seq_params.mib_size;
+ const int mib_size = cm->seq_params->mib_size;
// pre-set the delta lf for loop filter. Note that this value is set
// before mi is assigned for each block in current superblock
@@ -326,22 +328,23 @@ static AOM_INLINE void setup_delta_q(AV1_COMP *const cpi, ThreadData *td,
static void init_ref_frame_space(AV1_COMP *cpi, ThreadData *td, int mi_row,
int mi_col) {
const AV1_COMMON *cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
MACROBLOCK *x = &td->mb;
- const int frame_idx = cpi->gf_group.index;
- TplParams *const tpl_data = &cpi->tpl_data;
- TplDepFrame *tpl_frame = &tpl_data->tpl_frame[frame_idx];
+ const int frame_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
av1_zero(x->tpl_keep_ref_frame);
- if (tpl_frame->is_valid == 0) return;
- if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return;
if (frame_idx >= MAX_TPL_FRAME_IDX) return;
+ TplDepFrame *tpl_frame = &tpl_data->tpl_frame[frame_idx];
+ if (!tpl_frame->is_valid) return;
+ if (!is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) return;
if (cpi->oxcf.q_cfg.aq_mode != NO_AQ) return;
- const int is_overlay = cpi->gf_group.update_type[frame_idx] == OVERLAY_UPDATE;
+ const int is_overlay =
+ cpi->ppi->gf_group.update_type[frame_idx] == OVERLAY_UPDATE;
if (is_overlay) {
memset(x->tpl_keep_ref_frame, 1, sizeof(x->tpl_keep_ref_frame));
return;
@@ -351,7 +354,7 @@ static void init_ref_frame_space(AV1_COMP *cpi, ThreadData *td, int mi_row,
const int tpl_stride = tpl_frame->stride;
int64_t inter_cost[INTER_REFS_PER_FRAME] = { 0 };
const int step = 1 << block_mis_log2;
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
const int mi_row_end =
AOMMIN(mi_size_high[sb_size] + mi_row, mi_params->mi_rows);
@@ -426,15 +429,15 @@ static void init_ref_frame_space(AV1_COMP *cpi, ThreadData *td, int mi_row,
static AOM_INLINE void adjust_rdmult_tpl_model(AV1_COMP *cpi, MACROBLOCK *x,
int mi_row, int mi_col) {
- const BLOCK_SIZE sb_size = cpi->common.seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cpi->common.seq_params->sb_size;
const int orig_rdmult = cpi->rd.RDMULT;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
- const int gf_group_index = cpi->gf_group.index;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int gf_group_index = cpi->gf_frame_index;
if (cpi->oxcf.algo_cfg.enable_tpl_model && cpi->oxcf.q_cfg.aq_mode == NO_AQ &&
cpi->oxcf.q_cfg.deltaq_mode == NO_DELTA_Q && gf_group_index > 0 &&
- cpi->gf_group.update_type[gf_group_index] == ARF_UPDATE) {
+ cpi->ppi->gf_group.update_type[gf_group_index] == ARF_UPDATE) {
const int dr =
av1_get_rdmult_delta(cpi, sb_size, mi_row, mi_col, orig_rdmult);
x->rdmult = dr;
@@ -451,7 +454,7 @@ static void get_estimated_pred(AV1_COMP *cpi, const TileInfo *const tile,
MACROBLOCKD *xd = &x->e_mbd;
// TODO(kyslov) Extend to 128x128
- assert(cm->seq_params.sb_size == BLOCK_64X64);
+ assert(cm->seq_params->sb_size == BLOCK_64X64);
av1_set_offsets(cpi, tile, x, mi_row, mi_col, BLOCK_64X64);
@@ -512,7 +515,7 @@ static AOM_INLINE void encode_nonrd_sb(AV1_COMP *cpi, ThreadData *td,
const TileInfo *const tile_info = &tile_data->tile_info;
MB_MODE_INFO **mi = cm->mi_params.mi_grid_base +
get_mi_grid_idx(&cm->mi_params, mi_row, mi_col);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
// Grade the temporal variation of the sb, the grade will be used to decide
// fast mode search strategy for coding blocks
@@ -557,6 +560,20 @@ static AOM_INLINE void encode_nonrd_sb(AV1_COMP *cpi, ThreadData *td,
sf->part_sf.partition_search_type == VAR_BASED_PARTITION);
set_cb_offsets(td->mb.cb_offset, 0, 0);
+ // Initialize the flag to skip cdef for 64x64 blocks: if color sensitivy is
+ // on, set to 0 (don't skip).
+ if (sf->rt_sf.skip_cdef_sb) {
+ const int block64_in_sb = (sb_size == BLOCK_128X128) ? 2 : 1;
+ for (int r = 0; r < block64_in_sb; ++r) {
+ for (int c = 0; c < block64_in_sb; ++c) {
+ const int idx_in_sb =
+ r * MI_SIZE_64X64 * cm->mi_params.mi_stride + c * MI_SIZE_64X64;
+ if (mi[idx_in_sb])
+ mi[idx_in_sb]->skip_cdef_curr_sb =
+ !(x->color_sensitivity_sb[0] || x->color_sensitivity_sb[1]);
+ }
+ }
+ }
// Adjust and encode the superblock
PC_TREE *const pc_root = av1_alloc_pc_tree_node(sb_size);
av1_nonrd_use_partition(cpi, td, tile_data, mi, tp, mi_row, mi_col, sb_size,
@@ -599,7 +616,7 @@ static INLINE void init_encode_rd_sb(AV1_COMP *cpi, ThreadData *td,
if (gather_tpl_data) {
if (cm->delta_q_info.delta_q_present_flag) {
const int num_planes = av1_num_planes(cm);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
setup_delta_q(cpi, td, x, tile_info, mi_row, mi_col, num_planes);
av1_tpl_rdmult_setup_sb(cpi, x, sb_size, mi_row, mi_col);
}
@@ -637,7 +654,7 @@ static AOM_INLINE void encode_rd_sb(AV1_COMP *cpi, ThreadData *td,
const TileInfo *const tile_info = &tile_data->tile_info;
MB_MODE_INFO **mi = cm->mi_params.mi_grid_base +
get_mi_grid_idx(&cm->mi_params, mi_row, mi_col);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
const int num_planes = av1_num_planes(cm);
int dummy_rate;
int64_t dummy_dist;
@@ -708,10 +725,17 @@ static AOM_INLINE void encode_rd_sb(AV1_COMP *cpi, ThreadData *td,
cpi->oxcf.unit_test_cfg.sb_multipass_unit_test ? 2 : 1;
if (num_passes == 1) {
+#if CONFIG_PARTITION_SEARCH_ORDER
+ av1_reset_part_sf(&cpi->sf.part_sf);
+ RD_STATS this_rdc;
+ av1_rd_partition_search(cpi, td, tile_data, tp, sms_root, mi_row, mi_col,
+ sb_size, &this_rdc);
+#else
PC_TREE *const pc_root = av1_alloc_pc_tree_node(sb_size);
av1_rd_pick_partition(cpi, td, tile_data, tp, mi_row, mi_col, sb_size,
&dummy_rdc, dummy_rdc, pc_root, sms_root, NULL,
SB_SINGLE_PASS, NULL);
+#endif // CONFIG_PARTITION_SEARCH_ORDER
} else {
// First pass
SB_FIRST_PASS_STATS sb_fp_stats;
@@ -753,7 +777,8 @@ static AOM_INLINE void encode_rd_sb(AV1_COMP *cpi, ThreadData *td,
static AOM_INLINE int is_rtc_mode(const CostUpdateFreq *cost_upd_freq,
int use_non_rd_mode) {
return (use_non_rd_mode && cost_upd_freq->coeff >= 2 &&
- cost_upd_freq->mode >= 2 && cost_upd_freq->mv >= 2);
+ cost_upd_freq->mode >= 2 && cost_upd_freq->mv >= 2 &&
+ cost_upd_freq->dv >= 2);
}
/*!\brief Encode a superblock row by breaking it into superblocks
@@ -776,9 +801,9 @@ static AOM_INLINE void encode_sb_row(AV1_COMP *cpi, ThreadData *td,
MACROBLOCK *const x = &td->mb;
MACROBLOCKD *const xd = &x->e_mbd;
const int sb_cols_in_tile = av1_get_sb_cols_in_tile(cm, tile_data->tile_info);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
- const int mib_size = cm->seq_params.mib_size;
- const int mib_size_log2 = cm->seq_params.mib_size_log2;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
+ const int mib_size = cm->seq_params->mib_size;
+ const int mib_size_log2 = cm->seq_params->mib_size_log2;
const int sb_row = (mi_row - tile_info->mi_row_start) >> mib_size_log2;
const int use_nonrd_mode = cpi->sf.rt_sf.use_nonrd_pick_mode;
const CostUpdateFreq *const cost_upd_freq = &cpi->oxcf.cost_upd_freq;
@@ -833,6 +858,8 @@ static AOM_INLINE void encode_sb_row(AV1_COMP *cpi, ThreadData *td,
av1_set_cost_upd_freq(cpi, td, tile_info, mi_row, mi_col);
// Reset color coding related parameters
+ x->color_sensitivity_sb[0] = 0;
+ x->color_sensitivity_sb[1] = 0;
x->color_sensitivity[0] = 0;
x->color_sensitivity[1] = 0;
x->content_state_sb.source_sad = kMedSad;
@@ -855,6 +882,12 @@ static AOM_INLINE void encode_sb_row(AV1_COMP *cpi, ThreadData *td,
seg_skip = segfeature_active(seg, segment_id, SEG_LVL_SKIP);
}
+ // Produce the gradient data at superblock level, when intra mode pruning
+ // based on hog is enabled.
+ if (cpi->sf.intra_sf.intra_pruning_with_hog ||
+ cpi->sf.intra_sf.chroma_intra_pruning_with_hog)
+ produce_gradients_for_sb(cpi, x, sb_size, mi_row, mi_col);
+
// encode the superblock
if (use_nonrd_mode) {
encode_nonrd_sb(cpi, td, tile_data, tp, mi_row, mi_col, seg_skip);
@@ -886,10 +919,10 @@ static AOM_INLINE void init_encode_frame_mb_context(AV1_COMP *cpi) {
// Copy data over into macro block data structures.
av1_setup_src_planes(x, cpi->source, 0, 0, num_planes,
- cm->seq_params.sb_size);
+ cm->seq_params->sb_size);
- av1_setup_block_planes(xd, cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y, num_planes);
+ av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y, num_planes);
}
void av1_alloc_tile_data(AV1_COMP *cpi) {
@@ -927,13 +960,14 @@ void av1_init_tile_data(AV1_COMP *cpi) {
TileInfo *const tile_info = &tile_data->tile_info;
av1_tile_init(tile_info, cm, tile_row, tile_col);
tile_data->firstpass_top_mv = kZeroMv;
+ tile_data->abs_sum_level = 0;
if (pre_tok != NULL && tplist != NULL) {
token_info->tile_tok[tile_row][tile_col] = pre_tok + tile_tok;
pre_tok = token_info->tile_tok[tile_row][tile_col];
- tile_tok = allocated_tokens(*tile_info,
- cm->seq_params.mib_size_log2 + MI_SIZE_LOG2,
- num_planes);
+ tile_tok = allocated_tokens(
+ *tile_info, cm->seq_params->mib_size_log2 + MI_SIZE_LOG2,
+ num_planes);
token_info->tplist[tile_row][tile_col] = tplist + tplist_count;
tplist = token_info->tplist[tile_row][tile_col];
tplist_count = av1_get_sb_rows_in_tile(cm, tile_data->tile_info);
@@ -961,14 +995,14 @@ void av1_encode_sb_row(AV1_COMP *cpi, ThreadData *td, int tile_row,
TokenExtra *tok = NULL;
TokenList *const tplist = cpi->token_info.tplist[tile_row][tile_col];
const int sb_row_in_tile =
- (mi_row - tile_info->mi_row_start) >> cm->seq_params.mib_size_log2;
+ (mi_row - tile_info->mi_row_start) >> cm->seq_params->mib_size_log2;
const int tile_mb_cols =
(tile_info->mi_col_end - tile_info->mi_col_start + 2) >> 2;
const int num_mb_rows_in_sb =
- ((1 << (cm->seq_params.mib_size_log2 + MI_SIZE_LOG2)) + 8) >> 4;
+ ((1 << (cm->seq_params->mib_size_log2 + MI_SIZE_LOG2)) + 8) >> 4;
get_start_tok(cpi, tile_row, tile_col, mi_row, &tok,
- cm->seq_params.mib_size_log2 + MI_SIZE_LOG2, num_planes);
+ cm->seq_params->mib_size_log2 + MI_SIZE_LOG2, num_planes);
assert(tplist != NULL);
tplist[sb_row_in_tile].start = tok;
@@ -979,7 +1013,7 @@ void av1_encode_sb_row(AV1_COMP *cpi, ThreadData *td, int tile_row,
assert((unsigned int)(tok - tplist[sb_row_in_tile].start) <=
get_token_alloc(num_mb_rows_in_sb, tile_mb_cols,
- cm->seq_params.mib_size_log2 + MI_SIZE_LOG2,
+ cm->seq_params->mib_size_log2 + MI_SIZE_LOG2,
num_planes));
(void)tile_mb_cols;
@@ -1005,7 +1039,7 @@ void av1_encode_tile(AV1_COMP *cpi, ThreadData *td, int tile_row,
&td->mb.e_mbd);
if (cpi->oxcf.intra_mode_cfg.enable_cfl_intra)
- cfl_init(&td->mb.e_mbd.cfl, &cm->seq_params);
+ cfl_init(&td->mb.e_mbd.cfl, cm->seq_params);
if (td->mb.txfm_search_info.txb_rd_records != NULL) {
av1_crc32c_calculator_init(
@@ -1013,9 +1047,10 @@ void av1_encode_tile(AV1_COMP *cpi, ThreadData *td, int tile_row,
}
for (int mi_row = tile_info->mi_row_start; mi_row < tile_info->mi_row_end;
- mi_row += cm->seq_params.mib_size) {
+ mi_row += cm->seq_params->mib_size) {
av1_encode_sb_row(cpi, td, tile_row, tile_col, mi_row);
}
+ this_tile->abs_sum_level = td->abs_sum_level;
}
/*!\brief Break one frame into tiles and encode the tiles
@@ -1030,15 +1065,13 @@ static AOM_INLINE void encode_tiles(AV1_COMP *cpi) {
const int tile_rows = cm->tiles.rows;
int tile_col, tile_row;
+ MACROBLOCK *const mb = &cpi->td.mb;
assert(IMPLIES(cpi->tile_data == NULL,
cpi->allocated_tiles < tile_cols * tile_rows));
if (cpi->allocated_tiles < tile_cols * tile_rows) av1_alloc_tile_data(cpi);
av1_init_tile_data(cpi);
- if (!cpi->sf.rt_sf.use_nonrd_pick_mode) {
- cpi->td.mb.txfm_search_info.txb_rd_records =
- (TxbRdRecords *)aom_malloc(sizeof(TxbRdRecords));
- }
+ av1_alloc_mb_data(cm, mb, cpi->sf.rt_sf.use_nonrd_pick_mode);
for (tile_row = 0; tile_row < tile_rows; ++tile_row) {
for (tile_col = 0; tile_col < tile_cols; ++tile_col) {
@@ -1046,6 +1079,7 @@ static AOM_INLINE void encode_tiles(AV1_COMP *cpi) {
&cpi->tile_data[tile_row * cm->tiles.cols + tile_col];
cpi->td.intrabc_used = 0;
cpi->td.deltaq_used = 0;
+ cpi->td.abs_sum_level = 0;
cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
cpi->td.mb.tile_pb_ctx = &this_tile->tctx;
// Reset cyclic refresh counters.
@@ -1062,10 +1096,7 @@ static AOM_INLINE void encode_tiles(AV1_COMP *cpi) {
}
}
- if (cpi->td.mb.txfm_search_info.txb_rd_records) {
- aom_free(cpi->td.mb.txfm_search_info.txb_rd_records);
- cpi->td.mb.txfm_search_info.txb_rd_records = NULL;
- }
+ av1_dealloc_mb_data(cm, mb);
}
// Set the relative distance of a reference frame w.r.t. current frame
@@ -1141,10 +1172,10 @@ static int check_skip_mode_enabled(AV1_COMP *const cpi) {
const int cur_offset = (int)cm->current_frame.order_hint;
int ref_offset[2];
get_skip_mode_ref_offsets(cm, ref_offset);
- const int cur_to_ref0 = get_relative_dist(&cm->seq_params.order_hint_info,
+ const int cur_to_ref0 = get_relative_dist(&cm->seq_params->order_hint_info,
cur_offset, ref_offset[0]);
- const int cur_to_ref1 = abs(get_relative_dist(&cm->seq_params.order_hint_info,
- cur_offset, ref_offset[1]));
+ const int cur_to_ref1 = abs(get_relative_dist(
+ &cm->seq_params->order_hint_info, cur_offset, ref_offset[1]));
if (abs(cur_to_ref0 - cur_to_ref1) > 1) return 0;
// High Latency: Turn off skip mode if all refs are fwd.
@@ -1248,6 +1279,9 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
MACROBLOCKD *const xd = &x->e_mbd;
RD_COUNTS *const rdc = &cpi->td.rd_counts;
FrameProbInfo *const frame_probs = &cpi->frame_probs;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ FrameProbInfo *const temp_frame_probs = &cpi->ppi->temp_frame_probs;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
IntraBCHashInfo *const intrabc_hash_info = &x->intrabc_hash_info;
MultiThreadInfo *const mt_info = &cpi->mt_info;
AV1EncRowMultiThreadInfo *const enc_row_mt = &mt_info->enc_row_mt;
@@ -1278,9 +1312,15 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
if (features->allow_warped_motion &&
cpi->sf.inter_sf.prune_warped_prob_thresh > 0) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
- if (frame_probs->warped_probs[update_type] <
- cpi->sf.inter_sf.prune_warped_prob_thresh)
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
+ int warped_probability;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ warped_probability = temp_frame_probs->warped_probs[update_type];
+#else
+ warped_probability = frame_probs->warped_probs[update_type];
+#endif
+ if (warped_probability < cpi->sf.inter_sf.prune_warped_prob_thresh)
features->allow_warped_motion = 0;
}
@@ -1316,7 +1356,7 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
// Hash data generated for screen contents is used for intraBC ME
const int min_alloc_size = block_size_wide[mi_params->mi_alloc_bsize];
const int max_sb_size =
- (1 << (cm->seq_params.mib_size_log2 + MI_SIZE_LOG2));
+ (1 << (cm->seq_params->mib_size_log2 + MI_SIZE_LOG2));
int src_idx = 0;
for (int size = 4; size <= max_sb_size; size *= 2, src_idx = !src_idx) {
const int dst_idx = !src_idx;
@@ -1377,10 +1417,10 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
// is used for ineligible frames. That effectively will turn off row_mt
// usage. Note objective delta_q and tpl eligible frames are only altref
// frames currently.
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
if (cm->delta_q_info.delta_q_present_flag) {
if (deltaq_mode == DELTA_Q_OBJECTIVE &&
- !is_frame_tpl_eligible(gf_group, gf_group->index))
+ !is_frame_tpl_eligible(gf_group, cpi->gf_frame_index))
cm->delta_q_info.delta_q_present_flag = 0;
}
@@ -1500,8 +1540,8 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
features->tx_mode = select_tx_mode(cm, tx_search_type);
if (cpi->sf.tx_sf.tx_type_search.prune_tx_type_using_stats) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
-
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
for (i = 0; i < TX_SIZES_ALL; i++) {
int sum = 0;
int j;
@@ -1519,13 +1559,33 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
left -= prob;
if (j == 0) prob += left;
frame_probs->tx_type_probs[update_type][i][j] = prob;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /* TODO(FPMT): The current update is happening in cpi->frame_probs,
+ * this need to be taken care appropriately in final FPMT implementation
+ * to carry these values to subsequent frames. The frame_probs update is
+ * accumulated across frames, so the values from all individual parallel
+ * frames need to be taken into account after all the parallel frames
+ * are encoded.
+ *
+ * Only for quality simulation purpose - Update the accumulated frame
+ * probabilities in ppi->temp_variable based on the update flag.
+ */
+ if (cpi->do_frame_data_update) {
+ for (int update_type_idx = 0; update_type_idx < FRAME_UPDATE_TYPES;
+ update_type_idx++) {
+ temp_frame_probs->tx_type_probs[update_type_idx][i][j] =
+ frame_probs->tx_type_probs[update_type_idx][i][j];
+ }
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
}
}
if (cpi->sf.inter_sf.prune_obmc_prob_thresh > 0 &&
cpi->sf.inter_sf.prune_obmc_prob_thresh < INT_MAX) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
for (i = 0; i < BLOCK_SIZES_ALL; i++) {
int sum = 0;
@@ -1535,23 +1595,63 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
sum ? 128 * cpi->td.rd_counts.obmc_used[i][1] / sum : 0;
frame_probs->obmc_probs[update_type][i] =
(frame_probs->obmc_probs[update_type][i] + new_prob) >> 1;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /* TODO(FPMT): The current update is happening in cpi->frame_probs,
+ * this need to be taken care appropriately in final FPMT
+ * implementation to carry these values to subsequent frames.
+ * The frame_probs update is accumulated across frames, so the
+ * values from all individual parallel frames need to be taken
+ * into account after all the parallel frames are encoded.
+ *
+ * Only for quality simulation purpose - Update the accumulated frame
+ * probabilities in ppi->temp_variable based on the update flag.
+ */
+ if (cpi->do_frame_data_update) {
+ for (int update_type_idx = 0; update_type_idx < FRAME_UPDATE_TYPES;
+ update_type_idx++) {
+ temp_frame_probs->obmc_probs[update_type_idx][i] =
+ frame_probs->obmc_probs[update_type_idx][i];
+ }
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
}
if (features->allow_warped_motion &&
cpi->sf.inter_sf.prune_warped_prob_thresh > 0) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
int sum = 0;
for (i = 0; i < 2; i++) sum += cpi->td.rd_counts.warped_used[i];
const int new_prob = sum ? 128 * cpi->td.rd_counts.warped_used[1] / sum : 0;
frame_probs->warped_probs[update_type] =
(frame_probs->warped_probs[update_type] + new_prob) >> 1;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /* TODO(FPMT): The current update is happening in cpi->frame_probs,
+ * this need to be taken care appropriately in final FPMT
+ * implementation to carry these values to subsequent frames.
+ * The frame_probs update is accumulated across frames, so the
+ * values from all individual parallel frames need to be taken
+ * into account after all the parallel frames are encoded.
+ *
+ * Only for quality simulation purpose - Update the accumulated frame
+ * probabilities in ppi->temp_variable based on the update flag.
+ */
+ if (cpi->do_frame_data_update) {
+ for (int update_type_idx = 0; update_type_idx < FRAME_UPDATE_TYPES;
+ update_type_idx++) {
+ temp_frame_probs->warped_probs[update_type_idx] =
+ frame_probs->warped_probs[update_type_idx];
+ }
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
if (cm->current_frame.frame_type != KEY_FRAME &&
cpi->sf.interp_sf.adaptive_interp_filter_search == 2 &&
features->interp_filter == SWITCHABLE) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; i++) {
int sum = 0;
@@ -1572,6 +1672,25 @@ static AOM_INLINE void encode_frame_internal(AV1_COMP *cpi) {
left -= prob;
if (j == 0) prob += left;
frame_probs->switchable_interp_probs[update_type][i][j] = prob;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /* TODO(FPMT): The current update is happening in cpi->frame_probs,
+ * this need to be taken care appropriately in final FPMT
+ * implementation to carry these values to subsequent frames.
+ * The frame_probs update is accumulated across frames, so the
+ * values from all individual parallel frames need to be taken
+ * into account after all the parallel frames are encoded.
+ *
+ * Only for quality simulation purpose - Update the accumulated frame
+ * probabilities in ppi->temp_variable based on the update flag.
+ */
+ if (cpi->do_frame_data_update) {
+ for (int update_type_idx = 0; update_type_idx < FRAME_UPDATE_TYPES;
+ update_type_idx++) {
+ temp_frame_probs->switchable_interp_probs[update_type_idx][i][j] =
+ frame_probs->switchable_interp_probs[update_type_idx][i][j];
+ }
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
}
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.c b/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.c
index c10b2ffe6c..d3fa50292b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.c
@@ -44,7 +44,6 @@ void av1_set_ssim_rdmult(const AV1_COMP *const cpi, int *errorperbit,
assert(cpi->oxcf.tune_cfg.tuning == AOM_TUNE_SSIM);
- aom_clear_system_state();
for (row = mi_row / num_mi_w;
row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
for (col = mi_col / num_mi_h;
@@ -59,20 +58,19 @@ void av1_set_ssim_rdmult(const AV1_COMP *const cpi, int *errorperbit,
*rdmult = (int)((double)(*rdmult) * geom_mean_of_scale + 0.5);
*rdmult = AOMMAX(*rdmult, 0);
av1_set_error_per_bit(errorperbit, *rdmult);
- aom_clear_system_state();
}
// Return the end column for the current superblock, in unit of TPL blocks.
static int get_superblock_tpl_column_end(const AV1_COMMON *const cm, int mi_col,
int num_mi_w) {
// Find the start column of this superblock.
- const int sb_mi_col_start = (mi_col >> cm->seq_params.mib_size_log2)
- << cm->seq_params.mib_size_log2;
+ const int sb_mi_col_start = (mi_col >> cm->seq_params->mib_size_log2)
+ << cm->seq_params->mib_size_log2;
// Same but in superres upscaled dimension.
const int sb_mi_col_start_sr =
coded_to_superres_mi(sb_mi_col_start, cm->superres_scale_denominator);
// Width of this superblock in mi units.
- const int sb_mi_width = mi_size_wide[cm->seq_params.sb_size];
+ const int sb_mi_width = mi_size_wide[cm->seq_params->sb_size];
// Same but in superres upscaled dimension.
const int sb_mi_width_sr =
coded_to_superres_mi(sb_mi_width, cm->superres_scale_denominator);
@@ -86,15 +84,16 @@ int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x,
const BLOCK_SIZE bsize, const int mi_row,
const int mi_col, int orig_rdmult) {
const AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
- const int tpl_idx = cpi->gf_group.index;
- const TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[tpl_idx];
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int tpl_idx = cpi->gf_frame_index;
const int deltaq_rdmult = set_deltaq_rdmult(cpi, x);
- if (tpl_frame->is_valid == 0) return deltaq_rdmult;
- if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return deltaq_rdmult;
if (tpl_idx >= MAX_TPL_FRAME_IDX) return deltaq_rdmult;
+ const TplDepFrame *tpl_frame = &cpi->ppi->tpl_data.tpl_frame[tpl_idx];
+ if (!tpl_frame->is_valid) return deltaq_rdmult;
+ if (!is_frame_tpl_eligible(gf_group, cpi->gf_frame_index))
+ return deltaq_rdmult;
if (cpi->oxcf.q_cfg.aq_mode != NO_AQ) return deltaq_rdmult;
const int mi_col_sr =
@@ -116,7 +115,6 @@ int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x,
int row, col;
double base_block_count = 0.0;
double geom_mean_of_scale = 0.0;
- aom_clear_system_state();
for (row = mi_row / num_mi_w;
row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
for (col = mi_col_sr / num_mi_h;
@@ -124,7 +122,7 @@ int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x,
col < sb_bcol_end;
++col) {
const int index = row * num_cols + col;
- geom_mean_of_scale += log(cpi->tpl_sb_rdmult_scaling_factors[index]);
+ geom_mean_of_scale += log(cpi->ppi->tpl_sb_rdmult_scaling_factors[index]);
base_block_count += 1.0;
}
}
@@ -132,8 +130,7 @@ int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x,
int rdmult = (int)((double)orig_rdmult * geom_mean_of_scale + 0.5);
rdmult = AOMMAX(rdmult, 0);
av1_set_error_per_bit(&x->errorperbit, rdmult);
- aom_clear_system_state();
- if (bsize == cm->seq_params.sb_size) {
+ if (bsize == cm->seq_params->sb_size) {
const int rdmult_sb = set_deltaq_rdmult(cpi, x);
assert(rdmult_sb == rdmult);
(void)rdmult_sb;
@@ -341,7 +338,7 @@ void av1_update_state(const AV1_COMP *const cpi, ThreadData *td,
const int x_mis = AOMMIN(bw, mi_params->mi_cols - mi_col);
const int y_mis = AOMMIN(bh, mi_params->mi_rows - mi_row);
- if (cm->seq_params.order_hint_info.enable_ref_frame_mvs)
+ if (cm->seq_params->order_hint_info.enable_ref_frame_mvs)
av1_copy_frame_mvs(cm, mi, mi_row, mi_col, x_mis, y_mis);
}
@@ -604,9 +601,9 @@ static void set_partial_sb_partition(const AV1_COMMON *const cm,
MB_MODE_INFO **mib) {
int bh = bh_in;
int r, c;
- for (r = 0; r < cm->seq_params.mib_size; r += bh) {
+ for (r = 0; r < cm->seq_params->mib_size; r += bh) {
int bw = bw_in;
- for (c = 0; c < cm->seq_params.mib_size; c += bw) {
+ for (c = 0; c < cm->seq_params->mib_size; c += bw) {
const int grid_index = get_mi_grid_idx(&cm->mi_params, r, c);
const int mi_index = get_alloc_mi_idx(&cm->mi_params, r, c);
mib[grid_index] = mi + mi_index;
@@ -638,11 +635,11 @@ void av1_set_fixed_partitioning(AV1_COMP *cpi, const TileInfo *const tile,
assert((mi_rows_remaining > 0) && (mi_cols_remaining > 0));
// Apply the requested partition size to the SB if it is all "in image"
- if ((mi_cols_remaining >= cm->seq_params.mib_size) &&
- (mi_rows_remaining >= cm->seq_params.mib_size)) {
- for (int block_row = 0; block_row < cm->seq_params.mib_size;
+ if ((mi_cols_remaining >= cm->seq_params->mib_size) &&
+ (mi_rows_remaining >= cm->seq_params->mib_size)) {
+ for (int block_row = 0; block_row < cm->seq_params->mib_size;
block_row += bh) {
- for (int block_col = 0; block_col < cm->seq_params.mib_size;
+ for (int block_col = 0; block_col < cm->seq_params->mib_size;
block_col += bw) {
const int grid_index = get_mi_grid_idx(mi_params, block_row, block_col);
const int mi_index = get_alloc_mi_idx(mi_params, block_row, block_col);
@@ -682,25 +679,25 @@ int av1_is_leaf_split_partition(AV1_COMMON *cm, int mi_row, int mi_col,
int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
int mi_col, int orig_rdmult) {
AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
- const int tpl_idx = cpi->gf_group.index;
- TplParams *const tpl_data = &cpi->tpl_data;
- TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
- TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int tpl_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
- int tpl_stride = tpl_frame->stride;
int64_t intra_cost = 0;
int64_t mc_dep_cost = 0;
const int mi_wide = mi_size_wide[bsize];
const int mi_high = mi_size_high[bsize];
- if (tpl_frame->is_valid == 0) return orig_rdmult;
+ if (tpl_idx >= MAX_TPL_FRAME_IDX) return orig_rdmult;
- if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return orig_rdmult;
+ TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
+ TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
+ int tpl_stride = tpl_frame->stride;
+ if (!tpl_frame->is_valid) return orig_rdmult;
- if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return orig_rdmult;
+ if (!is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) return orig_rdmult;
int mi_count = 0;
const int mi_col_sr =
@@ -727,8 +724,6 @@ int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
}
assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
- aom_clear_system_state();
-
double beta = 1.0;
if (mc_dep_cost > 0 && intra_cost > 0) {
const double r0 = cpi->rd.r0;
@@ -738,8 +733,6 @@ int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
int rdmult = av1_get_adaptive_rdmult(cpi, beta);
- aom_clear_system_state();
-
rdmult = AOMMIN(rdmult, orig_rdmult * 3 / 2);
rdmult = AOMMAX(rdmult, orig_rdmult * 1 / 2);
@@ -760,7 +753,7 @@ int av1_active_h_edge(const AV1_COMP *cpi, int mi_row, int mi_step) {
if (is_stat_consumption_stage_twopass(cpi)) {
const AV1_COMMON *const cm = &cpi->common;
const FIRSTPASS_STATS *const this_frame_stats = read_one_frame_stats(
- &cpi->twopass, cm->current_frame.display_order_hint);
+ &cpi->ppi->twopass, cm->current_frame.display_order_hint);
if (this_frame_stats == NULL) return AOM_CODEC_ERROR;
// The inactive region is specified in MBs not mi units.
@@ -790,7 +783,7 @@ int av1_active_v_edge(const AV1_COMP *cpi, int mi_col, int mi_step) {
if (is_stat_consumption_stage_twopass(cpi)) {
const AV1_COMMON *const cm = &cpi->common;
const FIRSTPASS_STATS *const this_frame_stats = read_one_frame_stats(
- &cpi->twopass, cm->current_frame.display_order_hint);
+ &cpi->ppi->twopass, cm->current_frame.display_order_hint);
if (this_frame_stats == NULL) return AOM_CODEC_ERROR;
// The inactive region is specified in MBs not mi units.
@@ -814,24 +807,26 @@ void av1_get_tpl_stats_sb(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
if (!cpi->oxcf.algo_cfg.enable_tpl_model) return;
if (cpi->common.current_frame.frame_type == KEY_FRAME) return;
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
if (update_type == INTNL_OVERLAY_UPDATE || update_type == OVERLAY_UPDATE)
return;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
AV1_COMMON *const cm = &cpi->common;
- const int gf_group_index = cpi->gf_group.index;
- TplParams *const tpl_data = &cpi->tpl_data;
- TplDepFrame *tpl_frame = &tpl_data->tpl_frame[gf_group_index];
- TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
- int tpl_stride = tpl_frame->stride;
+ const int gf_group_index = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const int mi_wide = mi_size_wide[bsize];
const int mi_high = mi_size_high[bsize];
- if (tpl_frame->is_valid == 0) return;
if (gf_group_index >= MAX_TPL_FRAME_IDX) return;
+ TplDepFrame *tpl_frame = &tpl_data->tpl_frame[gf_group_index];
+ TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
+ int tpl_stride = tpl_frame->stride;
+ if (!tpl_frame->is_valid) return;
+
int mi_count = 0;
int count = 0;
const int mi_col_sr =
@@ -889,26 +884,26 @@ void av1_get_tpl_stats_sb(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
int av1_get_q_for_deltaq_objective(AV1_COMP *const cpi, BLOCK_SIZE bsize,
int mi_row, int mi_col) {
AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
- const int tpl_idx = cpi->gf_group.index;
- TplParams *const tpl_data = &cpi->tpl_data;
- TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
- TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int tpl_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
- int tpl_stride = tpl_frame->stride;
int64_t intra_cost = 0;
int64_t mc_dep_cost = 0;
const int mi_wide = mi_size_wide[bsize];
const int mi_high = mi_size_high[bsize];
const int base_qindex = cm->quant_params.base_qindex;
- if (tpl_frame->is_valid == 0) return base_qindex;
+ if (tpl_idx >= MAX_TPL_FRAME_IDX) return base_qindex;
- if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return base_qindex;
+ TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
+ TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
+ int tpl_stride = tpl_frame->stride;
+ if (!tpl_frame->is_valid) return base_qindex;
- if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return base_qindex;
+ if (!is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) return base_qindex;
int mi_count = 0;
const int mi_col_sr =
@@ -935,8 +930,6 @@ int av1_get_q_for_deltaq_objective(AV1_COMP *const cpi, BLOCK_SIZE bsize,
}
assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
- aom_clear_system_state();
-
int offset = 0;
double beta = 1.0;
if (mc_dep_cost > 0 && intra_cost > 0) {
@@ -945,8 +938,7 @@ int av1_get_q_for_deltaq_objective(AV1_COMP *const cpi, BLOCK_SIZE bsize,
beta = (r0 / rk);
assert(beta > 0.0);
}
- offset = av1_get_deltaq_offset(cpi, base_qindex, beta);
- aom_clear_system_state();
+ offset = av1_get_deltaq_offset(cm->seq_params->bit_depth, base_qindex, beta);
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
offset = AOMMIN(offset, delta_q_info->delta_q_res * 9 - 1);
@@ -1164,7 +1156,7 @@ void av1_avg_cdf_symbols(FRAME_CONTEXT *ctx_left, FRAME_CONTEXT *ctx_tr,
void av1_source_content_sb(AV1_COMP *cpi, MACROBLOCK *x, int offset) {
unsigned int tmp_sse;
unsigned int tmp_variance;
- const BLOCK_SIZE bsize = cpi->common.seq_params.sb_size;
+ const BLOCK_SIZE bsize = cpi->common.seq_params->sb_size;
uint8_t *src_y = cpi->source->y_buffer;
int src_ystride = cpi->source->y_stride;
uint8_t *last_src_y = cpi->last_source->y_buffer;
@@ -1178,8 +1170,8 @@ void av1_source_content_sb(AV1_COMP *cpi, MACROBLOCK *x, int offset) {
#endif
src_y += offset;
last_src_y += offset;
- tmp_variance = cpi->fn_ptr[bsize].vf(src_y, src_ystride, last_src_y,
- last_src_ystride, &tmp_sse);
+ tmp_variance = cpi->ppi->fn_ptr[bsize].vf(src_y, src_ystride, last_src_y,
+ last_src_ystride, &tmp_sse);
if (tmp_sse < avg_source_sse_threshold)
x->content_state_sb.source_sad = kLowSad;
else if (tmp_sse > avg_source_sse_threshold_high)
@@ -1233,7 +1225,7 @@ void av1_backup_sb_state(SB_FIRST_PASS_STATS *sb_fp_stats, const AV1_COMP *cpi,
const AV1_COMMON *cm = &cpi->common;
const int num_planes = av1_num_planes(cm);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
xd->above_txfm_context =
cm->above_contexts.txfm[tile_info->tile_row] + mi_col;
@@ -1269,7 +1261,7 @@ void av1_restore_sb_state(const SB_FIRST_PASS_STATS *sb_fp_stats, AV1_COMP *cpi,
const AV1_COMMON *cm = &cpi->common;
const int num_planes = av1_num_planes(cm);
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
av1_restore_context(x, &sb_fp_stats->x_ctx, mi_row, mi_col, sb_size,
num_planes);
@@ -1294,33 +1286,32 @@ void av1_restore_sb_state(const SB_FIRST_PASS_STATS *sb_fp_stats, AV1_COMP *cpi,
#endif // CONFIG_INTERNAL_STATS
}
-// Checks for skip status of mv cost update.
-static int skip_mv_cost_update(AV1_COMP *cpi, const TileInfo *const tile_info,
- const int mi_row, const int mi_col) {
- // For intra frames, mv cdfs are not updated during the encode. Hence, the mv
- // cost calculation is skipped in this case.
- if (frame_is_intra_only(&cpi->common)) return 1;
- // mv_cost_upd_level=0: update happens at each sb,
- // so return skip status as 0.
- // mv_cost_upd_level=1: update happens once for each sb row,
- // so return skip status as 1 for
- // mi_col != tile_info->mi_col_start.
- // mv_cost_upd_level=2: update happens once for a set of rows,
- // so return skip status as 1 appropriately.
- if (!cpi->sf.inter_sf.mv_cost_upd_level) return 0;
+/*! Checks whether to skip updating the entropy cost based on tile info.
+ *
+ * This function contains codes common to both \ref skip_mv_cost_update and
+ * \ref skip_dv_cost_update.
+ */
+static int skip_cost_update(const SequenceHeader *seq_params,
+ const TileInfo *const tile_info, const int mi_row,
+ const int mi_col,
+ INTERNAL_COST_UPDATE_TYPE upd_level) {
+ if (upd_level == INTERNAL_COST_UPD_SB) return 0;
+ if (upd_level == INTERNAL_COST_UPD_OFF) return 1;
+
+ // upd_level is at most as frequent as each sb_row in a tile.
if (mi_col != tile_info->mi_col_start) return 1;
- if (cpi->sf.inter_sf.mv_cost_upd_level == 2) {
- AV1_COMMON *const cm = &cpi->common;
- const int mib_size_log2 = cm->seq_params.mib_size_log2;
+
+ if (upd_level == INTERNAL_COST_UPD_SBROW_SET) {
+ const int mib_size_log2 = seq_params->mib_size_log2;
const int sb_row = (mi_row - tile_info->mi_row_start) >> mib_size_log2;
- const int sb_size = cm->seq_params.mib_size * MI_SIZE;
+ const int sb_size = seq_params->mib_size * MI_SIZE;
const int tile_height =
(tile_info->mi_row_end - tile_info->mi_row_start) * MI_SIZE;
- // When mv_cost_upd_level = 2, the cost update happens once for 2, 4 sb
- // rows for sb size 128, sb size 64 respectively. However, as the update
- // will not be equally spaced in smaller resolutions making it equally
- // spaced by calculating (mv_num_rows_cost_update) the number of rows
- // after which the cost update should happen.
+ // When upd_level = INTERNAL_COST_UPD_SBROW_SET, the cost update happens
+ // once for 2, 4 sb rows for sb size 128, sb size 64 respectively. However,
+ // as the update will not be equally spaced in smaller resolutions making
+ // it equally spaced by calculating (mv_num_rows_cost_update) the number of
+ // rows after which the cost update should happen.
const int sb_size_update_freq_map[2] = { 2, 4 };
const int update_freq_sb_rows =
sb_size_update_freq_map[sb_size != MAX_SB_SIZE];
@@ -1337,6 +1328,32 @@ static int skip_mv_cost_update(AV1_COMP *cpi, const TileInfo *const tile_info,
return 0;
}
+// Checks for skip status of mv cost update.
+static int skip_mv_cost_update(AV1_COMP *cpi, const TileInfo *const tile_info,
+ const int mi_row, const int mi_col) {
+ const AV1_COMMON *cm = &cpi->common;
+ // For intra frames, mv cdfs are not updated during the encode. Hence, the mv
+ // cost calculation is skipped in this case.
+ if (frame_is_intra_only(cm)) return 1;
+
+ return skip_cost_update(cm->seq_params, tile_info, mi_row, mi_col,
+ cpi->sf.inter_sf.mv_cost_upd_level);
+}
+
+// Checks for skip status of dv cost update.
+static int skip_dv_cost_update(AV1_COMP *cpi, const TileInfo *const tile_info,
+ const int mi_row, const int mi_col) {
+ const AV1_COMMON *cm = &cpi->common;
+ // Intrabc is only applicable to intra frames. So skip if intrabc is not
+ // allowed.
+ if (!av1_allow_intrabc(cm) || is_stat_generation_stage(cpi)) {
+ return 1;
+ }
+
+ return skip_cost_update(cm->seq_params, tile_info, mi_row, mi_col,
+ cpi->sf.intra_sf.dv_cost_upd_level);
+}
+
// Update the rate costs of some symbols according to the frequency directed
// by speed features
void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
@@ -1355,6 +1372,9 @@ void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
if (mi_col != tile_info->mi_col_start) break;
AOM_FALLTHROUGH_INTENDED;
case COST_UPD_SB: // SB level
+ if (cpi->sf.inter_sf.coeff_cost_upd_level == INTERNAL_COST_UPD_SBROW &&
+ mi_col != tile_info->mi_col_start)
+ break;
av1_fill_coeff_costs(&x->coeff_costs, xd->tile_ctx, num_planes);
break;
default: assert(0);
@@ -1368,6 +1388,9 @@ void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
if (mi_col != tile_info->mi_col_start) break;
AOM_FALLTHROUGH_INTENDED;
case COST_UPD_SB: // SB level
+ if (cpi->sf.inter_sf.mode_cost_upd_level == INTERNAL_COST_UPD_SBROW &&
+ mi_col != tile_info->mi_col_start)
+ break;
av1_fill_mode_rates(cm, &x->mode_costs, xd->tile_ctx);
break;
default: assert(0);
@@ -1388,4 +1411,19 @@ void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
break;
default: assert(0);
}
+
+ switch (cpi->oxcf.cost_upd_freq.dv) {
+ case COST_UPD_OFF:
+ case COST_UPD_TILE: // Tile level
+ break;
+ case COST_UPD_SBROW: // SB row level in tile
+ if (mi_col != tile_info->mi_col_start) break;
+ AOM_FALLTHROUGH_INTENDED;
+ case COST_UPD_SB: // SB level
+ // Checks for skip status of dv cost update.
+ if (skip_dv_cost_update(cpi, tile_info, mi_row, mi_col)) break;
+ av1_fill_dv_costs(&xd->tile_ctx->ndvc, x->dv_costs);
+ break;
+ default: assert(0);
+ }
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.h b/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.h
index 7bdfad5cba..3096181885 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encodeframe_utils.h
@@ -13,17 +13,68 @@
#define AOM_AV1_ENCODER_ENCODEFRAME_UTILS_H_
#include "aom_ports/aom_timer.h"
+#include "aom_ports/system_state.h"
#include "av1/common/reconinter.h"
#include "av1/encoder/encoder.h"
-#include "av1/encoder/partition_strategy.h"
#include "av1/encoder/rdopt.h"
#ifdef __cplusplus
extern "C" {
#endif
+#define WRITE_FEATURE_TO_FILE 0
+
+#define FEATURE_SIZE_SMS_SPLIT_FAST 6
+#define FEATURE_SIZE_SMS_SPLIT 17
+#define FEATURE_SIZE_SMS_PRUNE_PART 25
+#define FEATURE_SIZE_SMS_TERM_NONE 28
+#define FEATURE_SIZE_FP_SMS_TERM_NONE 20
+#define FEATURE_SIZE_MAX_MIN_PART_PRED 13
+#define MAX_NUM_CLASSES_MAX_MIN_PART_PRED 4
+
+#define FEATURE_SMS_NONE_FLAG 1
+#define FEATURE_SMS_SPLIT_FLAG (1 << 1)
+#define FEATURE_SMS_RECT_FLAG (1 << 2)
+
+#define FEATURE_SMS_PRUNE_PART_FLAG \
+ (FEATURE_SMS_NONE_FLAG | FEATURE_SMS_SPLIT_FLAG | FEATURE_SMS_RECT_FLAG)
+#define FEATURE_SMS_SPLIT_MODEL_FLAG \
+ (FEATURE_SMS_NONE_FLAG | FEATURE_SMS_SPLIT_FLAG)
+
+// Number of sub-partitions in rectangular partition types.
+#define SUB_PARTITIONS_RECT 2
+
+// Number of sub-partitions in split partition type.
+#define SUB_PARTITIONS_SPLIT 4
+
+// Number of sub-partitions in AB partition types.
+#define SUB_PARTITIONS_AB 3
+
+// Number of sub-partitions in 4-way partition types.
+#define SUB_PARTITIONS_PART4 4
+
+// 4part parition types.
+enum { HORZ4 = 0, VERT4, NUM_PART4_TYPES } UENUM1BYTE(PART4_TYPES);
+
+// AB parition types.
+enum {
+ HORZ_A = 0,
+ HORZ_B,
+ VERT_A,
+ VERT_B,
+ NUM_AB_PARTS
+} UENUM1BYTE(AB_PART_TYPE);
+
+// Rectangular parition types.
+enum { HORZ = 0, VERT, NUM_RECT_PARTS } UENUM1BYTE(RECT_PART_TYPE);
+
+// Structure to keep win flags for HORZ and VERT partition evaluations.
+typedef struct {
+ int rect_part_win[NUM_RECT_PARTS];
+} RD_RECT_PART_WIN_INFO;
+
enum { PICK_MODE_RD = 0, PICK_MODE_NONRD };
enum {
@@ -218,47 +269,6 @@ static AOM_INLINE const FIRSTPASS_STATS *read_one_frame_stats(const TWO_PASS *p,
return &p->stats_buf_ctx->stats_in_start[frm];
}
-static BLOCK_SIZE dim_to_size(int dim) {
- switch (dim) {
- case 4: return BLOCK_4X4;
- case 8: return BLOCK_8X8;
- case 16: return BLOCK_16X16;
- case 32: return BLOCK_32X32;
- case 64: return BLOCK_64X64;
- case 128: return BLOCK_128X128;
- default: assert(0); return 0;
- }
-}
-
-static AOM_INLINE void set_max_min_partition_size(SuperBlockEnc *sb_enc,
- AV1_COMP *cpi, MACROBLOCK *x,
- const SPEED_FEATURES *sf,
- BLOCK_SIZE sb_size,
- int mi_row, int mi_col) {
- const AV1_COMMON *cm = &cpi->common;
-
- sb_enc->max_partition_size =
- AOMMIN(sf->part_sf.default_max_partition_size,
- dim_to_size(cpi->oxcf.part_cfg.max_partition_size));
- sb_enc->min_partition_size =
- AOMMAX(sf->part_sf.default_min_partition_size,
- dim_to_size(cpi->oxcf.part_cfg.min_partition_size));
- sb_enc->max_partition_size =
- AOMMIN(sb_enc->max_partition_size, cm->seq_params.sb_size);
- sb_enc->min_partition_size =
- AOMMIN(sb_enc->min_partition_size, cm->seq_params.sb_size);
-
- if (use_auto_max_partition(cpi, sb_size, mi_row, mi_col)) {
- float features[FEATURE_SIZE_MAX_MIN_PART_PRED] = { 0.0f };
-
- av1_get_max_min_partition_features(cpi, x, mi_row, mi_col, features);
- sb_enc->max_partition_size =
- AOMMAX(AOMMIN(av1_predict_max_partition(cpi, x, features),
- sb_enc->max_partition_size),
- sb_enc->min_partition_size);
- }
-}
-
int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
int mi_col, int orig_rdmult);
@@ -335,6 +345,57 @@ void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
const TileInfo *const tile_info, const int mi_row,
const int mi_col);
+static AOM_INLINE void av1_dealloc_mb_data(struct AV1Common *cm,
+ struct macroblock *mb) {
+ if (mb->txfm_search_info.txb_rd_records) {
+ aom_free(mb->txfm_search_info.txb_rd_records);
+ mb->txfm_search_info.txb_rd_records = NULL;
+ }
+ const int num_planes = av1_num_planes(cm);
+ for (int plane = 0; plane < num_planes; plane++) {
+ if (mb->plane[plane].src_diff) {
+ aom_free(mb->plane[plane].src_diff);
+ mb->plane[plane].src_diff = NULL;
+ }
+ }
+ if (mb->e_mbd.seg_mask) {
+ aom_free(mb->e_mbd.seg_mask);
+ mb->e_mbd.seg_mask = NULL;
+ }
+ if (mb->winner_mode_stats) {
+ aom_free(mb->winner_mode_stats);
+ mb->winner_mode_stats = NULL;
+ }
+}
+
+static AOM_INLINE void av1_alloc_mb_data(struct AV1Common *cm,
+ struct macroblock *mb,
+ int use_nonrd_pick_mode) {
+ if (!use_nonrd_pick_mode) {
+ mb->txfm_search_info.txb_rd_records =
+ (TxbRdRecords *)aom_malloc(sizeof(TxbRdRecords));
+ }
+ const int num_planes = av1_num_planes(cm);
+ for (int plane = 0; plane < num_planes; plane++) {
+ const int subsampling_xy =
+ plane ? cm->seq_params->subsampling_x + cm->seq_params->subsampling_y
+ : 0;
+ const int sb_size = MAX_SB_SQUARE >> subsampling_xy;
+ CHECK_MEM_ERROR(cm, mb->plane[plane].src_diff,
+ (int16_t *)aom_memalign(
+ 32, sizeof(*mb->plane[plane].src_diff) * sb_size));
+ }
+ CHECK_MEM_ERROR(cm, mb->e_mbd.seg_mask,
+ (uint8_t *)aom_memalign(
+ 16, 2 * MAX_SB_SQUARE * sizeof(mb->e_mbd.seg_mask[0])));
+ const int winner_mode_count = frame_is_intra_only(cm)
+ ? MAX_WINNER_MODE_COUNT_INTRA
+ : MAX_WINNER_MODE_COUNT_INTER;
+ CHECK_MEM_ERROR(cm, mb->winner_mode_stats,
+ (WinnerModeStats *)aom_malloc(
+ winner_mode_count * sizeof(mb->winner_mode_stats[0])));
+}
+
// This function will compute the number of reference frames to be disabled
// based on selective_ref_frame speed feature.
static AOM_INLINE unsigned int get_num_refs_to_disable(
@@ -359,7 +420,7 @@ static AOM_INLINE unsigned int get_num_refs_to_disable(
#if !CONFIG_REALTIME_ONLY
else if (is_stat_consumption_stage_twopass(cpi)) {
const FIRSTPASS_STATS *const this_frame_stats =
- read_one_frame_stats(&cpi->twopass, cur_frame_display_index);
+ read_one_frame_stats(&cpi->ppi->twopass, cur_frame_display_index);
aom_clear_system_state();
const double coded_error_per_mb =
this_frame_stats->coded_error / cpi->frame_info.num_mbs;
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodemb.c b/third_party/libaom/source/libaom/av1/encoder/encodemb.c
index c9ee22034b..2a875e1223 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodemb.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encodemb.c
@@ -35,19 +35,19 @@
#include "av1/encoder/rd.h"
#include "av1/encoder/rdopt.h"
-void av1_subtract_block(const MACROBLOCKD *xd, int rows, int cols,
- int16_t *diff, ptrdiff_t diff_stride,
- const uint8_t *src8, ptrdiff_t src_stride,
- const uint8_t *pred8, ptrdiff_t pred_stride) {
+void av1_subtract_block(BitDepthInfo bd_info, int rows, int cols, int16_t *diff,
+ ptrdiff_t diff_stride, const uint8_t *src8,
+ ptrdiff_t src_stride, const uint8_t *pred8,
+ ptrdiff_t pred_stride) {
assert(rows >= 4 && cols >= 4);
#if CONFIG_AV1_HIGHBITDEPTH
- if (is_cur_buf_hbd(xd)) {
+ if (bd_info.use_highbitdepth_buf) {
aom_highbd_subtract_block(rows, cols, diff, diff_stride, src8, src_stride,
- pred8, pred_stride, xd->bd);
+ pred8, pred_stride, bd_info.bit_depth);
return;
}
#endif
- (void)xd;
+ (void)bd_info;
aom_subtract_block(rows, cols, diff, diff_stride, src8, src_stride, pred8,
pred_stride);
}
@@ -55,6 +55,7 @@ void av1_subtract_block(const MACROBLOCKD *xd, int rows, int cols,
void av1_subtract_txb(MACROBLOCK *x, int plane, BLOCK_SIZE plane_bsize,
int blk_col, int blk_row, TX_SIZE tx_size) {
MACROBLOCKD *const xd = &x->e_mbd;
+ const BitDepthInfo bd_info = get_bit_depth_info(xd);
struct macroblock_plane *const p = &x->plane[plane];
const struct macroblockd_plane *const pd = &x->e_mbd.plane[plane];
const int diff_stride = block_size_wide[plane_bsize];
@@ -66,8 +67,8 @@ void av1_subtract_txb(MACROBLOCK *x, int plane, BLOCK_SIZE plane_bsize,
uint8_t *src = &p->src.buf[(blk_row * src_stride + blk_col) << MI_SIZE_LOG2];
int16_t *src_diff =
&p->src_diff[(blk_row * diff_stride + blk_col) << MI_SIZE_LOG2];
- av1_subtract_block(xd, tx1d_height, tx1d_width, src_diff, diff_stride, src,
- src_stride, dst, dst_stride);
+ av1_subtract_block(bd_info, tx1d_height, tx1d_width, src_diff, diff_stride,
+ src, src_stride, dst, dst_stride);
}
void av1_subtract_plane(MACROBLOCK *x, BLOCK_SIZE plane_bsize, int plane) {
@@ -77,9 +78,10 @@ void av1_subtract_plane(MACROBLOCK *x, BLOCK_SIZE plane_bsize, int plane) {
const int bw = block_size_wide[plane_bsize];
const int bh = block_size_high[plane_bsize];
const MACROBLOCKD *xd = &x->e_mbd;
+ const BitDepthInfo bd_info = get_bit_depth_info(xd);
- av1_subtract_block(xd, bh, bw, p->src_diff, bw, p->src.buf, p->src.stride,
- pd->dst.buf, pd->dst.stride);
+ av1_subtract_block(bd_info, bh, bw, p->src_diff, bw, p->src.buf,
+ p->src.stride, pd->dst.buf, pd->dst.stride);
}
int av1_optimize_b(const struct AV1_COMP *cpi, MACROBLOCK *x, int plane,
@@ -132,13 +134,8 @@ const int DROPOUT_MULTIPLIER_Q_BASE = 32; // Base Q to compute multiplier.
void av1_dropout_qcoeff(MACROBLOCK *mb, int plane, int block, TX_SIZE tx_size,
TX_TYPE tx_type, int qindex) {
- const struct macroblock_plane *const p = &mb->plane[plane];
- tran_low_t *const qcoeff = p->qcoeff + BLOCK_OFFSET(block);
- tran_low_t *const dqcoeff = p->dqcoeff + BLOCK_OFFSET(block);
const int tx_width = tx_size_wide[tx_size];
const int tx_height = tx_size_high[tx_size];
- const int max_eob = av1_get_max_eob(tx_size);
- const SCAN_ORDER *const scan_order = get_scan(tx_size, tx_type);
// Early return if `qindex` is out of range.
if (qindex > DROPOUT_Q_MAX || qindex < DROPOUT_Q_MIN) {
@@ -156,6 +153,19 @@ void av1_dropout_qcoeff(MACROBLOCK *mb, int plane, int block, TX_SIZE tx_size,
multiplier *
CLIP(base_size, DROPOUT_AFTER_BASE_MIN, DROPOUT_AFTER_BASE_MAX);
+ av1_dropout_qcoeff_num(mb, plane, block, tx_size, tx_type, dropout_num_before,
+ dropout_num_after);
+}
+
+void av1_dropout_qcoeff_num(MACROBLOCK *mb, int plane, int block,
+ TX_SIZE tx_size, TX_TYPE tx_type,
+ int dropout_num_before, int dropout_num_after) {
+ const struct macroblock_plane *const p = &mb->plane[plane];
+ tran_low_t *const qcoeff = p->qcoeff + BLOCK_OFFSET(block);
+ tran_low_t *const dqcoeff = p->dqcoeff + BLOCK_OFFSET(block);
+ const int max_eob = av1_get_max_eob(tx_size);
+ const SCAN_ORDER *const scan_order = get_scan(tx_size, tx_type);
+
// Early return if there are not enough non-zero coefficients.
if (p->eobs[block] == 0 || p->eobs[block] <= dropout_num_before) {
return;
@@ -172,7 +182,8 @@ void av1_dropout_qcoeff(MACROBLOCK *mb, int plane, int block, TX_SIZE tx_size,
for (int i = 0; i < p->eobs[block]; ++i) {
const int scan_idx = scan_order->scan[i];
- if (qcoeff[scan_idx] > DROPOUT_COEFF_MAX) { // Keep large coefficients.
+ if (abs(qcoeff[scan_idx]) > DROPOUT_COEFF_MAX) {
+ // Keep large coefficients.
count_zeros_before = 0;
count_zeros_after = 0;
idx = -1;
@@ -197,6 +208,7 @@ void av1_dropout_qcoeff(MACROBLOCK *mb, int plane, int block, TX_SIZE tx_size,
if (count_nonzeros > DROPOUT_CONTINUITY_MAX) {
count_zeros_before = 0;
count_zeros_after = 0;
+ count_nonzeros = 0;
idx = -1;
eob = i + 1;
}
@@ -513,15 +525,17 @@ static void encode_block_inter(int plane, int block, int blk_row, int blk_col,
const int bsw = tx_size_wide_unit[sub_txs];
const int bsh = tx_size_high_unit[sub_txs];
const int step = bsh * bsw;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
assert(bsw > 0 && bsh > 0);
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
- for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
- const int offsetr = blk_row + row;
+ for (int row = 0; row < row_end; row += bsh) {
+ const int offsetr = blk_row + row;
+ for (int col = 0; col < col_end; col += bsw) {
const int offsetc = blk_col + col;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
-
encode_block_inter(plane, block, offsetr, offsetc, plane_bsize, sub_txs,
arg, dry_run);
block += step;
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodemb.h b/third_party/libaom/source/libaom/av1/encoder/encodemb.h
index fcd34a3908..f2dc956a65 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodemb.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encodemb.h
@@ -123,11 +123,16 @@ int av1_optimize_b(const struct AV1_COMP *cpi, MACROBLOCK *mb, int plane,
// `txb_entropy_ctx`, which `mb` points to, may be modified by this function.
void av1_dropout_qcoeff(MACROBLOCK *mb, int plane, int block, TX_SIZE tx_size,
TX_TYPE tx_type, int qindex);
-
-void av1_subtract_block(const MACROBLOCKD *xd, int rows, int cols,
- int16_t *diff, ptrdiff_t diff_stride,
- const uint8_t *src8, ptrdiff_t src_stride,
- const uint8_t *pred8, ptrdiff_t pred_stride);
+// Same as above, with the number of zeroes needed before/after a coeff to drop
+// it explicitly passed in, instead of being derived from qindex.
+void av1_dropout_qcoeff_num(MACROBLOCK *mb, int plane, int block,
+ TX_SIZE tx_size, TX_TYPE tx_type,
+ int dropout_num_before, int dropout_num_after);
+
+void av1_subtract_block(BitDepthInfo bd_info, int rows, int cols, int16_t *diff,
+ ptrdiff_t diff_stride, const uint8_t *src8,
+ ptrdiff_t src_stride, const uint8_t *pred8,
+ ptrdiff_t pred_stride);
void av1_subtract_txb(MACROBLOCK *x, int plane, BLOCK_SIZE plane_bsize,
int blk_col, int blk_row, TX_SIZE tx_size);
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodemv.c b/third_party/libaom/source/libaom/av1/encoder/encodemv.c
index 86c6156d8f..4a7d87408c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodemv.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encodemv.c
@@ -173,8 +173,8 @@ static void build_nmv_component_cost_table(int *mvcost,
}
}
-void av1_encode_mv(AV1_COMP *cpi, aom_writer *w, const MV *mv, const MV *ref,
- nmv_context *mvctx, int usehp) {
+void av1_encode_mv(AV1_COMP *cpi, aom_writer *w, ThreadData *td, const MV *mv,
+ const MV *ref, nmv_context *mvctx, int usehp) {
const MV diff = { mv->row - ref->row, mv->col - ref->col };
const MV_JOINT_TYPE j = av1_get_mv_joint(&diff);
// If the mv_diff is zero, then we should have used near or nearest instead.
@@ -193,8 +193,7 @@ void av1_encode_mv(AV1_COMP *cpi, aom_writer *w, const MV *mv, const MV *ref,
// motion vector component used.
if (cpi->sf.mv_sf.auto_mv_step_size) {
int maxv = AOMMAX(abs(mv->row), abs(mv->col)) >> 3;
- cpi->mv_search_params.max_mv_magnitude =
- AOMMAX(maxv, cpi->mv_search_params.max_mv_magnitude);
+ td->max_mv_magnitude = AOMMAX(maxv, td->max_mv_magnitude);
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodemv.h b/third_party/libaom/source/libaom/av1/encoder/encodemv.h
index 9f0d607295..962844bc79 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodemv.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encodemv.h
@@ -18,8 +18,8 @@
extern "C" {
#endif
-void av1_encode_mv(AV1_COMP *cpi, aom_writer *w, const MV *mv, const MV *ref,
- nmv_context *mvctx, int usehp);
+void av1_encode_mv(AV1_COMP *cpi, aom_writer *w, ThreadData *td, const MV *mv,
+ const MV *ref, nmv_context *mvctx, int usehp);
void av1_update_mv_stats(const MV *mv, const MV *ref, nmv_context *mvctx,
MvSubpelPrecision precision);
diff --git a/third_party/libaom/source/libaom/av1/encoder/encoder.c b/third_party/libaom/source/libaom/av1/encoder/encoder.c
index 955d15631c..41122ef45b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encoder.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encoder.c
@@ -51,6 +51,7 @@
#include "av1/encoder/aq_variance.h"
#include "av1/encoder/bitstream.h"
#include "av1/encoder/context_tree.h"
+#include "av1/encoder/dwt.h"
#include "av1/encoder/encodeframe.h"
#include "av1/encoder/encodemv.h"
#include "av1/encoder/encode_strategy.h"
@@ -81,10 +82,6 @@
#define DEFAULT_EXPLICIT_ORDER_HINT_BITS 7
-#if CONFIG_ENTROPY_STATS
-FRAME_COUNTS aggregate_fc;
-#endif // CONFIG_ENTROPY_STATS
-
// #define OUTPUT_YUV_REC
#ifdef OUTPUT_YUV_REC
FILE *yuv_rec_file;
@@ -228,7 +225,7 @@ double av1_get_compression_ratio(const AV1_COMMON *const cm,
const int upscaled_width = cm->superres_upscaled_width;
const int height = cm->height;
const int luma_pic_size = upscaled_width * height;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const BITSTREAM_PROFILE profile = seq_params->profile;
const int pic_size_profile_factor =
profile == PROFILE_0 ? 15 : (profile == PROFILE_1 ? 30 : 36);
@@ -242,7 +239,7 @@ double av1_get_compression_ratio(const AV1_COMMON *const cm,
static void set_tile_info(AV1_COMMON *const cm,
const TileConfig *const tile_cfg) {
const CommonModeInfoParams *const mi_params = &cm->mi_params;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
CommonTileParams *const tiles = &cm->tiles;
int i, start_sb;
@@ -298,7 +295,7 @@ void av1_update_frame_size(AV1_COMP *cpi) {
// We need to reallocate the context buffers here in case we need more mis.
if (av1_alloc_context_buffers(cm, cm->width, cm->height)) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate context buffers");
}
av1_init_mi_buffers(&cm->mi_params);
@@ -308,8 +305,10 @@ void av1_update_frame_size(AV1_COMP *cpi) {
if (!is_stat_generation_stage(cpi))
alloc_context_buffers_ext(cm, &cpi->mbmi_ext_info);
- if (!cpi->seq_params_locked)
- set_sb_size(&cm->seq_params, av1_select_sb_size(cpi));
+ if (!cpi->ppi->seq_params_locked)
+ set_sb_size(cm->seq_params,
+ av1_select_sb_size(&cpi->oxcf, cm->width, cm->height,
+ cpi->svc.number_spatial_layers));
set_tile_info(cm, &cpi->oxcf.tile_cfg);
}
@@ -327,9 +326,9 @@ static INLINE int does_level_match(int width, int height, double fps,
height <= lvl_height * lvl_dim_mult;
}
-static void set_bitstream_level_tier(SequenceHeader *seq, AV1_COMMON *cm,
- int width, int height,
- double init_framerate) {
+static void set_bitstream_level_tier(AV1_PRIMARY *const ppi, int width,
+ int height, double init_framerate) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
// TODO(any): This is a placeholder function that only addresses dimensions
// and max display sample rates.
// Need to add checks for max bit rate, max decoded luma sample rate, header
@@ -372,26 +371,26 @@ static void set_bitstream_level_tier(SequenceHeader *seq, AV1_COMMON *cm,
level = SEQ_LEVEL_6_2;
}
- SequenceHeader *const seq_params = &cm->seq_params;
for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
- seq->seq_level_idx[i] = level;
+ seq_params->seq_level_idx[i] = level;
// Set the maximum parameters for bitrate and buffer size for this profile,
// level, and tier
seq_params->op_params[i].bitrate = av1_max_level_bitrate(
- cm->seq_params.profile, seq->seq_level_idx[i], seq->tier[i]);
+ seq_params->profile, seq_params->seq_level_idx[i], seq_params->tier[i]);
// Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass the
// check
if (seq_params->op_params[i].bitrate == 0)
aom_internal_error(
- &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ &ppi->error, AOM_CODEC_UNSUP_BITSTREAM,
"AV1 does not support this combination of profile, level, and tier.");
// Buffer size in bits/s is bitrate in bits/s * 1 s
seq_params->op_params[i].buffer_size = seq_params->op_params[i].bitrate;
}
}
-void av1_init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
+void av1_init_seq_coding_tools(AV1_PRIMARY *const ppi,
const AV1EncoderConfig *oxcf, int use_svc) {
+ SequenceHeader *const seq = &ppi->seq_params;
const FrameDimensionCfg *const frm_dim_cfg = &oxcf->frm_dim_cfg;
const ToolCfg *const tool_cfg = &oxcf->tool_cfg;
@@ -449,7 +448,7 @@ void av1_init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
seq->enable_intra_edge_filter = oxcf->intra_mode_cfg.enable_intra_edge_filter;
seq->enable_filter_intra = oxcf->intra_mode_cfg.enable_filter_intra;
- set_bitstream_level_tier(seq, cm, frm_dim_cfg->width, frm_dim_cfg->height,
+ set_bitstream_level_tier(ppi, frm_dim_cfg->width, frm_dim_cfg->height,
oxcf->input_cfg.init_framerate);
if (seq->operating_points_cnt_minus_1 == 0) {
@@ -461,26 +460,27 @@ void av1_init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
// skip decoding enhancement layers (temporal first).
int i = 0;
assert(seq->operating_points_cnt_minus_1 ==
- (int)(cm->number_spatial_layers * cm->number_temporal_layers - 1));
- for (unsigned int sl = 0; sl < cm->number_spatial_layers; sl++) {
- for (unsigned int tl = 0; tl < cm->number_temporal_layers; tl++) {
+ (int)(ppi->number_spatial_layers * ppi->number_temporal_layers - 1));
+ for (unsigned int sl = 0; sl < ppi->number_spatial_layers; sl++) {
+ for (unsigned int tl = 0; tl < ppi->number_temporal_layers; tl++) {
seq->operating_point_idc[i] =
- (~(~0u << (cm->number_spatial_layers - sl)) << 8) |
- ~(~0u << (cm->number_temporal_layers - tl));
+ (~(~0u << (ppi->number_spatial_layers - sl)) << 8) |
+ ~(~0u << (ppi->number_temporal_layers - tl));
i++;
}
}
}
}
-static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
- AV1_COMMON *const cm = &cpi->common;
- SequenceHeader *const seq_params = &cm->seq_params;
- ResizePendingParams *resize_pending_params = &cpi->resize_pending_params;
+static void init_config_sequence(struct AV1_PRIMARY *ppi,
+ AV1EncoderConfig *oxcf) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
const DecoderModelCfg *const dec_model_cfg = &oxcf->dec_model_cfg;
const ColorCfg *const color_cfg = &oxcf->color_cfg;
- cpi->oxcf = *oxcf;
- cpi->framerate = oxcf->input_cfg.init_framerate;
+
+ ppi->use_svc = 0;
+ ppi->number_spatial_layers = 1;
+ ppi->number_temporal_layers = 1;
seq_params->profile = oxcf->profile;
seq_params->bit_depth = oxcf->tool_cfg.bit_depth;
@@ -508,7 +508,7 @@ static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
// set the decoder model parameters in schedule mode
seq_params->decoder_model_info.num_units_in_decoding_tick =
dec_model_cfg->num_units_in_decoding_tick;
- cm->buffer_removal_time_present = 1;
+ ppi->buffer_removal_time_present = 1;
av1_set_aom_dec_model_info(&seq_params->decoder_model_info);
av1_set_dec_model_op_parameters(&seq_params->op_params[0]);
} else if (seq_params->timing_info_present &&
@@ -546,11 +546,19 @@ static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
}
}
}
+ av1_change_config_seq(ppi, oxcf, NULL);
+}
+
+static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
+ AV1_COMMON *const cm = &cpi->common;
+ ResizePendingParams *resize_pending_params = &cpi->resize_pending_params;
+
+ cpi->oxcf = *oxcf;
+ cpi->framerate = oxcf->input_cfg.init_framerate;
cm->width = oxcf->frm_dim_cfg.width;
cm->height = oxcf->frm_dim_cfg.height;
- set_sb_size(seq_params,
- av1_select_sb_size(cpi)); // set sb size before allocations
+
alloc_compressor_data(cpi);
av1_update_film_grain_parameters(cpi, oxcf);
@@ -559,18 +567,15 @@ static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
cpi->td.counts = &cpi->counts;
// Set init SVC parameters.
- cpi->use_svc = 0;
- cpi->svc.external_ref_frame_config = 0;
+ cpi->svc.set_ref_frame_config = 0;
cpi->svc.non_reference_frame = 0;
cpi->svc.number_spatial_layers = 1;
cpi->svc.number_temporal_layers = 1;
- cm->number_spatial_layers = 1;
- cm->number_temporal_layers = 1;
cm->spatial_layer_id = 0;
cm->temporal_layer_id = 0;
// change includes all joint functionality
- av1_change_config(cpi, oxcf);
+ av1_change_config(cpi, oxcf, true);
cpi->ref_frame_flags = 0;
@@ -583,25 +588,13 @@ static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
av1_noise_estimate_init(&cpi->noise_estimate, cm->width, cm->height);
}
-void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
- AV1_COMMON *const cm = &cpi->common;
- SequenceHeader *const seq_params = &cm->seq_params;
- RATE_CONTROL *const rc = &cpi->rc;
- MACROBLOCK *const x = &cpi->td.mb;
- AV1LevelParams *const level_params = &cpi->level_params;
- InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
- RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
- const FrameDimensionCfg *const frm_dim_cfg = &cpi->oxcf.frm_dim_cfg;
+void av1_change_config_seq(struct AV1_PRIMARY *ppi,
+ const AV1EncoderConfig *oxcf,
+ bool *is_sb_size_changed) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
+ const FrameDimensionCfg *const frm_dim_cfg = &oxcf->frm_dim_cfg;
const DecoderModelCfg *const dec_model_cfg = &oxcf->dec_model_cfg;
const ColorCfg *const color_cfg = &oxcf->color_cfg;
- const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
- // in case of LAP, lag in frames is set according to number of lap buffers
- // calculated at init time. This stores and restores LAP's lag in frames to
- // prevent override by new cfg.
- int lap_lag_in_frames = -1;
- if (cpi->lap_enabled && cpi->compressor_stage == LAP_STAGE) {
- lap_lag_in_frames = cpi->oxcf.gf_cfg.lag_in_frames;
- }
if (seq_params->profile != oxcf->profile) seq_params->profile = oxcf->profile;
seq_params->bit_depth = oxcf->tool_cfg.bit_depth;
@@ -632,7 +625,7 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
// set the decoder model parameters in schedule mode
seq_params->decoder_model_info.num_units_in_decoding_tick =
dec_model_cfg->num_units_in_decoding_tick;
- cm->buffer_removal_time_present = 1;
+ ppi->buffer_removal_time_present = 1;
av1_set_aom_dec_model_info(&seq_params->decoder_model_info);
av1_set_dec_model_op_parameters(&seq_params->op_params[0]);
} else if (seq_params->timing_info_present &&
@@ -645,6 +638,56 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
10; // Default value (not signaled)
}
+ av1_update_film_grain_parameters_seq(ppi, oxcf);
+
+ int sb_size = seq_params->sb_size;
+ // Superblock size should not be updated after the first key frame.
+ if (!ppi->seq_params_locked) {
+ set_sb_size(seq_params, av1_select_sb_size(oxcf, frm_dim_cfg->width,
+ frm_dim_cfg->height,
+ ppi->number_spatial_layers));
+ for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i)
+ seq_params->tier[i] = (oxcf->tier_mask >> i) & 1;
+ }
+ if (is_sb_size_changed != NULL && sb_size != seq_params->sb_size)
+ *is_sb_size_changed = true;
+
+ // Init sequence level coding tools
+ // This should not be called after the first key frame.
+ if (!ppi->seq_params_locked) {
+ seq_params->operating_points_cnt_minus_1 =
+ (ppi->number_spatial_layers > 1 || ppi->number_temporal_layers > 1)
+ ? ppi->number_spatial_layers * ppi->number_temporal_layers - 1
+ : 0;
+ av1_init_seq_coding_tools(ppi, oxcf, ppi->use_svc);
+ }
+
+#if CONFIG_AV1_HIGHBITDEPTH
+ highbd_set_var_fns(ppi);
+#endif
+}
+
+void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf,
+ bool is_sb_size_changed) {
+ AV1_COMMON *const cm = &cpi->common;
+ SequenceHeader *const seq_params = cm->seq_params;
+ RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ MACROBLOCK *const x = &cpi->td.mb;
+ AV1LevelParams *const level_params = &cpi->ppi->level_params;
+ InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
+ RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
+ const FrameDimensionCfg *const frm_dim_cfg = &cpi->oxcf.frm_dim_cfg;
+ const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
+
+ // in case of LAP, lag in frames is set according to number of lap buffers
+ // calculated at init time. This stores and restores LAP's lag in frames to
+ // prevent override by new cfg.
+ int lap_lag_in_frames = -1;
+ if (cpi->ppi->lap_enabled && cpi->compressor_stage == LAP_STAGE) {
+ lap_lag_in_frames = cpi->oxcf.gf_cfg.lag_in_frames;
+ }
+
av1_update_film_grain_parameters(cpi, oxcf);
cpi->oxcf = *oxcf;
@@ -680,10 +723,10 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
seq_params->tier[0]);
}
- if ((has_no_stats_stage(cpi)) && (rc_cfg->mode == AOM_Q)) {
- rc->baseline_gf_interval = FIXED_GF_INTERVAL;
+ if (has_no_stats_stage(cpi) && (rc_cfg->mode == AOM_Q)) {
+ p_rc->baseline_gf_interval = FIXED_GF_INTERVAL;
} else {
- rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
+ p_rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
}
refresh_frame_flags->golden_frame = false;
@@ -720,16 +763,23 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
}
}
+ if (x->pixel_gradient_info == NULL) {
+ const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome;
+ CHECK_MEM_ERROR(cm, x->pixel_gradient_info,
+ aom_malloc(sizeof(*x->pixel_gradient_info) * plane_types *
+ MAX_SB_SQUARE));
+ }
+
av1_reset_segment_features(cm);
av1_set_high_precision_mv(cpi, 1, 0);
- set_rc_buffer_sizes(rc, rc_cfg);
+ set_rc_buffer_sizes(cpi);
// Under a configuration change, where maximum_buffer_size may change,
// keep buffer level clipped to the maximum allowed buffer size.
- rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
- rc->buffer_level = AOMMIN(rc->buffer_level, rc->maximum_buffer_size);
+ rc->bits_off_target = AOMMIN(rc->bits_off_target, p_rc->maximum_buffer_size);
+ rc->buffer_level = AOMMIN(rc->buffer_level, p_rc->maximum_buffer_size);
// Set up frame rate and related parameters rate control values.
av1_new_framerate(cpi, cpi->framerate);
@@ -752,18 +802,9 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
cm->width = frm_dim_cfg->width;
cm->height = frm_dim_cfg->height;
- int sb_size = seq_params->sb_size;
- // Superblock size should not be updated after the first key frame.
- if (!cpi->seq_params_locked) {
- set_sb_size(&cm->seq_params, av1_select_sb_size(cpi));
- for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i)
- seq_params->tier[i] = (oxcf->tier_mask >> i) & 1;
- }
-
- if (initial_dimensions->width || sb_size != seq_params->sb_size) {
+ if (initial_dimensions->width || is_sb_size_changed) {
if (cm->width > initial_dimensions->width ||
- cm->height > initial_dimensions->height ||
- seq_params->sb_size != sb_size) {
+ cm->height > initial_dimensions->height || is_sb_size_changed) {
av1_free_context_buffers(cm);
av1_free_shared_coeff_buffer(&cpi->td.shared_coeff_buf);
av1_free_sms_tree(&cpi->td);
@@ -780,27 +821,15 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
set_tile_info(cm, &cpi->oxcf.tile_cfg);
- if (!cpi->svc.external_ref_frame_config)
+ if (!cpi->svc.set_ref_frame_config)
cpi->ext_flags.refresh_frame.update_pending = 0;
cpi->ext_flags.refresh_frame_context_pending = 0;
-#if CONFIG_AV1_HIGHBITDEPTH
- highbd_set_var_fns(cpi);
-#endif
-
- // Init sequence level coding tools
- // This should not be called after the first key frame.
- if (!cpi->seq_params_locked) {
- seq_params->operating_points_cnt_minus_1 =
- (cm->number_spatial_layers > 1 || cm->number_temporal_layers > 1)
- ? cm->number_spatial_layers * cm->number_temporal_layers - 1
- : 0;
- av1_init_seq_coding_tools(&cm->seq_params, cm, oxcf, cpi->use_svc);
- }
-
- if (cpi->use_svc)
+ if (cpi->ppi->use_svc)
av1_update_layer_context_change_config(cpi, rc_cfg->target_bandwidth);
+ check_reset_rc_flag(cpi);
+
// restore the value of lag_in_frame for LAP stage.
if (lap_lag_in_frames != -1) {
cpi->oxcf.gf_cfg.lag_in_frames = lap_lag_in_frames;
@@ -810,7 +839,7 @@ void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
static INLINE void init_frame_info(FRAME_INFO *frame_info,
const AV1_COMMON *const cm) {
const CommonModeInfoParams *const mi_params = &cm->mi_params;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
frame_info->frame_width = cm->width;
frame_info->frame_height = cm->height;
frame_info->mi_cols = mi_params->mi_cols;
@@ -834,73 +863,44 @@ static INLINE void update_frame_index_set(FRAME_INDEX_SET *frame_index_set,
}
}
-AV1_PRIMARY *av1_create_primary_compressor() {
+AV1_PRIMARY *av1_create_primary_compressor(
+ struct aom_codec_pkt_list *pkt_list_head, int num_lap_buffers,
+ AV1EncoderConfig *oxcf) {
AV1_PRIMARY *volatile const ppi = aom_memalign(32, sizeof(AV1_PRIMARY));
if (!ppi) return NULL;
av1_zero(*ppi);
- return ppi;
-}
-
-AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
- BufferPool *const pool,
- FIRSTPASS_STATS *frame_stats_buf,
- COMPRESSOR_STAGE stage, int num_lap_buffers,
- int lap_lag_in_frames,
- STATS_BUFFER_CTX *stats_buf_context) {
- AV1_COMP *volatile const cpi = aom_memalign(32, sizeof(AV1_COMP));
- AV1_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
-
- if (!cm) return NULL;
-
- av1_zero(*cpi);
-
- cpi->ppi = ppi;
-
// The jmp_buf is valid only for the duration of the function that calls
// setjmp(). Therefore, this function must reset the 'setjmp' field to 0
// before it returns.
- if (setjmp(cm->error.jmp)) {
- cm->error.setjmp = 0;
- av1_remove_compressor(cpi);
+ if (setjmp(ppi->error.jmp)) {
+ ppi->error.setjmp = 0;
+ av1_remove_primary_compressor(ppi);
return 0;
}
+ ppi->error.setjmp = 1;
- cm->error.setjmp = 1;
- cpi->lap_enabled = num_lap_buffers > 0;
- cpi->compressor_stage = stage;
-
- CommonModeInfoParams *const mi_params = &cm->mi_params;
- mi_params->free_mi = enc_free_mi;
- mi_params->setup_mi = enc_setup_mi;
- mi_params->set_mb_mi = (oxcf->pass == 1 || cpi->compressor_stage == LAP_STAGE)
- ? stat_stage_set_mb_mi
- : enc_set_mb_mi;
-
- mi_params->mi_alloc_bsize = BLOCK_4X4;
-
- CHECK_MEM_ERROR(cm, cm->fc,
- (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
- CHECK_MEM_ERROR(
- cm, cm->default_frame_context,
- (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
- memset(cm->fc, 0, sizeof(*cm->fc));
- memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
-
- cpi->common.buffer_pool = pool;
+ ppi->seq_params_locked = 0;
+ ppi->lap_enabled = num_lap_buffers > 0;
+ ppi->output_pkt_list = pkt_list_head;
+ ppi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
+ ppi->frames_left = oxcf->input_cfg.limit;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ ppi->max_mv_magnitude = 0;
+ ppi->num_fp_contexts = 1;
+#endif
- init_config(cpi, oxcf);
- if (cpi->compressor_stage == LAP_STAGE) {
- cpi->oxcf.gf_cfg.lag_in_frames = lap_lag_in_frames;
- }
+ init_config_sequence(ppi, oxcf);
- cpi->frames_left = cpi->oxcf.input_cfg.limit;
+#if CONFIG_ENTROPY_STATS
+ av1_zero(ppi->aggregate_fc);
+#endif // CONFIG_ENTROPY_STATS
- av1_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
+ av1_primary_rc_init(oxcf, &ppi->p_rc);
// For two pass and lag_in_frames > 33 in LAP.
- cpi->rc.enable_scenecut_detection = ENABLE_SCENECUT_MODE_2;
- if (cpi->lap_enabled) {
+ ppi->p_rc.enable_scenecut_detection = ENABLE_SCENECUT_MODE_2;
+ if (ppi->lap_enabled) {
if ((num_lap_buffers <
(MAX_GF_LENGTH_LAP + SCENE_CUT_KEY_TEST_INTERVAL + 1)) &&
num_lap_buffers >= (MAX_GF_LENGTH_LAP + 3)) {
@@ -908,219 +908,22 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
* For lag in frames >= 19 and <33, enable scenecut
* with limited future frame prediction.
*/
- cpi->rc.enable_scenecut_detection = ENABLE_SCENECUT_MODE_1;
+ ppi->p_rc.enable_scenecut_detection = ENABLE_SCENECUT_MODE_1;
} else if (num_lap_buffers < (MAX_GF_LENGTH_LAP + 3)) {
// Disable scenecut when lag_in_frames < 19.
- cpi->rc.enable_scenecut_detection = DISABLE_SCENECUT;
+ ppi->p_rc.enable_scenecut_detection = DISABLE_SCENECUT;
}
}
- init_frame_info(&cpi->frame_info, cm);
- init_frame_index_set(&cpi->frame_index_set);
-
- cm->current_frame.frame_number = 0;
- cm->current_frame_id = -1;
- cpi->seq_params_locked = 0;
- cpi->partition_search_skippable_frame = 0;
- cpi->tile_data = NULL;
- cpi->last_show_frame_buf = NULL;
- realloc_segmentation_maps(cpi);
-
- cpi->refresh_frame.alt_ref_frame = false;
-
- cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
-#if CONFIG_INTERNAL_STATS
- cpi->b_calculate_blockiness = 1;
- cpi->b_calculate_consistency = 1;
- cpi->total_inconsistency = 0;
- cpi->psnr[0].worst = 100.0;
- cpi->psnr[1].worst = 100.0;
- cpi->worst_ssim = 100.0;
- cpi->worst_ssim_hbd = 100.0;
-
- cpi->count[0] = 0;
- cpi->count[1] = 0;
- cpi->bytes = 0;
-#if CONFIG_SPEED_STATS
- cpi->tx_search_count = 0;
-#endif // CONFIG_SPEED_STATS
-
- if (cpi->b_calculate_psnr) {
- cpi->total_sq_error[0] = 0;
- cpi->total_samples[0] = 0;
- cpi->total_sq_error[1] = 0;
- cpi->total_samples[1] = 0;
- cpi->tot_recode_hits = 0;
- cpi->summed_quality = 0;
- cpi->summed_weights = 0;
- cpi->summed_quality_hbd = 0;
- cpi->summed_weights_hbd = 0;
- }
-
- cpi->fastssim.worst = 100.0;
- cpi->psnrhvs.worst = 100.0;
-
- if (cpi->b_calculate_blockiness) {
- cpi->total_blockiness = 0;
- cpi->worst_blockiness = 0.0;
- }
-
- if (cpi->b_calculate_consistency) {
- CHECK_MEM_ERROR(
- cm, cpi->ssim_vars,
- aom_malloc(sizeof(*cpi->ssim_vars) * 4 * cpi->common.mi_params.mi_rows *
- cpi->common.mi_params.mi_cols));
- cpi->worst_consistency = 100.0;
- }
-#endif
-#if CONFIG_ENTROPY_STATS
- av1_zero(aggregate_fc);
-#endif // CONFIG_ENTROPY_STATS
-
- cpi->time_stamps.first_ts_start = INT64_MAX;
-
-#ifdef OUTPUT_YUV_REC
- yuv_rec_file = fopen("rec.yuv", "wb");
-#endif
-#ifdef OUTPUT_YUV_DENOISED
- yuv_denoised_file = fopen("denoised.yuv", "wb");
-#endif
-
- assert(MAX_LAP_BUFFERS >= MAX_LAG_BUFFERS);
- int size = get_stats_buf_size(num_lap_buffers, MAX_LAG_BUFFERS);
- for (int i = 0; i < size; i++)
- cpi->twopass.frame_stats_arr[i] = &frame_stats_buf[i];
-
- cpi->twopass.stats_buf_ctx = stats_buf_context;
- cpi->twopass.stats_in = cpi->twopass.stats_buf_ctx->stats_in_start;
-
-#if !CONFIG_REALTIME_ONLY
- if (is_stat_consumption_stage(cpi)) {
- const size_t packet_sz = sizeof(FIRSTPASS_STATS);
- const int packets = (int)(oxcf->twopass_stats_in.sz / packet_sz);
-
- if (!cpi->lap_enabled) {
- /*Re-initialize to stats buffer, populated by application in the case of
- * two pass*/
- cpi->twopass.stats_buf_ctx->stats_in_start = oxcf->twopass_stats_in.buf;
- cpi->twopass.stats_in = cpi->twopass.stats_buf_ctx->stats_in_start;
- cpi->twopass.stats_buf_ctx->stats_in_end =
- &cpi->twopass.stats_buf_ctx->stats_in_start[packets - 1];
-
- av1_init_second_pass(cpi);
- } else {
- av1_init_single_pass_lap(cpi);
- }
- }
-#endif
-
- alloc_obmc_buffers(&cpi->td.mb.obmc_buffer, cm);
-
- CHECK_MEM_ERROR(
- cm, cpi->td.mb.inter_modes_info,
- (InterModesInfo *)aom_malloc(sizeof(*cpi->td.mb.inter_modes_info)));
-
- for (int x = 0; x < 2; x++)
- for (int y = 0; y < 2; y++)
- CHECK_MEM_ERROR(
- cm, cpi->td.mb.intrabc_hash_info.hash_value_buffer[x][y],
- (uint32_t *)aom_malloc(
- AOM_BUFFER_SIZE_FOR_BLOCK_HASH *
- sizeof(*cpi->td.mb.intrabc_hash_info.hash_value_buffer[0][0])));
-
- cpi->td.mb.intrabc_hash_info.g_crc_initialized = 0;
-
- av1_set_speed_features_framesize_independent(cpi, oxcf->speed);
- av1_set_speed_features_framesize_dependent(cpi, oxcf->speed);
-
- CHECK_MEM_ERROR(cm, cpi->consec_zero_mv,
- aom_calloc((mi_params->mi_rows * mi_params->mi_cols) >> 2,
- sizeof(*cpi->consec_zero_mv)));
-
- {
- const int bsize = BLOCK_16X16;
- const int w = mi_size_wide[bsize];
- const int h = mi_size_high[bsize];
- const int num_cols = (mi_params->mi_cols + w - 1) / w;
- const int num_rows = (mi_params->mi_rows + h - 1) / h;
- CHECK_MEM_ERROR(cm, cpi->tpl_rdmult_scaling_factors,
- aom_calloc(num_rows * num_cols,
- sizeof(*cpi->tpl_rdmult_scaling_factors)));
- CHECK_MEM_ERROR(cm, cpi->tpl_sb_rdmult_scaling_factors,
- aom_calloc(num_rows * num_cols,
- sizeof(*cpi->tpl_sb_rdmult_scaling_factors)));
- }
-
- {
- const int bsize = BLOCK_16X16;
- const int w = mi_size_wide[bsize];
- const int h = mi_size_high[bsize];
- const int num_cols = (mi_params->mi_cols + w - 1) / w;
- const int num_rows = (mi_params->mi_rows + h - 1) / h;
- CHECK_MEM_ERROR(cm, cpi->ssim_rdmult_scaling_factors,
- aom_calloc(num_rows * num_cols,
- sizeof(*cpi->ssim_rdmult_scaling_factors)));
- }
-
-#if CONFIG_TUNE_VMAF
- {
- const int bsize = BLOCK_64X64;
- const int w = mi_size_wide[bsize];
- const int h = mi_size_high[bsize];
- const int num_cols = (mi_params->mi_cols + w - 1) / w;
- const int num_rows = (mi_params->mi_rows + h - 1) / h;
- CHECK_MEM_ERROR(cm, cpi->vmaf_info.rdmult_scaling_factors,
- aom_calloc(num_rows * num_cols,
- sizeof(*cpi->vmaf_info.rdmult_scaling_factors)));
- for (int i = 0; i < MAX_ARF_LAYERS; i++) {
- cpi->vmaf_info.last_frame_unsharp_amount[i] = -1.0;
- cpi->vmaf_info.last_frame_ysse[i] = -1.0;
- cpi->vmaf_info.last_frame_vmaf[i] = -1.0;
- }
- cpi->vmaf_info.original_qindex = -1;
-
-#if CONFIG_USE_VMAF_RC
- cpi->vmaf_info.vmaf_model = NULL;
-#endif
- }
-#endif
-
-#if CONFIG_TUNE_BUTTERAUGLI
- {
- const int w = mi_size_wide[butteraugli_rdo_bsize];
- const int h = mi_size_high[butteraugli_rdo_bsize];
- const int num_cols = (mi_params->mi_cols + w - 1) / w;
- const int num_rows = (mi_params->mi_rows + h - 1) / h;
- CHECK_MEM_ERROR(
- cm, cpi->butteraugli_info.rdmult_scaling_factors,
- aom_malloc(num_rows * num_cols *
- sizeof(*cpi->butteraugli_info.rdmult_scaling_factors)));
- memset(&cpi->butteraugli_info.source, 0,
- sizeof(cpi->butteraugli_info.source));
- memset(&cpi->butteraugli_info.resized_source, 0,
- sizeof(cpi->butteraugli_info.resized_source));
- cpi->butteraugli_info.recon_set = false;
- }
-#endif
-
-#if !CONFIG_REALTIME_ONLY
- if (!is_stat_generation_stage(cpi)) {
- av1_setup_tpl_buffers(cm, &cpi->tpl_data, cpi->oxcf.gf_cfg.lag_in_frames);
- }
-#endif
-
-#if CONFIG_COLLECT_PARTITION_STATS
- av1_zero(cpi->partition_stats);
-#endif // CONFIG_COLLECT_PARTITION_STATS
#define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
- cpi->fn_ptr[BT].sdf = SDF; \
- cpi->fn_ptr[BT].sdaf = SDAF; \
- cpi->fn_ptr[BT].vf = VF; \
- cpi->fn_ptr[BT].svf = SVF; \
- cpi->fn_ptr[BT].svaf = SVAF; \
- cpi->fn_ptr[BT].sdx4df = SDX4DF; \
- cpi->fn_ptr[BT].jsdaf = JSDAF; \
- cpi->fn_ptr[BT].jsvaf = JSVAF;
+ ppi->fn_ptr[BT].sdf = SDF; \
+ ppi->fn_ptr[BT].sdaf = SDAF; \
+ ppi->fn_ptr[BT].vf = VF; \
+ ppi->fn_ptr[BT].svf = SVF; \
+ ppi->fn_ptr[BT].svaf = SVAF; \
+ ppi->fn_ptr[BT].sdx4df = SDX4DF; \
+ ppi->fn_ptr[BT].jsdaf = JSDAF; \
+ ppi->fn_ptr[BT].jsvaf = JSVAF;
// Realtime mode doesn't use 4x rectangular blocks.
#if !CONFIG_REALTIME_ONLY
@@ -1233,9 +1036,9 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
#if !CONFIG_REALTIME_ONLY
#define OBFP(BT, OSDF, OVF, OSVF) \
- cpi->fn_ptr[BT].osdf = OSDF; \
- cpi->fn_ptr[BT].ovf = OVF; \
- cpi->fn_ptr[BT].osvf = OSVF;
+ ppi->fn_ptr[BT].osdf = OSDF; \
+ ppi->fn_ptr[BT].ovf = OVF; \
+ ppi->fn_ptr[BT].osvf = OSVF;
OBFP(BLOCK_128X128, aom_obmc_sad128x128, aom_obmc_variance128x128,
aom_obmc_sub_pixel_variance128x128)
@@ -1284,8 +1087,8 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
#endif // !CONFIG_REALTIME_ONLY
#define MBFP(BT, MCSDF, MCSVF) \
- cpi->fn_ptr[BT].msdf = MCSDF; \
- cpi->fn_ptr[BT].msvf = MCSVF;
+ ppi->fn_ptr[BT].msdf = MCSDF; \
+ ppi->fn_ptr[BT].msvf = MCSVF;
MBFP(BLOCK_128X128, aom_masked_sad128x128,
aom_masked_sub_pixel_variance128x128)
@@ -1315,8 +1118,8 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
#endif
#define SDSFP(BT, SDSF, SDSX4DF) \
- cpi->fn_ptr[BT].sdsf = SDSF; \
- cpi->fn_ptr[BT].sdsx4df = SDSX4DF;
+ ppi->fn_ptr[BT].sdsf = SDSF; \
+ ppi->fn_ptr[BT].sdsx4df = SDSX4DF;
SDSFP(BLOCK_128X128, aom_sad_skip_128x128, aom_sad_skip_128x128x4d);
SDSFP(BLOCK_128X64, aom_sad_skip_128x64, aom_sad_skip_128x64x4d);
@@ -1346,16 +1149,281 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
#undef SDSFP
#if CONFIG_AV1_HIGHBITDEPTH
- highbd_set_var_fns(cpi);
+ highbd_set_var_fns(ppi);
+#endif
+
+ {
+ // As cm->mi_params is a part of the frame level context (cpi), it is
+ // unavailable at this point. mi_params is created as a local temporary
+ // variable, to be passed into the functions used for allocating tpl
+ // buffers. The values in this variable are populated according to initial
+ // width and height of the frame.
+ CommonModeInfoParams mi_params;
+ enc_set_mb_mi(&mi_params, oxcf->frm_dim_cfg.width,
+ oxcf->frm_dim_cfg.height);
+
+ const int bsize = BLOCK_16X16;
+ const int w = mi_size_wide[bsize];
+ const int h = mi_size_high[bsize];
+ const int num_cols = (mi_params.mi_cols + w - 1) / w;
+ const int num_rows = (mi_params.mi_rows + h - 1) / h;
+ AOM_CHECK_MEM_ERROR(&ppi->error, ppi->tpl_rdmult_scaling_factors,
+ aom_calloc(num_rows * num_cols,
+ sizeof(*ppi->tpl_rdmult_scaling_factors)));
+ AOM_CHECK_MEM_ERROR(
+ &ppi->error, ppi->tpl_sb_rdmult_scaling_factors,
+ aom_calloc(num_rows * num_cols,
+ sizeof(*ppi->tpl_sb_rdmult_scaling_factors)));
+
+#if !CONFIG_REALTIME_ONLY
+ if (oxcf->pass != 1) {
+ av1_setup_tpl_buffers(ppi, &mi_params, oxcf->frm_dim_cfg.width,
+ oxcf->frm_dim_cfg.height, 0,
+ oxcf->gf_cfg.lag_in_frames);
+ }
+#endif
+
+#if CONFIG_INTERNAL_STATS
+ ppi->b_calculate_blockiness = 1;
+ ppi->b_calculate_consistency = 1;
+
+ for (int i = 0; i <= STAT_ALL; i++) {
+ ppi->psnr[0].stat[i] = 0;
+ ppi->psnr[1].stat[i] = 0;
+
+ ppi->fastssim.stat[i] = 0;
+ ppi->psnrhvs.stat[i] = 0;
+ }
+
+ ppi->psnr[0].worst = 100.0;
+ ppi->psnr[1].worst = 100.0;
+ ppi->worst_ssim = 100.0;
+ ppi->worst_ssim_hbd = 100.0;
+
+ ppi->count[0] = 0;
+ ppi->count[1] = 0;
+ ppi->total_bytes = 0;
+
+ if (ppi->b_calculate_psnr) {
+ ppi->total_sq_error[0] = 0;
+ ppi->total_samples[0] = 0;
+ ppi->total_sq_error[1] = 0;
+ ppi->total_samples[1] = 0;
+ ppi->total_recode_hits = 0;
+ ppi->summed_quality = 0;
+ ppi->summed_weights = 0;
+ ppi->summed_quality_hbd = 0;
+ ppi->summed_weights_hbd = 0;
+ }
+
+ ppi->fastssim.worst = 100.0;
+ ppi->psnrhvs.worst = 100.0;
+
+ if (ppi->b_calculate_blockiness) {
+ ppi->total_blockiness = 0;
+ ppi->worst_blockiness = 0.0;
+ }
+
+ ppi->total_inconsistency = 0;
+ ppi->worst_consistency = 100.0;
+ if (ppi->b_calculate_consistency) {
+ AOM_CHECK_MEM_ERROR(&ppi->error, ppi->ssim_vars,
+ aom_malloc(sizeof(*ppi->ssim_vars) * 4 *
+ mi_params.mi_rows * mi_params.mi_cols));
+ }
+#endif
+ }
+
+ ppi->error.setjmp = 0;
+ return ppi;
+}
+
+AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
+ BufferPool *const pool, COMPRESSOR_STAGE stage,
+ int lap_lag_in_frames) {
+ AV1_COMP *volatile const cpi = aom_memalign(32, sizeof(AV1_COMP));
+ AV1_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
+
+ if (!cm) return NULL;
+
+ av1_zero(*cpi);
+
+ cpi->ppi = ppi;
+ cm->seq_params = &ppi->seq_params;
+ cm->error = &ppi->error;
+
+ // The jmp_buf is valid only for the duration of the function that calls
+ // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
+ // before it returns.
+ if (setjmp(cm->error->jmp)) {
+ cm->error->setjmp = 0;
+ av1_remove_compressor(cpi);
+ return 0;
+ }
+
+ cm->error->setjmp = 1;
+ cpi->compressor_stage = stage;
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi->do_frame_data_update = true;
+#endif
+
+ CommonModeInfoParams *const mi_params = &cm->mi_params;
+ mi_params->free_mi = enc_free_mi;
+ mi_params->setup_mi = enc_setup_mi;
+ mi_params->set_mb_mi = (oxcf->pass == 1 || cpi->compressor_stage == LAP_STAGE)
+ ? stat_stage_set_mb_mi
+ : enc_set_mb_mi;
+
+ mi_params->mi_alloc_bsize = BLOCK_4X4;
+
+ CHECK_MEM_ERROR(cm, cm->fc,
+ (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
+ CHECK_MEM_ERROR(
+ cm, cm->default_frame_context,
+ (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
+ memset(cm->fc, 0, sizeof(*cm->fc));
+ memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
+
+ cpi->common.buffer_pool = pool;
+
+ init_config(cpi, oxcf);
+ if (cpi->compressor_stage == LAP_STAGE) {
+ cpi->oxcf.gf_cfg.lag_in_frames = lap_lag_in_frames;
+ }
+
+ av1_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc, &cpi->ppi->p_rc);
+
+ init_frame_info(&cpi->frame_info, cm);
+ init_frame_index_set(&cpi->frame_index_set);
+
+ cm->current_frame.frame_number = 0;
+ cm->current_frame_id = -1;
+ cpi->partition_search_skippable_frame = 0;
+ cpi->tile_data = NULL;
+ cpi->last_show_frame_buf = NULL;
+ realloc_segmentation_maps(cpi);
+
+ cpi->refresh_frame.alt_ref_frame = false;
+
+#if CONFIG_SPEED_STATS
+ cpi->tx_search_count = 0;
+#endif // CONFIG_SPEED_STATS
+
+ cpi->time_stamps.first_ts_start = INT64_MAX;
+
+#ifdef OUTPUT_YUV_REC
+ yuv_rec_file = fopen("rec.yuv", "wb");
+#endif
+#ifdef OUTPUT_YUV_DENOISED
+ yuv_denoised_file = fopen("denoised.yuv", "wb");
+#endif
+
+#if !CONFIG_REALTIME_ONLY
+ if (is_stat_consumption_stage(cpi)) {
+ const size_t packet_sz = sizeof(FIRSTPASS_STATS);
+ const int packets = (int)(oxcf->twopass_stats_in.sz / packet_sz);
+
+ if (!cpi->ppi->lap_enabled) {
+ /*Re-initialize to stats buffer, populated by application in the case of
+ * two pass*/
+ cpi->ppi->twopass.stats_buf_ctx->stats_in_start =
+ oxcf->twopass_stats_in.buf;
+ cpi->ppi->twopass.stats_in =
+ cpi->ppi->twopass.stats_buf_ctx->stats_in_start;
+ cpi->ppi->twopass.stats_buf_ctx->stats_in_end =
+ &cpi->ppi->twopass.stats_buf_ctx->stats_in_start[packets - 1];
+
+ av1_init_second_pass(cpi);
+ } else {
+ av1_init_single_pass_lap(cpi);
+ }
+ }
+#endif
+
+ alloc_obmc_buffers(&cpi->td.mb.obmc_buffer, cm);
+
+ CHECK_MEM_ERROR(
+ cm, cpi->td.mb.inter_modes_info,
+ (InterModesInfo *)aom_malloc(sizeof(*cpi->td.mb.inter_modes_info)));
+
+ for (int x = 0; x < 2; x++)
+ for (int y = 0; y < 2; y++)
+ CHECK_MEM_ERROR(
+ cm, cpi->td.mb.intrabc_hash_info.hash_value_buffer[x][y],
+ (uint32_t *)aom_malloc(
+ AOM_BUFFER_SIZE_FOR_BLOCK_HASH *
+ sizeof(*cpi->td.mb.intrabc_hash_info.hash_value_buffer[0][0])));
+
+ cpi->td.mb.intrabc_hash_info.g_crc_initialized = 0;
+
+ av1_set_speed_features_framesize_independent(cpi, oxcf->speed);
+ av1_set_speed_features_framesize_dependent(cpi, oxcf->speed);
+
+ CHECK_MEM_ERROR(cm, cpi->consec_zero_mv,
+ aom_calloc((mi_params->mi_rows * mi_params->mi_cols) >> 2,
+ sizeof(*cpi->consec_zero_mv)));
+
+ {
+ const int bsize = BLOCK_16X16;
+ const int w = mi_size_wide[bsize];
+ const int h = mi_size_high[bsize];
+ const int num_cols = (mi_params->mi_cols + w - 1) / w;
+ const int num_rows = (mi_params->mi_rows + h - 1) / h;
+ CHECK_MEM_ERROR(cm, cpi->ssim_rdmult_scaling_factors,
+ aom_calloc(num_rows * num_cols,
+ sizeof(*cpi->ssim_rdmult_scaling_factors)));
+ }
+
+#if CONFIG_TUNE_VMAF
+ {
+ const int bsize = BLOCK_64X64;
+ const int w = mi_size_wide[bsize];
+ const int h = mi_size_high[bsize];
+ const int num_cols = (mi_params->mi_cols + w - 1) / w;
+ const int num_rows = (mi_params->mi_rows + h - 1) / h;
+ CHECK_MEM_ERROR(cm, cpi->vmaf_info.rdmult_scaling_factors,
+ aom_calloc(num_rows * num_cols,
+ sizeof(*cpi->vmaf_info.rdmult_scaling_factors)));
+ for (int i = 0; i < MAX_ARF_LAYERS; i++) {
+ cpi->vmaf_info.last_frame_unsharp_amount[i] = -1.0;
+ cpi->vmaf_info.last_frame_ysse[i] = -1.0;
+ cpi->vmaf_info.last_frame_vmaf[i] = -1.0;
+ }
+ cpi->vmaf_info.original_qindex = -1;
+ cpi->vmaf_info.vmaf_model = NULL;
+ }
+#endif
+
+#if CONFIG_TUNE_BUTTERAUGLI
+ {
+ const int w = mi_size_wide[butteraugli_rdo_bsize];
+ const int h = mi_size_high[butteraugli_rdo_bsize];
+ const int num_cols = (mi_params->mi_cols + w - 1) / w;
+ const int num_rows = (mi_params->mi_rows + h - 1) / h;
+ CHECK_MEM_ERROR(
+ cm, cpi->butteraugli_info.rdmult_scaling_factors,
+ aom_malloc(num_rows * num_cols *
+ sizeof(*cpi->butteraugli_info.rdmult_scaling_factors)));
+ memset(&cpi->butteraugli_info.source, 0,
+ sizeof(cpi->butteraugli_info.source));
+ memset(&cpi->butteraugli_info.resized_source, 0,
+ sizeof(cpi->butteraugli_info.resized_source));
+ cpi->butteraugli_info.recon_set = false;
+ }
#endif
+#if CONFIG_COLLECT_PARTITION_STATS
+ av1_zero(cpi->partition_stats);
+#endif // CONFIG_COLLECT_PARTITION_STATS
+
/* av1_init_quantizer() is first called here. Add check in
* av1_frame_init_quantizer() so that av1_init_quantizer is only
* called later when needed. This will avoid unnecessary calls of
* av1_init_quantizer() for every frame.
*/
av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_qm_init(&cm->quant_params, av1_num_planes(cm));
av1_loop_filter_init(cm);
@@ -1365,7 +1433,7 @@ AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
#if !CONFIG_REALTIME_ONLY
av1_loop_restoration_precal();
#endif
- cm->error.setjmp = 0;
+ cm->error->setjmp = 0;
return cpi;
}
@@ -1402,6 +1470,7 @@ static AOM_INLINE void free_thread_data(AV1_COMP *cpi) {
for (int j = 0; j < 2; ++j) {
aom_free(thread_data->td->tmp_pred_bufs[j]);
}
+ aom_free(thread_data->td->pixel_gradient_info);
release_obmc_buffers(&thread_data->td->obmc_buffer);
aom_free(thread_data->td->vt64x64);
@@ -1423,7 +1492,27 @@ static AOM_INLINE void free_thread_data(AV1_COMP *cpi) {
void av1_remove_primary_compressor(AV1_PRIMARY *ppi) {
if (!ppi) return;
+ aom_free_frame_buffer(&ppi->alt_ref_buffer);
+ for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
+ aom_free(ppi->level_params.level_info[i]);
+ }
av1_lookahead_destroy(ppi->lookahead);
+
+ aom_free(ppi->tpl_rdmult_scaling_factors);
+ ppi->tpl_rdmult_scaling_factors = NULL;
+ aom_free(ppi->tpl_sb_rdmult_scaling_factors);
+ ppi->tpl_sb_rdmult_scaling_factors = NULL;
+
+ TplParams *const tpl_data = &ppi->tpl_data;
+ for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
+ aom_free(tpl_data->tpl_stats_pool[frame]);
+ aom_free_frame_buffer(&tpl_data->tpl_rec_pool[frame]);
+ }
+
+#if !CONFIG_REALTIME_ONLY
+ av1_tpl_dealloc(&tpl_data->tpl_mt_sync);
+#endif
+
aom_free(ppi);
}
@@ -1432,127 +1521,6 @@ void av1_remove_compressor(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
if (cm->current_frame.frame_number > 0) {
-#if CONFIG_ENTROPY_STATS
- if (!is_stat_generation_stage(cpi)) {
- fprintf(stderr, "Writing counts.stt\n");
- FILE *f = fopen("counts.stt", "wb");
- fwrite(&aggregate_fc, sizeof(aggregate_fc), 1, f);
- fclose(f);
- }
-#endif // CONFIG_ENTROPY_STATS
-#if CONFIG_INTERNAL_STATS
- aom_clear_system_state();
-
- if (!is_stat_generation_stage(cpi)) {
- char headings[512] = { 0 };
- char results[512] = { 0 };
- FILE *f = fopen("opsnr.stt", "a");
- double time_encoded =
- (cpi->time_stamps.prev_ts_end - cpi->time_stamps.first_ts_start) /
- 10000000.000;
- double total_encode_time =
- (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
- const double dr =
- (double)cpi->bytes * (double)8 / (double)1000 / time_encoded;
- const double peak =
- (double)((1 << cpi->oxcf.input_cfg.input_bit_depth) - 1);
- const double target_rate =
- (double)cpi->oxcf.rc_cfg.target_bandwidth / 1000;
- const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
-
- if (cpi->b_calculate_psnr) {
- const double total_psnr =
- aom_sse_to_psnr((double)cpi->total_samples[0], peak,
- (double)cpi->total_sq_error[0]);
- const double total_ssim =
- 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
- snprintf(headings, sizeof(headings),
- "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
- "AOMSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
- "WstPsnr\tWstSsim\tWstFast\tWstHVS\t"
- "AVPsrnY\tAPsnrCb\tAPsnrCr");
- snprintf(results, sizeof(results),
- "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
- "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
- "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
- "%7.3f\t%7.3f\t%7.3f",
- dr, cpi->psnr[0].stat[STAT_ALL] / cpi->count[0], total_psnr,
- cpi->psnr[0].stat[STAT_ALL] / cpi->count[0], total_psnr,
- total_ssim, total_ssim,
- cpi->fastssim.stat[STAT_ALL] / cpi->count[0],
- cpi->psnrhvs.stat[STAT_ALL] / cpi->count[0],
- cpi->psnr[0].worst, cpi->worst_ssim, cpi->fastssim.worst,
- cpi->psnrhvs.worst, cpi->psnr[0].stat[STAT_Y] / cpi->count[0],
- cpi->psnr[0].stat[STAT_U] / cpi->count[0],
- cpi->psnr[0].stat[STAT_V] / cpi->count[0]);
-
- if (cpi->b_calculate_blockiness) {
- SNPRINT(headings, "\t Block\tWstBlck");
- SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count[0]);
- SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
- }
-
- if (cpi->b_calculate_consistency) {
- double consistency =
- aom_sse_to_psnr((double)cpi->total_samples[0], peak,
- (double)cpi->total_inconsistency);
-
- SNPRINT(headings, "\tConsist\tWstCons");
- SNPRINT2(results, "\t%7.3f", consistency);
- SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
- }
-
- SNPRINT(headings, "\t Time\tRcErr\tAbsErr");
- SNPRINT2(results, "\t%8.0f", total_encode_time);
- SNPRINT2(results, " %7.2f", rate_err);
- SNPRINT2(results, " %7.2f", fabs(rate_err));
-
- SNPRINT(headings, "\tAPsnr611");
- SNPRINT2(results, " %7.3f",
- (6 * cpi->psnr[0].stat[STAT_Y] + cpi->psnr[0].stat[STAT_U] +
- cpi->psnr[0].stat[STAT_V]) /
- (cpi->count[0] * 8));
-
-#if CONFIG_AV1_HIGHBITDEPTH
- const uint32_t in_bit_depth = cpi->oxcf.input_cfg.input_bit_depth;
- const uint32_t bit_depth = cpi->td.mb.e_mbd.bd;
- if ((cpi->source->flags & YV12_FLAG_HIGHBITDEPTH) &&
- (in_bit_depth < bit_depth)) {
- const double peak_hbd = (double)((1 << bit_depth) - 1);
- const double total_psnr_hbd =
- aom_sse_to_psnr((double)cpi->total_samples[1], peak_hbd,
- (double)cpi->total_sq_error[1]);
- const double total_ssim_hbd =
- 100 * pow(cpi->summed_quality_hbd / cpi->summed_weights_hbd, 8.0);
- SNPRINT(headings,
- "\t AVGPsnrH GLBPsnrH AVPsnrPH GLPsnrPH"
- " AVPsnrYH APsnrCbH APsnrCrH WstPsnrH"
- " AOMSSIMH VPSSIMPH WstSsimH");
- SNPRINT2(results, "\t%7.3f",
- cpi->psnr[1].stat[STAT_ALL] / cpi->count[1]);
- SNPRINT2(results, " %7.3f", total_psnr_hbd);
- SNPRINT2(results, " %7.3f",
- cpi->psnr[1].stat[STAT_ALL] / cpi->count[1]);
- SNPRINT2(results, " %7.3f", total_psnr_hbd);
- SNPRINT2(results, " %7.3f",
- cpi->psnr[1].stat[STAT_Y] / cpi->count[1]);
- SNPRINT2(results, " %7.3f",
- cpi->psnr[1].stat[STAT_U] / cpi->count[1]);
- SNPRINT2(results, " %7.3f",
- cpi->psnr[1].stat[STAT_V] / cpi->count[1]);
- SNPRINT2(results, " %7.3f", cpi->psnr[1].worst);
- SNPRINT2(results, " %7.3f", total_ssim_hbd);
- SNPRINT2(results, " %7.3f", total_ssim_hbd);
- SNPRINT2(results, " %7.3f", cpi->worst_ssim_hbd);
- }
-#endif
- fprintf(f, "%s\n", headings);
- fprintf(f, "%s\n", results);
- }
-
- fclose(f);
- }
-#endif // CONFIG_INTERNAL_STATS
#if CONFIG_SPEED_STATS
if (!is_stat_generation_stage(cpi)) {
fprintf(stdout, "tx_search_count = %d\n", cpi->tx_search_count);
@@ -1571,12 +1539,6 @@ void av1_remove_compressor(AV1_COMP *cpi) {
av1_denoiser_free(&(cpi->denoiser));
#endif
- TplParams *const tpl_data = &cpi->tpl_data;
- for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
- aom_free(tpl_data->tpl_stats_pool[frame]);
- aom_free_frame_buffer(&tpl_data->tpl_rec_pool[frame]);
- }
-
if (cpi->compressor_stage != LAP_STAGE) {
terminate_worker_data(cpi);
free_thread_data(cpi);
@@ -1586,6 +1548,7 @@ void av1_remove_compressor(AV1_COMP *cpi) {
#if CONFIG_MULTITHREAD
pthread_mutex_t *const enc_row_mt_mutex_ = mt_info->enc_row_mt.mutex_;
pthread_mutex_t *const gm_mt_mutex_ = mt_info->gm_sync.mutex_;
+ pthread_mutex_t *const pack_bs_mt_mutex_ = mt_info->pack_bs_sync.mutex_;
if (enc_row_mt_mutex_ != NULL) {
pthread_mutex_destroy(enc_row_mt_mutex_);
aom_free(enc_row_mt_mutex_);
@@ -1594,6 +1557,10 @@ void av1_remove_compressor(AV1_COMP *cpi) {
pthread_mutex_destroy(gm_mt_mutex_);
aom_free(gm_mt_mutex_);
}
+ if (pack_bs_mt_mutex_ != NULL) {
+ pthread_mutex_destroy(pack_bs_mt_mutex_);
+ aom_free(pack_bs_mt_mutex_);
+ }
#endif
av1_row_mt_mem_dealloc(cpi);
if (cpi->compressor_stage != LAP_STAGE) {
@@ -1601,9 +1568,6 @@ void av1_remove_compressor(AV1_COMP *cpi) {
aom_free(mt_info->workers);
}
-#if !CONFIG_REALTIME_ONLY
- av1_tpl_dealloc(&tpl_data->tpl_mt_sync);
-#endif
if (mt_info->num_workers > 1) {
av1_loop_filter_dealloc(&mt_info->lf_row_sync);
av1_cdef_mt_dealloc(&mt_info->cdef_sync);
@@ -1617,13 +1581,9 @@ void av1_remove_compressor(AV1_COMP *cpi) {
dealloc_compressor_data(cpi);
-#if CONFIG_INTERNAL_STATS
- aom_free(cpi->ssim_vars);
- cpi->ssim_vars = NULL;
-#endif // CONFIG_INTERNAL_STATS
+ av1_ext_part_delete(&cpi->ext_part_controller);
av1_remove_common(cm);
- av1_free_ref_frame_buffers(cm->buffer_pool);
aom_free(cpi);
@@ -1667,7 +1627,7 @@ static void generate_psnr_packet(AV1_COMP *cpi) {
#endif
pkt.kind = AOM_CODEC_PSNR_PKT;
- aom_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
+ aom_codec_pkt_list_add(cpi->ppi->output_pkt_list, &pkt);
}
int av1_use_as_reference(int *ext_ref_frame_flags, int ref_frame_flags) {
@@ -1781,7 +1741,12 @@ static void set_mv_search_params(AV1_COMP *cpi) {
mv_search_params->mv_step_param = av1_init_search_range(
AOMMIN(max_mv_def, 2 * mv_search_params->max_mv_magnitude));
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Reset max_mv_magnitude for parallel frames based on update flag.
+ if (cpi->do_frame_data_update) mv_search_params->max_mv_magnitude = -1;
+#else
mv_search_params->max_mv_magnitude = -1;
+#endif
}
}
}
@@ -1789,14 +1754,14 @@ static void set_mv_search_params(AV1_COMP *cpi) {
void av1_set_screen_content_options(AV1_COMP *cpi, FeatureFlags *features) {
const AV1_COMMON *const cm = &cpi->common;
- if (cm->seq_params.force_screen_content_tools != 2) {
+ if (cm->seq_params->force_screen_content_tools != 2) {
features->allow_screen_content_tools = features->allow_intrabc =
- cm->seq_params.force_screen_content_tools;
+ cm->seq_params->force_screen_content_tools;
return;
}
if (cpi->oxcf.mode == REALTIME) {
- assert(cm->seq_params.reduced_still_picture_hdr);
+ assert(cm->seq_params->reduced_still_picture_hdr);
features->allow_screen_content_tools = features->allow_intrabc = 0;
return;
}
@@ -1814,7 +1779,7 @@ void av1_set_screen_content_options(AV1_COMP *cpi, FeatureFlags *features) {
const int stride = cpi->unfiltered_source->y_stride;
const int width = cpi->unfiltered_source->y_width;
const int height = cpi->unfiltered_source->y_height;
- const int bd = cm->seq_params.bit_depth;
+ const int bd = cm->seq_params->bit_depth;
const int blk_w = 16;
const int blk_h = 16;
// These threshold values are selected experimentally.
@@ -1960,7 +1925,7 @@ static void init_ref_frame_bufs(AV1_COMP *cpi) {
void av1_check_initial_width(AV1_COMP *cpi, int use_highbitdepth,
int subsampling_x, int subsampling_y) {
AV1_COMMON *const cm = &cpi->common;
- SequenceHeader *const seq_params = &cm->seq_params;
+ SequenceHeader *const seq_params = cm->seq_params;
InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
if (!initial_dimensions->width ||
@@ -1994,11 +1959,11 @@ static void setup_denoiser_buffer(AV1_COMP *cpi) {
if (cpi->oxcf.noise_sensitivity > 0 &&
!cpi->denoiser.frame_buffer_initialized) {
if (av1_denoiser_alloc(
- cm, &cpi->svc, &cpi->denoiser, cpi->use_svc,
+ cm, &cpi->svc, &cpi->denoiser, cpi->ppi->use_svc,
cpi->oxcf.noise_sensitivity, cm->width, cm->height,
- cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth, AOM_BORDER_IN_PIXELS))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
+ cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS))
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate denoiser");
}
}
@@ -2008,9 +1973,9 @@ static void setup_denoiser_buffer(AV1_COMP *cpi) {
int av1_set_size_literal(AV1_COMP *cpi, int width, int height) {
AV1_COMMON *cm = &cpi->common;
InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
- av1_check_initial_width(cpi, cm->seq_params.use_highbitdepth,
- cm->seq_params.subsampling_x,
- cm->seq_params.subsampling_y);
+ av1_check_initial_width(cpi, cm->seq_params->use_highbitdepth,
+ cm->seq_params->subsampling_x,
+ cm->seq_params->subsampling_y);
if (width <= 0 || height <= 0) return 1;
@@ -2040,7 +2005,7 @@ int av1_set_size_literal(AV1_COMP *cpi, int width, int height) {
void av1_set_frame_size(AV1_COMP *cpi, int width, int height) {
AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int num_planes = av1_num_planes(cm);
MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
int ref_frame;
@@ -2078,7 +2043,7 @@ void av1_set_frame_size(AV1_COMP *cpi, int width, int height) {
if (av1_alloc_above_context_buffers(above_contexts, cm->tiles.rows,
cm->mi_params.mi_cols,
av1_num_planes(cm)))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate context buffers");
}
@@ -2088,11 +2053,16 @@ void av1_set_frame_size(AV1_COMP *cpi, int width, int height) {
seq_params->subsampling_y, seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment, NULL, NULL,
NULL, cpi->oxcf.tool_cfg.enable_global_motion))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
+ if (!is_stat_generation_stage(cpi))
+ av1_alloc_cdef_buffers(cm, &cpi->mt_info.cdef_worker,
+ &cpi->mt_info.cdef_sync,
+ cpi->mt_info.num_mod_workers[MOD_CDEF]);
+
#if !CONFIG_REALTIME_ONLY
- const int use_restoration = cm->seq_params.enable_restoration &&
+ const int use_restoration = cm->seq_params->enable_restoration &&
!cm->features.all_lossless &&
!cm->tiles.large_scale;
if (use_restoration) {
@@ -2107,6 +2077,7 @@ void av1_set_frame_size(AV1_COMP *cpi, int width, int height) {
av1_alloc_restoration_buffers(cm);
}
#endif
+
if (!is_stat_generation_stage(cpi)) alloc_util_frame_buffers(cpi);
init_motion_estimation(cpi);
@@ -2145,13 +2116,22 @@ static void cdef_restoration_frame(AV1_COMP *cpi, AV1_COMMON *cm,
#if CONFIG_COLLECT_COMPONENT_TIMING
start_timing(cpi, cdef_time);
#endif
+ const int num_workers = cpi->mt_info.num_mod_workers[MOD_CDEF];
// Find CDEF parameters
av1_cdef_search(&cpi->mt_info, &cm->cur_frame->buf, cpi->source, cm, xd,
- cpi->sf.lpf_sf.cdef_pick_method, cpi->td.mb.rdmult);
+ cpi->sf.lpf_sf.cdef_pick_method, cpi->td.mb.rdmult,
+ cpi->sf.rt_sf.skip_cdef_sb, cpi->rc.frames_since_key);
// Apply the filter
- if (!cpi->sf.rt_sf.skip_loopfilter_non_reference)
- av1_cdef_frame(&cm->cur_frame->buf, cm, xd);
+ if (!cpi->sf.rt_sf.skip_loopfilter_non_reference) {
+ if (num_workers > 1) {
+ av1_cdef_frame_mt(cm, xd, cpi->mt_info.cdef_worker,
+ cpi->mt_info.workers, &cpi->mt_info.cdef_sync,
+ num_workers, av1_cdef_init_fb_row_mt);
+ } else {
+ av1_cdef_frame(&cm->cur_frame->buf, cm, xd, av1_cdef_init_fb_row);
+ }
+ }
#if CONFIG_COLLECT_COMPONENT_TIMING
end_timing(cpi, cdef_time);
#endif
@@ -2211,11 +2191,19 @@ static void loopfilter_frame(AV1_COMP *cpi, AV1_COMMON *cm) {
const int use_loopfilter =
!cm->features.coded_lossless && !cm->tiles.large_scale;
- const int use_cdef = cm->seq_params.enable_cdef &&
+ const int use_cdef = cm->seq_params->enable_cdef &&
!cm->features.coded_lossless && !cm->tiles.large_scale;
- const int use_restoration = cm->seq_params.enable_restoration &&
+ const int use_restoration = cm->seq_params->enable_restoration &&
!cm->features.all_lossless &&
!cm->tiles.large_scale;
+ const int cur_width = cm->cur_frame->width;
+ const int cur_height = cm->cur_frame->height;
+ const int cur_width_mib = cm->mi_params.mi_cols * MI_SIZE;
+ const int cur_height_mib = cm->mi_params.mi_rows * MI_SIZE;
+ const int is_realtime =
+ cpi->sf.rt_sf.use_nonrd_pick_mode && !(cm->mi_params.mi_cols % 2) &&
+ !(cm->mi_params.mi_rows % 2) && (cur_width_mib - cur_width < MI_SIZE) &&
+ (cur_height_mib - cur_height < MI_SIZE);
struct loopfilter *lf = &cm->lf;
@@ -2238,13 +2226,13 @@ static void loopfilter_frame(AV1_COMP *cpi, AV1_COMMON *cm) {
0,
#endif
mt_info->workers, num_workers,
- &mt_info->lf_row_sync);
+ &mt_info->lf_row_sync, is_realtime);
else
av1_loop_filter_frame(&cm->cur_frame->buf, cm, xd,
#if CONFIG_LPF_MASK
0,
#endif
- 0, num_planes, 0);
+ 0, num_planes, 0, is_realtime);
}
#if CONFIG_COLLECT_COMPONENT_TIMING
end_timing(cpi, loop_filter_time);
@@ -2278,16 +2266,17 @@ static int encode_without_recode(AV1_COMP *cpi) {
int top_index = 0, bottom_index = 0, q = 0;
YV12_BUFFER_CONFIG *unscaled = cpi->unscaled_source;
InterpFilter filter_scaler =
- cpi->use_svc ? svc->downsample_filter_type[svc->spatial_layer_id]
- : EIGHTTAP_SMOOTH;
- int phase_scaler =
- cpi->use_svc ? svc->downsample_filter_phase[svc->spatial_layer_id] : 0;
+ cpi->ppi->use_svc ? svc->downsample_filter_type[svc->spatial_layer_id]
+ : EIGHTTAP_SMOOTH;
+ int phase_scaler = cpi->ppi->use_svc
+ ? svc->downsample_filter_phase[svc->spatial_layer_id]
+ : 0;
set_size_independent_vars(cpi);
av1_setup_frame_size(cpi);
av1_set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
- if (!cpi->use_svc) {
+ if (!cpi->ppi->use_svc) {
phase_scaler = 8;
// 2:1 scaling.
if ((cm->width << 1) == unscaled->y_crop_width &&
@@ -2315,6 +2304,12 @@ static int encode_without_recode(AV1_COMP *cpi) {
printf("\n Encoding a frame:");
#endif
+#if CONFIG_TUNE_BUTTERAUGLI
+ if (cpi->oxcf.tune_cfg.tuning == AOM_TUNE_BUTTERAUGLI) {
+ av1_setup_butteraugli_rdmult(cpi);
+ }
+#endif
+
aom_clear_system_state();
cpi->source = av1_scale_if_required(cm, unscaled, &cpi->scaled_source,
@@ -2336,7 +2331,7 @@ static int encode_without_recode(AV1_COMP *cpi) {
}
#if CONFIG_AV1_TEMPORAL_DENOISING
- if (cpi->oxcf.noise_sensitivity > 0 && cpi->use_svc)
+ if (cpi->oxcf.noise_sensitivity > 0 && cpi->ppi->use_svc)
av1_denoiser_reset_on_first_frame(cpi);
#endif
@@ -2365,7 +2360,7 @@ static int encode_without_recode(AV1_COMP *cpi) {
// (zero_mode is forced), and since the scaled references are only
// use for newmv search, we can avoid scaling here.
if (!frame_is_intra_only(cm) &&
- !(cpi->use_svc && cpi->svc.force_zero_mode_spatial_ref))
+ !(cpi->ppi->use_svc && cpi->svc.force_zero_mode_spatial_ref))
av1_scale_references(cpi, filter_scaler, phase_scaler, 1);
av1_set_quantizer(cm, q_cfg->qm_minlevel, q_cfg->qm_maxlevel, q,
@@ -2373,7 +2368,7 @@ static int encode_without_recode(AV1_COMP *cpi) {
av1_set_speed_features_qindex_dependent(cpi, cpi->oxcf.speed);
if ((q_cfg->deltaq_mode != NO_DELTA_Q) || q_cfg->enable_chroma_deltaq)
av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_set_variance_partition_thresholds(cpi, q, 0);
av1_setup_frame(cpi);
@@ -2388,7 +2383,7 @@ static int encode_without_recode(AV1_COMP *cpi) {
av1_set_speed_features_qindex_dependent(cpi, cpi->oxcf.speed);
if (q_cfg->deltaq_mode != NO_DELTA_Q || q_cfg->enable_chroma_deltaq)
av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_set_variance_partition_thresholds(cpi, q, 0);
if (frame_is_intra_only(cm) || cm->features.error_resilient_mode)
av1_setup_frame(cpi);
@@ -2432,7 +2427,7 @@ static int encode_without_recode(AV1_COMP *cpi) {
end_timing(cpi, av1_encode_frame_time);
#endif
#if CONFIG_INTERNAL_STATS
- ++cpi->tot_recode_hits;
+ ++cpi->frame_recode_hits;
#endif
aom_clear_system_state();
@@ -2504,7 +2499,7 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
if (!cpi->sf.hl_sf.disable_extra_sc_testing)
av1_determine_sc_tools_with_encoding(cpi, q);
-#if CONFIG_USE_VMAF_RC
+#if CONFIG_TUNE_VMAF
if (oxcf->tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN) {
av1_vmaf_neg_preprocessing(cpi, cpi->unscaled_source);
}
@@ -2525,6 +2520,7 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
do {
loop = 0;
+ int do_mv_stats_collection = 1;
aom_clear_system_state();
// if frame was scaled calculate global_motion_search again if already
@@ -2580,7 +2576,7 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
if (q_cfg->deltaq_mode != NO_DELTA_Q || q_cfg->enable_chroma_deltaq)
av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_set_variance_partition_thresholds(cpi, q, 0);
@@ -2636,14 +2632,19 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
// transform / motion compensation build reconstruction frame
av1_encode_frame(cpi);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Disable mv_stats collection for parallel frames based on update flag.
+ if (!cpi->do_frame_data_update) do_mv_stats_collection = 0;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
// Reset the mv_stats in case we are interrupted by an intraframe or an
// overlay frame.
- if (cpi->mv_stats.valid) {
- av1_zero(cpi->mv_stats);
+ if (cpi->ppi->mv_stats.valid && do_mv_stats_collection) {
+ av1_zero(cpi->ppi->mv_stats);
}
// Gather the mv_stats for the next frame
if (cpi->sf.hl_sf.high_precision_mv_usage == LAST_MV_DATA &&
- av1_frame_allows_smart_mv(cpi)) {
+ av1_frame_allows_smart_mv(cpi) && do_mv_stats_collection) {
av1_collect_mv_stats(cpi, q);
}
@@ -2653,6 +2654,9 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
aom_clear_system_state();
+#if CONFIG_BITRATE_ACCURACY
+ const int do_dummy_pack = 1;
+#else // CONFIG_BITRATE_ACCURACY
// Dummy pack of the bitstream using up to date stats to get an
// accurate estimate of output frame size to determine if we need
// to recode.
@@ -2660,6 +2664,7 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
(cpi->sf.hl_sf.recode_loop >= ALLOW_RECODE_KFARFGF &&
oxcf->rc_cfg.mode != AOM_Q) ||
oxcf->rc_cfg.min_cr > 0;
+#endif // CONFIG_BITRATE_ACCURACY
if (do_dummy_pack) {
av1_finalize_encoded_frame(cpi);
int largest_tile_id = 0; // Output from bitstream: unused here
@@ -2669,7 +2674,15 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
return AOM_CODEC_ERROR;
}
+ // bits used for this frame
rc->projected_frame_size = (int)(*size) << 3;
+
+#if CONFIG_BITRATE_ACCURACY
+ cpi->ppi->tpl_data.actual_gop_bitrate += rc->projected_frame_size;
+ printf("\nframe: %d, projected frame size: %d, total: %f\n",
+ cpi->gf_frame_index, rc->projected_frame_size,
+ cpi->ppi->tpl_data.actual_gop_bitrate);
+#endif
}
#if CONFIG_TUNE_VMAF
@@ -2688,15 +2701,19 @@ static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
#if CONFIG_TUNE_BUTTERAUGLI
if (loop_count == 0 && oxcf->tune_cfg.tuning == AOM_TUNE_BUTTERAUGLI) {
loop = 1;
- av1_restore_butteraugli_source(cpi);
+ av1_setup_butteraugli_rdmult_and_restore_source(cpi, 0.4);
}
#endif
+#if CONFIG_BITRATE_ACCURACY
+ loop = 0; // turn off recode loop when CONFIG_BITRATE_ACCURACY is on
+#endif // CONFIG_BITRATE_ACCURACY
+
if (loop) {
++loop_count;
#if CONFIG_INTERNAL_STATS
- ++cpi->tot_recode_hits;
+ ++cpi->frame_recode_hits;
#endif
}
#if CONFIG_COLLECT_COMPONENT_TIMING
@@ -2796,12 +2813,12 @@ static int encode_with_recode_loop_and_filter(AV1_COMP *cpi, size_t *size,
#endif
AV1_COMMON *const cm = &cpi->common;
- SequenceHeader *const seq_params = &cm->seq_params;
+ SequenceHeader *const seq_params = cm->seq_params;
// Special case code to reduce pulsing when key frames are forced at a
// fixed interval. Note the reconstruction error if it is the frame before
// the force key frame
- if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
+ if (cpi->ppi->p_rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
#if CONFIG_AV1_HIGHBITDEPTH
if (seq_params->use_highbitdepth) {
cpi->ambient_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
@@ -2884,7 +2901,7 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
uint8_t *dest,
int *largest_tile_id) {
const AV1_COMMON *const cm = &cpi->common;
- assert(cm->seq_params.enable_superres);
+ assert(cm->seq_params->enable_superres);
assert(av1_superres_in_recode_allowed(cpi));
aom_codec_err_t err = AOM_CODEC_OK;
av1_save_all_coding_context(cpi);
@@ -2904,9 +2921,9 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
int64_t superres_rates[SCALE_NUMERATOR];
int superres_largest_tile_ids[SCALE_NUMERATOR];
// Use superres for Key-frames and Alt-ref frames only.
- const GF_GROUP *const gf_group = &cpi->gf_group;
- if (gf_group->update_type[gf_group->index] != OVERLAY_UPDATE &&
- gf_group->update_type[gf_group->index] != INTNL_OVERLAY_UPDATE) {
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ if (gf_group->update_type[cpi->gf_frame_index] != OVERLAY_UPDATE &&
+ gf_group->update_type[cpi->gf_frame_index] != INTNL_OVERLAY_UPDATE) {
for (int denom = SCALE_NUMERATOR + 1; denom <= 2 * SCALE_NUMERATOR;
++denom) {
superres_cfg->superres_scale_denominator = denom;
@@ -2952,7 +2969,7 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
const int64_t this_rate = superres_rates[this_index];
const int this_largest_tile_id = superres_largest_tile_ids[this_index];
const double this_rdcost = RDCOST_DBL_WITH_NATIVE_BD_DIST(
- rdmult, this_rate, this_sse, cm->seq_params.bit_depth);
+ rdmult, this_rate, this_sse, cm->seq_params->bit_depth);
if (this_rdcost < proj_rdcost1) {
sse1 = this_sse;
rate1 = this_rate;
@@ -2962,7 +2979,7 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
}
}
const double proj_rdcost2 = RDCOST_DBL_WITH_NATIVE_BD_DIST(
- rdmult, rate2, sse2, cm->seq_params.bit_depth);
+ rdmult, rate2, sse2, cm->seq_params->bit_depth);
// Re-encode with superres if it's better.
if (proj_rdcost1 < proj_rdcost2) {
restore_all_coding_context(cpi);
@@ -3007,9 +3024,9 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
const int64_t rdmult =
av1_compute_rd_mult_based_on_qindex(cpi, cm->quant_params.base_qindex);
proj_rdcost1 = RDCOST_DBL_WITH_NATIVE_BD_DIST(rdmult, rate1, sse1,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
const double proj_rdcost2 = RDCOST_DBL_WITH_NATIVE_BD_DIST(
- rdmult, rate2, sse2, cm->seq_params.bit_depth);
+ rdmult, rate2, sse2, cm->seq_params->bit_depth);
// Re-encode with superres if it's better.
if (proj_rdcost1 < proj_rdcost2) {
restore_all_coding_context(cpi);
@@ -3034,6 +3051,42 @@ static int encode_with_and_without_superres(AV1_COMP *cpi, size_t *size,
return err;
}
+#if !CONFIG_REALTIME_ONLY
+static void calculate_frame_avg_haar_energy(AV1_COMP *cpi) {
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
+ const FIRSTPASS_STATS *const total_stats =
+ twopass->stats_buf_ctx->total_stats;
+
+ if (is_one_pass_rt_params(cpi) ||
+ (cpi->oxcf.q_cfg.deltaq_mode != DELTA_Q_PERCEPTUAL) ||
+ (is_fp_wavelet_energy_invalid(total_stats) == 0))
+ return;
+
+ const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
+ ? cpi->initial_mbs
+ : cpi->common.mi_params.MBs;
+ const YV12_BUFFER_CONFIG *const unfiltered_source = cpi->unfiltered_source;
+ const uint8_t *const src = unfiltered_source->y_buffer;
+ const int hbd = unfiltered_source->flags & YV12_FLAG_HIGHBITDEPTH;
+ const int stride = unfiltered_source->y_stride;
+ const BLOCK_SIZE fp_block_size =
+ get_fp_block_size(cpi->is_screen_content_type);
+ const int fp_block_size_width = block_size_wide[fp_block_size];
+ const int fp_block_size_height = block_size_high[fp_block_size];
+ const int num_unit_cols =
+ get_num_blocks(unfiltered_source->y_crop_width, fp_block_size_width);
+ const int num_unit_rows =
+ get_num_blocks(unfiltered_source->y_crop_height, fp_block_size_height);
+ const int num_8x8_cols = num_unit_cols * (fp_block_size_width / 8);
+ const int num_8x8_rows = num_unit_rows * (fp_block_size_height / 8);
+ int64_t frame_avg_wavelet_energy = av1_haar_ac_sad_mxn_uint8_input(
+ src, stride, hbd, num_8x8_rows, num_8x8_cols);
+
+ twopass->frame_avg_haar_energy =
+ log(((double)frame_avg_wavelet_energy / num_mbs) + 1.0);
+}
+#endif
+
extern void av1_print_frame_contexts(const FRAME_CONTEXT *fc,
const char *filename);
@@ -3055,7 +3108,7 @@ extern void av1_print_frame_contexts(const FRAME_CONTEXT *fc,
static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
uint8_t *dest) {
AV1_COMMON *const cm = &cpi->common;
- SequenceHeader *const seq_params = &cm->seq_params;
+ SequenceHeader *const seq_params = cm->seq_params;
CurrentFrame *const current_frame = &cm->current_frame;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
struct segmentation *const seg = &cm->seg;
@@ -3070,6 +3123,10 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
av1_set_screen_content_options(cpi, features);
}
+#if !CONFIG_REALTIME_ONLY
+ calculate_frame_avg_haar_energy(cpi);
+#endif
+
// frame type has been decided outside of this function call
cm->cur_frame->frame_type = current_frame->frame_type;
@@ -3088,7 +3145,7 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
cpi->last_frame_type = current_frame->frame_type;
if (frame_is_sframe(cm)) {
- GF_GROUP *gf_group = &cpi->gf_group;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
// S frame will wipe out any previously encoded altref so we cannot place
// an overlay frame
gf_group->update_type[gf_group->size] = GF_UPDATE;
@@ -3110,7 +3167,7 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
cm->ref_frame_id[i] = display_frame_id;
}
- cpi->seq_params_locked = 1;
+ cpi->ppi->seq_params_locked = 1;
#if DUMP_RECON_FRAMES == 1
// NOTE(zoeliu): For debug - Output the filtered reconstructed video.
@@ -3147,7 +3204,7 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
if (!is_stat_generation_stage(cpi) &&
cpi->common.features.allow_screen_content_tools &&
!frame_is_intra_only(cm)) {
- if (cpi->common.seq_params.force_integer_mv == 2) {
+ if (cpi->common.seq_params->force_integer_mv == 2) {
// Adaptive mode: see what previous frame encoded did
if (cpi->unscaled_last_source != NULL) {
features->cur_frame_force_integer_mv = av1_is_integer_mv(
@@ -3157,7 +3214,7 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
}
} else {
cpi->common.features.cur_frame_force_integer_mv =
- cpi->common.seq_params.force_integer_mv;
+ cpi->common.seq_params->force_integer_mv;
}
} else {
cpi->common.features.cur_frame_force_integer_mv = 0;
@@ -3290,7 +3347,7 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
cpi->superres_mode = orig_superres_mode; // restore
}
- cpi->seq_params_locked = 1;
+ cpi->ppi->seq_params_locked = 1;
// Update reference frame ids for reference frames this frame will overwrite
if (seq_params->frame_id_numbers_present_flag) {
@@ -3332,10 +3389,6 @@ static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size,
refresh_reference_frames(cpi);
-#if CONFIG_ENTROPY_STATS
- av1_accumulate_frame_counts(&aggregate_fc, &cpi->counts);
-#endif // CONFIG_ENTROPY_STATS
-
if (features->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
*cm->fc = cpi->tile_data[largest_tile_id].tctx;
av1_reset_cdf_symbol_counters(cm->fc);
@@ -3417,7 +3470,13 @@ int av1_encode(AV1_COMP *const cpi, uint8_t *const dest,
current_frame->display_order_hint = current_frame->order_hint;
current_frame->order_hint %=
- (1 << (cm->seq_params.order_hint_info.order_hint_bits_minus_1 + 1));
+ (1 << (cm->seq_params->order_hint_info.order_hint_bits_minus_1 + 1));
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ current_frame->pyramid_level = get_true_pyr_level(
+ cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index],
+ current_frame->display_order_hint, cpi->ppi->gf_group.max_layer_depth);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
if (is_stat_generation_stage(cpi)) {
#if !CONFIG_REALTIME_ONLY
@@ -3442,9 +3501,9 @@ static int apply_denoise_2d(AV1_COMP *cpi, YV12_BUFFER_CONFIG *sd,
AV1_COMMON *const cm = &cpi->common;
if (!cpi->denoise_and_model) {
cpi->denoise_and_model = aom_denoise_and_model_alloc(
- cm->seq_params.bit_depth, block_size, noise_level);
+ cm->seq_params->bit_depth, block_size, noise_level);
if (!cpi->denoise_and_model) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Error allocating denoise and model");
return -1;
}
@@ -3452,7 +3511,7 @@ static int apply_denoise_2d(AV1_COMP *cpi, YV12_BUFFER_CONFIG *sd,
if (!cpi->film_grain_table) {
cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
if (!cpi->film_grain_table) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Error allocating grain table");
return -1;
}
@@ -3474,7 +3533,7 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
int64_t end_time) {
AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
int res = 0;
const int subsampling_x = sd->subsampling_x;
const int subsampling_y = sd->subsampling_y;
@@ -3516,7 +3575,7 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
res = -1;
#if CONFIG_INTERNAL_STATS
aom_usec_timer_mark(&timer);
- cpi->time_receive_data += aom_usec_timer_elapsed(&timer);
+ cpi->ppi->total_time_receive_data += aom_usec_timer_elapsed(&timer);
#endif
// Note: Regarding profile setting, the following checks are added to help
@@ -3528,20 +3587,20 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
// header.
if ((seq_params->profile == PROFILE_0) && !seq_params->monochrome &&
(subsampling_x != 1 || subsampling_y != 1)) {
- aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
+ aom_internal_error(cm->error, AOM_CODEC_INVALID_PARAM,
"Non-4:2:0 color format requires profile 1 or 2");
res = -1;
}
if ((seq_params->profile == PROFILE_1) &&
!(subsampling_x == 0 && subsampling_y == 0)) {
- aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
+ aom_internal_error(cm->error, AOM_CODEC_INVALID_PARAM,
"Profile 1 requires 4:4:4 color format");
res = -1;
}
if ((seq_params->profile == PROFILE_2) &&
(seq_params->bit_depth <= AOM_BITS_10) &&
!(subsampling_x == 1 && subsampling_y == 0)) {
- aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
+ aom_internal_error(cm->error, AOM_CODEC_INVALID_PARAM,
"Profile 2 bit-depth <= 10 requires 4:2:2 color format");
res = -1;
}
@@ -3549,6 +3608,20 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
return res;
}
+#if CONFIG_ENTROPY_STATS
+void print_entropy_stats(AV1_PRIMARY *const ppi) {
+ if (!ppi->cpi) return;
+
+ if (ppi->cpi->oxcf.pass != 1 &&
+ ppi->cpi->common.current_frame.frame_number > 0) {
+ fprintf(stderr, "Writing counts.stt\n");
+ FILE *f = fopen("counts.stt", "wb");
+ fwrite(&ppi->aggregate_fc, sizeof(ppi->aggregate_fc), 1, f);
+ fclose(f);
+ }
+}
+#endif // CONFIG_ENTROPY_STATS
+
#if CONFIG_INTERNAL_STATS
extern double av1_get_blockiness(const unsigned char *img1, int img1_pitch,
const unsigned char *img2, int img2_pitch,
@@ -3564,11 +3637,16 @@ static void adjust_image_stat(double y, double u, double v, double all,
}
static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
+ AV1_PRIMARY *const ppi = cpi->ppi;
AV1_COMMON *const cm = &cpi->common;
double samples = 0.0;
const uint32_t in_bit_depth = cpi->oxcf.input_cfg.input_bit_depth;
const uint32_t bit_depth = cpi->td.mb.e_mbd.bd;
+ if (cpi->ppi->use_svc &&
+ cpi->svc.spatial_layer_id < cpi->svc.number_spatial_layers - 1)
+ return;
+
#if CONFIG_INTER_STATS_ONLY
if (cm->current_frame.frame_type == KEY_FRAME) return; // skip key frame
#endif
@@ -3578,9 +3656,9 @@ static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
const YV12_BUFFER_CONFIG *recon = &cpi->common.cur_frame->buf;
double y, u, v, frame_all;
- cpi->count[0]++;
- cpi->count[1]++;
- if (cpi->b_calculate_psnr) {
+ ppi->count[0]++;
+ ppi->count[1]++;
+ if (cpi->ppi->b_calculate_psnr) {
PSNR_STATS psnr;
double weight[2] = { 0.0, 0.0 };
double frame_ssim2[2] = { 0.0, 0.0 };
@@ -3591,34 +3669,30 @@ static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
aom_calc_psnr(orig, recon, &psnr);
#endif
adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3], psnr.psnr[0],
- &(cpi->psnr[0]));
- cpi->total_sq_error[0] += psnr.sse[0];
- cpi->total_samples[0] += psnr.samples[0];
+ &(ppi->psnr[0]));
+ ppi->total_sq_error[0] += psnr.sse[0];
+ ppi->total_samples[0] += psnr.samples[0];
samples = psnr.samples[0];
- // TODO(yaowu): unify these two versions into one.
- if (cm->seq_params.use_highbitdepth)
- aom_highbd_calc_ssim(orig, recon, weight, bit_depth, in_bit_depth,
- frame_ssim2);
- else
- aom_calc_ssim(orig, recon, &weight[0], &frame_ssim2[0]);
+ aom_calc_ssim(orig, recon, bit_depth, in_bit_depth,
+ cm->seq_params->use_highbitdepth, weight, frame_ssim2);
- cpi->worst_ssim = AOMMIN(cpi->worst_ssim, frame_ssim2[0]);
- cpi->summed_quality += frame_ssim2[0] * weight[0];
- cpi->summed_weights += weight[0];
+ ppi->worst_ssim = AOMMIN(ppi->worst_ssim, frame_ssim2[0]);
+ ppi->summed_quality += frame_ssim2[0] * weight[0];
+ ppi->summed_weights += weight[0];
#if CONFIG_AV1_HIGHBITDEPTH
// Compute PSNR based on stream bit depth
if ((cpi->source->flags & YV12_FLAG_HIGHBITDEPTH) &&
(in_bit_depth < bit_depth)) {
adjust_image_stat(psnr.psnr_hbd[1], psnr.psnr_hbd[2], psnr.psnr_hbd[3],
- psnr.psnr_hbd[0], &cpi->psnr[1]);
- cpi->total_sq_error[1] += psnr.sse_hbd[0];
- cpi->total_samples[1] += psnr.samples_hbd[0];
+ psnr.psnr_hbd[0], &ppi->psnr[1]);
+ ppi->total_sq_error[1] += psnr.sse_hbd[0];
+ ppi->total_samples[1] += psnr.samples_hbd[0];
- cpi->worst_ssim_hbd = AOMMIN(cpi->worst_ssim_hbd, frame_ssim2[1]);
- cpi->summed_quality_hbd += frame_ssim2[1] * weight[1];
- cpi->summed_weights_hbd += weight[1];
+ ppi->worst_ssim_hbd = AOMMIN(ppi->worst_ssim_hbd, frame_ssim2[1]);
+ ppi->summed_quality_hbd += frame_ssim2[1] * weight[1];
+ ppi->summed_weights_hbd += weight[1];
}
#endif
@@ -3636,48 +3710,207 @@ static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
}
#endif
}
- if (cpi->b_calculate_blockiness) {
- if (!cm->seq_params.use_highbitdepth) {
+ if (ppi->b_calculate_blockiness) {
+ if (!cm->seq_params->use_highbitdepth) {
const double frame_blockiness =
av1_get_blockiness(orig->y_buffer, orig->y_stride, recon->y_buffer,
recon->y_stride, orig->y_width, orig->y_height);
- cpi->worst_blockiness = AOMMAX(cpi->worst_blockiness, frame_blockiness);
- cpi->total_blockiness += frame_blockiness;
+ ppi->worst_blockiness = AOMMAX(ppi->worst_blockiness, frame_blockiness);
+ ppi->total_blockiness += frame_blockiness;
}
- if (cpi->b_calculate_consistency) {
- if (!cm->seq_params.use_highbitdepth) {
+ if (ppi->b_calculate_consistency) {
+ if (!cm->seq_params->use_highbitdepth) {
const double this_inconsistency = aom_get_ssim_metrics(
orig->y_buffer, orig->y_stride, recon->y_buffer, recon->y_stride,
- orig->y_width, orig->y_height, cpi->ssim_vars, &cpi->metrics, 1);
+ orig->y_width, orig->y_height, ppi->ssim_vars, &ppi->metrics, 1);
const double peak = (double)((1 << in_bit_depth) - 1);
const double consistency =
- aom_sse_to_psnr(samples, peak, cpi->total_inconsistency);
+ aom_sse_to_psnr(samples, peak, ppi->total_inconsistency);
if (consistency > 0.0)
- cpi->worst_consistency =
- AOMMIN(cpi->worst_consistency, consistency);
- cpi->total_inconsistency += this_inconsistency;
+ ppi->worst_consistency =
+ AOMMIN(ppi->worst_consistency, consistency);
+ ppi->total_inconsistency += this_inconsistency;
}
}
}
frame_all =
aom_calc_fastssim(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
- adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
+ adjust_image_stat(y, u, v, frame_all, &ppi->fastssim);
frame_all = aom_psnrhvs(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
- adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
+ adjust_image_stat(y, u, v, frame_all, &ppi->psnrhvs);
+ }
+}
+
+void print_internal_stats(AV1_PRIMARY *const ppi) {
+ if (!ppi->cpi) return;
+ AV1_COMP *const cpi = ppi->cpi;
+
+ if (ppi->cpi->oxcf.pass != 1 &&
+ ppi->cpi->common.current_frame.frame_number > 0) {
+ aom_clear_system_state();
+ char headings[512] = { 0 };
+ char results[512] = { 0 };
+ FILE *f = fopen("opsnr.stt", "a");
+ double time_encoded =
+ (cpi->time_stamps.prev_ts_end - cpi->time_stamps.first_ts_start) /
+ 10000000.000;
+ double total_encode_time =
+ (ppi->total_time_receive_data + ppi->total_time_compress_data) /
+ 1000.000;
+ const double dr =
+ (double)ppi->total_bytes * (double)8 / (double)1000 / time_encoded;
+ const double peak =
+ (double)((1 << ppi->cpi->oxcf.input_cfg.input_bit_depth) - 1);
+ const double target_rate =
+ (double)ppi->cpi->oxcf.rc_cfg.target_bandwidth / 1000;
+ const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
+
+ if (ppi->b_calculate_psnr) {
+ const double total_psnr = aom_sse_to_psnr(
+ (double)ppi->total_samples[0], peak, (double)ppi->total_sq_error[0]);
+ const double total_ssim =
+ 100 * pow(ppi->summed_quality / ppi->summed_weights, 8.0);
+ snprintf(headings, sizeof(headings),
+ "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
+ "AOMSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
+ "WstPsnr\tWstSsim\tWstFast\tWstHVS\t"
+ "AVPsrnY\tAPsnrCb\tAPsnrCr");
+ snprintf(results, sizeof(results),
+ "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
+ "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
+ "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
+ "%7.3f\t%7.3f\t%7.3f",
+ dr, ppi->psnr[0].stat[STAT_ALL] / ppi->count[0], total_psnr,
+ ppi->psnr[0].stat[STAT_ALL] / ppi->count[0], total_psnr,
+ total_ssim, total_ssim,
+ ppi->fastssim.stat[STAT_ALL] / ppi->count[0],
+ ppi->psnrhvs.stat[STAT_ALL] / ppi->count[0], ppi->psnr[0].worst,
+ ppi->worst_ssim, ppi->fastssim.worst, ppi->psnrhvs.worst,
+ ppi->psnr[0].stat[STAT_Y] / ppi->count[0],
+ ppi->psnr[0].stat[STAT_U] / ppi->count[0],
+ ppi->psnr[0].stat[STAT_V] / ppi->count[0]);
+
+ if (ppi->b_calculate_blockiness) {
+ SNPRINT(headings, "\t Block\tWstBlck");
+ SNPRINT2(results, "\t%7.3f", ppi->total_blockiness / ppi->count[0]);
+ SNPRINT2(results, "\t%7.3f", ppi->worst_blockiness);
+ }
+
+ if (ppi->b_calculate_consistency) {
+ double consistency =
+ aom_sse_to_psnr((double)ppi->total_samples[0], peak,
+ (double)ppi->total_inconsistency);
+
+ SNPRINT(headings, "\tConsist\tWstCons");
+ SNPRINT2(results, "\t%7.3f", consistency);
+ SNPRINT2(results, "\t%7.3f", ppi->worst_consistency);
+ }
+
+ SNPRINT(headings, "\t Time\tRcErr\tAbsErr");
+ SNPRINT2(results, "\t%8.0f", total_encode_time);
+ SNPRINT2(results, " %7.2f", rate_err);
+ SNPRINT2(results, " %7.2f", fabs(rate_err));
+
+ SNPRINT(headings, "\tAPsnr611");
+ SNPRINT2(results, " %7.3f",
+ (6 * ppi->psnr[0].stat[STAT_Y] + ppi->psnr[0].stat[STAT_U] +
+ ppi->psnr[0].stat[STAT_V]) /
+ (ppi->count[0] * 8));
+
+#if CONFIG_AV1_HIGHBITDEPTH
+ const uint32_t in_bit_depth = ppi->cpi->oxcf.input_cfg.input_bit_depth;
+ const uint32_t bit_depth = ppi->seq_params.bit_depth;
+ // Since cpi->source->flags is not available here, but total_samples[1]
+ // will be non-zero if cpi->source->flags & YV12_FLAG_HIGHBITDEPTH was
+ // true in compute_internal_stats
+ if ((ppi->total_samples[1] > 0) && (in_bit_depth < bit_depth)) {
+ const double peak_hbd = (double)((1 << bit_depth) - 1);
+ const double total_psnr_hbd =
+ aom_sse_to_psnr((double)ppi->total_samples[1], peak_hbd,
+ (double)ppi->total_sq_error[1]);
+ const double total_ssim_hbd =
+ 100 * pow(ppi->summed_quality_hbd / ppi->summed_weights_hbd, 8.0);
+ SNPRINT(headings,
+ "\t AVGPsnrH GLBPsnrH AVPsnrPH GLPsnrPH"
+ " AVPsnrYH APsnrCbH APsnrCrH WstPsnrH"
+ " AOMSSIMH VPSSIMPH WstSsimH");
+ SNPRINT2(results, "\t%7.3f",
+ ppi->psnr[1].stat[STAT_ALL] / ppi->count[1]);
+ SNPRINT2(results, " %7.3f", total_psnr_hbd);
+ SNPRINT2(results, " %7.3f",
+ ppi->psnr[1].stat[STAT_ALL] / ppi->count[1]);
+ SNPRINT2(results, " %7.3f", total_psnr_hbd);
+ SNPRINT2(results, " %7.3f", ppi->psnr[1].stat[STAT_Y] / ppi->count[1]);
+ SNPRINT2(results, " %7.3f", ppi->psnr[1].stat[STAT_U] / ppi->count[1]);
+ SNPRINT2(results, " %7.3f", ppi->psnr[1].stat[STAT_V] / ppi->count[1]);
+ SNPRINT2(results, " %7.3f", ppi->psnr[1].worst);
+ SNPRINT2(results, " %7.3f", total_ssim_hbd);
+ SNPRINT2(results, " %7.3f", total_ssim_hbd);
+ SNPRINT2(results, " %7.3f", ppi->worst_ssim_hbd);
+ }
+#endif
+ fprintf(f, "%s\n", headings);
+ fprintf(f, "%s\n", results);
+ }
+
+ fclose(f);
+
+ if (ppi->ssim_vars != NULL) {
+ aom_free(ppi->ssim_vars);
+ ppi->ssim_vars = NULL;
+ }
}
}
#endif // CONFIG_INTERNAL_STATS
+void av1_post_encode_updates(AV1_COMP *const cpi, size_t size,
+ int64_t time_stamp, int64_t time_end) {
+ AV1_PRIMARY *const ppi = cpi->ppi;
+ AV1_COMMON *const cm = &cpi->common;
+ // Note *size = 0 indicates a dropped frame for which psnr is not calculated
+ if (ppi->b_calculate_psnr && size > 0) {
+ if (cm->show_existing_frame ||
+ (!is_stat_generation_stage(cpi) && cm->show_frame)) {
+ generate_psnr_packet(cpi);
+ }
+ }
+
+ if (ppi->level_params.keep_level_stats && !is_stat_generation_stage(cpi)) {
+ // Initialize level info. at the beginning of each sequence.
+ if (cm->current_frame.frame_type == KEY_FRAME && !cpi->no_show_fwd_kf) {
+ av1_init_level_info(cpi);
+ }
+ av1_update_level_info(cpi, size, time_stamp, time_end);
+ }
+
+#if CONFIG_INTERNAL_STATS
+ if (!is_stat_generation_stage(cpi)) {
+ compute_internal_stats(cpi, (int)size);
+ }
+#endif // CONFIG_INTERNAL_STATS
+}
+
int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
- size_t *size, uint8_t *dest, int64_t *time_stamp,
- int64_t *time_end, int flush,
+ size_t *size, size_t avail_size, uint8_t *dest,
+ int64_t *time_stamp, int64_t *time_end, int flush,
const aom_rational64_t *timestamp_ratio) {
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
AV1_COMMON *const cm = &cpi->common;
+#if CONFIG_INTERNAL_STATS
+ cpi->frame_recode_hits = 0;
+ cpi->time_compress_data = 0;
+ cpi->bytes = 0;
+#endif
+#if CONFIG_ENTROPY_STATS
+ if (cpi->compressor_stage == ENCODE_STAGE) {
+ av1_zero(cpi->counts);
+ }
+#endif
+
#if CONFIG_BITSTREAM_DEBUG
assert(cpi->oxcf.max_threads <= 1 &&
"bitstream debug tool does not support multithreading");
@@ -3685,12 +3918,13 @@ int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
aom_bitstream_queue_set_frame_write(cm->current_frame.order_hint * 2 +
cm->show_frame);
#endif
- if (cpi->use_svc && cm->number_spatial_layers > 1) {
+ if (cpi->ppi->use_svc && cpi->ppi->number_spatial_layers > 1) {
av1_one_pass_cbr_svc_start_layer(cpi);
}
cm->showable_frame = 0;
*size = 0;
+ cpi->available_bs_size = avail_size;
#if CONFIG_INTERNAL_STATS
struct aom_usec_timer cmptimer;
aom_usec_timer_start(&cmptimer);
@@ -3763,27 +3997,9 @@ int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
aom_usec_timer_mark(&cmptimer);
cpi->time_compress_data += aom_usec_timer_elapsed(&cmptimer);
#endif // CONFIG_INTERNAL_STATS
- // Note *size = 0 indicates a dropped frame for which psnr is not calculated
- if (cpi->b_calculate_psnr && *size > 0) {
- if (cm->show_existing_frame ||
- (!is_stat_generation_stage(cpi) && cm->show_frame)) {
- generate_psnr_packet(cpi);
- }
- }
- if (cpi->level_params.keep_level_stats && !is_stat_generation_stage(cpi)) {
- // Initialize level info. at the beginning of each sequence.
- if (cm->current_frame.frame_type == KEY_FRAME && !cpi->no_show_fwd_kf) {
- av1_init_level_info(cpi);
- }
- av1_update_level_info(cpi, *size, *time_stamp, *time_end);
- }
+ av1_post_encode_updates(cpi, *size, *time_stamp, *time_end);
-#if CONFIG_INTERNAL_STATS
- if (!is_stat_generation_stage(cpi)) {
- compute_internal_stats(cpi, (int)(*size));
- }
-#endif // CONFIG_INTERNAL_STATS
#if CONFIG_SPEED_STATS
if (!is_stat_generation_stage(cpi) && !cm->show_existing_frame) {
cpi->tx_search_count += cpi->td.mb.txfm_search_info.tx_search_count;
@@ -3806,8 +4022,8 @@ int av1_get_preview_raw_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *dest) {
*dest = cm->cur_frame->buf;
dest->y_width = cm->width;
dest->y_height = cm->height;
- dest->uv_width = cm->width >> cm->seq_params.subsampling_x;
- dest->uv_height = cm->height >> cm->seq_params.subsampling_y;
+ dest->uv_width = cm->width >> cm->seq_params->subsampling_x;
+ dest->uv_height = cm->height >> cm->seq_params->subsampling_y;
ret = 0;
} else {
ret = -1;
@@ -3829,12 +4045,12 @@ aom_codec_err_t av1_copy_new_frame_enc(AV1_COMMON *cm,
YV12_BUFFER_CONFIG *sd) {
const int num_planes = av1_num_planes(cm);
if (!equal_dimensions_and_border(new_frame, sd))
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Incorrect buffer dimensions");
else
aom_yv12_copy_frame(new_frame, sd, num_planes);
- return cm->error.error_code;
+ return cm->error->error_code;
}
int av1_set_internal_size(AV1EncoderConfig *const oxcf,
@@ -3919,7 +4135,7 @@ int av1_convert_sect5obus_to_annexb(uint8_t *buffer, size_t *frame_size) {
return AOM_CODEC_OK;
}
-static void svc_set_updates_external_ref_frame_config(
+static void svc_set_updates_ref_frame_config(
ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags, SVC *const svc) {
ext_refresh_frame_flags->update_pending = 1;
ext_refresh_frame_flags->last_frame = svc->refresh[svc->ref_idx[0]];
@@ -3980,7 +4196,7 @@ void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
av1_use_as_reference(&ext_flags->ref_frame_flags, ref);
} else {
- if (cpi->svc.external_ref_frame_config) {
+ if (cpi->svc.set_ref_frame_config) {
int ref = svc_set_references_external_ref_frame_config(cpi);
av1_use_as_reference(&ext_flags->ref_frame_flags, ref);
}
@@ -4008,9 +4224,8 @@ void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
ext_refresh_frame_flags->alt2_ref_frame = (upd & AOM_ALT2_FLAG) != 0;
ext_refresh_frame_flags->update_pending = 1;
} else {
- if (cpi->svc.external_ref_frame_config)
- svc_set_updates_external_ref_frame_config(ext_refresh_frame_flags,
- &cpi->svc);
+ if (cpi->svc.set_ref_frame_config)
+ svc_set_updates_ref_frame_config(ext_refresh_frame_flags, &cpi->svc);
else
ext_refresh_frame_flags->update_pending = 0;
}
@@ -4030,12 +4245,12 @@ void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
}
}
-aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi) {
- if (!cpi) return NULL;
+aom_fixed_buf_t *av1_get_global_headers(AV1_PRIMARY *ppi) {
+ if (!ppi) return NULL;
uint8_t header_buf[512] = { 0 };
const uint32_t sequence_header_size =
- av1_write_sequence_header_obu(&cpi->common.seq_params, &header_buf[0]);
+ av1_write_sequence_header_obu(&ppi->seq_params, &header_buf[0]);
assert(sequence_header_size <= sizeof(header_buf));
if (sequence_header_size == 0) return NULL;
@@ -4046,7 +4261,8 @@ aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi) {
if (payload_offset + sequence_header_size > sizeof(header_buf)) return NULL;
memmove(&header_buf[payload_offset], &header_buf[0], sequence_header_size);
- if (av1_write_obu_header(&cpi->level_params, OBU_SEQUENCE_HEADER, 0,
+ if (av1_write_obu_header(&ppi->level_params, &ppi->cpi->frame_header_count,
+ OBU_SEQUENCE_HEADER, 0,
&header_buf[0]) != obu_header_size) {
return NULL;
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encoder.h b/third_party/libaom/source/libaom/av1/encoder/encoder.h
index 905470f437..fe6e76f498 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encoder.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encoder.h
@@ -35,6 +35,7 @@
#include "av1/encoder/block.h"
#include "av1/encoder/context_tree.h"
#include "av1/encoder/encodemb.h"
+#include "av1/encoder/external_partition.h"
#include "av1/encoder/firstpass.h"
#include "av1/encoder/global_motion.h"
#include "av1/encoder/level.h"
@@ -49,6 +50,7 @@
#include "av1/encoder/tokenize.h"
#include "av1/encoder/tpl_model.h"
#include "av1/encoder/av1_noise_estimate.h"
+#include "av1/encoder/bitstream.h"
#if CONFIG_INTERNAL_STATS
#include "aom_dsp/ssim.h"
@@ -119,6 +121,26 @@ enum {
FRAMEFLAGS_ERROR_RESILIENT = 1 << 6,
} UENUM1BYTE(FRAMETYPE_FLAGS);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+// 0 level frames are sometimes used for rate control purposes, but for
+// reference mapping purposes, the minimum level should be 1.
+#define MIN_PYR_LEVEL 1
+static INLINE int get_true_pyr_level(int frame_level, int frame_order,
+ int max_layer_depth) {
+ if (frame_order == 0) {
+ // Keyframe case
+ return MIN_PYR_LEVEL;
+ } else if (frame_level == MAX_ARF_LAYERS) {
+ // Leaves
+ return max_layer_depth;
+ } else if (frame_level == (MAX_ARF_LAYERS + 1)) {
+ // Altrefs
+ return MIN_PYR_LEVEL;
+ }
+ return AOMMAX(MIN_PYR_LEVEL, frame_level);
+}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
enum {
NO_AQ = 0,
VARIANCE_AQ = 1,
@@ -159,13 +181,6 @@ enum {
/*!\cond */
typedef enum {
- COST_UPD_SB,
- COST_UPD_SBROW,
- COST_UPD_TILE,
- COST_UPD_OFF,
-} COST_UPDATE_TYPE;
-
-typedef enum {
MOD_FP, // First pass
MOD_TF, // Temporal filtering
MOD_TPL, // TPL
@@ -173,12 +188,24 @@ typedef enum {
MOD_ENC, // Encode stage
MOD_LPF, // Deblocking loop filter
MOD_CDEF_SEARCH, // CDEF search
+ MOD_CDEF, // CDEF frame
MOD_LR, // Loop restoration filtering
+ MOD_PACK_BS, // Pack bitstream
NUM_MT_MODULES
} MULTI_THREADED_MODULES;
/*!\endcond */
+/*!\enum COST_UPDATE_TYPE
+ * \brief This enum controls how often the entropy costs should be updated.
+ */
+typedef enum {
+ COST_UPD_SB, /*!< Update every sb. */
+ COST_UPD_SBROW, /*!< Update every sb rows inside a tile. */
+ COST_UPD_TILE, /*!< Update every tile. */
+ COST_UPD_OFF, /*!< Turn off cost updates. */
+} COST_UPDATE_TYPE;
+
/*!
* \brief Encoder config related to resize.
*/
@@ -623,6 +650,8 @@ typedef struct {
COST_UPDATE_TYPE mode;
// Indicates the update frequency for mv costs.
COST_UPDATE_TYPE mv;
+ // Indicates the update frequency for dv costs.
+ COST_UPDATE_TYPE dv;
} CostUpdateFreq;
typedef struct {
@@ -711,7 +740,10 @@ typedef struct {
*/
typedef struct {
/*!
- * Indicates the loop filter sharpness.
+ * Controls the level at which rate-distortion optimization of transform
+ * coefficients favours sharpness in the block. Has no impact on RD when set
+ * to zero (default). For values 1-7, eob and skip block optimization are
+ * avoided and rdmult is adjusted in favour of block sharpness.
*/
int sharpness;
@@ -940,6 +972,10 @@ typedef struct AV1EncoderConfig {
// format.
bool save_as_annexb;
+ // The path for partition stats reading and writing, used in the experiment
+ // CONFIG_PARTITION_SEARCH_ORDER.
+ const char *partition_info_path;
+
/*!\endcond */
} AV1EncoderConfig;
@@ -1267,6 +1303,7 @@ typedef struct TileDataEnc {
TileInfo tile_info;
DECLARE_ALIGNED(16, FRAME_CONTEXT, tctx);
FRAME_CONTEXT *row_ctx;
+ uint64_t abs_sum_level;
uint8_t allow_update_cdf;
InterModeRdModel inter_mode_rd_models[BLOCK_SIZES_ALL];
AV1EncRowMultiThreadSync row_mt_sync;
@@ -1295,14 +1332,23 @@ typedef struct ThreadData {
PALETTE_BUFFER *palette_buffer;
CompoundTypeRdBuffers comp_rd_buffer;
CONV_BUF_TYPE *tmp_conv_dst;
+ uint64_t abs_sum_level;
uint8_t *tmp_pred_bufs[2];
int intrabc_used;
int deltaq_used;
+ int coefficient_size;
+ int max_mv_magnitude;
+ int interp_filter_selected[SWITCHABLE];
FRAME_CONTEXT *tctx;
VP64x64 *vt64x64;
int32_t num_64x64_blocks;
PICK_MODE_CONTEXT *firstpass_ctx;
TemporalFilterData tf_data;
+ TplTxfmStats tpl_txfm_stats;
+ // Pointer to the array of structures to store gradient information of each
+ // pixel in a superblock. The buffer constitutes of MAX_SB_SQUARE pixel level
+ // structures for each of the plane types (PLANE_TYPE_Y and PLANE_TYPE_UV).
+ PixelLevelGradientInfo *pixel_gradient_info;
} ThreadData;
struct EncWorkerData;
@@ -1427,6 +1473,11 @@ typedef struct MultiThreadInfo {
AV1LrSync lr_row_sync;
/*!
+ * Pack bitstream multi-threading object.
+ */
+ AV1EncPackBSSync pack_bs_sync;
+
+ /*!
* Global Motion multi-threading object.
*/
AV1GlobalMotionSync gm_sync;
@@ -1440,6 +1491,11 @@ typedef struct MultiThreadInfo {
* CDEF search multi-threading object.
*/
AV1CdefSync cdef_sync;
+
+ /*!
+ * CDEF row multi-threading data.
+ */
+ AV1CdefWorkerData *cdef_worker;
} MultiThreadInfo;
/*!\cond */
@@ -1561,10 +1617,13 @@ enum {
rd_pick_sb_modes_time,
av1_rd_pick_intra_mode_sb_time,
av1_rd_pick_inter_mode_sb_time,
+ set_params_rd_pick_inter_mode_time,
+ skip_inter_mode_time,
handle_inter_mode_time,
evaluate_motion_mode_for_winner_candidates_time,
- handle_intra_mode_time,
do_tx_search_time,
+ handle_intra_mode_time,
+ refine_winner_mode_tx_time,
av1_search_palette_mode_time,
handle_newmv_time,
compound_type_rd_time,
@@ -1609,11 +1668,15 @@ static INLINE char const *get_component_name(int index) {
return "av1_rd_pick_intra_mode_sb_time";
case av1_rd_pick_inter_mode_sb_time:
return "av1_rd_pick_inter_mode_sb_time";
+ case set_params_rd_pick_inter_mode_time:
+ return "set_params_rd_pick_inter_mode_time";
+ case skip_inter_mode_time: return "skip_inter_mode_time";
case handle_inter_mode_time: return "handle_inter_mode_time";
case evaluate_motion_mode_for_winner_candidates_time:
return "evaluate_motion_mode_for_winner_candidates_time";
- case handle_intra_mode_time: return "handle_intra_mode_time";
case do_tx_search_time: return "do_tx_search_time";
+ case handle_intra_mode_time: return "handle_intra_mode_time";
+ case refine_winner_mode_tx_time: return "refine_winner_mode_tx_time";
case av1_search_palette_mode_time: return "av1_search_palette_mode_time";
case handle_newmv_time: return "handle_newmv_time";
case compound_type_rd_time: return "compound_type_rd_time";
@@ -2045,12 +2108,88 @@ typedef struct {
uint8_t *entropy_ctx;
} CoeffBufferPool;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+/*!
+ * \brief Max number of frames that can be encoded in a parallel encode set.
+ */
+#define MAX_PARALLEL_FRAMES 4
+
+/*!
+ * \brief Structure to hold data of frame encoded in a given parallel encode
+ * set.
+ */
+typedef struct AV1_FP_OUT_DATA {
+ /*!
+ * Buffer to store packed bitstream data of a frame.
+ */
+ unsigned char *cx_data_frame;
+
+ /*!
+ * Allocated size of the cx_data_frame buffer.
+ */
+ size_t cx_data_sz;
+
+ /*!
+ * Size of data written in the cx_data_frame buffer.
+ */
+ size_t frame_size;
+
+ /*!
+ * Display order hint of frame whose packed data is in cx_data_frame buffer.
+ */
+ int frame_display_order_hint;
+} AV1_FP_OUT_DATA;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
/*!
* \brief Top level primary encoder structure
*/
typedef struct AV1_PRIMARY {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /*!
+ * Array of frame level encoder stage top level structures
+ */
+ struct AV1_COMP *parallel_cpi[MAX_PARALLEL_FRAMES];
+
+ /*!
+ * Number of frame level contexts(cpis)
+ */
+ int num_fp_contexts;
+
+ /*!
+ * Array of structures to hold data of frames encoded in a given parallel
+ * encode set.
+ */
+ struct AV1_FP_OUT_DATA parallel_frames_data[MAX_PARALLEL_FRAMES - 1];
+
+ /*!
+ * Loopfilter levels of the previous encoded frame.
+ */
+ int filter_level[2];
+ int filter_level_u;
+ int filter_level_v;
+
+ /*!
+ * Largest MV component used in previous encoded frame during
+ * stats consumption stage.
+ */
+ int max_mv_magnitude;
+
+ /*!
+ * Temporary variable simulating the delayed frame_probability update.
+ */
+ FrameProbInfo temp_frame_probs;
+
+ /*!
+ * Temporary variable used in simulating the delayed update of
+ * avg_frame_qindex.
+ */
+ int temp_avg_frame_qindex[FRAME_TYPES];
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
/*!
* Encode stage top level structure
+ * When CONFIG_FRAME_PARALLEL_ENCODE is enabled this is the same as
+ * parallel_cpi[0]
*/
struct AV1_COMP *cpi;
@@ -2063,6 +2202,186 @@ typedef struct AV1_PRIMARY {
* Look-ahead context.
*/
struct lookahead_ctx *lookahead;
+
+ /*!
+ * Sequence parameters have been transmitted already and locked
+ * or not. Once locked av1_change_config cannot change the seq
+ * parameters.
+ */
+ int seq_params_locked;
+
+ /*!
+ * Pointer to internal utility functions that manipulate aom_codec_* data
+ * structures.
+ */
+ struct aom_codec_pkt_list *output_pkt_list;
+
+ /*!
+ * When set, indicates that internal ARFs are enabled.
+ */
+ int internal_altref_allowed;
+
+ /*!
+ * Information related to a gf group.
+ */
+ GF_GROUP gf_group;
+
+ /*!
+ * Track prior gf group state.
+ */
+ GF_STATE gf_state;
+
+ /*!
+ * Flag indicating whether look ahead processing (LAP) is enabled.
+ */
+ int lap_enabled;
+
+ /*!
+ * Parameters for AV1 bitstream levels.
+ */
+ AV1LevelParams level_params;
+
+ /*!
+ * Calculates PSNR on each frame when set to 1.
+ */
+ int b_calculate_psnr;
+
+ /*!
+ * Number of frames left to be encoded, is 0 if limit is not set.
+ */
+ int frames_left;
+
+ /*!
+ * Information related to two pass encoding.
+ */
+ TWO_PASS twopass;
+
+ /*!
+ * Rate control related parameters.
+ */
+ PRIMARY_RATE_CONTROL p_rc;
+
+ /*!
+ * Frame buffer holding the temporally filtered source frame. It can be KEY
+ * frame or ARF frame.
+ */
+ YV12_BUFFER_CONFIG alt_ref_buffer;
+
+ /*!
+ * Elements part of the sequence header, that are applicable for all the
+ * frames in the video.
+ */
+ SequenceHeader seq_params;
+
+ /*!
+ * Indicates whether to use SVC.
+ */
+ int use_svc;
+
+ /*!
+ * If true, buffer removal times are present.
+ */
+ bool buffer_removal_time_present;
+
+ /*!
+ * Number of temporal layers: may be > 1 for SVC (scalable vector coding).
+ */
+ unsigned int number_temporal_layers;
+
+ /*!
+ * Number of spatial layers: may be > 1 for SVC (scalable vector coding).
+ */
+ unsigned int number_spatial_layers;
+
+ /*!
+ * Code and details about current error status.
+ */
+ struct aom_internal_error_info error;
+
+ /*!
+ * Function pointers to variants of sse/sad/variance computation functions.
+ * fn_ptr[i] indicates the list of function pointers corresponding to block
+ * size i.
+ */
+ aom_variance_fn_ptr_t fn_ptr[BLOCK_SIZES_ALL];
+
+ /*!
+ * Scaling factors used in the RD multiplier modulation.
+ * TODO(sdeng): consider merge the following arrays.
+ * tpl_rdmult_scaling_factors is a temporary buffer used to store the
+ * intermediate scaling factors which are used in the calculation of
+ * tpl_sb_rdmult_scaling_factors. tpl_rdmult_scaling_factors[i] stores the
+ * intermediate scaling factor of the ith 16 x 16 block in raster scan order.
+ */
+ double *tpl_rdmult_scaling_factors;
+
+ /*!
+ * tpl_sb_rdmult_scaling_factors[i] stores the RD multiplier scaling factor of
+ * the ith 16 x 16 block in raster scan order.
+ */
+ double *tpl_sb_rdmult_scaling_factors;
+
+ /*!
+ * Parameters related to tpl.
+ */
+ TplParams tpl_data;
+
+ /*!
+ * Motion vector stats of the previous encoded frame.
+ */
+ MV_STATS mv_stats;
+
+#if CONFIG_INTERNAL_STATS
+ /*!\cond */
+ uint64_t total_time_receive_data;
+ uint64_t total_time_compress_data;
+
+ unsigned int total_mode_chosen_counts[MAX_MODES];
+
+ int count[2];
+ uint64_t total_sq_error[2];
+ uint64_t total_samples[2];
+ ImageStat psnr[2];
+
+ double total_blockiness;
+ double worst_blockiness;
+
+ int total_bytes;
+ double summed_quality;
+ double summed_weights;
+ double summed_quality_hbd;
+ double summed_weights_hbd;
+ unsigned int total_recode_hits;
+ double worst_ssim;
+ double worst_ssim_hbd;
+
+ ImageStat fastssim;
+ ImageStat psnrhvs;
+
+ int b_calculate_blockiness;
+ int b_calculate_consistency;
+
+ double total_inconsistency;
+ double worst_consistency;
+ Ssimv *ssim_vars;
+ Metrics metrics;
+ /*!\endcond */
+#endif
+
+#if CONFIG_ENTROPY_STATS
+ /*!
+ * Aggregates frame counts for the sequence.
+ */
+ FRAME_COUNTS aggregate_fc;
+#endif // CONFIG_ENTROPY_STATS
+
+ /*!
+ * For each type of reference frame, this contains the index of a reference
+ * frame buffer for a reference frame of the same type. We use this to
+ * choose our primary reference frame (which is the most recent reference
+ * frame of the same type as the current frame).
+ */
+ int fb_of_context_type[REF_FRAMES];
} AV1_PRIMARY;
/*!
@@ -2173,9 +2492,9 @@ typedef struct AV1_COMP {
YV12_BUFFER_CONFIG *unfiltered_source;
/*!
- * Parameters related to tpl.
+ * Skip tpl setup when tpl data from gop length decision can be reused.
*/
- TplParams tpl_data;
+ int skip_tpl_setup_stats;
/*!
* Temporal filter context.
@@ -2209,14 +2528,6 @@ typedef struct AV1_COMP {
RefreshFrameFlagsInfo refresh_frame;
/*!
- * For each type of reference frame, this contains the index of a reference
- * frame buffer for a reference frame of the same type. We use this to
- * choose our primary reference frame (which is the most recent reference
- * frame of the same type as the current frame).
- */
- int fb_of_context_type[REF_FRAMES];
-
- /*!
* Flags signalled by the external interface at frame level.
*/
ExternalFlags ext_flags;
@@ -2275,12 +2586,6 @@ typedef struct AV1_COMP {
double framerate;
/*!
- * Pointer to internal utility functions that manipulate aom_codec_* data
- * structures.
- */
- struct aom_codec_pkt_list *output_pkt_list;
-
- /*!
* Bitmask indicating which reference buffers may be referenced by this frame.
*/
int ref_frame_flags;
@@ -2322,26 +2627,9 @@ typedef struct AV1_COMP {
ActiveMap active_map;
/*!
- * Function pointers to variants of sse/sad/variance computation functions.
- * fn_ptr[i] indicates the list of function pointers corresponding to block
- * size i.
- */
- aom_variance_fn_ptr_t fn_ptr[BLOCK_SIZES_ALL];
-
- /*!
- * Information related to two pass encoding.
- */
- TWO_PASS twopass;
-
- /*!
- * Information related to a gf group.
- */
- GF_GROUP gf_group;
-
- /*!
- * Track prior gf group state.
+ * The frame processing order within a GOP.
*/
- GF_STATE gf_state;
+ unsigned char gf_frame_index;
/*!
* To control the reference frame buffer and selection.
@@ -2349,58 +2637,20 @@ typedef struct AV1_COMP {
RefBufferStack ref_buffer_stack;
/*!
- * Frame buffer holding the temporally filtered source frame. It can be KEY
- * frame or ARF frame.
- */
- YV12_BUFFER_CONFIG alt_ref_buffer;
-
- /*!
* Tell if OVERLAY frame shows existing alt_ref frame.
*/
int show_existing_alt_ref;
#if CONFIG_INTERNAL_STATS
/*!\cond */
- uint64_t time_receive_data;
uint64_t time_compress_data;
unsigned int mode_chosen_counts[MAX_MODES];
-
- int count[2];
- uint64_t total_sq_error[2];
- uint64_t total_samples[2];
- ImageStat psnr[2];
-
- double total_blockiness;
- double worst_blockiness;
-
int bytes;
- double summed_quality;
- double summed_weights;
- double summed_quality_hbd;
- double summed_weights_hbd;
- unsigned int tot_recode_hits;
- double worst_ssim;
- double worst_ssim_hbd;
-
- ImageStat fastssim;
- ImageStat psnrhvs;
-
- int b_calculate_blockiness;
- int b_calculate_consistency;
-
- double total_inconsistency;
- double worst_consistency;
- Ssimv *ssim_vars;
- Metrics metrics;
+ unsigned int frame_recode_hits;
/*!\endcond */
#endif
- /*!
- * Calculates PSNR on each frame when set to 1.
- */
- int b_calculate_psnr;
-
#if CONFIG_SPEED_STATS
/*!
* For debugging: number of transform searches we have performed.
@@ -2458,13 +2708,6 @@ typedef struct AV1_COMP {
TokenInfo token_info;
/*!
- * Sequence parameters have been transmitted already and locked
- * or not. Once locked av1_change_config cannot change the seq
- * parameters.
- */
- int seq_params_locked;
-
- /*!
* VARIANCE_AQ segment map refresh.
*/
int vaq_refresh;
@@ -2492,21 +2735,11 @@ typedef struct AV1_COMP {
int existing_fb_idx_to_show;
/*!
- * When set, indicates that internal ARFs are enabled.
- */
- int internal_altref_allowed;
-
- /*!
* A flag to indicate if intrabc is ever used in current frame.
*/
int intrabc_used;
/*!
- * Tables to calculate IntraBC MV cost.
- */
- IntraBCMVCosts dv_costs;
-
- /*!
* Mark which ref frames can be skipped for encoding current frame during RDO.
*/
int prune_ref_frame_mask;
@@ -2571,9 +2804,9 @@ typedef struct AV1_COMP {
#endif
/*!
- * Parameters for AV1 bitstream levels.
+ * Count the number of OBU_FRAME and OBU_FRAME_HEADER for level calculation.
*/
- AV1LevelParams level_params;
+ int frame_header_count;
/*!
* Whether any no-zero delta_q was actually used.
@@ -2586,20 +2819,6 @@ typedef struct AV1_COMP {
RefFrameDistanceInfo ref_frame_dist_info;
/*!
- * Scaling factors used in the RD multiplier modulation.
- * TODO(sdeng): consider merge the following arrays.
- * tpl_rdmult_scaling_factors is a temporary buffer used to store the
- * intermediate scaling factors which are used in the calculation of
- * tpl_sb_rdmult_scaling_factors. tpl_rdmult_scaling_factors[i] stores the
- * intermediate scaling factor of the ith 16 x 16 block in raster scan order.
- */
- double *tpl_rdmult_scaling_factors;
- /*!
- * tpl_sb_rdmult_scaling_factors[i] stores the RD multiplier scaling factor of
- * the ith 16 x 16 block in raster scan order.
- */
- double *tpl_sb_rdmult_scaling_factors;
- /*!
* ssim_rdmult_scaling_factors[i] stores the RD multiplier scaling factor of
* the ith 16 x 16 block in raster scan order. This scaling factor is used for
* RD multiplier modulation when SSIM tuning is enabled.
@@ -2621,30 +2840,16 @@ typedef struct AV1_COMP {
#endif
/*!
- * Indicates whether to use SVC.
- */
- int use_svc;
- /*!
* Parameters for scalable video coding.
*/
SVC svc;
/*!
- * Flag indicating whether look ahead processing (LAP) is enabled.
- */
- int lap_enabled;
- /*!
* Indicates whether current processing stage is encode stage or LAP stage.
*/
COMPRESSOR_STAGE compressor_stage;
/*!
- * Some motion vector stats from the last encoded frame to help us decide what
- * precision to use to encode the current frame.
- */
- MV_STATS mv_stats;
-
- /*!
* Frame type of the last frame. May be used in some heuristics for speeding
* up the encoding.
*/
@@ -2686,14 +2891,35 @@ typedef struct AV1_COMP {
uint8_t *consec_zero_mv;
/*!
- * Number of frames left to be encoded, is 0 if limit is not set.
+ * Block size of first pass encoding
*/
- int frames_left;
+ BLOCK_SIZE fp_block_size;
/*!
- * Block size of first pass encoding
+ * The counter of encoded super block, used to differentiate block names.
+ * This number starts from 0 and increases whenever a super block is encoded.
*/
- BLOCK_SIZE fp_block_size;
+ int sb_counter;
+
+ /*!
+ * Available bitstream buffer size in bytes
+ */
+ size_t available_bs_size;
+
+ /*!
+ * The controller of the external partition model.
+ * It is used to do partition type selection based on external models.
+ */
+ ExtPartController ext_part_controller;
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /*!
+ * A flag to indicate frames that will update their data to the primary
+ * context at the end of the encode. It is set for non-parallel frames and the
+ * last frame in encode order in a given parallel encode set.
+ */
+ bool do_frame_data_update;
+#endif
} AV1_COMP;
/*!
@@ -2773,26 +2999,39 @@ void av1_initialize_enc(void);
struct AV1_COMP *av1_create_compressor(AV1_PRIMARY *ppi, AV1EncoderConfig *oxcf,
BufferPool *const pool,
- FIRSTPASS_STATS *frame_stats_buf,
COMPRESSOR_STAGE stage,
- int num_lap_buffers,
- int lap_lag_in_frames,
- STATS_BUFFER_CTX *stats_buf_context);
+ int lap_lag_in_frames);
-struct AV1_PRIMARY *av1_create_primary_compressor();
+struct AV1_PRIMARY *av1_create_primary_compressor(
+ struct aom_codec_pkt_list *pkt_list_head, int num_lap_buffers,
+ AV1EncoderConfig *oxcf);
void av1_remove_compressor(AV1_COMP *cpi);
void av1_remove_primary_compressor(AV1_PRIMARY *ppi);
-void av1_change_config(AV1_COMP *cpi, const AV1EncoderConfig *oxcf);
+#if CONFIG_ENTROPY_STATS
+void print_entropy_stats(AV1_PRIMARY *const ppi);
+#endif
+#if CONFIG_INTERNAL_STATS
+void print_internal_stats(AV1_PRIMARY *ppi);
+#endif
+
+void av1_change_config_seq(AV1_PRIMARY *ppi, const AV1EncoderConfig *oxcf,
+ bool *sb_size_changed);
+
+void av1_change_config(AV1_COMP *cpi, const AV1EncoderConfig *oxcf,
+ bool sb_size_changed);
void av1_check_initial_width(AV1_COMP *cpi, int use_highbitdepth,
int subsampling_x, int subsampling_y);
-void av1_init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
+void av1_init_seq_coding_tools(AV1_PRIMARY *const ppi,
const AV1EncoderConfig *oxcf, int use_svc);
+void av1_post_encode_updates(AV1_COMP *const cpi, size_t size,
+ int64_t time_stamp, int64_t time_end);
+
/*!\endcond */
/*!\brief Obtain the raw frame data
@@ -2827,6 +3066,7 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
* \param[in] cpi Top-level encoder structure
* \param[in] frame_flags Flags to decide how to encoding the frame
* \param[in] size Bitstream size
+ * \param[in] avail_size Available bitstream buffer size
* \param[in] dest Bitstream output
* \param[out] time_stamp Time stamp of the frame
* \param[out] time_end Time end
@@ -2840,8 +3080,8 @@ int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
* \retval #AOM_CODEC_ERROR
*/
int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
- size_t *size, uint8_t *dest, int64_t *time_stamp,
- int64_t *time_end, int flush,
+ size_t *size, size_t avail_size, uint8_t *dest,
+ int64_t *time_stamp, int64_t *time_end, int flush,
const aom_rational64_t *timebase);
/*!\brief Run 1-pass/2-pass encoding
@@ -2902,6 +3142,71 @@ void av1_set_screen_content_options(struct AV1_COMP *cpi,
void av1_update_frame_size(AV1_COMP *cpi);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+typedef struct {
+ int pyr_level;
+ int disp_order;
+} RefFrameMapPair;
+
+static INLINE void init_ref_map_pair(
+ AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]) {
+ if (cpi->ppi->gf_group.update_type[cpi->gf_frame_index] == KF_UPDATE) {
+ memset(ref_frame_map_pairs, -1, sizeof(*ref_frame_map_pairs) * REF_FRAMES);
+ return;
+ }
+ memset(ref_frame_map_pairs, 0, sizeof(*ref_frame_map_pairs) * REF_FRAMES);
+ for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
+ // Get reference frame buffer.
+ const RefCntBuffer *const buf = cpi->common.ref_frame_map[map_idx];
+ if (ref_frame_map_pairs[map_idx].disp_order == -1) continue;
+ if (buf == NULL) {
+ ref_frame_map_pairs[map_idx].disp_order = -1;
+ ref_frame_map_pairs[map_idx].pyr_level = -1;
+ continue;
+ } else if (buf->ref_count > 1) {
+ // Once the keyframe is coded, the slots in ref_frame_map will all
+ // point to the same frame. In that case, all subsequent pointers
+ // matching the current are considered "free" slots. This will find
+ // the next occurance of the current pointer if ref_count indicates
+ // there are multiple instances of it and mark it as free.
+ for (int idx2 = map_idx + 1; idx2 < REF_FRAMES; ++idx2) {
+ const RefCntBuffer *const buf2 = cpi->common.ref_frame_map[idx2];
+ if (buf2 == buf) {
+ ref_frame_map_pairs[idx2].disp_order = -1;
+ ref_frame_map_pairs[idx2].pyr_level = -1;
+ }
+ }
+ }
+ ref_frame_map_pairs[map_idx].disp_order = (int)buf->display_order_hint;
+ ref_frame_map_pairs[map_idx].pyr_level = buf->pyramid_level;
+ }
+}
+
+static AOM_INLINE void calc_frame_data_update_flag(
+ GF_GROUP *const gf_group, int gf_frame_index,
+ bool *const do_frame_data_update) {
+ *do_frame_data_update = true;
+ // Set the flag to false for all frames in a given parallel encode set except
+ // the last frame in the set with frame_parallel_level = 2.
+ if (gf_group->frame_parallel_level[gf_frame_index] == 1) {
+ *do_frame_data_update = false;
+ } else if (gf_group->frame_parallel_level[gf_frame_index] == 2) {
+ // Check if this is the last frame in the set with frame_parallel_level = 2.
+ for (int i = gf_frame_index + 1; i < gf_group->size; i++) {
+ if ((gf_group->frame_parallel_level[i] == 0 &&
+ (gf_group->update_type[i] == ARF_UPDATE ||
+ gf_group->update_type[i] == INTNL_ARF_UPDATE)) ||
+ gf_group->frame_parallel_level[i] == 1) {
+ break;
+ } else if (gf_group->frame_parallel_level[i] == 2) {
+ *do_frame_data_update = false;
+ break;
+ }
+ }
+ }
+}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
// TODO(jingning): Move these functions as primitive members for the new cpi
// class.
static INLINE void stack_push(int *stack, int *stack_size, int item) {
@@ -2949,8 +3254,9 @@ ticks_to_timebase_units(const aom_rational64_t *timestamp_ratio, int64_t n) {
}
static INLINE int frame_is_kf_gf_arf(const AV1_COMP *cpi) {
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const FRAME_UPDATE_TYPE update_type = gf_group->update_type[gf_group->index];
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ const FRAME_UPDATE_TYPE update_type =
+ gf_group->update_type[cpi->gf_frame_index];
return frame_is_intra_only(&cpi->common) || update_type == ARF_UPDATE ||
update_type == GF_UPDATE;
@@ -3009,10 +3315,25 @@ static INLINE int is_altref_enabled(int lag_in_frames, bool enable_auto_arf) {
return lag_in_frames >= ALT_MIN_LAG && enable_auto_arf;
}
+static AOM_INLINE int can_disable_altref(const GFConfig *gf_cfg) {
+ return is_altref_enabled(gf_cfg->lag_in_frames, gf_cfg->enable_auto_arf) &&
+ (gf_cfg->gf_min_pyr_height == 0);
+}
+
+static AOM_INLINE int use_ml_model_to_decide_flat_gop(
+ const RateControlCfg *rc_cfg) {
+ return (rc_cfg->mode == AOM_Q && rc_cfg->cq_level <= 200);
+}
+
+// Helper function to compute number of blocks on either side of the frame.
+static INLINE int get_num_blocks(const int frame_length, const int mb_length) {
+ return (frame_length + mb_length - 1) / mb_length;
+}
+
// Check if statistics generation stage
static INLINE int is_stat_generation_stage(const AV1_COMP *const cpi) {
assert(IMPLIES(cpi->compressor_stage == LAP_STAGE,
- cpi->oxcf.pass == 0 && cpi->lap_enabled));
+ cpi->oxcf.pass == 0 && cpi->ppi->lap_enabled));
return (cpi->oxcf.pass == 1 || (cpi->compressor_stage == LAP_STAGE));
}
// Check if statistics consumption stage
@@ -3024,7 +3345,7 @@ static INLINE int is_stat_consumption_stage_twopass(const AV1_COMP *const cpi) {
static INLINE int is_stat_consumption_stage(const AV1_COMP *const cpi) {
return (is_stat_consumption_stage_twopass(cpi) ||
(cpi->oxcf.pass == 0 && (cpi->compressor_stage == ENCODE_STAGE) &&
- cpi->lap_enabled));
+ cpi->ppi->lap_enabled));
}
/*!\endcond */
@@ -3037,11 +3358,18 @@ static INLINE int is_stat_consumption_stage(const AV1_COMP *const cpi) {
* \return 0 if no stats for current stage else 1
*/
static INLINE int has_no_stats_stage(const AV1_COMP *const cpi) {
- assert(IMPLIES(!cpi->lap_enabled, cpi->compressor_stage == ENCODE_STAGE));
- return (cpi->oxcf.pass == 0 && !cpi->lap_enabled);
+ assert(
+ IMPLIES(!cpi->ppi->lap_enabled, cpi->compressor_stage == ENCODE_STAGE));
+ return (cpi->oxcf.pass == 0 && !cpi->ppi->lap_enabled);
}
+
/*!\cond */
+static INLINE int is_one_pass_rt_params(const AV1_COMP *cpi) {
+ return has_no_stats_stage(cpi) && cpi->oxcf.mode == REALTIME &&
+ cpi->oxcf.gf_cfg.lag_in_frames == 0;
+}
+
// Function return size of frame stats buffer
static INLINE int get_stats_buf_size(int num_lap_buffer, int num_lag_buffer) {
/* if lookahead is enabled return num_lap_buffers else num_lag_buffers */
@@ -3208,7 +3536,7 @@ static INLINE int get_ref_frame_flags(const SPEED_FEATURES *const sf,
// Note: The OBU returned is in Low Overhead Bitstream Format. Specifically,
// the obu_has_size_field bit is set, and the buffer contains the obu_size
// field.
-aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi);
+aom_fixed_buf_t *av1_get_global_headers(AV1_PRIMARY *ppi);
#define MAX_GFUBOOST_FACTOR 10.0
#define MIN_GFUBOOST_FACTOR 4.0
@@ -3229,9 +3557,9 @@ static INLINE int is_frame_eligible_for_ref_pruning(const GF_GROUP *gf_group,
}
// Get update type of the current frame.
-static INLINE FRAME_UPDATE_TYPE
-get_frame_update_type(const GF_GROUP *gf_group) {
- return gf_group->update_type[gf_group->index];
+static INLINE FRAME_UPDATE_TYPE get_frame_update_type(const GF_GROUP *gf_group,
+ int gf_frame_index) {
+ return gf_group->update_type[gf_frame_index];
}
static INLINE int av1_pixels_to_mi(int pixels) {
@@ -3241,14 +3569,15 @@ static INLINE int av1_pixels_to_mi(int pixels) {
static AOM_INLINE int is_psnr_calc_enabled(const AV1_COMP *cpi) {
const AV1_COMMON *const cm = &cpi->common;
- return cpi->b_calculate_psnr && !is_stat_generation_stage(cpi) &&
+ return cpi->ppi->b_calculate_psnr && !is_stat_generation_stage(cpi) &&
cm->show_frame;
}
#if CONFIG_AV1_TEMPORAL_DENOISING
static INLINE int denoise_svc(const struct AV1_COMP *const cpi) {
- return (!cpi->use_svc || (cpi->use_svc && cpi->svc.spatial_layer_id >=
- cpi->svc.first_layer_denoise));
+ return (!cpi->ppi->use_svc ||
+ (cpi->ppi->use_svc &&
+ cpi->svc.spatial_layer_id >= cpi->svc.first_layer_denoise));
}
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/encoder_alloc.h b/third_party/libaom/source/libaom/av1/encoder/encoder_alloc.h
index eae34e0fe6..6eb44e7ee1 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encoder_alloc.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encoder_alloc.h
@@ -56,7 +56,7 @@ static AOM_INLINE void alloc_compressor_data(AV1_COMP *cpi) {
TokenInfo *token_info = &cpi->token_info;
if (av1_alloc_context_buffers(cm, cm->width, cm->height)) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate context buffers");
}
@@ -78,6 +78,13 @@ static AOM_INLINE void alloc_compressor_data(AV1_COMP *cpi) {
CHECK_MEM_ERROR(cm, cpi->td.mb.mv_costs,
(MvCosts *)aom_calloc(1, sizeof(MvCosts)));
+ if (cpi->td.mb.dv_costs) {
+ aom_free(cpi->td.mb.dv_costs);
+ cpi->td.mb.dv_costs = NULL;
+ }
+ CHECK_MEM_ERROR(cm, cpi->td.mb.dv_costs,
+ (IntraBCMVCosts *)aom_malloc(sizeof(*cpi->td.mb.dv_costs)));
+
av1_setup_shared_coeff_buffer(&cpi->common, &cpi->td.shared_coeff_buf);
av1_setup_sms_tree(cpi, &cpi->td);
cpi->td.firstpass_ctx =
@@ -186,19 +193,10 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
aom_free(cpi->ssim_rdmult_scaling_factors);
cpi->ssim_rdmult_scaling_factors = NULL;
- aom_free(cpi->tpl_rdmult_scaling_factors);
- cpi->tpl_rdmult_scaling_factors = NULL;
-
- aom_free(cpi->tpl_sb_rdmult_scaling_factors);
- cpi->tpl_sb_rdmult_scaling_factors = NULL;
-
#if CONFIG_TUNE_VMAF
aom_free(cpi->vmaf_info.rdmult_scaling_factors);
cpi->vmaf_info.rdmult_scaling_factors = NULL;
-
-#if CONFIG_USE_VMAF_RC
- aom_close_vmaf_model_rc(cpi->vmaf_info.vmaf_model);
-#endif
+ aom_close_vmaf_model(cpi->vmaf_info.vmaf_model);
#endif
#if CONFIG_TUNE_BUTTERAUGLI
@@ -215,6 +213,11 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
cpi->td.mb.mv_costs = NULL;
}
+ if (cpi->td.mb.dv_costs) {
+ aom_free(cpi->td.mb.dv_costs);
+ cpi->td.mb.dv_costs = NULL;
+ }
+
aom_free(cpi->td.mb.inter_modes_info);
cpi->td.mb.inter_modes_info = NULL;
@@ -235,7 +238,6 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
av1_free_pmc(cpi->td.firstpass_ctx, av1_num_planes(cm));
cpi->td.firstpass_ctx = NULL;
- av1_free_ref_frame_buffers(cm->buffer_pool);
av1_free_txb_buf(cpi);
av1_free_context_buffers(cm);
@@ -243,10 +245,15 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
#if !CONFIG_REALTIME_ONLY
av1_free_restoration_buffers(cm);
#endif
+
+ if (!is_stat_generation_stage(cpi))
+ av1_free_cdef_buffers(cm, &cpi->mt_info.cdef_worker,
+ &cpi->mt_info.cdef_sync,
+ cpi->mt_info.num_mod_workers[MOD_CDEF]);
+
aom_free_frame_buffer(&cpi->trial_frame_rst);
aom_free_frame_buffer(&cpi->scaled_source);
aom_free_frame_buffer(&cpi->scaled_last_source);
- aom_free_frame_buffer(&cpi->alt_ref_buffer);
free_token_info(token_info);
@@ -259,6 +266,7 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
for (int j = 0; j < 2; ++j) {
aom_free(cpi->td.mb.tmp_pred_bufs[j]);
}
+ aom_free(cpi->td.mb.pixel_gradient_info);
#if CONFIG_DENOISE
if (cpi->denoise_and_model) {
@@ -271,11 +279,7 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
cpi->film_grain_table = NULL;
}
- for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
- aom_free(cpi->level_params.level_info[i]);
- }
-
- if (cpi->use_svc) av1_free_svc_cyclic_refresh(cpi);
+ if (cpi->ppi->use_svc) av1_free_svc_cyclic_refresh(cpi);
if (cpi->consec_zero_mv) {
aom_free(cpi->consec_zero_mv);
@@ -285,7 +289,7 @@ static AOM_INLINE void dealloc_compressor_data(AV1_COMP *cpi) {
static AOM_INLINE void variance_partition_alloc(AV1_COMP *cpi) {
AV1_COMMON *const cm = &cpi->common;
- const int num_64x64_blocks = (cm->seq_params.sb_size == BLOCK_64X64) ? 1 : 4;
+ const int num_64x64_blocks = (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4;
if (cpi->td.vt64x64) {
if (num_64x64_blocks != cpi->td.num_64x64_blocks) {
aom_free(cpi->td.vt64x64);
@@ -301,7 +305,7 @@ static AOM_INLINE void variance_partition_alloc(AV1_COMP *cpi) {
static AOM_INLINE void alloc_altref_frame_buffer(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const AV1EncoderConfig *oxcf = &cpi->oxcf;
// When lag_in_frames <= 1, alt-ref frames are not enabled. In this case,
@@ -311,29 +315,29 @@ static AOM_INLINE void alloc_altref_frame_buffer(AV1_COMP *cpi) {
// TODO(agrange) Check if ARF is enabled and skip allocation if not.
if (aom_realloc_frame_buffer(
- &cpi->alt_ref_buffer, oxcf->frm_dim_cfg.width,
+ &cpi->ppi->alt_ref_buffer, oxcf->frm_dim_cfg.width,
oxcf->frm_dim_cfg.height, seq_params->subsampling_x,
seq_params->subsampling_y, seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment, NULL, NULL,
NULL, cpi->oxcf.tool_cfg.enable_global_motion))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate altref buffer");
}
static AOM_INLINE void alloc_util_frame_buffers(AV1_COMP *cpi) {
AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int byte_alignment = cm->features.byte_alignment;
if (aom_realloc_frame_buffer(
&cpi->last_frame_uf, cm->width, cm->height, seq_params->subsampling_x,
seq_params->subsampling_y, seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, byte_alignment, NULL, NULL, NULL, 0))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate last frame buffer");
// The frame buffer trial_frame_rst is used during loop restoration filter
// search. Hence it is allocated only when loop restoration is used.
- const int use_restoration = cm->seq_params.enable_restoration &&
+ const int use_restoration = cm->seq_params->enable_restoration &&
!cm->features.all_lossless &&
!cm->tiles.large_scale;
if (use_restoration) {
@@ -342,7 +346,7 @@ static AOM_INLINE void alloc_util_frame_buffers(AV1_COMP *cpi) {
cm->superres_upscaled_height, seq_params->subsampling_x,
seq_params->subsampling_y, seq_params->use_highbitdepth,
AOM_RESTORATION_FRAME_BORDER, byte_alignment, NULL, NULL, NULL, 0))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate trial restored frame buffer");
}
@@ -351,7 +355,7 @@ static AOM_INLINE void alloc_util_frame_buffers(AV1_COMP *cpi) {
seq_params->subsampling_y, seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, byte_alignment, NULL, NULL, NULL,
cpi->oxcf.tool_cfg.enable_global_motion))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate scaled source buffer");
// The frame buffer cpi->scaled_last_source is used to hold the previous
@@ -367,7 +371,7 @@ static AOM_INLINE void alloc_util_frame_buffers(AV1_COMP *cpi) {
seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
byte_alignment, NULL, NULL, NULL,
cpi->oxcf.tool_cfg.enable_global_motion))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate scaled last source buffer");
}
}
@@ -384,16 +388,16 @@ static AOM_INLINE YV12_BUFFER_CONFIG *realloc_and_scale_source(
if (aom_realloc_frame_buffer(
&cpi->scaled_source, scaled_width, scaled_height,
- cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth, AOM_BORDER_IN_PIXELS,
+ cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
+ cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS,
cm->features.byte_alignment, NULL, NULL, NULL,
cpi->oxcf.tool_cfg.enable_global_motion))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to reallocate scaled source buffer");
assert(cpi->scaled_source.y_crop_width == scaled_width);
assert(cpi->scaled_source.y_crop_height == scaled_height);
av1_resize_and_extend_frame_nonnormative(
- cpi->unscaled_source, &cpi->scaled_source, (int)cm->seq_params.bit_depth,
+ cpi->unscaled_source, &cpi->scaled_source, (int)cm->seq_params->bit_depth,
num_planes);
return &cpi->scaled_source;
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/encoder_utils.c b/third_party/libaom/source/libaom/av1/encoder/encoder_utils.c
index 7a7e8505b4..557268f9d3 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encoder_utils.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encoder_utils.c
@@ -344,7 +344,7 @@ static void configure_static_seg_features(AV1_COMP *cpi) {
seg->update_data = 1;
qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
@@ -459,13 +459,13 @@ void av1_apply_active_map(AV1_COMP *cpi) {
#if !CONFIG_REALTIME_ONLY
static void process_tpl_stats_frame(AV1_COMP *cpi) {
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
AV1_COMMON *const cm = &cpi->common;
- assert(IMPLIES(gf_group->size > 0, gf_group->index < gf_group->size));
+ assert(IMPLIES(gf_group->size > 0, cpi->gf_frame_index < gf_group->size));
- const int tpl_idx = gf_group->index;
- TplParams *const tpl_data = &cpi->tpl_data;
+ const int tpl_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
@@ -497,22 +497,23 @@ static void process_tpl_stats_frame(AV1_COMP *cpi) {
} else {
aom_clear_system_state();
cpi->rd.r0 = (double)intra_cost_base / mc_dep_cost_base;
- if (is_frame_tpl_eligible(gf_group, gf_group->index)) {
- if (cpi->lap_enabled) {
- double min_boost_factor = sqrt(cpi->rc.baseline_gf_interval);
+ if (is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) {
+ if (cpi->ppi->lap_enabled) {
+ double min_boost_factor = sqrt(cpi->ppi->p_rc.baseline_gf_interval);
const int gfu_boost = get_gfu_boost_from_r0_lap(
min_boost_factor, MAX_GFUBOOST_FACTOR, cpi->rd.r0,
- cpi->rc.num_stats_required_for_gfu_boost);
+ cpi->ppi->p_rc.num_stats_required_for_gfu_boost);
// printf("old boost %d new boost %d\n", cpi->rc.gfu_boost,
// gfu_boost);
- cpi->rc.gfu_boost = combine_prior_with_tpl_boost(
- min_boost_factor, MAX_BOOST_COMBINE_FACTOR, cpi->rc.gfu_boost,
- gfu_boost, cpi->rc.num_stats_used_for_gfu_boost);
+ cpi->ppi->p_rc.gfu_boost = combine_prior_with_tpl_boost(
+ min_boost_factor, MAX_BOOST_COMBINE_FACTOR,
+ cpi->ppi->p_rc.gfu_boost, gfu_boost,
+ cpi->ppi->p_rc.num_stats_used_for_gfu_boost);
} else {
const int gfu_boost = (int)(200.0 / cpi->rd.r0);
- cpi->rc.gfu_boost = combine_prior_with_tpl_boost(
+ cpi->ppi->p_rc.gfu_boost = combine_prior_with_tpl_boost(
MIN_BOOST_COMBINE_FACTOR, MAX_BOOST_COMBINE_FACTOR,
- cpi->rc.gfu_boost, gfu_boost, cpi->rc.frames_to_key);
+ cpi->ppi->p_rc.gfu_boost, gfu_boost, cpi->rc.frames_to_key);
}
}
aom_clear_system_state();
@@ -529,17 +530,17 @@ void av1_set_size_dependent_vars(AV1_COMP *cpi, int *q, int *bottom_index,
av1_set_speed_features_framesize_dependent(cpi, cpi->speed);
#if !CONFIG_REALTIME_ONLY
- GF_GROUP *gf_group = &cpi->gf_group;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
if (cpi->oxcf.algo_cfg.enable_tpl_model &&
- is_frame_tpl_eligible(gf_group, gf_group->index)) {
+ is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) {
process_tpl_stats_frame(cpi);
av1_tpl_rdmult_setup(cpi);
}
#endif
// Decide q and q bounds.
- *q = av1_rc_pick_q_and_bounds(cpi, &cpi->rc, cm->width, cm->height,
- cpi->gf_group.index, bottom_index, top_index);
+ *q = av1_rc_pick_q_and_bounds(cpi, cm->width, cm->height, cpi->gf_frame_index,
+ bottom_index, top_index);
// Configure experimental use of segmentation for enhanced coding of
// static regions if indicated.
@@ -564,6 +565,23 @@ static void reset_film_grain_chroma_params(aom_film_grain_t *pars) {
memset(pars->ar_coeffs_cb, 0, sizeof(pars->ar_coeffs_cb));
}
+void av1_update_film_grain_parameters_seq(struct AV1_PRIMARY *ppi,
+ const AV1EncoderConfig *oxcf) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
+ const TuneCfg *const tune_cfg = &oxcf->tune_cfg;
+
+ if (tune_cfg->film_grain_test_vector || tune_cfg->film_grain_table_filename ||
+ tune_cfg->content == AOM_CONTENT_FILM) {
+ seq_params->film_grain_params_present = 1;
+ } else {
+#if CONFIG_DENOISE
+ seq_params->film_grain_params_present = (oxcf->noise_level > 0);
+#else
+ seq_params->film_grain_params_present = 0;
+#endif
+ }
+}
+
void av1_update_film_grain_parameters(struct AV1_COMP *cpi,
const AV1EncoderConfig *oxcf) {
AV1_COMMON *const cm = &cpi->common;
@@ -577,39 +595,30 @@ void av1_update_film_grain_parameters(struct AV1_COMP *cpi,
}
if (tune_cfg->film_grain_test_vector) {
- cm->seq_params.film_grain_params_present = 1;
if (cm->current_frame.frame_type == KEY_FRAME) {
memcpy(&cm->film_grain_params,
film_grain_test_vectors + tune_cfg->film_grain_test_vector - 1,
sizeof(cm->film_grain_params));
if (oxcf->tool_cfg.enable_monochrome)
reset_film_grain_chroma_params(&cm->film_grain_params);
- cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
- if (cm->seq_params.color_range == AOM_CR_FULL_RANGE) {
+ cm->film_grain_params.bit_depth = cm->seq_params->bit_depth;
+ if (cm->seq_params->color_range == AOM_CR_FULL_RANGE) {
cm->film_grain_params.clip_to_restricted_range = 0;
}
}
} else if (tune_cfg->film_grain_table_filename) {
- cm->seq_params.film_grain_params_present = 1;
-
cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
memset(cpi->film_grain_table, 0, sizeof(aom_film_grain_table_t));
aom_film_grain_table_read(cpi->film_grain_table,
- tune_cfg->film_grain_table_filename, &cm->error);
+ tune_cfg->film_grain_table_filename, cm->error);
} else if (tune_cfg->content == AOM_CONTENT_FILM) {
- cm->seq_params.film_grain_params_present = 1;
- cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
+ cm->film_grain_params.bit_depth = cm->seq_params->bit_depth;
if (oxcf->tool_cfg.enable_monochrome)
reset_film_grain_chroma_params(&cm->film_grain_params);
- if (cm->seq_params.color_range == AOM_CR_FULL_RANGE)
+ if (cm->seq_params->color_range == AOM_CR_FULL_RANGE)
cm->film_grain_params.clip_to_restricted_range = 0;
} else {
-#if CONFIG_DENOISE
- cm->seq_params.film_grain_params_present = (cpi->oxcf.noise_level > 0);
-#else
- cm->seq_params.film_grain_params_present = 0;
-#endif
memset(&cm->film_grain_params, 0, sizeof(cm->film_grain_params));
}
}
@@ -643,7 +652,7 @@ void av1_scale_references(AV1_COMP *cpi, const InterpFilter filter,
if (aom_yv12_realloc_with_new_border(
&ref_fb->buf, AOM_BORDER_IN_PIXELS,
cm->features.byte_alignment, num_planes) != 0) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
}
}
@@ -652,7 +661,7 @@ void av1_scale_references(AV1_COMP *cpi, const InterpFilter filter,
if (new_fb == NULL) {
const int new_fb_idx = get_free_fb(cm);
if (new_fb_idx == INVALID_IDX) {
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Unable to find free frame buffer");
}
force_scaling = 1;
@@ -663,30 +672,30 @@ void av1_scale_references(AV1_COMP *cpi, const InterpFilter filter,
new_fb->buf.y_crop_height != cm->height) {
if (aom_realloc_frame_buffer(
&new_fb->buf, cm->width, cm->height,
- cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth, AOM_BORDER_IN_PIXELS,
+ cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
+ cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS,
cm->features.byte_alignment, NULL, NULL, NULL, 0)) {
if (force_scaling) {
// Release the reference acquired in the get_free_fb() call above.
--new_fb->ref_count;
}
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
}
#if CONFIG_AV1_HIGHBITDEPTH
- if (use_optimized_scaler && cm->seq_params.bit_depth == AOM_BITS_8)
+ if (use_optimized_scaler && cm->seq_params->bit_depth == AOM_BITS_8)
av1_resize_and_extend_frame(ref, &new_fb->buf, filter, phase,
num_planes);
else
av1_resize_and_extend_frame_nonnormative(
- ref, &new_fb->buf, (int)cm->seq_params.bit_depth, num_planes);
+ ref, &new_fb->buf, (int)cm->seq_params->bit_depth, num_planes);
#else
if (use_optimized_scaler)
av1_resize_and_extend_frame(ref, &new_fb->buf, filter, phase,
num_planes);
else
av1_resize_and_extend_frame_nonnormative(
- ref, &new_fb->buf, (int)cm->seq_params.bit_depth, num_planes);
+ ref, &new_fb->buf, (int)cm->seq_params->bit_depth, num_planes);
#endif
cpi->scaled_ref_buf[ref_frame - 1] = new_fb;
alloc_frame_mvs(cm, new_fb);
@@ -704,10 +713,8 @@ void av1_scale_references(AV1_COMP *cpi, const InterpFilter filter,
}
}
-BLOCK_SIZE av1_select_sb_size(const AV1_COMP *const cpi) {
- const AV1_COMMON *const cm = &cpi->common;
- const AV1EncoderConfig *const oxcf = &cpi->oxcf;
-
+BLOCK_SIZE av1_select_sb_size(const AV1EncoderConfig *const oxcf, int width,
+ int height, int number_spatial_layers) {
if (oxcf->tool_cfg.superblock_size == AOM_SUPERBLOCK_SIZE_64X64)
return BLOCK_64X64;
if (oxcf->tool_cfg.superblock_size == AOM_SUPERBLOCK_SIZE_128X128)
@@ -715,7 +722,7 @@ BLOCK_SIZE av1_select_sb_size(const AV1_COMP *const cpi) {
assert(oxcf->tool_cfg.superblock_size == AOM_SUPERBLOCK_SIZE_DYNAMIC);
- if (cpi->svc.number_spatial_layers > 1 ||
+ if (number_spatial_layers > 1 ||
oxcf->resize_cfg.resize_mode != RESIZE_NONE) {
// Use the configured size (top resolution) for spatial layers or
// on resize.
@@ -732,7 +739,7 @@ BLOCK_SIZE av1_select_sb_size(const AV1_COMP *const cpi) {
// speed-feature.
if (oxcf->superres_cfg.superres_mode == AOM_SUPERRES_NONE &&
oxcf->resize_cfg.resize_mode == RESIZE_NONE && oxcf->speed >= 1) {
- return AOMMIN(cm->width, cm->height) > 480 ? BLOCK_128X128 : BLOCK_64X64;
+ return AOMMIN(width, height) > 480 ? BLOCK_128X128 : BLOCK_64X64;
}
return BLOCK_128X128;
@@ -753,8 +760,10 @@ void av1_setup_frame(AV1_COMP *cpi) {
if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
frame_is_sframe(cm)) {
- if (!cpi->seq_params_locked) {
- set_sb_size(&cm->seq_params, av1_select_sb_size(cpi));
+ if (!cpi->ppi->seq_params_locked) {
+ set_sb_size(cm->seq_params,
+ av1_select_sb_size(&cpi->oxcf, cm->width, cm->height,
+ cpi->svc.number_spatial_layers));
}
} else {
const RefCntBuffer *const primary_ref_buf = get_primary_ref_frame_buf(cm);
@@ -959,7 +968,7 @@ void av1_determine_sc_tools_with_encoding(AV1_COMP *cpi, const int q_orig) {
av1_set_speed_features_qindex_dependent(cpi, oxcf->speed);
if (q_cfg->deltaq_mode != NO_DELTA_Q || q_cfg->enable_chroma_deltaq)
av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
av1_set_variance_partition_thresholds(cpi, q_for_screen_content_quick_run,
0);
@@ -1005,13 +1014,13 @@ void av1_finalize_encoded_frame(AV1_COMP *const cpi) {
AV1_COMMON *const cm = &cpi->common;
CurrentFrame *const current_frame = &cm->current_frame;
- if (!cm->seq_params.reduced_still_picture_hdr &&
+ if (!cm->seq_params->reduced_still_picture_hdr &&
encode_show_existing_frame(cm)) {
RefCntBuffer *const frame_to_show =
cm->ref_frame_map[cpi->existing_fb_idx_to_show];
if (frame_to_show == NULL) {
- aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
+ aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
"Buffer does not contain a reconstructed frame");
}
assert(frame_to_show->ref_count > 0);
@@ -1019,7 +1028,7 @@ void av1_finalize_encoded_frame(AV1_COMP *const cpi) {
}
if (!encode_show_existing_frame(cm) &&
- cm->seq_params.film_grain_params_present &&
+ cm->seq_params->film_grain_params_present &&
(cm->show_frame || cm->showable_frame)) {
// Copy the current frame's film grain params to the its corresponding
// RefCntBuffer slot.
@@ -1232,7 +1241,7 @@ static void save_extra_coding_context(AV1_COMP *cpi) {
cc->lf = cm->lf;
cc->cdef_info = cm->cdef_info;
cc->rc = cpi->rc;
- cc->mv_stats = cpi->mv_stats;
+ cc->mv_stats = cpi->ppi->mv_stats;
}
void av1_save_all_coding_context(AV1_COMP *cpi) {
@@ -1301,11 +1310,11 @@ void av1_dump_filtered_recon_frames(AV1_COMP *cpi) {
"show_frame=%d, show_existing_frame=%d, source_alt_ref_active=%d, "
"refresh_alt_ref_frame=%d, "
"y_stride=%4d, uv_stride=%4d, cm->width=%4d, cm->height=%4d\n\n",
- current_frame->frame_number, cpi->gf_group.index,
- cpi->gf_group.update_type[cpi->gf_group.index], current_frame->order_hint,
- cm->show_frame, cm->show_existing_frame, cpi->rc.source_alt_ref_active,
- cpi->refresh_frame.alt_ref_frame, recon_buf->y_stride,
- recon_buf->uv_stride, cm->width, cm->height);
+ current_frame->frame_number, cpi->gf_frame_index,
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index],
+ current_frame->order_hint, cm->show_frame, cm->show_existing_frame,
+ cpi->rc.source_alt_ref_active, cpi->refresh_frame.alt_ref_frame,
+ recon_buf->y_stride, recon_buf->uv_stride, cm->width, cm->height);
#if 0
int ref_frame;
printf("get_ref_frame_map_idx: [");
diff --git a/third_party/libaom/source/libaom/av1/encoder/encoder_utils.h b/third_party/libaom/source/libaom/av1/encoder/encoder_utils.h
index 40652e956c..e75bc79ba6 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encoder_utils.h
+++ b/third_party/libaom/source/libaom/av1/encoder/encoder_utils.h
@@ -125,14 +125,14 @@ static AOM_INLINE void init_buffer_indices(
}
#define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
- cpi->fn_ptr[BT].sdf = SDF; \
- cpi->fn_ptr[BT].sdaf = SDAF; \
- cpi->fn_ptr[BT].vf = VF; \
- cpi->fn_ptr[BT].svf = SVF; \
- cpi->fn_ptr[BT].svaf = SVAF; \
- cpi->fn_ptr[BT].sdx4df = SDX4DF; \
- cpi->fn_ptr[BT].jsdaf = JSDAF; \
- cpi->fn_ptr[BT].jsvaf = JSVAF;
+ ppi->fn_ptr[BT].sdf = SDF; \
+ ppi->fn_ptr[BT].sdaf = SDAF; \
+ ppi->fn_ptr[BT].vf = VF; \
+ ppi->fn_ptr[BT].svf = SVF; \
+ ppi->fn_ptr[BT].svaf = SVAF; \
+ ppi->fn_ptr[BT].sdx4df = SDX4DF; \
+ ppi->fn_ptr[BT].jsdaf = JSDAF; \
+ ppi->fn_ptr[BT].jsvaf = JSVAF;
#define HIGHBD_BFP_WRAPPER(WIDTH, HEIGHT, BD) \
HIGHBD_BFP( \
@@ -325,8 +325,8 @@ MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x16_avg)
#endif // CONFIG_AV1_HIGHBITDEPTH
#define HIGHBD_MBFP(BT, MCSDF, MCSVF) \
- cpi->fn_ptr[BT].msdf = MCSDF; \
- cpi->fn_ptr[BT].msvf = MCSVF;
+ ppi->fn_ptr[BT].msdf = MCSDF; \
+ ppi->fn_ptr[BT].msvf = MCSVF;
#define HIGHBD_MBFP_WRAPPER(WIDTH, HEIGHT, BD) \
HIGHBD_MBFP(BLOCK_##WIDTH##X##HEIGHT, \
@@ -386,8 +386,8 @@ MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x16)
#endif
#define HIGHBD_SDSFP(BT, SDSF, SDSX4DF) \
- cpi->fn_ptr[BT].sdsf = SDSF; \
- cpi->fn_ptr[BT].sdsx4df = SDSX4DF;
+ ppi->fn_ptr[BT].sdsf = SDSF; \
+ ppi->fn_ptr[BT].sdsx4df = SDSX4DF;
#define HIGHBD_SDSFP_WRAPPER(WIDTH, HEIGHT, BD) \
HIGHBD_SDSFP(BLOCK_##WIDTH##X##HEIGHT, \
@@ -487,9 +487,9 @@ MAKE_SDSF_SKIP_SAD_4D_WRAPPER(aom_highbd_sad_skip_8x32x4d)
aom_highbd_obmc_sub_pixel_variance##WIDTH##x##HEIGHT)
#define HIGHBD_OBFP(BT, OSDF, OVF, OSVF) \
- cpi->fn_ptr[BT].osdf = OSDF; \
- cpi->fn_ptr[BT].ovf = OVF; \
- cpi->fn_ptr[BT].osvf = OSVF;
+ ppi->fn_ptr[BT].osdf = OSDF; \
+ ppi->fn_ptr[BT].ovf = OVF; \
+ ppi->fn_ptr[BT].osvf = OSVF;
#define HIGHBD_OBFP_WRAPPER(WIDTH, HEIGHT, BD) \
HIGHBD_OBFP(BLOCK_##WIDTH##X##HEIGHT, \
@@ -542,10 +542,10 @@ MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x64)
MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x16)
#endif
-static AOM_INLINE void highbd_set_var_fns(AV1_COMP *const cpi) {
- AV1_COMMON *const cm = &cpi->common;
- if (cm->seq_params.use_highbitdepth) {
- switch (cm->seq_params.bit_depth) {
+static AOM_INLINE void highbd_set_var_fns(AV1_PRIMARY *const ppi) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
+ if (seq_params->use_highbitdepth) {
+ switch (seq_params->bit_depth) {
case AOM_BITS_8:
#if !CONFIG_REALTIME_ONLY
HIGHBD_BFP_WRAPPER(64, 16, 8)
@@ -850,7 +850,7 @@ static AOM_INLINE void highbd_set_var_fns(AV1_COMP *const cpi) {
default:
assert(0 &&
- "cm->seq_params.bit_depth should be AOM_BITS_8, "
+ "cm->seq_params->bit_depth should be AOM_BITS_8, "
"AOM_BITS_10 or AOM_BITS_12");
}
}
@@ -873,6 +873,33 @@ static AOM_INLINE void copy_frame_prob_info(AV1_COMP *cpi) {
av1_copy(frame_probs->switchable_interp_probs,
default_switchable_interp_probs);
}
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ FrameProbInfo *const temp_frame_probs = &cpi->ppi->temp_frame_probs;
+ if (cpi->sf.tx_sf.tx_type_search.prune_tx_type_using_stats) {
+ av1_copy(temp_frame_probs->tx_type_probs, default_tx_type_probs);
+ }
+ if (cpi->sf.inter_sf.prune_obmc_prob_thresh > 0 &&
+ cpi->sf.inter_sf.prune_obmc_prob_thresh < INT_MAX) {
+ av1_copy(temp_frame_probs->obmc_probs, default_obmc_probs);
+ }
+ if (cpi->sf.inter_sf.prune_warped_prob_thresh > 0) {
+ av1_copy(temp_frame_probs->warped_probs, default_warped_probs);
+ }
+ if (cpi->sf.interp_sf.adaptive_interp_filter_search == 2) {
+ av1_copy(temp_frame_probs->switchable_interp_probs,
+ default_switchable_interp_probs);
+ }
+#endif
+}
+
+static AOM_INLINE void restore_cdef_coding_context(CdefInfo *const dst,
+ const CdefInfo *const src) {
+ dst->cdef_bits = src->cdef_bits;
+ dst->cdef_damping = src->cdef_damping;
+ av1_copy(dst->cdef_strengths, src->cdef_strengths);
+ av1_copy(dst->cdef_uv_strengths, src->cdef_uv_strengths);
+ dst->nb_cdef_strengths = src->nb_cdef_strengths;
}
// Coding context that only needs to be restored when recode loop includes
@@ -882,9 +909,9 @@ static AOM_INLINE void restore_extra_coding_context(AV1_COMP *cpi) {
CODING_CONTEXT *const cc = &cpi->coding_context;
AV1_COMMON *cm = &cpi->common;
cm->lf = cc->lf;
- cm->cdef_info = cc->cdef_info;
+ restore_cdef_coding_context(&cm->cdef_info, &cc->cdef_info);
cpi->rc = cc->rc;
- cpi->mv_stats = cc->mv_stats;
+ cpi->ppi->mv_stats = cc->mv_stats;
}
static AOM_INLINE int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
@@ -964,6 +991,8 @@ static AOM_INLINE void refresh_reference_frames(AV1_COMP *cpi) {
}
}
+void av1_update_film_grain_parameters_seq(struct AV1_PRIMARY *ppi,
+ const AV1EncoderConfig *oxcf);
void av1_update_film_grain_parameters(struct AV1_COMP *cpi,
const AV1EncoderConfig *oxcf);
@@ -972,7 +1001,8 @@ void av1_scale_references(AV1_COMP *cpi, const InterpFilter filter,
void av1_setup_frame(AV1_COMP *cpi);
-BLOCK_SIZE av1_select_sb_size(const AV1_COMP *const cpi);
+BLOCK_SIZE av1_select_sb_size(const AV1EncoderConfig *const oxcf, int width,
+ int height, int number_spatial_layers);
void av1_apply_active_map(AV1_COMP *cpi);
diff --git a/third_party/libaom/source/libaom/av1/encoder/encodetxb.c b/third_party/libaom/source/libaom/av1/encoder/encodetxb.c
index 7b0b281c80..0eb134890e 100644
--- a/third_party/libaom/source/libaom/av1/encoder/encodetxb.c
+++ b/third_party/libaom/source/libaom/av1/encoder/encodetxb.c
@@ -26,11 +26,11 @@
void av1_alloc_txb_buf(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
CoeffBufferPool *coeff_buf_pool = &cpi->coeff_buffer_pool;
- int size = ((cm->mi_params.mi_rows >> cm->seq_params.mib_size_log2) + 1) *
- ((cm->mi_params.mi_cols >> cm->seq_params.mib_size_log2) + 1);
+ int size = ((cm->mi_params.mi_rows >> cm->seq_params->mib_size_log2) + 1) *
+ ((cm->mi_params.mi_cols >> cm->seq_params->mib_size_log2) + 1);
const int num_planes = av1_num_planes(cm);
- const int subsampling_x = cm->seq_params.subsampling_x;
- const int subsampling_y = cm->seq_params.subsampling_y;
+ const int subsampling_x = cm->seq_params->subsampling_x;
+ const int subsampling_y = cm->seq_params->subsampling_y;
const int chroma_max_sb_square =
MAX_SB_SQUARE >> (subsampling_x + subsampling_y);
const int num_tcoeffs =
@@ -624,6 +624,7 @@ void av1_update_and_record_txb_context(int plane, int block, int blk_row,
const int coeff_ctx = coeff_contexts[pos];
const tran_low_t v = qcoeff[pos];
const tran_low_t level = abs(v);
+ td->abs_sum_level += level;
if (allow_update_cdf) {
if (c == eob - 1) {
@@ -719,7 +720,7 @@ void av1_update_intra_mb_txb_context(const AV1_COMP *cpi, ThreadData *td,
CB_COEFF_BUFFER *av1_get_cb_coeff_buffer(const struct AV1_COMP *cpi, int mi_row,
int mi_col) {
const AV1_COMMON *const cm = &cpi->common;
- const int mib_size_log2 = cm->seq_params.mib_size_log2;
+ const int mib_size_log2 = cm->seq_params->mib_size_log2;
const int stride = (cm->mi_params.mi_cols >> mib_size_log2) + 1;
const int offset =
(mi_row >> mib_size_log2) * stride + (mi_col >> mib_size_log2);
diff --git a/third_party/libaom/source/libaom/av1/encoder/ethread.c b/third_party/libaom/source/libaom/av1/encoder/ethread.c
index 3735ca3c8b..d274b6b84f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/ethread.c
+++ b/third_party/libaom/source/libaom/av1/encoder/ethread.c
@@ -11,9 +11,11 @@
#include "av1/common/warped_motion.h"
+#include "av1/encoder/bitstream.h"
#include "av1/encoder/encodeframe.h"
#include "av1/encoder/encoder.h"
#include "av1/encoder/encoder_alloc.h"
+#include "av1/encoder/encodeframe_utils.h"
#include "av1/encoder/ethread.h"
#if !CONFIG_REALTIME_ONLY
#include "av1/encoder/firstpass.h"
@@ -52,7 +54,7 @@ static AOM_INLINE void accumulate_rd_opt(ThreadData *td, ThreadData *td_t) {
static AOM_INLINE void update_delta_lf_for_row_mt(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
- const int mib_size = cm->seq_params.mib_size;
+ const int mib_size = cm->seq_params->mib_size;
const int frame_lf_count =
av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
for (int row = 0; row < cm->tiles.rows; row++) {
@@ -68,7 +70,8 @@ static AOM_INLINE void update_delta_lf_for_row_mt(AV1_COMP *cpi) {
const int idx_str = cm->mi_params.mi_stride * mi_row + mi_col;
MB_MODE_INFO **mi = cm->mi_params.mi_grid_base + idx_str;
MB_MODE_INFO *mbmi = mi[0];
- if (mbmi->skip_txfm == 1 && (mbmi->bsize == cm->seq_params.sb_size)) {
+ if (mbmi->skip_txfm == 1 &&
+ (mbmi->bsize == cm->seq_params->sb_size)) {
for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
mbmi->delta_lf[lf_id] = xd->delta_lf[lf_id];
mbmi->delta_lf_from_base = xd->delta_lf_from_base;
@@ -362,7 +365,7 @@ static AOM_INLINE void switch_tile_and_get_next_job(
*cur_tile_id = tile_id;
const int unit_height = mi_size_high[fp_block_size];
get_next_job(&tile_data[tile_id], current_mi_row,
- is_firstpass ? unit_height : cm->seq_params.mib_size);
+ is_firstpass ? unit_height : cm->seq_params->mib_size);
}
}
@@ -441,13 +444,20 @@ static int enc_row_mt_worker_hook(void *arg1, void *unused) {
const BLOCK_SIZE fp_block_size = cpi->fp_block_size;
int end_of_frame = 0;
+
+ // When master thread does not have a valid job to process, xd->tile_ctx
+ // is not set and it contains NULL pointer. This can result in NULL pointer
+ // access violation if accessed beyond the encode stage. Hence, updating
+ // thread_data->td->mb.e_mbd.tile_ctx is initialized with common frame
+ // context to avoid NULL pointer access in subsequent stages.
+ thread_data->td->mb.e_mbd.tile_ctx = cm->fc;
while (1) {
int current_mi_row = -1;
#if CONFIG_MULTITHREAD
pthread_mutex_lock(enc_row_mt_mutex_);
#endif
if (!get_next_job(&cpi->tile_data[cur_tile_id], &current_mi_row,
- cm->seq_params.mib_size)) {
+ cm->seq_params->mib_size)) {
// No jobs are available for the current tile. Query for the status of
// other tiles and get the next job if available
switch_tile_and_get_next_job(cm, cpi->tile_data, &cur_tile_id,
@@ -470,6 +480,7 @@ static int enc_row_mt_worker_hook(void *arg1, void *unused) {
td->mb.e_mbd.tile_ctx = td->tctx;
td->mb.tile_pb_ctx = &this_tile->tctx;
+ td->abs_sum_level = 0;
if (this_tile->allow_update_cdf) {
td->mb.row_ctx = this_tile->row_ctx;
@@ -482,7 +493,7 @@ static int enc_row_mt_worker_hook(void *arg1, void *unused) {
av1_init_above_context(&cm->above_contexts, av1_num_planes(cm), tile_row,
&td->mb.e_mbd);
- cfl_init(&td->mb.e_mbd.cfl, &cm->seq_params);
+ cfl_init(&td->mb.e_mbd.cfl, cm->seq_params);
if (td->mb.txfm_search_info.txb_rd_records != NULL) {
av1_crc32c_calculator_init(
&td->mb.txfm_search_info.txb_rd_records->mb_rd_record.crc_calculator);
@@ -492,6 +503,7 @@ static int enc_row_mt_worker_hook(void *arg1, void *unused) {
#if CONFIG_MULTITHREAD
pthread_mutex_lock(enc_row_mt_mutex_);
#endif
+ this_tile->abs_sum_level += td->abs_sum_level;
row_mt_sync->num_threads_working--;
#if CONFIG_MULTITHREAD
pthread_mutex_unlock(enc_row_mt_mutex_);
@@ -526,16 +538,12 @@ static int enc_worker_hook(void *arg1, void *unused) {
return 1;
}
-void av1_create_second_pass_workers(AV1_COMP *cpi, int num_workers) {
+#if CONFIG_MULTITHREAD
+void av1_init_mt_sync(AV1_COMP *cpi, int is_first_pass) {
AV1_COMMON *const cm = &cpi->common;
- const AVxWorkerInterface *const winterface = aom_get_worker_interface();
MultiThreadInfo *const mt_info = &cpi->mt_info;
- assert(mt_info->workers != NULL);
- assert(mt_info->tile_thr_data != NULL);
-
-#if CONFIG_MULTITHREAD
- if (cpi->oxcf.row_mt == 1) {
+ if (is_first_pass || cpi->oxcf.row_mt == 1) {
AV1EncRowMultiThreadInfo *enc_row_mt = &mt_info->enc_row_mt;
if (enc_row_mt->mutex_ == NULL) {
CHECK_MEM_ERROR(cm, enc_row_mt->mutex_,
@@ -543,24 +551,39 @@ void av1_create_second_pass_workers(AV1_COMP *cpi, int num_workers) {
if (enc_row_mt->mutex_) pthread_mutex_init(enc_row_mt->mutex_, NULL);
}
}
- AV1GlobalMotionSync *gm_sync = &mt_info->gm_sync;
- if (gm_sync->mutex_ == NULL) {
- CHECK_MEM_ERROR(cm, gm_sync->mutex_,
- aom_malloc(sizeof(*(gm_sync->mutex_))));
- if (gm_sync->mutex_) pthread_mutex_init(gm_sync->mutex_, NULL);
- }
- AV1TemporalFilterSync *tf_sync = &mt_info->tf_sync;
- if (tf_sync->mutex_ == NULL) {
- CHECK_MEM_ERROR(cm, tf_sync->mutex_, aom_malloc(sizeof(*tf_sync->mutex_)));
- if (tf_sync->mutex_) pthread_mutex_init(tf_sync->mutex_, NULL);
- }
- AV1CdefSync *cdef_sync = &mt_info->cdef_sync;
- if (cdef_sync->mutex_ == NULL) {
- CHECK_MEM_ERROR(cm, cdef_sync->mutex_,
- aom_malloc(sizeof(*(cdef_sync->mutex_))));
- if (cdef_sync->mutex_) pthread_mutex_init(cdef_sync->mutex_, NULL);
+
+ if (!is_first_pass) {
+ AV1GlobalMotionSync *gm_sync = &mt_info->gm_sync;
+ if (gm_sync->mutex_ == NULL) {
+ CHECK_MEM_ERROR(cm, gm_sync->mutex_,
+ aom_malloc(sizeof(*(gm_sync->mutex_))));
+ if (gm_sync->mutex_) pthread_mutex_init(gm_sync->mutex_, NULL);
+ }
+#if !CONFIG_REALTIME_ONLY
+ AV1TemporalFilterSync *tf_sync = &mt_info->tf_sync;
+ if (tf_sync->mutex_ == NULL) {
+ CHECK_MEM_ERROR(cm, tf_sync->mutex_,
+ aom_malloc(sizeof(*tf_sync->mutex_)));
+ if (tf_sync->mutex_) pthread_mutex_init(tf_sync->mutex_, NULL);
+ }
+#endif // !CONFIG_REALTIME_ONLY
+ AV1CdefSync *cdef_sync = &mt_info->cdef_sync;
+ if (cdef_sync->mutex_ == NULL) {
+ CHECK_MEM_ERROR(cm, cdef_sync->mutex_,
+ aom_malloc(sizeof(*(cdef_sync->mutex_))));
+ if (cdef_sync->mutex_) pthread_mutex_init(cdef_sync->mutex_, NULL);
+ }
}
-#endif
+}
+#endif // CONFIG_MULTITHREAD
+
+void av1_create_second_pass_workers(AV1_COMP *cpi, int num_workers) {
+ AV1_COMMON *const cm = &cpi->common;
+ const AVxWorkerInterface *const winterface = aom_get_worker_interface();
+ MultiThreadInfo *const mt_info = &cpi->mt_info;
+
+ assert(mt_info->workers != NULL);
+ assert(mt_info->tile_thr_data != NULL);
for (int i = num_workers - 1; i >= 0; i--) {
AVxWorker *const worker = &mt_info->workers[i];
@@ -576,7 +599,7 @@ void av1_create_second_pass_workers(AV1_COMP *cpi, int num_workers) {
// Create threads
if (!winterface->reset(worker))
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Tile encoder thread creation failed");
} else {
// Main thread acts as a worker and uses the thread data in cpi.
@@ -625,10 +648,6 @@ static AOM_INLINE void create_enc_workers(AV1_COMP *cpi, int num_workers) {
alloc_compound_type_rd_buffers(cm, &thread_data->td->comp_rd_buffer);
- CHECK_MEM_ERROR(
- cm, thread_data->td->tmp_conv_dst,
- aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE *
- sizeof(*thread_data->td->tmp_conv_dst)));
for (int j = 0; j < 2; ++j) {
CHECK_MEM_ERROR(
cm, thread_data->td->tmp_pred_bufs[j],
@@ -636,9 +655,14 @@ static AOM_INLINE void create_enc_workers(AV1_COMP *cpi, int num_workers) {
sizeof(*thread_data->td->tmp_pred_bufs[j])));
}
+ const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome;
+ CHECK_MEM_ERROR(cm, thread_data->td->pixel_gradient_info,
+ aom_malloc(sizeof(*thread_data->td->pixel_gradient_info) *
+ plane_types * MAX_SB_SQUARE));
+
if (cpi->sf.part_sf.partition_search_type == VAR_BASED_PARTITION) {
const int num_64x64_blocks =
- (cm->seq_params.sb_size == BLOCK_64X64) ? 1 : 4;
+ (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4;
CHECK_MEM_ERROR(
cm, thread_data->td->vt64x64,
aom_malloc(sizeof(*thread_data->td->vt64x64) * num_64x64_blocks));
@@ -680,6 +704,10 @@ void av1_create_workers(AV1_COMP *cpi, int num_workers) {
// Set up shared coeff buffers.
av1_setup_shared_coeff_buffer(cm, &thread_data->td->shared_coeff_buf);
+ CHECK_MEM_ERROR(
+ cm, thread_data->td->tmp_conv_dst,
+ aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE *
+ sizeof(*thread_data->td->tmp_conv_dst)));
}
++mt_info->num_workers;
}
@@ -724,7 +752,7 @@ static AOM_INLINE void fp_create_enc_workers(AV1_COMP *cpi, int num_workers) {
if (create_workers) {
// Create threads
if (!winterface->reset(worker))
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Tile encoder thread creation failed");
}
} else {
@@ -764,7 +792,7 @@ static AOM_INLINE void sync_enc_workers(MultiThreadInfo *const mt_info,
}
if (had_error)
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Failed to encode tile data");
}
@@ -780,14 +808,15 @@ static AOM_INLINE void accumulate_counters_enc_workers(AV1_COMP *cpi,
!frame_is_intra_only(&cpi->common))
av1_accumulate_cyclic_refresh_counters(cpi->cyclic_refresh,
&thread_data->td->mb);
- if (thread_data->td->mb.txfm_search_info.txb_rd_records) {
- aom_free(thread_data->td->mb.txfm_search_info.txb_rd_records);
- thread_data->td->mb.txfm_search_info.txb_rd_records = NULL;
- }
- if (thread_data->td != &cpi->td &&
- cpi->oxcf.cost_upd_freq.mv < COST_UPD_OFF) {
- aom_free(thread_data->td->mb.mv_costs);
+ if (thread_data->td != &cpi->td) {
+ if (cpi->oxcf.cost_upd_freq.mv < COST_UPD_OFF) {
+ aom_free(thread_data->td->mb.mv_costs);
+ }
+ if (cpi->oxcf.cost_upd_freq.dv < COST_UPD_OFF) {
+ aom_free(thread_data->td->mb.dv_costs);
+ }
}
+ av1_dealloc_mb_data(&cpi->common, &thread_data->td->mb);
// Accumulate counters.
if (i > 0) {
@@ -822,6 +851,7 @@ static AOM_INLINE void prepare_enc_workers(AV1_COMP *cpi, AVxWorkerHook hook,
thread_data->td->intrabc_used = 0;
thread_data->td->deltaq_used = 0;
+ thread_data->td->abs_sum_level = 0;
// Before encoding a frame, copy the thread data from cpi.
if (thread_data->td != &cpi->td) {
@@ -846,15 +876,19 @@ static AOM_INLINE void prepare_enc_workers(AV1_COMP *cpi, AVxWorkerHook hook,
memcpy(thread_data->td->mb.mv_costs, cpi->td.mb.mv_costs,
sizeof(MvCosts));
}
+ if (cpi->oxcf.cost_upd_freq.dv < COST_UPD_OFF) {
+ CHECK_MEM_ERROR(cm, thread_data->td->mb.dv_costs,
+ (IntraBCMVCosts *)aom_malloc(sizeof(IntraBCMVCosts)));
+ memcpy(thread_data->td->mb.dv_costs, cpi->td.mb.dv_costs,
+ sizeof(IntraBCMVCosts));
+ }
}
+ av1_alloc_mb_data(cm, &thread_data->td->mb,
+ cpi->sf.rt_sf.use_nonrd_pick_mode);
+
// Reset cyclic refresh counters.
av1_init_cyclic_refresh_counters(&thread_data->td->mb);
- if (!cpi->sf.rt_sf.use_nonrd_pick_mode) {
- CHECK_MEM_ERROR(cm, thread_data->td->mb.txfm_search_info.txb_rd_records,
- (TxbRdRecords *)aom_malloc(sizeof(TxbRdRecords)));
- }
-
if (thread_data->td->counts != &cpi->counts) {
memcpy(thread_data->td->counts, &cpi->counts, sizeof(cpi->counts));
}
@@ -867,6 +901,8 @@ static AOM_INLINE void prepare_enc_workers(AV1_COMP *cpi, AVxWorkerHook hook,
thread_data->td->mb.tmp_pred_bufs[j] =
thread_data->td->tmp_pred_bufs[j];
}
+ thread_data->td->mb.pixel_gradient_info =
+ thread_data->td->pixel_gradient_info;
thread_data->td->mb.e_mbd.tmp_conv_dst = thread_data->td->mb.tmp_conv_dst;
for (int j = 0; j < 2; ++j) {
@@ -904,11 +940,16 @@ static AOM_INLINE void fp_prepare_enc_workers(AV1_COMP *cpi, AVxWorkerHook hook,
memcpy(thread_data->td->mb.mv_costs, cpi->td.mb.mv_costs,
sizeof(MvCosts));
}
+ if (cpi->oxcf.cost_upd_freq.dv < COST_UPD_OFF) {
+ CHECK_MEM_ERROR(cm, thread_data->td->mb.dv_costs,
+ (IntraBCMVCosts *)aom_malloc(sizeof(IntraBCMVCosts)));
+ memcpy(thread_data->td->mb.dv_costs, cpi->td.mb.dv_costs,
+ sizeof(IntraBCMVCosts));
+ }
}
- if (!cpi->sf.rt_sf.use_nonrd_pick_mode) {
- CHECK_MEM_ERROR(cm, thread_data->td->mb.txfm_search_info.txb_rd_records,
- (TxbRdRecords *)aom_malloc(sizeof(TxbRdRecords)));
- }
+
+ av1_alloc_mb_data(cm, &thread_data->td->mb,
+ cpi->sf.rt_sf.use_nonrd_pick_mode);
}
}
#endif
@@ -1191,13 +1232,15 @@ void av1_fp_encode_tiles_row_mt(AV1_COMP *cpi) {
sync_enc_workers(&cpi->mt_info, cm, num_workers);
for (int i = num_workers - 1; i >= 0; i--) {
EncWorkerData *const thread_data = &cpi->mt_info.tile_thr_data[i];
- if (thread_data->td != &cpi->td &&
- cpi->oxcf.cost_upd_freq.mv < COST_UPD_OFF) {
- aom_free(thread_data->td->mb.mv_costs);
- }
- if (thread_data->td->mb.txfm_search_info.txb_rd_records) {
- aom_free(thread_data->td->mb.txfm_search_info.txb_rd_records);
+ if (thread_data->td != &cpi->td) {
+ if (cpi->oxcf.cost_upd_freq.mv < COST_UPD_OFF) {
+ aom_free(thread_data->td->mb.mv_costs);
+ }
+ if (cpi->oxcf.cost_upd_freq.dv < COST_UPD_OFF) {
+ aom_free(thread_data->td->mb.dv_costs);
+ }
}
+ av1_dealloc_mb_data(cm, &thread_data->td->mb);
}
}
@@ -1277,11 +1320,15 @@ static int tpl_worker_hook(void *arg1, void *unused) {
AV1_COMMON *cm = &cpi->common;
MACROBLOCK *x = &thread_data->td->mb;
MACROBLOCKD *xd = &x->e_mbd;
+ TplTxfmStats *tpl_txfm_stats = &thread_data->td->tpl_txfm_stats;
CommonModeInfoParams *mi_params = &cm->mi_params;
- BLOCK_SIZE bsize = convert_length_to_bsize(cpi->tpl_data.tpl_bsize_1d);
+ BLOCK_SIZE bsize = convert_length_to_bsize(cpi->ppi->tpl_data.tpl_bsize_1d);
TX_SIZE tx_size = max_txsize_lookup[bsize];
int mi_height = mi_size_high[bsize];
- int num_active_workers = cpi->tpl_data.tpl_mt_sync.num_threads_working;
+ int num_active_workers = cpi->ppi->tpl_data.tpl_mt_sync.num_threads_working;
+
+ av1_init_tpl_txfm_stats(tpl_txfm_stats);
+
for (int mi_row = thread_data->start * mi_height; mi_row < mi_params->mi_rows;
mi_row += num_active_workers * mi_height) {
// Motion estimation row boundary
@@ -1290,7 +1337,7 @@ static int tpl_worker_hook(void *arg1, void *unused) {
xd->mb_to_top_edge = -GET_MV_SUBPEL(mi_row * MI_SIZE);
xd->mb_to_bottom_edge =
GET_MV_SUBPEL((mi_params->mi_rows - mi_height - mi_row) * MI_SIZE);
- av1_mc_flow_dispenser_row(cpi, x, mi_row, bsize, tx_size);
+ av1_mc_flow_dispenser_row(cpi, tpl_txfm_stats, x, mi_row, bsize, tx_size);
}
return 1;
}
@@ -1370,6 +1417,24 @@ static AOM_INLINE void prepare_tpl_workers(AV1_COMP *cpi, AVxWorkerHook hook,
// OBMC buffers are used only to init MS params and remain unused when
// called from tpl, hence set the buffers to defaults.
av1_init_obmc_buffer(&thread_data->td->mb.obmc_buffer);
+ thread_data->td->mb.tmp_conv_dst = thread_data->td->tmp_conv_dst;
+ thread_data->td->mb.e_mbd.tmp_conv_dst = thread_data->td->mb.tmp_conv_dst;
+ }
+ }
+}
+
+// Accumulate transform stats after tpl.
+static void tpl_accumulate_txfm_stats(ThreadData *main_td,
+ const MultiThreadInfo *mt_info,
+ int num_workers) {
+ TplTxfmStats *accumulated_stats = &main_td->tpl_txfm_stats;
+ for (int i = num_workers - 1; i >= 0; i--) {
+ AVxWorker *const worker = &mt_info->workers[i];
+ EncWorkerData *const thread_data = (EncWorkerData *)worker->data1;
+ ThreadData *td = thread_data->td;
+ if (td != main_td) {
+ const TplTxfmStats *tpl_txfm_stats = &td->tpl_txfm_stats;
+ av1_accumulate_tpl_txfm_stats(tpl_txfm_stats, accumulated_stats);
}
}
}
@@ -1379,7 +1444,7 @@ void av1_mc_flow_dispenser_mt(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
CommonModeInfoParams *mi_params = &cm->mi_params;
MultiThreadInfo *mt_info = &cpi->mt_info;
- TplParams *tpl_data = &cpi->tpl_data;
+ TplParams *tpl_data = &cpi->ppi->tpl_data;
AV1TplRowMultiThreadSync *tpl_sync = &tpl_data->tpl_mt_sync;
int mb_rows = mi_params->mb_rows;
int num_workers =
@@ -1398,6 +1463,7 @@ void av1_mc_flow_dispenser_mt(AV1_COMP *cpi) {
prepare_tpl_workers(cpi, tpl_worker_hook, num_workers);
launch_workers(&cpi->mt_info, num_workers);
sync_enc_workers(&cpi->mt_info, cm, num_workers);
+ tpl_accumulate_txfm_stats(&cpi->td, &cpi->mt_info, num_workers);
}
// Deallocate memory for temporal filter multi-thread synchronization.
@@ -1752,6 +1818,331 @@ void av1_global_motion_estimation_mt(AV1_COMP *cpi) {
}
#endif // !CONFIG_REALTIME_ONLY
+// Compare and order tiles based on absolute sum of tx coeffs.
+static int compare_tile_order(const void *a, const void *b) {
+ const PackBSTileOrder *const tile_a = (const PackBSTileOrder *)a;
+ const PackBSTileOrder *const tile_b = (const PackBSTileOrder *)b;
+
+ if (tile_a->abs_sum_level > tile_b->abs_sum_level)
+ return -1;
+ else if (tile_a->abs_sum_level == tile_b->abs_sum_level)
+ return (tile_a->tile_idx > tile_b->tile_idx ? 1 : -1);
+ else
+ return 1;
+}
+
+// Get next tile index to be processed for pack bitstream
+static AOM_INLINE int get_next_pack_bs_tile_idx(
+ AV1EncPackBSSync *const pack_bs_sync, const int num_tiles) {
+ assert(pack_bs_sync->next_job_idx <= num_tiles);
+ if (pack_bs_sync->next_job_idx == num_tiles) return -1;
+
+ return pack_bs_sync->pack_bs_tile_order[pack_bs_sync->next_job_idx++]
+ .tile_idx;
+}
+
+// Calculates bitstream chunk size based on total buffer size and tile or tile
+// group size.
+static AOM_INLINE size_t get_bs_chunk_size(int tg_or_tile_size,
+ const int frame_or_tg_size,
+ size_t *remain_buf_size,
+ size_t max_buf_size,
+ int is_last_chunk) {
+ size_t this_chunk_size;
+ assert(*remain_buf_size > 0);
+ if (is_last_chunk) {
+ this_chunk_size = *remain_buf_size;
+ *remain_buf_size = 0;
+ } else {
+ const uint64_t size_scale = (uint64_t)max_buf_size * tg_or_tile_size;
+ this_chunk_size = (size_t)(size_scale / frame_or_tg_size);
+ *remain_buf_size -= this_chunk_size;
+ assert(*remain_buf_size > 0);
+ }
+ assert(this_chunk_size > 0);
+ return this_chunk_size;
+}
+
+// Initializes params required for pack bitstream tile.
+static void init_tile_pack_bs_params(AV1_COMP *const cpi, uint8_t *const dst,
+ struct aom_write_bit_buffer *saved_wb,
+ PackBSParams *const pack_bs_params_arr,
+ uint8_t obu_extn_header) {
+ MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
+ AV1_COMMON *const cm = &cpi->common;
+ const CommonTileParams *const tiles = &cm->tiles;
+ const int num_tiles = tiles->cols * tiles->rows;
+ // Fixed size tile groups for the moment
+ const int num_tg_hdrs = cpi->num_tg;
+ // Tile group size in terms of number of tiles.
+ const int tg_size_in_tiles = (num_tiles + num_tg_hdrs - 1) / num_tg_hdrs;
+ uint8_t *tile_dst = dst;
+ uint8_t *tile_data_curr = dst;
+ // Max tile group count can not be more than MAX_TILES.
+ int tg_size_mi[MAX_TILES] = { 0 }; // Size of tile group in mi units
+ int tile_idx;
+ int tg_idx = 0;
+ int tile_count_in_tg = 0;
+ int new_tg = 1;
+
+ // Populate pack bitstream params of all tiles.
+ for (tile_idx = 0; tile_idx < num_tiles; tile_idx++) {
+ const TileInfo *const tile_info = &cpi->tile_data[tile_idx].tile_info;
+ PackBSParams *const pack_bs_params = &pack_bs_params_arr[tile_idx];
+ // Calculate tile size in mi units.
+ const int tile_size_mi = (tile_info->mi_col_end - tile_info->mi_col_start) *
+ (tile_info->mi_row_end - tile_info->mi_row_start);
+ int is_last_tile_in_tg = 0;
+ tile_count_in_tg++;
+ if (tile_count_in_tg == tg_size_in_tiles || tile_idx == (num_tiles - 1))
+ is_last_tile_in_tg = 1;
+
+ // Populate pack bitstream params of this tile.
+ pack_bs_params->curr_tg_hdr_size = 0;
+ pack_bs_params->obu_extn_header = obu_extn_header;
+ pack_bs_params->saved_wb = saved_wb;
+ pack_bs_params->obu_header_size = 0;
+ pack_bs_params->is_last_tile_in_tg = is_last_tile_in_tg;
+ pack_bs_params->new_tg = new_tg;
+ pack_bs_params->tile_col = tile_info->tile_col;
+ pack_bs_params->tile_row = tile_info->tile_row;
+ pack_bs_params->tile_size_mi = tile_size_mi;
+ tg_size_mi[tg_idx] += tile_size_mi;
+
+ if (new_tg) new_tg = 0;
+ if (is_last_tile_in_tg) {
+ tile_count_in_tg = 0;
+ new_tg = 1;
+ tg_idx++;
+ }
+ }
+
+ assert(cpi->available_bs_size > 0);
+ size_t tg_buf_size[MAX_TILES] = { 0 };
+ size_t max_buf_size = cpi->available_bs_size;
+ size_t remain_buf_size = max_buf_size;
+ const int frame_size_mi = cm->mi_params.mi_rows * cm->mi_params.mi_cols;
+
+ tile_idx = 0;
+ // Prepare obu, tile group and frame header of each tile group.
+ for (tg_idx = 0; tg_idx < cpi->num_tg; tg_idx++) {
+ PackBSParams *const pack_bs_params = &pack_bs_params_arr[tile_idx];
+ int is_last_tg = tg_idx == cpi->num_tg - 1;
+ // Prorate bitstream buffer size based on tile group size and available
+ // buffer size. This buffer will be used to store headers and tile data.
+ tg_buf_size[tg_idx] =
+ get_bs_chunk_size(tg_size_mi[tg_idx], frame_size_mi, &remain_buf_size,
+ max_buf_size, is_last_tg);
+
+ pack_bs_params->dst = tile_dst;
+ pack_bs_params->tile_data_curr = tile_dst;
+
+ // Write obu, tile group and frame header at first tile in the tile
+ // group.
+ av1_write_obu_tg_tile_headers(cpi, xd, pack_bs_params, tile_idx);
+ tile_dst += tg_buf_size[tg_idx];
+
+ // Exclude headers from tile group buffer size.
+ tg_buf_size[tg_idx] -= pack_bs_params->curr_tg_hdr_size;
+ tile_idx += tg_size_in_tiles;
+ }
+
+ tg_idx = 0;
+ // Calculate bitstream buffer size of each tile in the tile group.
+ for (tile_idx = 0; tile_idx < num_tiles; tile_idx++) {
+ PackBSParams *const pack_bs_params = &pack_bs_params_arr[tile_idx];
+
+ if (pack_bs_params->new_tg) {
+ max_buf_size = tg_buf_size[tg_idx];
+ remain_buf_size = max_buf_size;
+ }
+
+ // Prorate bitstream buffer size of this tile based on tile size and
+ // available buffer size. For this proration, header size is not accounted.
+ const size_t tile_buf_size = get_bs_chunk_size(
+ pack_bs_params->tile_size_mi, tg_size_mi[tg_idx], &remain_buf_size,
+ max_buf_size, pack_bs_params->is_last_tile_in_tg);
+ pack_bs_params->tile_buf_size = tile_buf_size;
+
+ // Update base address of bitstream buffer for tile and tile group.
+ if (pack_bs_params->new_tg) {
+ tile_dst = pack_bs_params->dst;
+ tile_data_curr = pack_bs_params->tile_data_curr;
+ // Account header size in first tile of a tile group.
+ pack_bs_params->tile_buf_size += pack_bs_params->curr_tg_hdr_size;
+ } else {
+ pack_bs_params->dst = tile_dst;
+ pack_bs_params->tile_data_curr = tile_data_curr;
+ }
+
+ if (pack_bs_params->is_last_tile_in_tg) tg_idx++;
+ tile_dst += pack_bs_params->tile_buf_size;
+ }
+}
+
+// Worker hook function of pack bitsteam multithreading.
+static int pack_bs_worker_hook(void *arg1, void *arg2) {
+ EncWorkerData *const thread_data = (EncWorkerData *)arg1;
+ PackBSParams *const pack_bs_params = (PackBSParams *)arg2;
+ AV1_COMP *const cpi = thread_data->cpi;
+ AV1_COMMON *const cm = &cpi->common;
+ AV1EncPackBSSync *const pack_bs_sync = &cpi->mt_info.pack_bs_sync;
+ const CommonTileParams *const tiles = &cm->tiles;
+ const int num_tiles = tiles->cols * tiles->rows;
+
+ while (1) {
+#if CONFIG_MULTITHREAD
+ pthread_mutex_lock(pack_bs_sync->mutex_);
+#endif
+ const int tile_idx = get_next_pack_bs_tile_idx(pack_bs_sync, num_tiles);
+#if CONFIG_MULTITHREAD
+ pthread_mutex_unlock(pack_bs_sync->mutex_);
+#endif
+ if (tile_idx == -1) break;
+ TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
+ thread_data->td->mb.e_mbd.tile_ctx = &this_tile->tctx;
+
+ av1_pack_tile_info(cpi, thread_data->td, &pack_bs_params[tile_idx]);
+ }
+
+ return 1;
+}
+
+// Prepares thread data and workers of pack bitsteam multithreading.
+static void prepare_pack_bs_workers(AV1_COMP *const cpi,
+ PackBSParams *const pack_bs_params,
+ AVxWorkerHook hook, const int num_workers) {
+ MultiThreadInfo *const mt_info = &cpi->mt_info;
+ for (int i = num_workers - 1; i >= 0; i--) {
+ AVxWorker *worker = &mt_info->workers[i];
+ EncWorkerData *const thread_data = &mt_info->tile_thr_data[i];
+ if (i == 0) thread_data->td = &cpi->td;
+
+ if (thread_data->td != &cpi->td) thread_data->td->mb = cpi->td.mb;
+
+ thread_data->cpi = cpi;
+ thread_data->start = i;
+ thread_data->thread_id = i;
+ av1_reset_pack_bs_thread_data(thread_data->td);
+
+ worker->hook = hook;
+ worker->data1 = thread_data;
+ worker->data2 = pack_bs_params;
+ }
+
+ AV1_COMMON *const cm = &cpi->common;
+ AV1EncPackBSSync *const pack_bs_sync = &mt_info->pack_bs_sync;
+ const uint16_t num_tiles = cm->tiles.rows * cm->tiles.cols;
+#if CONFIG_MULTITHREAD
+ if (pack_bs_sync->mutex_ == NULL) {
+ CHECK_MEM_ERROR(cm, pack_bs_sync->mutex_,
+ aom_malloc(sizeof(*pack_bs_sync->mutex_)));
+ if (pack_bs_sync->mutex_) pthread_mutex_init(pack_bs_sync->mutex_, NULL);
+ }
+#endif
+ pack_bs_sync->next_job_idx = 0;
+
+ PackBSTileOrder *const pack_bs_tile_order = pack_bs_sync->pack_bs_tile_order;
+ // Reset tile order data of pack bitstream
+ av1_zero_array(pack_bs_tile_order, num_tiles);
+
+ // Populate pack bitstream tile order structure
+ for (uint16_t tile_idx = 0; tile_idx < num_tiles; tile_idx++) {
+ pack_bs_tile_order[tile_idx].abs_sum_level =
+ cpi->tile_data[tile_idx].abs_sum_level;
+ pack_bs_tile_order[tile_idx].tile_idx = tile_idx;
+ }
+
+ // Sort tiles in descending order based on tile area.
+ qsort(pack_bs_tile_order, num_tiles, sizeof(*pack_bs_tile_order),
+ compare_tile_order);
+}
+
+// Accumulates data after pack bitsteam processing.
+static void accumulate_pack_bs_data(
+ AV1_COMP *const cpi, const PackBSParams *const pack_bs_params_arr,
+ uint8_t *const dst, uint32_t *total_size, const FrameHeaderInfo *fh_info,
+ int *const largest_tile_id, unsigned int *max_tile_size,
+ uint32_t *const obu_header_size, uint8_t **tile_data_start,
+ const int num_workers) {
+ const AV1_COMMON *const cm = &cpi->common;
+ const CommonTileParams *const tiles = &cm->tiles;
+ const int tile_count = tiles->cols * tiles->rows;
+ // Fixed size tile groups for the moment
+ size_t curr_tg_data_size = 0;
+ int is_first_tg = 1;
+ uint8_t *curr_tg_start = dst;
+ size_t src_offset = 0;
+ size_t dst_offset = 0;
+
+ for (int tile_idx = 0; tile_idx < tile_count; tile_idx++) {
+ // PackBSParams stores all parameters required to pack tile and header
+ // info.
+ const PackBSParams *const pack_bs_params = &pack_bs_params_arr[tile_idx];
+ uint32_t tile_size = 0;
+
+ if (pack_bs_params->new_tg) {
+ curr_tg_start = dst + *total_size;
+ curr_tg_data_size = pack_bs_params->curr_tg_hdr_size;
+ *tile_data_start += pack_bs_params->curr_tg_hdr_size;
+ *obu_header_size = pack_bs_params->obu_header_size;
+ }
+ curr_tg_data_size +=
+ pack_bs_params->buf.size + (pack_bs_params->is_last_tile_in_tg ? 0 : 4);
+
+ if (pack_bs_params->buf.size > *max_tile_size) {
+ *largest_tile_id = tile_idx;
+ *max_tile_size = (unsigned int)pack_bs_params->buf.size;
+ }
+ tile_size +=
+ (uint32_t)pack_bs_params->buf.size + *pack_bs_params->total_size;
+
+ // Pack all the chunks of tile bitstreams together
+ if (tile_idx != 0) memmove(dst + dst_offset, dst + src_offset, tile_size);
+
+ if (pack_bs_params->is_last_tile_in_tg)
+ av1_write_last_tile_info(
+ cpi, fh_info, pack_bs_params->saved_wb, &curr_tg_data_size,
+ curr_tg_start, &tile_size, tile_data_start, largest_tile_id,
+ &is_first_tg, *obu_header_size, pack_bs_params->obu_extn_header);
+ src_offset += pack_bs_params->tile_buf_size;
+ dst_offset += tile_size;
+ *total_size += tile_size;
+ }
+
+ // Accumulate thread data
+ MultiThreadInfo *const mt_info = &cpi->mt_info;
+ for (int idx = num_workers - 1; idx >= 0; idx--) {
+ ThreadData const *td = mt_info->tile_thr_data[idx].td;
+ av1_accumulate_pack_bs_thread_data(cpi, td);
+ }
+}
+
+void av1_write_tile_obu_mt(
+ AV1_COMP *const cpi, uint8_t *const dst, uint32_t *total_size,
+ struct aom_write_bit_buffer *saved_wb, uint8_t obu_extn_header,
+ const FrameHeaderInfo *fh_info, int *const largest_tile_id,
+ unsigned int *max_tile_size, uint32_t *const obu_header_size,
+ uint8_t **tile_data_start) {
+ MultiThreadInfo *const mt_info = &cpi->mt_info;
+ const int num_workers = mt_info->num_mod_workers[MOD_PACK_BS];
+
+ PackBSParams pack_bs_params[MAX_TILES];
+ uint32_t tile_size[MAX_TILES] = { 0 };
+
+ for (int tile_idx = 0; tile_idx < MAX_TILES; tile_idx++)
+ pack_bs_params[tile_idx].total_size = &tile_size[tile_idx];
+
+ init_tile_pack_bs_params(cpi, dst, saved_wb, pack_bs_params, obu_extn_header);
+ prepare_pack_bs_workers(cpi, pack_bs_params, pack_bs_worker_hook,
+ num_workers);
+ launch_workers(mt_info, num_workers);
+ sync_enc_workers(mt_info, &cpi->common, num_workers);
+ accumulate_pack_bs_data(cpi, pack_bs_params, dst, total_size, fh_info,
+ largest_tile_id, max_tile_size, obu_header_size,
+ tile_data_start, num_workers);
+}
+
// Deallocate memory for CDEF search multi-thread synchronization.
void av1_cdef_mt_dealloc(AV1CdefSync *cdef_sync) {
(void)cdef_sync;
@@ -1780,6 +2171,9 @@ static void update_next_job_info(AV1CdefSync *cdef_sync, int nvfb, int nhfb) {
// Initializes cdef_sync parameters.
static AOM_INLINE void cdef_reset_job_info(AV1CdefSync *cdef_sync) {
+#if CONFIG_MULTITHREAD
+ if (cdef_sync->mutex_) pthread_mutex_init(cdef_sync->mutex_, NULL);
+#endif // CONFIG_MULTITHREAD
cdef_sync->end_of_frame = 0;
cdef_sync->fbr = 0;
cdef_sync->fbc = 0;
@@ -1896,6 +2290,12 @@ static AOM_INLINE int compute_num_lr_workers(AV1_COMP *cpi) {
return compute_num_enc_workers(cpi, cpi->oxcf.max_threads);
}
+// Computes num_workers for pack bitstream multi-threading.
+static AOM_INLINE int compute_num_pack_bs_workers(AV1_COMP *cpi) {
+ if (cpi->oxcf.max_threads <= 1) return 1;
+ return compute_num_enc_tile_mt_workers(&cpi->common, cpi->oxcf.max_threads);
+}
+
int compute_num_mod_workers(AV1_COMP *cpi, MULTI_THREADED_MODULES mod_name) {
int num_mod_workers = 0;
switch (mod_name) {
@@ -1915,7 +2315,9 @@ int compute_num_mod_workers(AV1_COMP *cpi, MULTI_THREADED_MODULES mod_name) {
case MOD_CDEF_SEARCH:
num_mod_workers = compute_num_cdef_workers(cpi);
break;
+ case MOD_CDEF: num_mod_workers = compute_num_cdef_workers(cpi); break;
case MOD_LR: num_mod_workers = compute_num_lr_workers(cpi); break;
+ case MOD_PACK_BS: num_mod_workers = compute_num_pack_bs_workers(cpi); break;
default: assert(0); break;
}
return (num_mod_workers);
diff --git a/third_party/libaom/source/libaom/av1/encoder/ethread.h b/third_party/libaom/source/libaom/av1/encoder/ethread.h
index 55e7f7be39..c2ab864690 100644
--- a/third_party/libaom/source/libaom/av1/encoder/ethread.h
+++ b/third_party/libaom/source/libaom/av1/encoder/ethread.h
@@ -80,6 +80,10 @@ int av1_get_max_num_workers(AV1_COMP *cpi);
void av1_create_workers(AV1_COMP *cpi, int num_workers);
+#if CONFIG_MULTITHREAD
+void av1_init_mt_sync(AV1_COMP *cpi, int is_first_pass);
+#endif // CONFIG_MULTITHREAD
+
void av1_create_second_pass_workers(AV1_COMP *cpi, int num_workers);
void av1_cdef_mse_calc_frame_mt(AV1_COMMON *cm, MultiThreadInfo *mt_info,
@@ -87,6 +91,13 @@ void av1_cdef_mse_calc_frame_mt(AV1_COMMON *cm, MultiThreadInfo *mt_info,
void av1_cdef_mt_dealloc(AV1CdefSync *cdef_sync);
+void av1_write_tile_obu_mt(
+ AV1_COMP *const cpi, uint8_t *const dst, uint32_t *total_size,
+ struct aom_write_bit_buffer *saved_wb, uint8_t obu_extn_header,
+ const FrameHeaderInfo *fh_info, int *const largest_tile_id,
+ unsigned int *max_tile_size, uint32_t *const obu_header_size,
+ uint8_t **tile_data_start);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/external_partition.c b/third_party/libaom/source/libaom/av1/encoder/external_partition.c
new file mode 100644
index 0000000000..542b2bb878
--- /dev/null
+++ b/third_party/libaom/source/libaom/av1/encoder/external_partition.c
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include "av1/common/common.h"
+#include "av1/encoder/external_partition.h"
+
+aom_codec_err_t av1_ext_part_create(aom_ext_part_funcs_t funcs,
+ aom_ext_part_config_t config,
+ ExtPartController *ext_part_controller) {
+ if (ext_part_controller == NULL) {
+ return AOM_CODEC_INVALID_PARAM;
+ }
+ ext_part_controller->funcs = funcs;
+ ext_part_controller->config = config;
+ const aom_ext_part_status_t status = ext_part_controller->funcs.create_model(
+ ext_part_controller->funcs.priv, &ext_part_controller->config,
+ &ext_part_controller->model);
+ if (status == AOM_EXT_PART_ERROR) {
+ return AOM_CODEC_ERROR;
+ } else if (status == AOM_EXT_PART_TEST) {
+ ext_part_controller->test_mode = 1;
+ ext_part_controller->ready = 0;
+ return AOM_CODEC_OK;
+ }
+ assert(status == AOM_EXT_PART_OK);
+ ext_part_controller->ready = 1;
+ return AOM_CODEC_OK;
+}
+
+aom_codec_err_t av1_ext_part_init(ExtPartController *ext_part_controller) {
+ if (ext_part_controller == NULL) {
+ return AOM_CODEC_INVALID_PARAM;
+ }
+ av1_zero(ext_part_controller);
+ return AOM_CODEC_OK;
+}
+
+aom_codec_err_t av1_ext_part_delete(ExtPartController *ext_part_controller) {
+ if (ext_part_controller == NULL) {
+ return AOM_CODEC_INVALID_PARAM;
+ }
+ if (ext_part_controller->ready) {
+ const aom_ext_part_status_t status =
+ ext_part_controller->funcs.delete_model(ext_part_controller->model);
+ if (status != AOM_EXT_PART_OK) {
+ return AOM_CODEC_ERROR;
+ }
+ }
+ return av1_ext_part_init(ext_part_controller);
+}
+
+bool av1_ext_part_get_partition_decision(ExtPartController *ext_part_controller,
+ aom_partition_decision_t *decision) {
+ assert(ext_part_controller != NULL);
+ assert(ext_part_controller->ready);
+ assert(decision != NULL);
+ const aom_ext_part_status_t status =
+ ext_part_controller->funcs.get_partition_decision(
+ ext_part_controller->model, decision);
+ if (status != AOM_EXT_PART_OK) return false;
+ return true;
+}
+
+bool av1_ext_part_send_partition_stats(ExtPartController *ext_part_controller,
+ const aom_partition_stats_t *stats) {
+ assert(ext_part_controller != NULL);
+ assert(ext_part_controller->ready);
+ assert(stats != NULL);
+ const aom_ext_part_status_t status =
+ ext_part_controller->funcs.send_partition_stats(
+ ext_part_controller->model, stats);
+ if (status != AOM_EXT_PART_OK) return false;
+ return true;
+}
+
+bool av1_ext_part_send_features(ExtPartController *ext_part_controller,
+ const aom_partition_features_t *features) {
+ assert(ext_part_controller != NULL);
+ assert(ext_part_controller->ready);
+ assert(features != NULL);
+ const aom_ext_part_status_t status = ext_part_controller->funcs.send_features(
+ ext_part_controller->model, features);
+ if (status != AOM_EXT_PART_OK) return false;
+ return true;
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/external_partition.h b/third_party/libaom/source/libaom/av1/encoder/external_partition.h
new file mode 100644
index 0000000000..20f03ed752
--- /dev/null
+++ b/third_party/libaom/source/libaom/av1/encoder/external_partition.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#ifndef AOM_AV1_ENCODER_EXTERNAL_PARTITION_H_
+#define AOM_AV1_ENCODER_EXTERNAL_PARTITION_H_
+
+#include <stdbool.h>
+
+#include "aom/aom_codec.h"
+#include "aom/aom_external_partition.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*!\cond */
+
+typedef struct ExtPartController {
+ int ready;
+ int test_mode;
+ aom_ext_part_config_t config;
+ aom_ext_part_model_t model;
+ aom_ext_part_funcs_t funcs;
+} ExtPartController;
+
+aom_codec_err_t av1_ext_part_create(aom_ext_part_funcs_t funcs,
+ aom_ext_part_config_t config,
+ ExtPartController *ext_part_controller);
+
+aom_codec_err_t av1_ext_part_init(ExtPartController *ext_part_controller);
+
+aom_codec_err_t av1_ext_part_delete(ExtPartController *ext_part_controller);
+
+bool av1_ext_part_get_partition_decision(ExtPartController *ext_part_controller,
+ aom_partition_decision_t *decision);
+
+bool av1_ext_part_send_partition_stats(ExtPartController *ext_part_controller,
+ const aom_partition_stats_t *stats);
+
+bool av1_ext_part_send_features(ExtPartController *ext_part_controller,
+ const aom_partition_features_t *features);
+
+/*!\endcond */
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // AOM_AV1_ENCODER_EXTERNAL_PARTITION_H_
diff --git a/third_party/libaom/source/libaom/av1/encoder/firstpass.c b/third_party/libaom/source/libaom/av1/encoder/firstpass.c
index ff6814d04c..662b42c822 100644
--- a/third_party/libaom/source/libaom/av1/encoder/firstpass.c
+++ b/third_party/libaom/source/libaom/av1/encoder/firstpass.c
@@ -27,6 +27,7 @@
#include "av1/common/entropymv.h"
#include "av1/common/quant_common.h"
#include "av1/common/reconinter.h" // av1_setup_dst_planes()
+#include "av1/common/reconintra.h"
#include "av1/common/txb_common.h"
#include "av1/encoder/aq_variance.h"
#include "av1/encoder/av1_quantize.h"
@@ -54,6 +55,8 @@
#define NCOUNT_INTRA_THRESH 8192
#define NCOUNT_INTRA_FACTOR 3
+#define INVALID_FP_STATS_TO_PREDICT_FLAT_GOP -1
+
static AOM_INLINE void output_stats(FIRSTPASS_STATS *stats,
struct aom_codec_pkt_list *pktlist) {
struct aom_codec_cx_pkt pkt;
@@ -108,6 +111,9 @@ void av1_twopass_zero_stats(FIRSTPASS_STATS *section) {
section->new_mv_count = 0.0;
section->count = 0.0;
section->duration = 1.0;
+ section->is_flash = 0;
+ section->noise_var = 0;
+ section->cor_coeff = 1.0;
}
void av1_accumulate_stats(FIRSTPASS_STATS *section,
@@ -118,9 +124,11 @@ void av1_accumulate_stats(FIRSTPASS_STATS *section,
section->frame_avg_wavelet_energy += frame->frame_avg_wavelet_energy;
section->coded_error += frame->coded_error;
section->sr_coded_error += frame->sr_coded_error;
+ section->tr_coded_error += frame->tr_coded_error;
section->pcnt_inter += frame->pcnt_inter;
section->pcnt_motion += frame->pcnt_motion;
section->pcnt_second_ref += frame->pcnt_second_ref;
+ section->pcnt_third_ref += frame->pcnt_third_ref;
section->pcnt_neutral += frame->pcnt_neutral;
section->intra_skip_pct += frame->intra_skip_pct;
section->inactive_zone_rows += frame->inactive_zone_rows;
@@ -177,8 +185,9 @@ static int get_num_mbs(const BLOCK_SIZE fp_block_size,
}
void av1_end_first_pass(AV1_COMP *cpi) {
- if (cpi->twopass.stats_buf_ctx->total_stats)
- output_stats(cpi->twopass.stats_buf_ctx->total_stats, cpi->output_pkt_list);
+ if (cpi->ppi->twopass.stats_buf_ctx->total_stats && !cpi->ppi->lap_enabled)
+ output_stats(cpi->ppi->twopass.stats_buf_ctx->total_stats,
+ cpi->ppi->output_pkt_list);
}
static aom_variance_fn_t get_block_variance_fn(BLOCK_SIZE bsize) {
@@ -261,15 +270,12 @@ static AOM_INLINE void first_pass_motion_search(AV1_COMP *cpi, MACROBLOCK *x,
const BLOCK_SIZE bsize = xd->mi[0]->bsize;
const int new_mv_mode_penalty = NEW_MV_MODE_PENALTY;
const int sr = get_search_range(&cpi->initial_dimensions);
- const int step_param = 3 + sr;
+ const int step_param = cpi->sf.fp_sf.reduce_mv_step_param + sr;
const search_site_config *first_pass_search_sites =
cpi->mv_search_params.search_site_cfg[SS_CFG_FPF];
const int fine_search_interval =
cpi->is_screen_content_type && cpi->common.features.allow_intrabc;
- if (fine_search_interval) {
- av1_set_speed_features_framesize_independent(cpi, cpi->oxcf.speed);
- }
FULLPEL_MOTION_SEARCH_PARAMS ms_params;
av1_make_default_fullpel_ms_params(&ms_params, cpi, x, bsize, ref_mv,
first_pass_search_sites,
@@ -281,7 +287,7 @@ static AOM_INLINE void first_pass_motion_search(AV1_COMP *cpi, MACROBLOCK *x,
&this_best_mv, NULL);
if (tmp_err < INT_MAX) {
- aom_variance_fn_ptr_t v_fn_ptr = cpi->fn_ptr[bsize];
+ aom_variance_fn_ptr_t v_fn_ptr = cpi->ppi->fn_ptr[bsize];
const MSBuffers *ms_buffers = &ms_params.ms_buffers;
tmp_err = av1_get_mvpred_sse(&ms_params.mv_cost_params, this_best_mv,
&v_fn_ptr, ms_buffers->src, ms_buffers->ref) +
@@ -355,6 +361,86 @@ static double raw_motion_error_stdev(int *raw_motion_err_list,
return raw_err_stdev;
}
+static AOM_INLINE int do_third_ref_motion_search(const RateControlCfg *rc_cfg,
+ const GFConfig *gf_cfg) {
+ return use_ml_model_to_decide_flat_gop(rc_cfg) && can_disable_altref(gf_cfg);
+}
+
+static AOM_INLINE int calc_wavelet_energy(const AV1EncoderConfig *oxcf) {
+ return (use_ml_model_to_decide_flat_gop(&oxcf->rc_cfg) &&
+ can_disable_altref(&oxcf->gf_cfg)) ||
+ (oxcf->q_cfg.deltaq_mode == DELTA_Q_PERCEPTUAL);
+}
+typedef struct intra_pred_block_pass1_args {
+ const SequenceHeader *seq_params;
+ MACROBLOCK *x;
+} intra_pred_block_pass1_args;
+
+static INLINE void copy_rect(uint8_t *dst, int dstride, const uint8_t *src,
+ int sstride, int width, int height, int use_hbd) {
+#if CONFIG_AV1_HIGHBITDEPTH
+ if (use_hbd) {
+ aom_highbd_convolve_copy(CONVERT_TO_SHORTPTR(src), sstride,
+ CONVERT_TO_SHORTPTR(dst), dstride, width, height);
+ } else {
+ aom_convolve_copy(src, sstride, dst, dstride, width, height);
+ }
+#else
+ (void)use_hbd;
+ aom_convolve_copy(src, sstride, dst, dstride, width, height);
+#endif
+}
+
+static void first_pass_intra_pred_and_calc_diff(int plane, int block,
+ int blk_row, int blk_col,
+ BLOCK_SIZE plane_bsize,
+ TX_SIZE tx_size, void *arg) {
+ (void)block;
+ struct intra_pred_block_pass1_args *const args = arg;
+ MACROBLOCK *const x = args->x;
+ MACROBLOCKD *const xd = &x->e_mbd;
+ MACROBLOCKD_PLANE *const pd = &xd->plane[plane];
+ MACROBLOCK_PLANE *const p = &x->plane[plane];
+ const int dst_stride = pd->dst.stride;
+ uint8_t *dst = &pd->dst.buf[(blk_row * dst_stride + blk_col) << MI_SIZE_LOG2];
+ const MB_MODE_INFO *const mbmi = xd->mi[0];
+ const SequenceHeader *seq_params = args->seq_params;
+ const int src_stride = p->src.stride;
+ uint8_t *src = &p->src.buf[(blk_row * src_stride + blk_col) << MI_SIZE_LOG2];
+
+ av1_predict_intra_block(
+ xd, seq_params->sb_size, seq_params->enable_intra_edge_filter, pd->width,
+ pd->height, tx_size, mbmi->mode, 0, 0, FILTER_INTRA_MODES, src,
+ src_stride, dst, dst_stride, blk_col, blk_row, plane);
+
+ av1_subtract_txb(x, plane, plane_bsize, blk_col, blk_row, tx_size);
+}
+
+static void first_pass_predict_intra_block_for_luma_plane(
+ const SequenceHeader *seq_params, MACROBLOCK *x, BLOCK_SIZE bsize) {
+ assert(bsize < BLOCK_SIZES_ALL);
+ const MACROBLOCKD *const xd = &x->e_mbd;
+ const int plane = AOM_PLANE_Y;
+ const MACROBLOCKD_PLANE *const pd = &xd->plane[plane];
+ const int ss_x = pd->subsampling_x;
+ const int ss_y = pd->subsampling_y;
+ const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, ss_x, ss_y);
+ const int dst_stride = pd->dst.stride;
+ uint8_t *dst = pd->dst.buf;
+ const MACROBLOCK_PLANE *const p = &x->plane[plane];
+ const int src_stride = p->src.stride;
+ const uint8_t *src = p->src.buf;
+
+ intra_pred_block_pass1_args args = { seq_params, x };
+ av1_foreach_transformed_block_in_plane(
+ xd, plane_bsize, plane, first_pass_intra_pred_and_calc_diff, &args);
+
+ // copy source data to recon buffer, as the recon buffer will be used as a
+ // reference frame subsequently.
+ copy_rect(dst, dst_stride, src, src_stride, block_size_wide[bsize],
+ block_size_high[bsize], seq_params->use_highbitdepth);
+}
+
#define UL_INTRA_THRESH 50
#define INVALID_ROW -1
// Computes and returns the intra pred error of a block.
@@ -388,11 +474,10 @@ static int firstpass_intra_prediction(
const int qindex, FRAME_STATS *const stats) {
const AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
MACROBLOCK *const x = &td->mb;
MACROBLOCKD *const xd = &x->e_mbd;
const int unit_scale = mi_size_wide[fp_block_size];
- const int use_dc_pred = (unit_col || unit_row) && (!unit_col || !unit_row);
const int num_planes = av1_num_planes(cm);
const BLOCK_SIZE bsize =
get_bsize(mi_params, fp_block_size, unit_row, unit_col);
@@ -412,9 +497,12 @@ static int firstpass_intra_prediction(
xd->mi[0]->segment_id = 0;
xd->lossless[xd->mi[0]->segment_id] = (qindex == 0);
xd->mi[0]->mode = DC_PRED;
- xd->mi[0]->tx_size = use_dc_pred ? max_txsize_lookup[bsize] : TX_4X4;
+ xd->mi[0]->tx_size = TX_4X4;
- av1_encode_intra_block_plane(cpi, x, bsize, 0, DRY_RUN_NORMAL, 0);
+ if (cpi->sf.fp_sf.disable_recon)
+ first_pass_predict_intra_block_for_luma_plane(seq_params, x, bsize);
+ else
+ av1_encode_intra_block_plane(cpi, x, bsize, 0, DRY_RUN_NORMAL, 0);
int this_intra_error = aom_get_mb_ss(x->plane[0].src_diff);
if (seq_params->use_highbitdepth) {
switch (seq_params->bit_depth) {
@@ -480,16 +568,22 @@ static int firstpass_intra_prediction(
// Accumulate the intra error.
stats->intra_error += (int64_t)this_intra_error;
- const int hbd = is_cur_buf_hbd(xd);
- const int stride = x->plane[0].src.stride;
- const int num_8x8_rows = block_size_high[fp_block_size] / 8;
- const int num_8x8_cols = block_size_wide[fp_block_size] / 8;
- const uint8_t *buf = x->plane[0].src.buf;
- for (int r8 = 0; r8 < num_8x8_rows; ++r8) {
- for (int c8 = 0; c8 < num_8x8_cols; ++c8) {
- stats->frame_avg_wavelet_energy += av1_haar_ac_sad_8x8_uint8_input(
- buf + c8 * 8 + r8 * 8 * stride, stride, hbd);
- }
+ // Stats based on wavelet energy is used in the following cases :
+ // 1. ML model which predicts if a flat structure (golden-frame only structure
+ // without ALT-REF and Internal-ARFs) is better. This ML model is enabled in
+ // constant quality mode under certain conditions.
+ // 2. Delta qindex mode is set as DELTA_Q_PERCEPTUAL.
+ // Thus, wavelet energy calculation is enabled for the above cases.
+ if (calc_wavelet_energy(&cpi->oxcf)) {
+ const int hbd = is_cur_buf_hbd(xd);
+ const int stride = x->plane[0].src.stride;
+ const int num_8x8_rows = block_size_high[fp_block_size] / 8;
+ const int num_8x8_cols = block_size_wide[fp_block_size] / 8;
+ const uint8_t *buf = x->plane[0].src.buf;
+ stats->frame_avg_wavelet_energy += av1_haar_ac_sad_mxn_uint8_input(
+ buf, stride, hbd, num_8x8_rows, num_8x8_cols);
+ } else {
+ stats->frame_avg_wavelet_energy = INVALID_FP_STATS_TO_PREDICT_FLAT_GOP;
}
return this_intra_error;
@@ -516,13 +610,13 @@ static int get_prediction_error_bitdepth(const int is_high_bitdepth,
static void accumulate_mv_stats(const MV best_mv, const FULLPEL_MV mv,
const int mb_row, const int mb_col,
const int mb_rows, const int mb_cols,
- MV *last_mv, FRAME_STATS *stats) {
+ MV *last_non_zero_mv, FRAME_STATS *stats) {
if (is_zero_mv(&best_mv)) return;
++stats->mv_count;
// Non-zero vector, was it different from the last non zero vector?
- if (!is_equal_mv(&best_mv, last_mv)) ++stats->new_mv_count;
- *last_mv = best_mv;
+ if (!is_equal_mv(&best_mv, last_non_zero_mv)) ++stats->new_mv_count;
+ *last_non_zero_mv = best_mv;
// Does the row vector point inwards or outwards?
if (mb_row < mb_rows / 2) {
@@ -555,7 +649,6 @@ static void accumulate_mv_stats(const MV best_mv, const FULLPEL_MV mv,
}
}
-#define LOW_MOTION_ERROR_THRESH 25
// Computes and returns the inter prediction error from the last frame.
// Computes inter prediction errors from the golden and alt ref frams and
// Updates stats accordingly.
@@ -576,8 +669,9 @@ static void accumulate_mv_stats(const MV best_mv, const FULLPEL_MV mv,
// this_intra_error: the intra prediction error of this block.
// raw_motion_err_counts: the count of raw motion vectors.
// raw_motion_err_list: the array that records the raw motion error.
-// best_ref_mv: best reference mv found so far.
-// last_mv: last mv.
+// ref_mv: the reference used to start the motion search
+// best_mv: the best mv found
+// last_non_zero_mv: the last non zero mv found in this tile row.
// stats: frame encoding stats.
// Modifies:
// raw_motion_err_list
@@ -593,8 +687,8 @@ static int firstpass_inter_prediction(
const int unit_col, const int recon_yoffset, const int recon_uvoffset,
const int src_yoffset, const int alt_ref_frame_yoffset,
const BLOCK_SIZE fp_block_size, const int this_intra_error,
- const int raw_motion_err_counts, int *raw_motion_err_list, MV *best_ref_mv,
- MV *last_mv, FRAME_STATS *stats) {
+ const int raw_motion_err_counts, int *raw_motion_err_list, const MV ref_mv,
+ MV *best_mv, MV *last_non_zero_mv, FRAME_STATS *stats) {
int this_inter_error = this_intra_error;
AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
@@ -612,7 +706,6 @@ static int firstpass_inter_prediction(
const int unit_cols = get_unit_cols(fp_block_size, mi_params->mb_cols);
// Assume 0,0 motion with no mv overhead.
FULLPEL_MV mv = kZeroFullMv;
- FULLPEL_MV tmp_mv = kZeroFullMv;
xd->plane[0].pre[0].buf = last_frame->y_buffer + recon_yoffset;
// Set up limit values for motion vectors to prevent them extending
// outside the UMV borders.
@@ -636,15 +729,15 @@ static int firstpass_inter_prediction(
&unscaled_last_source_buf_2d);
raw_motion_err_list[raw_motion_err_counts] = raw_motion_error;
- // TODO(pengchong): Replace the hard-coded threshold
- if (raw_motion_error > LOW_MOTION_ERROR_THRESH || cpi->oxcf.speed <= 2) {
+ if (raw_motion_error > cpi->sf.fp_sf.skip_motion_search_threshold) {
// Test last reference frame using the previous best mv as the
// starting point (best reference) for the search.
- first_pass_motion_search(cpi, x, best_ref_mv, &mv, &motion_error);
+ first_pass_motion_search(cpi, x, &ref_mv, &mv, &motion_error);
// If the current best reference mv is not centered on 0,0 then do a
// 0,0 based search as well.
- if (!is_zero_mv(best_ref_mv)) {
+ if (!is_zero_mv(&ref_mv)) {
+ FULLPEL_MV tmp_mv = kZeroFullMv;
int tmp_err = INT_MAX;
first_pass_motion_search(cpi, x, &kZeroMv, &tmp_mv, &tmp_err);
@@ -657,6 +750,7 @@ static int firstpass_inter_prediction(
// Motion search in 2nd reference frame.
int gf_motion_error = motion_error;
if ((current_frame->frame_number > 1) && golden_frame != NULL) {
+ FULLPEL_MV tmp_mv = kZeroFullMv;
// Assume 0,0 motion with no mv overhead.
xd->plane[0].pre[0].buf = golden_frame->y_buffer + recon_yoffset;
xd->plane[0].pre[0].stride = golden_frame->y_stride;
@@ -682,13 +776,22 @@ static int firstpass_inter_prediction(
// Motion search in 3rd reference frame.
int alt_motion_error = motion_error;
- if (alt_ref_frame != NULL) {
- xd->plane[0].pre[0].buf = alt_ref_frame->y_buffer + alt_ref_frame_yoffset;
- xd->plane[0].pre[0].stride = alt_ref_frame->y_stride;
- alt_motion_error =
- get_prediction_error_bitdepth(is_high_bitdepth, bitdepth, bsize,
- &x->plane[0].src, &xd->plane[0].pre[0]);
- first_pass_motion_search(cpi, x, &kZeroMv, &tmp_mv, &alt_motion_error);
+ // The ML model to predict if a flat structure (golden-frame only structure
+ // without ALT-REF and Internal-ARFs) is better requires stats based on
+ // motion search w.r.t 3rd reference frame in the first pass. As the ML
+ // model is enabled under certain conditions, motion search in 3rd reference
+ // frame is also enabled for those cases.
+ if (do_third_ref_motion_search(&cpi->oxcf.rc_cfg, &cpi->oxcf.gf_cfg)) {
+ if (alt_ref_frame != NULL) {
+ FULLPEL_MV tmp_mv = kZeroFullMv;
+ xd->plane[0].pre[0].buf =
+ alt_ref_frame->y_buffer + alt_ref_frame_yoffset;
+ xd->plane[0].pre[0].stride = alt_ref_frame->y_stride;
+ alt_motion_error = get_prediction_error_bitdepth(
+ is_high_bitdepth, bitdepth, bsize, &x->plane[0].src,
+ &xd->plane[0].pre[0]);
+ first_pass_motion_search(cpi, x, &kZeroMv, &tmp_mv, &alt_motion_error);
+ }
}
if (alt_motion_error < motion_error && alt_motion_error < gf_motion_error &&
alt_motion_error < this_intra_error) {
@@ -716,8 +819,7 @@ static int firstpass_inter_prediction(
}
// Start by assuming that intra mode is best.
- best_ref_mv->row = 0;
- best_ref_mv->col = 0;
+ *best_mv = kZeroMv;
if (motion_error <= this_intra_error) {
aom_clear_system_state();
@@ -736,28 +838,30 @@ static int firstpass_inter_prediction(
(double)motion_error / DOUBLE_DIVIDE_CHECK((double)this_intra_error);
}
- const MV best_mv = get_mv_from_fullmv(&mv);
+ *best_mv = get_mv_from_fullmv(&mv);
this_inter_error = motion_error;
xd->mi[0]->mode = NEWMV;
- xd->mi[0]->mv[0].as_mv = best_mv;
+ xd->mi[0]->mv[0].as_mv = *best_mv;
xd->mi[0]->tx_size = TX_4X4;
xd->mi[0]->ref_frame[0] = LAST_FRAME;
xd->mi[0]->ref_frame[1] = NONE_FRAME;
- av1_enc_build_inter_predictor(cm, xd, unit_row * unit_scale,
- unit_col * unit_scale, NULL, bsize,
- AOM_PLANE_Y, AOM_PLANE_Y);
- av1_encode_sby_pass1(cpi, x, bsize);
- stats->sum_mvr += best_mv.row;
- stats->sum_mvr_abs += abs(best_mv.row);
- stats->sum_mvc += best_mv.col;
- stats->sum_mvc_abs += abs(best_mv.col);
- stats->sum_mvrs += best_mv.row * best_mv.row;
- stats->sum_mvcs += best_mv.col * best_mv.col;
+
+ if (cpi->sf.fp_sf.disable_recon == 0) {
+ av1_enc_build_inter_predictor(cm, xd, unit_row * unit_scale,
+ unit_col * unit_scale, NULL, bsize,
+ AOM_PLANE_Y, AOM_PLANE_Y);
+ av1_encode_sby_pass1(cpi, x, bsize);
+ }
+ stats->sum_mvr += best_mv->row;
+ stats->sum_mvr_abs += abs(best_mv->row);
+ stats->sum_mvc += best_mv->col;
+ stats->sum_mvc_abs += abs(best_mv->col);
+ stats->sum_mvrs += best_mv->row * best_mv->row;
+ stats->sum_mvcs += best_mv->col * best_mv->col;
++stats->inter_count;
- *best_ref_mv = best_mv;
- accumulate_mv_stats(best_mv, mv, unit_row, unit_col, unit_rows, unit_cols,
- last_mv, stats);
+ accumulate_mv_stats(*best_mv, mv, unit_row, unit_col, unit_rows, unit_cols,
+ last_non_zero_mv, stats);
}
return this_inter_error;
@@ -783,7 +887,7 @@ static void update_firstpass_stats(AV1_COMP *cpi,
const int frame_number,
const int64_t ts_duration,
const BLOCK_SIZE fp_block_size) {
- TWO_PASS *twopass = &cpi->twopass;
+ TWO_PASS *twopass = &cpi->ppi->twopass;
AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
FIRSTPASS_STATS *this_frame_stats = twopass->stats_buf_ctx->stats_in_end;
@@ -817,6 +921,9 @@ static void update_firstpass_stats(AV1_COMP *cpi,
fps.inactive_zone_rows = (double)stats->image_data_start_row;
fps.inactive_zone_cols = (double)0; // TODO(paulwilkins): fix
fps.raw_error_stdev = raw_err_stdev;
+ fps.is_flash = 0;
+ fps.noise_var = (double)0;
+ fps.cor_coeff = (double)1.0;
if (stats->mv_count > 0) {
fps.MVr = (double)stats->sum_mvr / stats->mv_count;
@@ -849,12 +956,20 @@ static void update_firstpass_stats(AV1_COMP *cpi,
// cpi->source_time_stamp.
fps.duration = (double)ts_duration;
+ // Invalidate the stats related to third ref motion search if not valid.
+ // This helps to print a warning in second pass encoding.
+ if (do_third_ref_motion_search(&cpi->oxcf.rc_cfg, &cpi->oxcf.gf_cfg) == 0) {
+ fps.pcnt_third_ref = INVALID_FP_STATS_TO_PREDICT_FLAT_GOP;
+ fps.tr_coded_error = INVALID_FP_STATS_TO_PREDICT_FLAT_GOP;
+ }
+
// We will store the stats inside the persistent twopass struct (and NOT the
// local variable 'fps'), and then cpi->output_pkt_list will point to it.
*this_frame_stats = fps;
- output_stats(this_frame_stats, cpi->output_pkt_list);
- if (cpi->twopass.stats_buf_ctx->total_stats != NULL) {
- av1_accumulate_stats(cpi->twopass.stats_buf_ctx->total_stats, &fps);
+ if (!cpi->ppi->lap_enabled)
+ output_stats(this_frame_stats, cpi->ppi->output_pkt_list);
+ if (cpi->ppi->twopass.stats_buf_ctx->total_stats != NULL) {
+ av1_accumulate_stats(cpi->ppi->twopass.stats_buf_ctx->total_stats, &fps);
}
/*In the case of two pass, first pass uses it as a circular buffer,
* when LAP is enabled it is used as a linear buffer*/
@@ -982,6 +1097,17 @@ static void first_pass_tiles(AV1_COMP *cpi, const BLOCK_SIZE fp_block_size) {
AV1_COMMON *const cm = &cpi->common;
const int tile_cols = cm->tiles.cols;
const int tile_rows = cm->tiles.rows;
+ const int num_planes = av1_num_planes(&cpi->common);
+ for (int plane = 0; plane < num_planes; plane++) {
+ const int subsampling_xy =
+ plane ? cm->seq_params->subsampling_x + cm->seq_params->subsampling_y
+ : 0;
+ const int sb_size = MAX_SB_SQUARE >> subsampling_xy;
+ CHECK_MEM_ERROR(
+ cm, cpi->td.mb.plane[plane].src_diff,
+ (int16_t *)aom_memalign(
+ 32, sizeof(*cpi->td.mb.plane[plane].src_diff) * sb_size));
+ }
for (int tile_row = 0; tile_row < tile_rows; ++tile_row) {
for (int tile_col = 0; tile_col < tile_cols; ++tile_col) {
TileDataEnc *const tile_data =
@@ -989,6 +1115,12 @@ static void first_pass_tiles(AV1_COMP *cpi, const BLOCK_SIZE fp_block_size) {
first_pass_tile(cpi, &cpi->td, tile_data, fp_block_size);
}
}
+ for (int plane = 0; plane < num_planes; plane++) {
+ if (cpi->td.mb.plane[plane].src_diff) {
+ aom_free(cpi->td.mb.plane[plane].src_diff);
+ cpi->td.mb.plane[plane].src_diff = NULL;
+ }
+ }
}
void av1_first_pass_row(AV1_COMP *cpi, ThreadData *td, TileDataEnc *tile_data,
@@ -997,7 +1129,7 @@ void av1_first_pass_row(AV1_COMP *cpi, ThreadData *td, TileDataEnc *tile_data,
AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
CurrentFrame *const current_frame = &cm->current_frame;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int num_planes = av1_num_planes(cm);
MACROBLOCKD *const xd = &x->e_mbd;
TileInfo *tile = &tile_data->tile_info;
@@ -1105,7 +1237,7 @@ void av1_first_pass_row(AV1_COMP *cpi, ThreadData *td, TileDataEnc *tile_data,
cpi, td, last_frame, golden_frame, alt_ref_frame, unit_row, unit_col,
recon_yoffset, recon_uvoffset, src_yoffset, alt_ref_frame_yoffset,
fp_block_size, this_intra_error, raw_motion_err_counts,
- raw_motion_err_list, &best_ref_mv, &last_mv, mb_stats);
+ raw_motion_err_list, best_ref_mv, &best_ref_mv, &last_mv, mb_stats);
if (unit_col_in_tile == 0) {
*first_top_mv = last_mv;
}
@@ -1138,7 +1270,7 @@ void av1_first_pass(AV1_COMP *cpi, const int64_t ts_duration) {
AV1_COMMON *const cm = &cpi->common;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
CurrentFrame *const current_frame = &cm->current_frame;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int num_planes = av1_num_planes(cm);
MACROBLOCKD *const xd = &x->e_mbd;
const int qindex = find_fp_qindex(seq_params->bit_depth);
@@ -1147,9 +1279,14 @@ void av1_first_pass(AV1_COMP *cpi, const int64_t ts_duration) {
FeatureFlags *const features = &cm->features;
av1_set_screen_content_options(cpi, features);
}
+
+ // Prepare the speed features
+ av1_set_speed_features_framesize_independent(cpi, cpi->oxcf.speed);
+
// Unit size for the first pass encoding.
const BLOCK_SIZE fp_block_size =
- cpi->is_screen_content_type ? BLOCK_8X8 : BLOCK_16X16;
+ get_fp_block_size(cpi->is_screen_content_type);
+
// Number of rows in the unit size.
// Note mi_params->mb_rows and mi_params->mb_cols are in the unit of 16x16.
const int unit_rows = get_unit_rows(fp_block_size, mi_params->mb_rows);
@@ -1250,7 +1387,7 @@ void av1_first_pass(AV1_COMP *cpi, const int64_t ts_duration) {
(stats.image_data_start_row * unit_cols * 2));
}
- TWO_PASS *twopass = &cpi->twopass;
+ TWO_PASS *twopass = &cpi->ppi->twopass;
const int num_mbs_16X16 = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
? cpi->initial_mbs
: mi_params->MBs;
diff --git a/third_party/libaom/source/libaom/av1/encoder/firstpass.h b/third_party/libaom/source/libaom/av1/encoder/firstpass.h
index 22969e885b..122912f72a 100644
--- a/third_party/libaom/source/libaom/av1/encoder/firstpass.h
+++ b/third_party/libaom/source/libaom/av1/encoder/firstpass.h
@@ -152,6 +152,18 @@ typedef struct {
* standard deviation for (0, 0) motion prediction error
*/
double raw_error_stdev;
+ /*!
+ * Whether the frame contains a flash
+ */
+ int64_t is_flash;
+ /*!
+ * Estimated noise variance
+ */
+ double noise_var;
+ /*!
+ * Correlation coefficient with the previous frame
+ */
+ double cor_coeff;
} FIRSTPASS_STATS;
/*!\cond */
@@ -170,8 +182,6 @@ enum {
*/
typedef struct {
/*!\cond */
- // The frame processing order within a GOP
- unsigned char index;
// Frame update type, e.g. ARF/GF/LF/Overlay
FRAME_UPDATE_TYPE update_type[MAX_STATIC_GF_GROUP_LENGTH];
unsigned char arf_src_offset[MAX_STATIC_GF_GROUP_LENGTH];
@@ -191,6 +201,21 @@ typedef struct {
REFBUF_STATE refbuf_state[MAX_STATIC_GF_GROUP_LENGTH];
int arf_index; // the index in the gf group of ARF, if no arf, then -1
int size; // The total length of a GOP
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Indicates the level of parallelism in frame parallel encodes.
+ // 0 : frame is independently encoded (not part of parallel encodes).
+ // 1 : frame is the first in encode order in a given parallel encode set.
+ // 2 : frame occurs later in encode order in a given parallel encode set.
+ int frame_parallel_level[MAX_STATIC_GF_GROUP_LENGTH];
+ // Indicates whether a frame should act as non-reference frame.
+ // 0 : frame is a reference frame.
+ // 1 : frame is a non-reference frame.
+ int is_frame_non_ref[MAX_STATIC_GF_GROUP_LENGTH];
+
+ // The offset into lookahead_ctx for choosing
+ // source of frame parallel encodes.
+ int src_offset[MAX_STATIC_GF_GROUP_LENGTH];
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
/*!\endcond */
} GF_GROUP;
/*!\cond */
@@ -327,6 +352,15 @@ struct EncodeFrameParams;
struct AV1EncoderConfig;
struct TileDataEnc;
+static INLINE int is_fp_wavelet_energy_invalid(
+ const FIRSTPASS_STATS *fp_stats) {
+ return (fp_stats->frame_avg_wavelet_energy < 0);
+}
+
+static INLINE BLOCK_SIZE get_fp_block_size(int is_screen_content_type) {
+ return (is_screen_content_type ? BLOCK_8X8 : BLOCK_16X16);
+}
+
int av1_get_unit_rows_in_tile(TileInfo tile, const BLOCK_SIZE fp_block_size);
int av1_get_unit_cols_in_tile(TileInfo tile, const BLOCK_SIZE fp_block_size);
diff --git a/third_party/libaom/source/libaom/av1/encoder/global_motion_facade.c b/third_party/libaom/source/libaom/av1/encoder/global_motion_facade.c
index 31c69da7eb..01ef7b0843 100644
--- a/third_party/libaom/source/libaom/av1/encoder/global_motion_facade.c
+++ b/third_party/libaom/source/libaom/av1/encoder/global_motion_facade.c
@@ -108,10 +108,10 @@ static AOM_INLINE void compute_global_motion_for_ref_frame(
const int do_adaptive_gm_estimation = 0;
const int ref_frame_dist = get_relative_dist(
- &cm->seq_params.order_hint_info, cm->current_frame.order_hint,
+ &cm->seq_params->order_hint_info, cm->current_frame.order_hint,
cm->cur_frame->ref_order_hints[frame - LAST_FRAME]);
const GlobalMotionEstimationType gm_estimation_type =
- cm->seq_params.order_hint_info.enable_order_hint &&
+ cm->seq_params->order_hint_info.enable_order_hint &&
abs(ref_frame_dist) <= 2 && do_adaptive_gm_estimation
? GLOBAL_MOTION_DISFLOW_BASED
: GLOBAL_MOTION_FEATURE_BASED;
@@ -126,7 +126,7 @@ static AOM_INLINE void compute_global_motion_for_ref_frame(
av1_compute_global_motion(model, src_buffer, src_width, src_height,
src_stride, src_corners, num_src_corners,
- ref_buf[frame], cpi->common.seq_params.bit_depth,
+ ref_buf[frame], cpi->common.seq_params->bit_depth,
gm_estimation_type, inliers_by_motion,
params_by_motion, RANSAC_NUM_MOTIONS);
int64_t ref_frame_error = 0;
@@ -284,9 +284,9 @@ static AOM_INLINE void update_valid_ref_frames_for_gm(
AV1_COMMON *const cm = &cpi->common;
int *num_past_ref_frames = &num_ref_frames[0];
int *num_future_ref_frames = &num_ref_frames[1];
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
int ref_pruning_enabled = is_frame_eligible_for_ref_pruning(
- gf_group, cpi->sf.inter_sf.selective_ref_frame, 1, gf_group->index);
+ gf_group, cpi->sf.inter_sf.selective_ref_frame, 1, cpi->gf_frame_index);
for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; --frame) {
const MV_REFERENCE_FRAME ref_frame[2] = { frame, NONE_FRAME };
@@ -368,7 +368,7 @@ static AOM_INLINE void setup_global_motion_info_params(AV1_COMP *cpi) {
// The source buffer is 16-bit, so we need to convert to 8 bits for the
// following code. We cache the result until the source frame is released.
gm_info->src_buffer =
- av1_downconvert_frame(source, cpi->common.seq_params.bit_depth);
+ av1_downconvert_frame(source, cpi->common.seq_params->bit_depth);
}
gm_info->segment_map_w =
diff --git a/third_party/libaom/source/libaom/av1/encoder/gop_structure.c b/third_party/libaom/source/libaom/av1/encoder/gop_structure.c
index 0e4968a72f..9cf72d2733 100644
--- a/third_party/libaom/source/libaom/av1/encoder/gop_structure.c
+++ b/third_party/libaom/source/libaom/av1/encoder/gop_structure.c
@@ -26,12 +26,51 @@
#include "av1/encoder/firstpass.h"
#include "av1/encoder/gop_structure.h"
+#if CONFIG_FRAME_PARALLEL_ENCODE
+// This function sets gf_group->frame_parallel_level for LF_UPDATE frames based
+// on the value of parallel_frame_count.
+static void set_frame_parallel_level(int *frame_parallel_level,
+ int *parallel_frame_count,
+ int max_parallel_frames) {
+ assert(*parallel_frame_count > 0);
+ // parallel_frame_count > 1 indicates subsequent frame(s) in the current
+ // parallel encode set.
+ *frame_parallel_level = 1 + (*parallel_frame_count > 1);
+ // Update the count of no. of parallel frames.
+ (*parallel_frame_count)++;
+ if (*parallel_frame_count > max_parallel_frames) *parallel_frame_count = 1;
+}
+
+// This function sets gf_group->src_offset based on frame_parallel_level.
+// Outputs are gf_group->src_offset and first_frame_index
+static void set_src_offset(GF_GROUP *const gf_group, int *first_frame_index,
+ int cur_frame_idx, int frame_ind) {
+ if (gf_group->frame_parallel_level[frame_ind] > 0) {
+ if (gf_group->frame_parallel_level[frame_ind] == 1) {
+ *first_frame_index = cur_frame_idx;
+ }
+
+ // Obtain the offset of the frame at frame_ind in the lookahead queue by
+ // subtracting the display order hints of the current frame from the display
+ // order hint of the first frame in parallel encoding set (at
+ // first_frame_index).
+ gf_group->src_offset[frame_ind] =
+ (cur_frame_idx + gf_group->arf_src_offset[frame_ind]) -
+ *first_frame_index;
+ }
+}
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
// Set parameters for frames between 'start' and 'end' (excluding both).
-static void set_multi_layer_params(const TWO_PASS *twopass,
- GF_GROUP *const gf_group, RATE_CONTROL *rc,
- FRAME_INFO *frame_info, int start, int end,
- int *cur_frame_idx, int *frame_ind,
- int layer_depth) {
+static void set_multi_layer_params(
+ const TWO_PASS *twopass, GF_GROUP *const gf_group,
+ const PRIMARY_RATE_CONTROL *p_rc, RATE_CONTROL *rc, FRAME_INFO *frame_info,
+ int start, int end, int *cur_frame_idx, int *frame_ind,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ int *parallel_frame_count, int max_parallel_frames,
+ int do_frame_parallel_encode, int *first_frame_index,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ int layer_depth) {
const int num_frames_to_process = end - start;
// Either we are at the last level of the pyramid, or we don't have enough
@@ -45,11 +84,21 @@ static void set_multi_layer_params(const TWO_PASS *twopass,
gf_group->cur_frame_idx[*frame_ind] = *cur_frame_idx;
gf_group->layer_depth[*frame_ind] = MAX_ARF_LAYERS;
gf_group->arf_boost[*frame_ind] = av1_calc_arf_boost(
- twopass, rc, frame_info, start, end - start, 0, NULL, NULL);
+ twopass, p_rc, rc, frame_info, start, end - start, 0, NULL, NULL, 0);
gf_group->frame_type[*frame_ind] = INTER_FRAME;
gf_group->refbuf_state[*frame_ind] = REFBUF_UPDATE;
gf_group->max_layer_depth =
AOMMAX(gf_group->max_layer_depth, layer_depth);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Set the level of parallelism for the LF_UPDATE frame.
+ if (do_frame_parallel_encode) {
+ set_frame_parallel_level(&gf_group->frame_parallel_level[*frame_ind],
+ parallel_frame_count, max_parallel_frames);
+ // Set LF_UPDATE frames as non-reference frames.
+ gf_group->is_frame_non_ref[*frame_ind] = 1;
+ }
+ set_src_offset(gf_group, first_frame_index, *cur_frame_idx, *frame_ind);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
++(*frame_ind);
++(*cur_frame_idx);
++start;
@@ -65,14 +114,32 @@ static void set_multi_layer_params(const TWO_PASS *twopass,
gf_group->frame_type[*frame_ind] = INTER_FRAME;
gf_group->refbuf_state[*frame_ind] = REFBUF_UPDATE;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ if (do_frame_parallel_encode) {
+ // If max_parallel_frames is not exceeded, encode the next internal ARF
+ // frame in parallel.
+ if (*parallel_frame_count > 1 &&
+ *parallel_frame_count <= max_parallel_frames) {
+ gf_group->frame_parallel_level[*frame_ind] = 2;
+ *parallel_frame_count = 1;
+ }
+ }
+ set_src_offset(gf_group, first_frame_index, *cur_frame_idx, *frame_ind);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
// Get the boost factor for intermediate ARF frames.
gf_group->arf_boost[*frame_ind] = av1_calc_arf_boost(
- twopass, rc, frame_info, m, end - m, m - start, NULL, NULL);
+ twopass, p_rc, rc, frame_info, m, end - m, m - start, NULL, NULL, 0);
++(*frame_ind);
// Frames displayed before this internal ARF.
- set_multi_layer_params(twopass, gf_group, rc, frame_info, start, m,
- cur_frame_idx, frame_ind, layer_depth + 1);
+ set_multi_layer_params(twopass, gf_group, p_rc, rc, frame_info, start, m,
+ cur_frame_idx, frame_ind,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ parallel_frame_count, max_parallel_frames,
+ do_frame_parallel_encode, first_frame_index,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ layer_depth + 1);
// Overlay for internal ARF.
gf_group->update_type[*frame_ind] = INTNL_OVERLAY_UPDATE;
@@ -82,12 +149,21 @@ static void set_multi_layer_params(const TWO_PASS *twopass,
gf_group->layer_depth[*frame_ind] = layer_depth;
gf_group->frame_type[*frame_ind] = INTER_FRAME;
gf_group->refbuf_state[*frame_ind] = REFBUF_UPDATE;
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ set_src_offset(gf_group, first_frame_index, *cur_frame_idx, *frame_ind);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
++(*frame_ind);
++(*cur_frame_idx);
// Frames displayed after this internal ARF.
- set_multi_layer_params(twopass, gf_group, rc, frame_info, m + 1, end,
- cur_frame_idx, frame_ind, layer_depth + 1);
+ set_multi_layer_params(twopass, gf_group, p_rc, rc, frame_info, m + 1, end,
+ cur_frame_idx, frame_ind,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ parallel_frame_count, max_parallel_frames,
+ do_frame_parallel_encode, first_frame_index,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ layer_depth + 1);
}
}
@@ -95,6 +171,7 @@ static int construct_multi_layer_gf_structure(
AV1_COMP *cpi, TWO_PASS *twopass, GF_GROUP *const gf_group,
RATE_CONTROL *rc, FRAME_INFO *const frame_info, int gf_interval,
FRAME_UPDATE_TYPE first_frame_update_type) {
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
int frame_index = 0;
int cur_frame_index = 0;
@@ -103,6 +180,18 @@ static int construct_multi_layer_gf_structure(
first_frame_update_type == OVERLAY_UPDATE ||
first_frame_update_type == GF_UPDATE);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Initialize gf_group->frame_parallel_level and gf_group->is_frame_non_ref to
+ // 0.
+ memset(
+ gf_group->frame_parallel_level, 0,
+ sizeof(gf_group->frame_parallel_level[0]) * MAX_STATIC_GF_GROUP_LENGTH);
+ memset(gf_group->is_frame_non_ref, 0,
+ sizeof(gf_group->is_frame_non_ref[0]) * MAX_STATIC_GF_GROUP_LENGTH);
+ memset(gf_group->src_offset, 0,
+ sizeof(gf_group->src_offset[0]) * MAX_STATIC_GF_GROUP_LENGTH);
+#endif
+
if (first_frame_update_type == KF_UPDATE &&
cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1) {
gf_group->update_type[frame_index] = ARF_UPDATE;
@@ -146,7 +235,7 @@ static int construct_multi_layer_gf_structure(
gf_group->arf_src_offset[frame_index] = gf_interval - cur_frame_index;
gf_group->cur_frame_idx[frame_index] = cur_frame_index;
gf_group->layer_depth[frame_index] = 1;
- gf_group->arf_boost[frame_index] = cpi->rc.gfu_boost;
+ gf_group->arf_boost[frame_index] = cpi->ppi->p_rc.gfu_boost;
gf_group->frame_type[frame_index] = is_fwd_kf ? KEY_FRAME : INTER_FRAME;
gf_group->refbuf_state[frame_index] = REFBUF_UPDATE;
gf_group->max_layer_depth = 1;
@@ -156,9 +245,25 @@ static int construct_multi_layer_gf_structure(
gf_group->arf_index = -1;
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Running count of no. of frames that is part of a given parallel
+ // encode set in a gf_group. Value of 1 indicates no parallel encode.
+ int parallel_frame_count = 1;
+ // Enable parallel encode of frames if gf_group has a multi-layer pyramid
+ // structure.
+ int do_frame_parallel_encode = (cpi->ppi->num_fp_contexts > 1 && use_altref);
+
+ int first_frame_index = cur_frame_index;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
// Rest of the frames.
- set_multi_layer_params(twopass, gf_group, rc, frame_info, cur_frame_index,
- gf_interval, &cur_frame_index, &frame_index,
+ set_multi_layer_params(twopass, gf_group, p_rc, rc, frame_info,
+ cur_frame_index, gf_interval, &cur_frame_index,
+ &frame_index,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ &parallel_frame_count, cpi->ppi->num_fp_contexts,
+ do_frame_parallel_encode, &first_frame_index,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
use_altref + 1);
if (use_altref) {
@@ -181,25 +286,41 @@ static int construct_multi_layer_gf_structure(
gf_group->frame_type[frame_index] = INTER_FRAME;
gf_group->refbuf_state[frame_index] = REFBUF_UPDATE;
gf_group->max_layer_depth = AOMMAX(gf_group->max_layer_depth, 2);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ set_src_offset(gf_group, &first_frame_index, cur_frame_index,
+ frame_index);
+#endif
++frame_index;
}
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ if (do_frame_parallel_encode) {
+ // If frame_parallel_level is set to 1 for the last LF_UPDATE
+ // frame in the gf_group, reset it to zero since there are no subsequent
+ // frames in the gf_group.
+ if (gf_group->frame_parallel_level[frame_index - 2] == 1) {
+ assert(gf_group->update_type[frame_index - 2] == LF_UPDATE);
+ gf_group->frame_parallel_level[frame_index - 2] = 0;
+ }
+ }
+#endif
return frame_index;
}
void av1_gop_setup_structure(AV1_COMP *cpi) {
RATE_CONTROL *const rc = &cpi->rc;
- GF_GROUP *const gf_group = &cpi->gf_group;
- TWO_PASS *const twopass = &cpi->twopass;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FRAME_INFO *const frame_info = &cpi->frame_info;
const int key_frame = rc->frames_since_key == 0;
const FRAME_UPDATE_TYPE first_frame_update_type =
- key_frame
- ? KF_UPDATE
- : cpi->gf_state.arf_gf_boost_lst || (rc->baseline_gf_interval == 1)
- ? OVERLAY_UPDATE
- : GF_UPDATE;
+ key_frame ? KF_UPDATE
+ : cpi->ppi->gf_state.arf_gf_boost_lst ||
+ (p_rc->baseline_gf_interval == 1)
+ ? OVERLAY_UPDATE
+ : GF_UPDATE;
gf_group->size = construct_multi_layer_gf_structure(
- cpi, twopass, gf_group, rc, frame_info, rc->baseline_gf_interval - 1,
+ cpi, twopass, gf_group, rc, frame_info, p_rc->baseline_gf_interval - 1,
first_frame_update_type);
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/gop_structure.h b/third_party/libaom/source/libaom/av1/encoder/gop_structure.h
index 6cfca22862..aeffb40acb 100644
--- a/third_party/libaom/source/libaom/av1/encoder/gop_structure.h
+++ b/third_party/libaom/source/libaom/av1/encoder/gop_structure.h
@@ -66,10 +66,11 @@ void av1_gop_bit_allocation(const AV1_COMP *cpi, RATE_CONTROL *const rc,
int64_t gf_group_bits);
/*!\cond */
-int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
+int av1_calc_arf_boost(const TWO_PASS *twopass,
+ const PRIMARY_RATE_CONTROL *p_rc, const RATE_CONTROL *rc,
FRAME_INFO *frame_info, int offset, int f_frames,
int b_frames, int *num_fpstats_used,
- int *num_fpstats_required);
+ int *num_fpstats_required, int project_gfu_boost);
/*!\endcond */
#ifdef __cplusplus
diff --git a/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.c b/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.c
index 08c167a9d6..eda5ddf78c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.c
+++ b/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.c
@@ -14,6 +14,7 @@
#include "config/aom_dsp_rtcd.h"
#include "av1/common/idct.h"
+#include "av1/common/blockd.h"
#include "av1/encoder/hybrid_fwd_txfm.h"
/* 4-point reversible, orthonormal Walsh-Hadamard in 3.5 adds, 0.5 shifts per
@@ -313,3 +314,26 @@ void av1_highbd_fwd_txfm(const int16_t *src_diff, tran_low_t *coeff,
default: assert(0); break;
}
}
+
+void av1_quick_txfm(int use_hadamard, TX_SIZE tx_size, BitDepthInfo bd_info,
+ const int16_t *src_diff, int src_stride,
+ tran_low_t *coeff) {
+ if (use_hadamard) {
+ switch (tx_size) {
+ case TX_4X4: aom_hadamard_4x4(src_diff, src_stride, coeff); break;
+ case TX_8X8: aom_hadamard_8x8(src_diff, src_stride, coeff); break;
+ case TX_16X16: aom_hadamard_16x16(src_diff, src_stride, coeff); break;
+ case TX_32X32: aom_hadamard_32x32(src_diff, src_stride, coeff); break;
+ default: assert(0);
+ }
+ } else {
+ TxfmParam txfm_param;
+ txfm_param.tx_type = DCT_DCT;
+ txfm_param.tx_size = tx_size;
+ txfm_param.lossless = 0;
+ txfm_param.bd = bd_info.bit_depth;
+ txfm_param.is_hbd = bd_info.use_highbitdepth_buf;
+ txfm_param.tx_set_type = EXT_TX_SET_ALL16;
+ av1_fwd_txfm(src_diff, coeff, src_stride, &txfm_param);
+ }
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.h b/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.h
index daabc7119a..30f8a2258b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.h
+++ b/third_party/libaom/source/libaom/av1/encoder/hybrid_fwd_txfm.h
@@ -24,6 +24,15 @@ void av1_fwd_txfm(const int16_t *src_diff, tran_low_t *coeff, int diff_stride,
void av1_highbd_fwd_txfm(const int16_t *src_diff, tran_low_t *coeff,
int diff_stride, TxfmParam *txfm_param);
+/*!\brief Apply Hadamard or DCT transform
+ *
+ * \callergraph
+ * DCT and Hadamard transforms are commonly used for quick RD score estimation.
+ * The coeff buffer's size should be equal to the number of pixels
+ * corresponding to tx_size.
+ */
+void av1_quick_txfm(int use_hadamard, TX_SIZE tx_size, BitDepthInfo bd_info,
+ const int16_t *src_diff, int src_stride, tran_low_t *coeff);
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/interp_search.c b/third_party/libaom/source/libaom/av1/encoder/interp_search.c
index 0066c35434..dd77f6a1c0 100644
--- a/third_party/libaom/source/libaom/av1/encoder/interp_search.c
+++ b/third_party/libaom/source/libaom/av1/encoder/interp_search.c
@@ -178,7 +178,7 @@ static INLINE int64_t interpolation_filter_rd(
mbmi->interp_filters = filter_sets[filter_idx];
const int tmp_rs =
get_switchable_rate(x, mbmi->interp_filters, switchable_ctx,
- cm->seq_params.enable_dual_filter);
+ cm->seq_params->enable_dual_filter);
int64_t min_rd = RDCOST(x->rdmult, tmp_rs, 0);
if (min_rd > *rd) {
@@ -449,14 +449,23 @@ static INLINE void find_best_non_dual_interp_filter(
interp_search_flags->interp_filter_search_mask;
if (cpi->sf.interp_sf.adaptive_interp_filter_search == 2) {
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
const int ctx0 = av1_get_pred_context_switchable_interp(xd, 0);
const int ctx1 = av1_get_pred_context_switchable_interp(xd, 1);
- const int *switchable_interp_p0 =
- cpi->frame_probs.switchable_interp_probs[update_type][ctx0];
- const int *switchable_interp_p1 =
- cpi->frame_probs.switchable_interp_probs[update_type][ctx1];
-
+ int *switchable_interp_p0;
+ int *switchable_interp_p1;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ switchable_interp_p0 = (int *)cpi->ppi->temp_frame_probs
+ .switchable_interp_probs[update_type][ctx0];
+ switchable_interp_p1 = (int *)cpi->ppi->temp_frame_probs
+ .switchable_interp_probs[update_type][ctx1];
+#else
+ switchable_interp_p0 =
+ (int *)cpi->frame_probs.switchable_interp_probs[update_type][ctx0];
+ switchable_interp_p1 =
+ (int *)cpi->frame_probs.switchable_interp_probs[update_type][ctx1];
+#endif
static const int thr[7] = { 0, 8, 8, 8, 8, 0, 8 };
const int thresh = thr[update_type];
for (i = 0; i < SWITCHABLE_FILTERS; i++) {
@@ -683,7 +692,7 @@ int64_t av1_interpolation_filter_search(
switchable_ctx[1] = av1_get_pred_context_switchable_interp(xd, 1);
*switchable_rate =
get_switchable_rate(x, mbmi->interp_filters, switchable_ctx,
- cm->seq_params.enable_dual_filter);
+ cm->seq_params->enable_dual_filter);
// Do MC evaluation for default filter_type.
// Luma MC
@@ -747,7 +756,7 @@ int64_t av1_interpolation_filter_search(
restore_dst_buf(xd, *tmp_dst, num_planes);
const BUFFER_SET *dst_bufs[2] = { tmp_dst, orig_dst };
// Evaluate dual interp filters
- if (cm->seq_params.enable_dual_filter) {
+ if (cm->seq_params->enable_dual_filter) {
if (cpi->sf.interp_sf.use_fast_interpolation_filter_search) {
fast_dual_interp_filter_rd(x, cpi, tile_data, bsize, orig_dst, rd,
&rd_stats_luma, &rd_stats, switchable_rate,
diff --git a/third_party/libaom/source/libaom/av1/encoder/interp_search.h b/third_party/libaom/source/libaom/av1/encoder/interp_search.h
index 1ee26d11ba..902b69960a 100644
--- a/third_party/libaom/source/libaom/av1/encoder/interp_search.h
+++ b/third_party/libaom/source/libaom/av1/encoder/interp_search.h
@@ -37,7 +37,7 @@ typedef struct {
/*!\brief Miscellaneous arguments for inter mode search.
*/
-typedef struct {
+typedef struct HandleInterModeArgs {
/*!
* Buffer for the above predictor in OBMC
*/
@@ -139,6 +139,16 @@ typedef struct {
* Estimated cmp mode.
*/
int cmp_mode[MODE_CTX_REF_FRAMES];
+ /*!
+ * The best sse during single new_mv search. Note that the sse here comes from
+ * single_motion_search, and not from interpolation_filter_search. This has
+ * two implications:
+ * 1. The mv used to calculate the sse here does not have to be the best sse
+ * found in handle_inter_mode.
+ * 2. Even if the mvs agree, the sse here can differ from the sse in \ref
+ * MACROBLOCK::pred_sse due to different interpolation filter used.
+ */
+ unsigned int best_single_sse_in_refs[REF_FRAMES];
} HandleInterModeArgs;
/*!\cond */
diff --git a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.c b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.c
index 9cb0f4a118..50e53fdde1 100644
--- a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.c
+++ b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.c
@@ -32,6 +32,31 @@ static const UV_PREDICTION_MODE uv_rd_search_mode_order[UV_INTRA_MODES] = {
UV_D113_PRED, UV_D45_PRED,
};
+// The bitmask corresponds to the filter intra modes as defined in enums.h
+// FILTER_INTRA_MODE enumeration type. Setting a bit to 0 in the mask means to
+// disable the evaluation of corresponding filter intra mode. The table
+// av1_derived_filter_intra_mode_used_flag is used when speed feature
+// prune_filter_intra_level is 1. The evaluated filter intra modes are union
+// of the following:
+// 1) FILTER_DC_PRED
+// 2) mode that corresponds to best mode so far of DC_PRED, V_PRED, H_PRED,
+// D157_PRED and PAETH_PRED. (Eg: FILTER_V_PRED if best mode so far is V_PRED).
+static const uint8_t av1_derived_filter_intra_mode_used_flag[INTRA_MODES] = {
+ 0x01, // DC_PRED: 0000 0001
+ 0x03, // V_PRED: 0000 0011
+ 0x05, // H_PRED: 0000 0101
+ 0x01, // D45_PRED: 0000 0001
+ 0x01, // D135_PRED: 0000 0001
+ 0x01, // D113_PRED: 0000 0001
+ 0x09, // D157_PRED: 0000 1001
+ 0x01, // D203_PRED: 0000 0001
+ 0x01, // D67_PRED: 0000 0001
+ 0x01, // SMOOTH_PRED: 0000 0001
+ 0x01, // SMOOTH_V_PRED: 0000 0001
+ 0x01, // SMOOTH_H_PRED: 0000 0001
+ 0x11 // PAETH_PRED: 0001 0001
+};
+
// The bitmask corresponds to the chroma intra modes as defined in enums.h
// UV_PREDICTION_MODE enumeration type. Setting a bit to 0 in the mask means to
// disable the evaluation of corresponding chroma intra mode. The table
@@ -60,59 +85,6 @@ static const uint16_t av1_derived_chroma_intra_mode_used_flag[INTRA_MODES] = {
};
/*!\endcond */
-/*!\brief Calculate the rdcost of a given luma intra angle
- *
- * \ingroup intra_mode_search
- * \callergraph
- * This function runs rd calculation for a given luma intra prediction angle.
- * This is used to select the best angle delta.
- *
- * \return Returns the rdcost of the angle and updates the mbmi if the
- * new rdcost is better.
- */
-static int64_t calc_rd_given_intra_angle(
- const AV1_COMP *const cpi, MACROBLOCK *x, BLOCK_SIZE bsize, int mode_cost,
- int64_t best_rd_in, int8_t angle_delta, int max_angle_delta, int *rate,
- RD_STATS *rd_stats, int *best_angle_delta, TX_SIZE *best_tx_size,
- int64_t *best_rd, int64_t *best_model_rd, uint8_t *best_tx_type_map,
- uint8_t *best_blk_skip, int skip_model_rd) {
- RD_STATS tokenonly_rd_stats;
- int64_t this_rd;
- MACROBLOCKD *xd = &x->e_mbd;
- MB_MODE_INFO *mbmi = xd->mi[0];
- const int n4 = bsize_to_num_blk(bsize);
- assert(!is_inter_block(mbmi));
- mbmi->angle_delta[PLANE_TYPE_Y] = angle_delta;
- if (!skip_model_rd) {
- if (model_intra_yrd_and_prune(cpi, x, bsize, best_model_rd)) {
- return INT64_MAX;
- }
- }
- av1_pick_uniform_tx_size_type_yrd(cpi, x, &tokenonly_rd_stats, bsize,
- best_rd_in);
- if (tokenonly_rd_stats.rate == INT_MAX) return INT64_MAX;
-
- int this_rate =
- mode_cost + tokenonly_rd_stats.rate +
- x->mode_costs
- .angle_delta_cost[mbmi->mode - V_PRED][max_angle_delta + angle_delta];
- this_rd = RDCOST(x->rdmult, this_rate, tokenonly_rd_stats.dist);
-
- if (this_rd < *best_rd) {
- memcpy(best_blk_skip, x->txfm_search_info.blk_skip,
- sizeof(best_blk_skip[0]) * n4);
- av1_copy_array(best_tx_type_map, xd->tx_type_map, n4);
- *best_rd = this_rd;
- *best_angle_delta = mbmi->angle_delta[PLANE_TYPE_Y];
- *best_tx_size = mbmi->tx_size;
- *rate = this_rate;
- rd_stats->rate = tokenonly_rd_stats.rate;
- rd_stats->dist = tokenonly_rd_stats.dist;
- rd_stats->skip_txfm = tokenonly_rd_stats.skip_txfm;
- }
- return this_rd;
-}
-
/*!\brief Search for the best filter_intra mode when coding intra frame.
*
* \ingroup intra_mode_search
@@ -125,8 +97,12 @@ static int rd_pick_filter_intra_sby(const AV1_COMP *const cpi, MACROBLOCK *x,
int *rate, int *rate_tokenonly,
int64_t *distortion, int *skippable,
BLOCK_SIZE bsize, int mode_cost,
+ PREDICTION_MODE best_mode_so_far,
int64_t *best_rd, int64_t *best_model_rd,
PICK_MODE_CONTEXT *ctx) {
+ // Skip the evaluation of filter intra modes.
+ if (cpi->sf.intra_sf.prune_filter_intra_level == 2) return 0;
+
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *mbmi = xd->mi[0];
int filter_intra_selected_flag = 0;
@@ -134,17 +110,33 @@ static int rd_pick_filter_intra_sby(const AV1_COMP *const cpi, MACROBLOCK *x,
TX_SIZE best_tx_size = TX_8X8;
FILTER_INTRA_MODE_INFO filter_intra_mode_info;
uint8_t best_tx_type_map[MAX_MIB_SIZE * MAX_MIB_SIZE];
- (void)ctx;
av1_zero(filter_intra_mode_info);
mbmi->filter_intra_mode_info.use_filter_intra = 1;
mbmi->mode = DC_PRED;
mbmi->palette_mode_info.palette_size[0] = 0;
+ // Skip the evaluation of filter-intra if cached MB_MODE_INFO does not have
+ // filter-intra as winner.
+ if (x->use_mb_mode_cache &&
+ !x->mb_mode_cache->filter_intra_mode_info.use_filter_intra)
+ return 0;
+
for (mode = 0; mode < FILTER_INTRA_MODES; ++mode) {
int64_t this_rd;
RD_STATS tokenonly_rd_stats;
mbmi->filter_intra_mode_info.filter_intra_mode = mode;
+ if ((cpi->sf.intra_sf.prune_filter_intra_level == 1) &&
+ !(av1_derived_filter_intra_mode_used_flag[best_mode_so_far] &
+ (1 << mode)))
+ continue;
+
+ // Skip the evaluation of modes that do not match with the winner mode in
+ // x->mb_mode_cache.
+ if (x->use_mb_mode_cache &&
+ mode != x->mb_mode_cache->filter_intra_mode_info.filter_intra_mode)
+ continue;
+
if (model_intra_yrd_and_prune(cpi, x, bsize, best_model_rd)) {
continue;
}
@@ -248,6 +240,42 @@ void av1_count_colors_highbd(const uint8_t *src8, int stride, int rows,
}
}
+void set_y_mode_and_delta_angle(const int mode_idx, MB_MODE_INFO *const mbmi) {
+ if (mode_idx < INTRA_MODE_END) {
+ mbmi->mode = intra_rd_search_mode_order[mode_idx];
+ mbmi->angle_delta[PLANE_TYPE_Y] = 0;
+ } else {
+ mbmi->mode = (mode_idx - INTRA_MODE_END) / (MAX_ANGLE_DELTA * 2) + V_PRED;
+ int angle_delta = (mode_idx - INTRA_MODE_END) % (MAX_ANGLE_DELTA * 2);
+ mbmi->angle_delta[PLANE_TYPE_Y] =
+ (angle_delta < 3 ? (angle_delta - 3) : (angle_delta - 2));
+ }
+}
+
+int prune_intra_y_mode(int64_t this_model_rd, int64_t *best_model_rd,
+ int64_t top_intra_model_rd[], int model_cnt_allowed) {
+ const double thresh_best = 1.50;
+ const double thresh_top = 1.00;
+ for (int i = 0; i < model_cnt_allowed; i++) {
+ if (this_model_rd < top_intra_model_rd[i]) {
+ for (int j = model_cnt_allowed - 1; j > i; j--) {
+ top_intra_model_rd[j] = top_intra_model_rd[j - 1];
+ }
+ top_intra_model_rd[i] = this_model_rd;
+ break;
+ }
+ }
+ if (top_intra_model_rd[model_cnt_allowed - 1] != INT64_MAX &&
+ this_model_rd > thresh_top * top_intra_model_rd[model_cnt_allowed - 1])
+ return 1;
+
+ if (this_model_rd != INT64_MAX &&
+ this_model_rd > thresh_best * (*best_model_rd))
+ return 1;
+ if (this_model_rd < *best_model_rd) *best_model_rd = this_model_rd;
+ return 0;
+}
+
// Run RD calculation with given chroma intra prediction angle., and return
// the RD cost. Update the best mode info. if the RD cost is the best so far.
static int64_t pick_intra_angle_routine_sbuv(
@@ -342,125 +370,199 @@ static int rd_pick_intra_angle_sbuv(const AV1_COMP *const cpi, MACROBLOCK *x,
#define PLANE_SIGN_TO_JOINT_SIGN(plane, a, b) \
(plane == CFL_PRED_U ? a * CFL_SIGNS + b - 1 : b * CFL_SIGNS + a - 1)
-static int cfl_rd_pick_alpha(MACROBLOCK *const x, const AV1_COMP *const cpi,
- TX_SIZE tx_size, int64_t best_rd) {
+
+static void cfl_idx_to_sign_and_alpha(int cfl_idx, CFL_SIGN_TYPE *cfl_sign,
+ int *cfl_alpha) {
+ int cfl_linear_idx = cfl_idx - CFL_INDEX_ZERO;
+ if (cfl_linear_idx == 0) {
+ *cfl_sign = CFL_SIGN_ZERO;
+ *cfl_alpha = 0;
+ } else {
+ *cfl_sign = cfl_linear_idx > 0 ? CFL_SIGN_POS : CFL_SIGN_NEG;
+ *cfl_alpha = abs(cfl_linear_idx) - 1;
+ }
+}
+
+static int64_t cfl_compute_rd(const AV1_COMP *const cpi, MACROBLOCK *x,
+ int plane, TX_SIZE tx_size,
+ BLOCK_SIZE plane_bsize, int cfl_idx,
+ int fast_mode, RD_STATS *rd_stats) {
+ assert(IMPLIES(fast_mode, rd_stats == NULL));
+ const AV1_COMMON *const cm = &cpi->common;
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = xd->mi[0];
- const MACROBLOCKD_PLANE *pd = &xd->plane[AOM_PLANE_U];
- const ModeCosts *mode_costs = &x->mode_costs;
- const BLOCK_SIZE plane_bsize =
- get_plane_block_size(mbmi->bsize, pd->subsampling_x, pd->subsampling_y);
-
- assert(is_cfl_allowed(xd) && cpi->oxcf.intra_mode_cfg.enable_cfl_intra);
- assert(plane_bsize < BLOCK_SIZES_ALL);
- if (!xd->lossless[mbmi->segment_id]) {
- assert(block_size_wide[plane_bsize] == tx_size_wide[tx_size]);
- assert(block_size_high[plane_bsize] == tx_size_high[tx_size]);
+ int cfl_plane = get_cfl_pred_type(plane);
+ CFL_SIGN_TYPE cfl_sign;
+ int cfl_alpha;
+ cfl_idx_to_sign_and_alpha(cfl_idx, &cfl_sign, &cfl_alpha);
+ // We conly build CFL for a given plane, the other plane's sign is dummy
+ int dummy_sign = CFL_SIGN_NEG;
+ const int8_t orig_cfl_alpha_signs = mbmi->cfl_alpha_signs;
+ const uint8_t orig_cfl_alpha_idx = mbmi->cfl_alpha_idx;
+ mbmi->cfl_alpha_signs =
+ PLANE_SIGN_TO_JOINT_SIGN(cfl_plane, cfl_sign, dummy_sign);
+ mbmi->cfl_alpha_idx = (cfl_alpha << CFL_ALPHABET_SIZE_LOG2) + cfl_alpha;
+ int64_t cfl_cost;
+ if (fast_mode) {
+ cfl_cost =
+ intra_model_rd(cm, x, plane, plane_bsize, tx_size, /*use_hadamard=*/0);
+ } else {
+ av1_init_rd_stats(rd_stats);
+ av1_txfm_rd_in_plane(x, cpi, rd_stats, INT64_MAX, 0, plane, plane_bsize,
+ tx_size, FTXS_NONE, 0);
+ av1_rd_cost_update(x->rdmult, rd_stats);
+ cfl_cost = rd_stats->rdcost;
}
+ mbmi->cfl_alpha_signs = orig_cfl_alpha_signs;
+ mbmi->cfl_alpha_idx = orig_cfl_alpha_idx;
+ return cfl_cost;
+}
+
+static void cfl_pick_plane_parameter(const AV1_COMP *const cpi, MACROBLOCK *x,
+ int plane, TX_SIZE tx_size,
+ int cfl_search_range,
+ RD_STATS cfl_rd_arr[CFL_MAGS_SIZE]) {
+ assert(cfl_search_range >= 1 && cfl_search_range <= CFL_MAGS_SIZE);
+ MACROBLOCKD *const xd = &x->e_mbd;
xd->cfl.use_dc_pred_cache = 1;
- const int64_t mode_rd = RDCOST(
- x->rdmult,
- mode_costs->intra_uv_mode_cost[CFL_ALLOWED][mbmi->mode][UV_CFL_PRED], 0);
- int64_t best_rd_uv[CFL_JOINT_SIGNS][CFL_PRED_PLANES];
- int best_c[CFL_JOINT_SIGNS][CFL_PRED_PLANES];
-#if CONFIG_DEBUG
- int best_rate_uv[CFL_JOINT_SIGNS][CFL_PRED_PLANES];
-#endif // CONFIG_DEBUG
-
- const int skip_trellis = 0;
- for (int plane = 0; plane < CFL_PRED_PLANES; plane++) {
- RD_STATS rd_stats;
- av1_init_rd_stats(&rd_stats);
- for (int joint_sign = 0; joint_sign < CFL_JOINT_SIGNS; joint_sign++) {
- best_rd_uv[joint_sign][plane] = INT64_MAX;
- best_c[joint_sign][plane] = 0;
- }
- // Collect RD stats for an alpha value of zero in this plane.
- // Skip i == CFL_SIGN_ZERO as (0, 0) is invalid.
- for (int i = CFL_SIGN_NEG; i < CFL_SIGNS; i++) {
- const int8_t joint_sign =
- PLANE_SIGN_TO_JOINT_SIGN(plane, CFL_SIGN_ZERO, i);
- if (i == CFL_SIGN_NEG) {
- mbmi->cfl_alpha_idx = 0;
- mbmi->cfl_alpha_signs = joint_sign;
- av1_txfm_rd_in_plane(x, cpi, &rd_stats, best_rd, 0, plane + 1,
- plane_bsize, tx_size, FTXS_NONE, skip_trellis);
- if (rd_stats.rate == INT_MAX) break;
- }
- const int alpha_rate = mode_costs->cfl_cost[joint_sign][plane][0];
- best_rd_uv[joint_sign][plane] =
- RDCOST(x->rdmult, rd_stats.rate + alpha_rate, rd_stats.dist);
-#if CONFIG_DEBUG
- best_rate_uv[joint_sign][plane] = rd_stats.rate;
-#endif // CONFIG_DEBUG
- }
- }
- int8_t best_joint_sign = -1;
-
- for (int plane = 0; plane < CFL_PRED_PLANES; plane++) {
- for (int pn_sign = CFL_SIGN_NEG; pn_sign < CFL_SIGNS; pn_sign++) {
- int progress = 0;
- for (int c = 0; c < CFL_ALPHABET_SIZE; c++) {
- int flag = 0;
- RD_STATS rd_stats;
- if (c > 2 && progress < c) break;
- av1_init_rd_stats(&rd_stats);
- for (int i = 0; i < CFL_SIGNS; i++) {
- const int8_t joint_sign = PLANE_SIGN_TO_JOINT_SIGN(plane, pn_sign, i);
- if (i == 0) {
- mbmi->cfl_alpha_idx = (c << CFL_ALPHABET_SIZE_LOG2) + c;
- mbmi->cfl_alpha_signs = joint_sign;
- av1_txfm_rd_in_plane(x, cpi, &rd_stats, best_rd, 0, plane + 1,
- plane_bsize, tx_size, FTXS_NONE, skip_trellis);
- if (rd_stats.rate == INT_MAX) break;
- }
- const int alpha_rate = mode_costs->cfl_cost[joint_sign][plane][c];
- int64_t this_rd =
- RDCOST(x->rdmult, rd_stats.rate + alpha_rate, rd_stats.dist);
- if (this_rd >= best_rd_uv[joint_sign][plane]) continue;
- best_rd_uv[joint_sign][plane] = this_rd;
- best_c[joint_sign][plane] = c;
-#if CONFIG_DEBUG
- best_rate_uv[joint_sign][plane] = rd_stats.rate;
-#endif // CONFIG_DEBUG
- flag = 2;
- if (best_rd_uv[joint_sign][!plane] == INT64_MAX) continue;
- this_rd += mode_rd + best_rd_uv[joint_sign][!plane];
- if (this_rd >= best_rd) continue;
- best_rd = this_rd;
- best_joint_sign = joint_sign;
+ MB_MODE_INFO *const mbmi = xd->mi[0];
+ assert(mbmi->uv_mode == UV_CFL_PRED);
+ const MACROBLOCKD_PLANE *pd = &xd->plane[plane];
+ const BLOCK_SIZE plane_bsize =
+ get_plane_block_size(mbmi->bsize, pd->subsampling_x, pd->subsampling_y);
+
+ const int dir_ls[2] = { 1, -1 };
+
+ int est_best_cfl_idx = CFL_INDEX_ZERO;
+ if (cfl_search_range < CFL_MAGS_SIZE) {
+ int fast_mode = 1;
+ int start_cfl_idx = CFL_INDEX_ZERO;
+ int64_t best_cfl_cost = cfl_compute_rd(cpi, x, plane, tx_size, plane_bsize,
+ start_cfl_idx, fast_mode, NULL);
+ for (int si = 0; si < 2; ++si) {
+ const int dir = dir_ls[si];
+ for (int i = 1; i < CFL_MAGS_SIZE; ++i) {
+ int cfl_idx = start_cfl_idx + dir * i;
+ if (cfl_idx < 0 || cfl_idx >= CFL_MAGS_SIZE) break;
+ int64_t cfl_cost = cfl_compute_rd(cpi, x, plane, tx_size, plane_bsize,
+ cfl_idx, fast_mode, NULL);
+ if (cfl_cost < best_cfl_cost) {
+ best_cfl_cost = cfl_cost;
+ est_best_cfl_idx = cfl_idx;
+ } else {
+ break;
}
- progress += flag;
}
}
}
- int best_rate_overhead = INT_MAX;
- uint8_t ind = 0;
- if (best_joint_sign >= 0) {
- const int u = best_c[best_joint_sign][CFL_PRED_U];
- const int v = best_c[best_joint_sign][CFL_PRED_V];
- ind = (u << CFL_ALPHABET_SIZE_LOG2) + v;
- best_rate_overhead = mode_costs->cfl_cost[best_joint_sign][CFL_PRED_U][u] +
- mode_costs->cfl_cost[best_joint_sign][CFL_PRED_V][v];
-#if CONFIG_DEBUG
- xd->cfl.rate =
- mode_costs->intra_uv_mode_cost[CFL_ALLOWED][mbmi->mode][UV_CFL_PRED] +
- best_rate_overhead + best_rate_uv[best_joint_sign][CFL_PRED_U] +
- best_rate_uv[best_joint_sign][CFL_PRED_V];
-#endif // CONFIG_DEBUG
- } else {
- best_joint_sign = 0;
+ for (int cfl_idx = 0; cfl_idx < CFL_MAGS_SIZE; ++cfl_idx) {
+ av1_invalid_rd_stats(&cfl_rd_arr[cfl_idx]);
}
- mbmi->cfl_alpha_idx = ind;
- mbmi->cfl_alpha_signs = best_joint_sign;
+ int fast_mode = 0;
+ int start_cfl_idx = est_best_cfl_idx;
+ cfl_compute_rd(cpi, x, plane, tx_size, plane_bsize, start_cfl_idx, fast_mode,
+ &cfl_rd_arr[start_cfl_idx]);
+ for (int si = 0; si < 2; ++si) {
+ const int dir = dir_ls[si];
+ for (int i = 1; i < cfl_search_range; ++i) {
+ int cfl_idx = start_cfl_idx + dir * i;
+ if (cfl_idx < 0 || cfl_idx >= CFL_MAGS_SIZE) break;
+ cfl_compute_rd(cpi, x, plane, tx_size, plane_bsize, cfl_idx, fast_mode,
+ &cfl_rd_arr[cfl_idx]);
+ }
+ }
xd->cfl.use_dc_pred_cache = 0;
xd->cfl.dc_pred_is_cached[0] = 0;
xd->cfl.dc_pred_is_cached[1] = 0;
- return best_rate_overhead;
+}
+
+/*!\brief Pick the optimal parameters for Chroma to Luma (CFL) component
+ *
+ * \ingroup intra_mode_search
+ * \callergraph
+ *
+ * This function will use DCT_DCT followed by computing SATD (sum of absolute
+ * transformed differences) to estimate the RD score and find the best possible
+ * CFL parameter.
+ *
+ * Then the function will apply a full RD search near the best possible CFL
+ * parameter to find the best actual CFL parameter.
+ *
+ * Side effect:
+ * We use ths buffers in x->plane[] and xd->plane[] as throw-away buffers for RD
+ * search.
+ *
+ * \param[in] x Encoder prediction block structure.
+ * \param[in] cpi Top-level encoder instance structure.
+ * \param[in] tx_size Transform size.
+ * \param[in] ref_best_rd Reference best RD.
+ * \param[in] cfl_search_range The search range of full RD search near the
+ * estimated best CFL parameter.
+ *
+ * \param[out] best_rd_stats RD stats of the best CFL parameter
+ * \param[out] best_cfl_alpha_idx Best CFL alpha index
+ * \param[out] best_cfl_alpha_signs Best CFL joint signs
+ *
+ */
+static int cfl_rd_pick_alpha(MACROBLOCK *const x, const AV1_COMP *const cpi,
+ TX_SIZE tx_size, int64_t ref_best_rd,
+ int cfl_search_range, RD_STATS *best_rd_stats,
+ uint8_t *best_cfl_alpha_idx,
+ int8_t *best_cfl_alpha_signs) {
+ assert(cfl_search_range >= 1 && cfl_search_range <= CFL_MAGS_SIZE);
+ const ModeCosts *mode_costs = &x->mode_costs;
+ RD_STATS cfl_rd_arr_u[CFL_MAGS_SIZE];
+ RD_STATS cfl_rd_arr_v[CFL_MAGS_SIZE];
+
+ av1_invalid_rd_stats(best_rd_stats);
+
+ cfl_pick_plane_parameter(cpi, x, 1, tx_size, cfl_search_range, cfl_rd_arr_u);
+ cfl_pick_plane_parameter(cpi, x, 2, tx_size, cfl_search_range, cfl_rd_arr_v);
+
+ for (int ui = 0; ui < CFL_MAGS_SIZE; ++ui) {
+ if (cfl_rd_arr_u[ui].rate == INT_MAX) continue;
+ int cfl_alpha_u;
+ CFL_SIGN_TYPE cfl_sign_u;
+ cfl_idx_to_sign_and_alpha(ui, &cfl_sign_u, &cfl_alpha_u);
+ for (int vi = 0; vi < CFL_MAGS_SIZE; ++vi) {
+ if (cfl_rd_arr_v[vi].rate == INT_MAX) continue;
+ int cfl_alpha_v;
+ CFL_SIGN_TYPE cfl_sign_v;
+ cfl_idx_to_sign_and_alpha(vi, &cfl_sign_v, &cfl_alpha_v);
+ // cfl_sign_u == CFL_SIGN_ZERO && cfl_sign_v == CFL_SIGN_ZERO is not a
+ // valid parameter for CFL
+ if (cfl_sign_u == CFL_SIGN_ZERO && cfl_sign_v == CFL_SIGN_ZERO) continue;
+ int joint_sign = cfl_sign_u * CFL_SIGNS + cfl_sign_v - 1;
+ RD_STATS rd_stats = cfl_rd_arr_u[ui];
+ av1_merge_rd_stats(&rd_stats, &cfl_rd_arr_v[vi]);
+ if (rd_stats.rate != INT_MAX) {
+ rd_stats.rate +=
+ mode_costs->cfl_cost[joint_sign][CFL_PRED_U][cfl_alpha_u];
+ rd_stats.rate +=
+ mode_costs->cfl_cost[joint_sign][CFL_PRED_V][cfl_alpha_v];
+ }
+ av1_rd_cost_update(x->rdmult, &rd_stats);
+ if (rd_stats.rdcost < best_rd_stats->rdcost) {
+ *best_rd_stats = rd_stats;
+ *best_cfl_alpha_idx =
+ (cfl_alpha_u << CFL_ALPHABET_SIZE_LOG2) + cfl_alpha_v;
+ *best_cfl_alpha_signs = joint_sign;
+ }
+ }
+ }
+ if (best_rd_stats->rdcost >= ref_best_rd) {
+ av1_invalid_rd_stats(best_rd_stats);
+ // Set invalid CFL parameters here since the rdcost is not better than
+ // ref_best_rd.
+ *best_cfl_alpha_idx = 0;
+ *best_cfl_alpha_signs = 0;
+ return 0;
+ }
+ return 1;
}
int64_t av1_rd_pick_intra_sbuv_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
@@ -532,19 +634,19 @@ int64_t av1_rd_pick_intra_sbuv_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
mbmi->uv_mode = mode;
// Init variables for cfl and angle delta
- int cfl_alpha_rate = 0;
+ const SPEED_FEATURES *sf = &cpi->sf;
+ mbmi->angle_delta[PLANE_TYPE_UV] = 0;
if (mode == UV_CFL_PRED) {
if (!is_cfl_allowed(xd) || !intra_mode_cfg->enable_cfl_intra) continue;
assert(!is_directional_mode);
const TX_SIZE uv_tx_size = av1_get_tx_size(AOM_PLANE_U, xd);
- cfl_alpha_rate = cfl_rd_pick_alpha(x, cpi, uv_tx_size, best_rd);
- if (cfl_alpha_rate == INT_MAX) continue;
- }
- mbmi->angle_delta[PLANE_TYPE_UV] = 0;
-
- if (is_directional_mode && av1_use_angle_delta(mbmi->bsize) &&
- intra_mode_cfg->enable_angle_delta) {
- const SPEED_FEATURES *sf = &cpi->sf;
+ if (!cfl_rd_pick_alpha(x, cpi, uv_tx_size, best_rd,
+ sf->intra_sf.cfl_search_range, &tokenonly_rd_stats,
+ &mbmi->cfl_alpha_idx, &mbmi->cfl_alpha_signs)) {
+ continue;
+ }
+ } else if (is_directional_mode && av1_use_angle_delta(mbmi->bsize) &&
+ intra_mode_cfg->enable_angle_delta) {
if (sf->intra_sf.chroma_intra_pruning_with_hog &&
!intra_search_state.dir_mode_skip_mask_ready) {
static const float thresh[2][4] = {
@@ -554,7 +656,7 @@ int64_t av1_rd_pick_intra_sbuv_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
const int is_chroma = 1;
const int is_intra_frame = frame_is_intra_only(cm);
prune_intra_mode_with_hog(
- x, bsize,
+ x, bsize, cm->seq_params->sb_size,
thresh[is_intra_frame]
[sf->intra_sf.chroma_intra_pruning_with_hog - 1],
intra_search_state.directional_mode_skip_mask, is_chroma);
@@ -577,17 +679,9 @@ int64_t av1_rd_pick_intra_sbuv_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
}
}
const int mode_cost =
- mode_costs->intra_uv_mode_cost[is_cfl_allowed(xd)][mbmi->mode][mode] +
- cfl_alpha_rate;
+ mode_costs->intra_uv_mode_cost[is_cfl_allowed(xd)][mbmi->mode][mode];
this_rate = tokenonly_rd_stats.rate +
intra_mode_info_cost_uv(cpi, x, mbmi, bsize, mode_cost);
- if (mode == UV_CFL_PRED) {
- assert(is_cfl_allowed(xd) && intra_mode_cfg->enable_cfl_intra);
-#if CONFIG_DEBUG
- if (!xd->lossless[mbmi->segment_id])
- assert(xd->cfl.rate == tokenonly_rd_stats.rate + mode_cost);
-#endif // CONFIG_DEBUG
- }
this_rd = RDCOST(x->rdmult, this_rate, tokenonly_rd_stats.dist);
if (this_rd < best_rd) {
@@ -633,8 +727,7 @@ int av1_search_palette_mode(IntraModeSearchState *intra_search_state,
const int num_planes = av1_num_planes(cm);
MACROBLOCKD *const xd = &x->e_mbd;
int rate2 = 0;
- int64_t distortion2 = 0, best_rd_palette = best_rd, this_rd,
- best_model_rd_palette = INT64_MAX;
+ int64_t distortion2 = 0, best_rd_palette = best_rd, this_rd;
int skippable = 0;
uint8_t *const best_palette_color_map =
x->palette_buffer->best_palette_color_map;
@@ -656,11 +749,11 @@ int av1_search_palette_mode(IntraModeSearchState *intra_search_state,
RD_STATS rd_stats_y;
av1_invalid_rd_stats(&rd_stats_y);
- av1_rd_pick_palette_intra_sby(
- cpi, x, bsize, intra_mode_cost[DC_PRED], &best_mbmi_palette,
- best_palette_color_map, &best_rd_palette, &best_model_rd_palette,
- &rd_stats_y.rate, NULL, &rd_stats_y.dist, &rd_stats_y.skip_txfm, NULL,
- ctx, best_blk_skip, best_tx_type_map);
+ av1_rd_pick_palette_intra_sby(cpi, x, bsize, intra_mode_cost[DC_PRED],
+ &best_mbmi_palette, best_palette_color_map,
+ &best_rd_palette, &rd_stats_y.rate, NULL,
+ &rd_stats_y.dist, &rd_stats_y.skip_txfm, NULL,
+ ctx, best_blk_skip, best_tx_type_map);
if (rd_stats_y.rate == INT_MAX || pmi->palette_size[0] == 0) {
this_rd_cost->rdcost = INT64_MAX;
return skippable;
@@ -766,81 +859,6 @@ static AOM_INLINE int intra_block_yrd(const AV1_COMP *const cpi, MACROBLOCK *x,
return 0;
}
-/*!\brief Search for the best angle delta for luma prediction
- *
- * \ingroup intra_mode_search
- * \callergraph
- * Given a luma directional intra prediction mode, this function will try to
- * estimate the best delta_angle.
- *
- * \return Returns the new rdcost of the best intra angle.
- */
-static int64_t rd_pick_intra_angle_sby(const AV1_COMP *const cpi, MACROBLOCK *x,
- int *rate, RD_STATS *rd_stats,
- BLOCK_SIZE bsize, int mode_cost,
- int64_t best_rd, int64_t *best_model_rd,
- int skip_model_rd_for_zero_deg) {
- MACROBLOCKD *xd = &x->e_mbd;
- MB_MODE_INFO *mbmi = xd->mi[0];
- assert(!is_inter_block(mbmi));
-
- int best_angle_delta = 0;
- int64_t rd_cost[2 * (MAX_ANGLE_DELTA + 2)];
- TX_SIZE best_tx_size = mbmi->tx_size;
- uint8_t best_blk_skip[MAX_MIB_SIZE * MAX_MIB_SIZE];
- uint8_t best_tx_type_map[MAX_MIB_SIZE * MAX_MIB_SIZE];
-
- for (int i = 0; i < 2 * (MAX_ANGLE_DELTA + 2); ++i) rd_cost[i] = INT64_MAX;
-
- int first_try = 1;
- for (int angle_delta = 0; angle_delta <= MAX_ANGLE_DELTA; angle_delta += 2) {
- for (int i = 0; i < 2; ++i) {
- const int64_t best_rd_in =
- (best_rd == INT64_MAX) ? INT64_MAX
- : (best_rd + (best_rd >> (first_try ? 3 : 5)));
- const int64_t this_rd = calc_rd_given_intra_angle(
- cpi, x, bsize, mode_cost, best_rd_in, (1 - 2 * i) * angle_delta,
- MAX_ANGLE_DELTA, rate, rd_stats, &best_angle_delta, &best_tx_size,
- &best_rd, best_model_rd, best_tx_type_map, best_blk_skip,
- (skip_model_rd_for_zero_deg & !angle_delta));
- rd_cost[2 * angle_delta + i] = this_rd;
- if (first_try && this_rd == INT64_MAX) return best_rd;
- first_try = 0;
- if (angle_delta == 0) {
- rd_cost[1] = this_rd;
- break;
- }
- }
- }
-
- assert(best_rd != INT64_MAX);
- for (int angle_delta = 1; angle_delta <= MAX_ANGLE_DELTA; angle_delta += 2) {
- for (int i = 0; i < 2; ++i) {
- int skip_search = 0;
- const int64_t rd_thresh = best_rd + (best_rd >> 5);
- if (rd_cost[2 * (angle_delta + 1) + i] > rd_thresh &&
- rd_cost[2 * (angle_delta - 1) + i] > rd_thresh)
- skip_search = 1;
- if (!skip_search) {
- calc_rd_given_intra_angle(
- cpi, x, bsize, mode_cost, best_rd, (1 - 2 * i) * angle_delta,
- MAX_ANGLE_DELTA, rate, rd_stats, &best_angle_delta, &best_tx_size,
- &best_rd, best_model_rd, best_tx_type_map, best_blk_skip, 0);
- }
- }
- }
-
- if (rd_stats->rate != INT_MAX) {
- mbmi->tx_size = best_tx_size;
- mbmi->angle_delta[PLANE_TYPE_Y] = best_angle_delta;
- const int n4 = bsize_to_num_blk(bsize);
- memcpy(x->txfm_search_info.blk_skip, best_blk_skip,
- sizeof(best_blk_skip[0]) * n4);
- av1_copy_array(xd->tx_type_map, best_tx_type_map, n4);
- }
- return best_rd;
-}
-
/*!\brief Search for the best filter_intra mode when coding inter frame.
*
* \ingroup intra_mode_search
@@ -909,11 +927,14 @@ static INLINE void handle_filter_intra_mode(const AV1_COMP *cpi, MACROBLOCK *x,
}
}
+// Evaluate a given luma intra-mode in inter frames.
int av1_handle_intra_y_mode(IntraModeSearchState *intra_search_state,
const AV1_COMP *cpi, MACROBLOCK *x,
BLOCK_SIZE bsize, unsigned int ref_frame_cost,
const PICK_MODE_CONTEXT *ctx, RD_STATS *rd_stats_y,
- int64_t best_rd, int *mode_cost_y, int64_t *rd_y) {
+ int64_t best_rd, int *mode_cost_y, int64_t *rd_y,
+ int64_t *best_model_rd,
+ int64_t top_intra_model_rd[]) {
const AV1_COMMON *cm = &cpi->common;
const SPEED_FEATURES *const sf = &cpi->sf;
MACROBLOCKD *const xd = &x->e_mbd;
@@ -928,7 +949,7 @@ int av1_handle_intra_y_mode(IntraModeSearchState *intra_search_state,
int known_rate = mode_cost;
const int intra_cost_penalty = av1_get_intra_cost_penalty(
cm->quant_params.base_qindex, cm->quant_params.y_dc_delta_q,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
if (mode != DC_PRED && mode != PAETH_PRED) known_rate += intra_cost_penalty;
known_rate += AOMMIN(mode_costs->skip_txfm_cost[skip_ctx][0],
@@ -946,32 +967,34 @@ int av1_handle_intra_y_mode(IntraModeSearchState *intra_search_state,
!intra_search_state->dir_mode_skip_mask_ready) {
const float thresh[4] = { -1.2f, 0.0f, 0.0f, 1.2f };
const int is_chroma = 0;
- prune_intra_mode_with_hog(
- x, bsize, thresh[sf->intra_sf.intra_pruning_with_hog - 1],
- intra_search_state->directional_mode_skip_mask, is_chroma);
+ prune_intra_mode_with_hog(x, bsize, cm->seq_params->sb_size,
+ thresh[sf->intra_sf.intra_pruning_with_hog - 1],
+ intra_search_state->directional_mode_skip_mask,
+ is_chroma);
intra_search_state->dir_mode_skip_mask_ready = 1;
}
if (intra_search_state->directional_mode_skip_mask[mode]) return 0;
- av1_init_rd_stats(rd_stats_y);
- rd_stats_y->rate = INT_MAX;
- int64_t model_rd = INT64_MAX;
- int rate_dummy;
- rd_pick_intra_angle_sby(cpi, x, &rate_dummy, rd_stats_y, bsize, mode_cost,
- best_rd, &model_rd, 0);
-
- } else {
- av1_init_rd_stats(rd_stats_y);
- mbmi->angle_delta[PLANE_TYPE_Y] = 0;
- av1_pick_uniform_tx_size_type_yrd(cpi, x, rd_stats_y, bsize, best_rd);
}
+ const TX_SIZE tx_size = AOMMIN(TX_32X32, max_txsize_lookup[bsize]);
+ const int64_t this_model_rd =
+ intra_model_rd(&cpi->common, x, 0, bsize, tx_size, /*use_hadamard=*/1);
+ if (prune_intra_y_mode(this_model_rd, best_model_rd, top_intra_model_rd,
+ sf->intra_sf.top_intra_model_count_allowed))
+ return 0;
+ av1_init_rd_stats(rd_stats_y);
+ av1_pick_uniform_tx_size_type_yrd(cpi, x, rd_stats_y, bsize, best_rd);
// Pick filter intra modes.
if (mode == DC_PRED && av1_filter_intra_allowed_bsize(cm, bsize)) {
int try_filter_intra = 1;
int64_t best_rd_so_far = INT64_MAX;
if (rd_stats_y->rate != INT_MAX) {
- const int tmp_rate = rd_stats_y->rate +
- mode_costs->filter_intra_cost[bsize][0] + mode_cost;
+ // best_rd_so_far is the rdcost of DC_PRED without using filter_intra.
+ // Later, in filter intra search, best_rd_so_far is used for comparison.
+ mbmi->filter_intra_mode_info.use_filter_intra = 0;
+ const int tmp_rate =
+ rd_stats_y->rate +
+ intra_mode_info_cost_y(cpi, x, mbmi, bsize, mode_cost);
best_rd_so_far = RDCOST(x->rdmult, tmp_rate, rd_stats_y->dist);
try_filter_intra = (best_rd_so_far / 2) <= best_rd;
}
@@ -1095,7 +1118,8 @@ int64_t av1_rd_pick_intra_sby_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
const float thresh[4] = { -1.2f, -1.2f, -0.6f, 0.4f };
const int is_chroma = 0;
prune_intra_mode_with_hog(
- x, bsize, thresh[cpi->sf.intra_sf.intra_pruning_with_hog - 1],
+ x, bsize, cpi->common.seq_params->sb_size,
+ thresh[cpi->sf.intra_sf.intra_pruning_with_hog - 1],
directional_mode_skip_mask, is_chroma);
}
mbmi->filter_intra_mode_info.use_filter_intra = 0;
@@ -1105,16 +1129,21 @@ int64_t av1_rd_pick_intra_sby_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
set_mode_eval_params(cpi, x, MODE_EVAL);
MB_MODE_INFO best_mbmi = *mbmi;
- av1_zero(x->winner_mode_stats);
+ av1_zero_array(x->winner_mode_stats, MAX_WINNER_MODE_COUNT_INTRA);
x->winner_mode_count = 0;
// Searches the intra-modes except for intrabc, palette, and filter_intra.
- for (int mode_idx = INTRA_MODE_START; mode_idx < INTRA_MODE_END; ++mode_idx) {
+ int64_t top_intra_model_rd[TOP_INTRA_MODEL_COUNT];
+ for (int i = 0; i < TOP_INTRA_MODEL_COUNT; i++) {
+ top_intra_model_rd[i] = INT64_MAX;
+ }
+ for (int mode_idx = INTRA_MODE_START; mode_idx < LUMA_MODE_COUNT;
+ ++mode_idx) {
+ set_y_mode_and_delta_angle(mode_idx, mbmi);
RD_STATS this_rd_stats;
int this_rate, this_rate_tokenonly, s;
int is_diagonal_mode;
int64_t this_distortion, this_rd;
- mbmi->mode = intra_rd_search_mode_order[mode_idx];
is_diagonal_mode = av1_is_diagonal_mode(mbmi->mode);
if (is_diagonal_mode && !cpi->oxcf.intra_mode_cfg.enable_diagonal_intra)
@@ -1132,36 +1161,43 @@ int64_t av1_rd_pick_intra_sby_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
continue;
// The functionality of filter intra modes and smooth prediction
- // overlap. Retain the smooth prediction if filter intra modes are
- // disabled.
+ // overlap. Hence smooth prediction is pruned only if all the
+ // filter intra modes are enabled.
if (cpi->sf.intra_sf.disable_smooth_intra &&
- !cpi->sf.intra_sf.disable_filter_intra && mbmi->mode == SMOOTH_PRED)
+ cpi->sf.intra_sf.prune_filter_intra_level == 0 &&
+ mbmi->mode == SMOOTH_PRED)
continue;
if (!cpi->oxcf.intra_mode_cfg.enable_paeth_intra &&
mbmi->mode == PAETH_PRED)
continue;
- mbmi->angle_delta[PLANE_TYPE_Y] = 0;
+
+ // Skip the evaluation of modes that do not match with the winner mode in
+ // x->mb_mode_cache.
+ if (x->use_mb_mode_cache && mbmi->mode != x->mb_mode_cache->mode) continue;
is_directional_mode = av1_is_directional_mode(mbmi->mode);
if (is_directional_mode && directional_mode_skip_mask[mbmi->mode]) continue;
- if (is_directional_mode && av1_use_angle_delta(bsize) &&
- cpi->oxcf.intra_mode_cfg.enable_angle_delta) {
- // Searches through the best angle_delta if this option is available.
- this_rd_stats.rate = INT_MAX;
- rd_pick_intra_angle_sby(cpi, x, &this_rate, &this_rd_stats, bsize,
- bmode_costs[mbmi->mode], best_rd, &best_model_rd,
- 1);
- } else {
- if (model_intra_yrd_and_prune(cpi, x, bsize, &best_model_rd)) {
- continue;
- }
+ if (is_directional_mode && av1_use_angle_delta(bsize) == 0 &&
+ mbmi->angle_delta[PLANE_TYPE_Y] != 0)
+ continue;
- // Builds the actual prediction. The prediction from
- // model_intra_yrd_and_prune was just an estimation that did not take into
- // account the effect of txfm pipeline, so we need to redo it for real
- // here.
- av1_pick_uniform_tx_size_type_yrd(cpi, x, &this_rd_stats, bsize, best_rd);
- }
+ // Use intra_y_mode_mask speed feature to skip intra mode evaluation.
+ if (!(cpi->sf.intra_sf.intra_y_mode_mask[max_txsize_lookup[bsize]] &
+ (1 << mbmi->mode)))
+ continue;
+
+ const TX_SIZE tx_size = AOMMIN(TX_32X32, max_txsize_lookup[bsize]);
+ const int64_t this_model_rd =
+ intra_model_rd(&cpi->common, x, 0, bsize, tx_size, /*use_hadamard=*/1);
+ if (prune_intra_y_mode(this_model_rd, &best_model_rd, top_intra_model_rd,
+ cpi->sf.intra_sf.top_intra_model_count_allowed))
+ continue;
+
+ // Builds the actual prediction. The prediction from
+ // model_intra_yrd_and_prune was just an estimation that did not take into
+ // account the effect of txfm pipeline, so we need to redo it for real
+ // here.
+ av1_pick_uniform_tx_size_type_yrd(cpi, x, &this_rd_stats, bsize, best_rd);
this_rate_tokenonly = this_rd_stats.rate;
this_distortion = this_rd_stats.dist;
s = this_rd_stats.skip_txfm;
@@ -1204,16 +1240,16 @@ int64_t av1_rd_pick_intra_sby_mode(const AV1_COMP *const cpi, MACROBLOCK *x,
if (try_palette) {
av1_rd_pick_palette_intra_sby(
cpi, x, bsize, bmode_costs[DC_PRED], &best_mbmi, best_palette_color_map,
- &best_rd, &best_model_rd, rate, rate_tokenonly, distortion, skippable,
- &beat_best_rd, ctx, ctx->blk_skip, ctx->tx_type_map);
+ &best_rd, rate, rate_tokenonly, distortion, skippable, &beat_best_rd,
+ ctx, ctx->blk_skip, ctx->tx_type_map);
}
// Searches filter_intra
- if (beat_best_rd && av1_filter_intra_allowed_bsize(&cpi->common, bsize) &&
- !cpi->sf.intra_sf.disable_filter_intra) {
+ if (beat_best_rd && av1_filter_intra_allowed_bsize(&cpi->common, bsize)) {
if (rd_pick_filter_intra_sby(cpi, x, rate, rate_tokenonly, distortion,
skippable, bsize, bmode_costs[DC_PRED],
- &best_rd, &best_model_rd, ctx)) {
+ best_mbmi.mode, &best_rd, &best_model_rd,
+ ctx)) {
best_mbmi = *mbmi;
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.h b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.h
index cc2a87b098..5a52440909 100644
--- a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.h
+++ b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search.h
@@ -95,6 +95,9 @@ typedef struct IntraModeSearchState {
* \param[out] mode_cost_y The cost needed to signal the current
* intra mode.
* \param[out] rd_y The rdcost of the chosen mode.
+ * \param[in] best_model_rd Best model RD seen for this block so far
+ * \param[in] top_intra_model_rd Top intra model RD seen for this
+ * block so far.
*
* \return Returns 1 if a valid intra mode is found, 0 otherwise.
* The corresponding values in x->e_mbd.mi[0], rd_stats_y, mode_cost_y, and
@@ -106,7 +109,9 @@ int av1_handle_intra_y_mode(IntraModeSearchState *intra_search_state,
const AV1_COMP *cpi, MACROBLOCK *x,
BLOCK_SIZE bsize, unsigned int ref_frame_cost,
const PICK_MODE_CONTEXT *ctx, RD_STATS *rd_stats_y,
- int64_t best_rd, int *mode_cost_y, int64_t *rd_y);
+ int64_t best_rd, int *mode_cost_y, int64_t *rd_y,
+ int64_t *best_model_rd,
+ int64_t top_intra_model_rd[]);
/*!\brief Search through all chroma intra-modes for inter frames.
*
@@ -262,6 +267,29 @@ static AOM_INLINE void init_intra_mode_search_state(
intra_search_state->rate_uv_intra = INT_MAX;
}
+/*! \brief set the luma intra mode and delta angles for a given mode index.
+ * The total number of luma intra mode is LUMA_MODE_COUNT = 61.
+ * The first 13 modes are from DC_PRED to PAETH_PRED, followed by directional
+ * modes. Each of the main 8 directional modes have 6 = MAX_ANGLE_DELTA * 2
+ * delta angles.
+ * \param[in] mode_idx mode index in intra mode decision
+ * process.
+ * \param[in] mbmi Pointer to structure holding
+ * the mode info for the current macroblock.
+ */
+void set_y_mode_and_delta_angle(const int mode_idx, MB_MODE_INFO *const mbmi);
+
+/*! \brief prune luma intra mode based on the model rd.
+ * \param[in] this_model_rd model rd for current mode.
+ * \param[in] best_model_rd Best model RD seen for this block so
+ * far.
+ * \param[in] top_intra_model_rd Top intra model RD seen for this
+ * block so far.
+ * \param[in] model_cnt_allowed The number of top intra model RD allowed.
+ */
+int prune_intra_y_mode(int64_t this_model_rd, int64_t *best_model_rd,
+ int64_t top_intra_model_rd[], int model_cnt_allowed);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search_utils.h b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search_utils.h
index 532482896a..0bf77ac9f5 100644
--- a/third_party/libaom/source/libaom/av1/encoder/intra_mode_search_utils.h
+++ b/third_party/libaom/source/libaom/av1/encoder/intra_mode_search_utils.h
@@ -22,8 +22,10 @@
#include "av1/common/reconintra.h"
#include "av1/encoder/encoder.h"
+#include "av1/encoder/encodeframe.h"
#include "av1/encoder/model_rd.h"
#include "av1/encoder/palette.h"
+#include "av1/encoder/hybrid_fwd_txfm.h"
#ifdef __cplusplus
extern "C" {
@@ -134,8 +136,13 @@ static AOM_INLINE int get_hist_bin_idx(int dx, int dy) {
}
#undef FIX_PREC_BITS
-static AOM_INLINE void generate_hog(const uint8_t *src, int stride, int rows,
- int cols, float *hist) {
+// Normalizes the hog data.
+static AOM_INLINE void normalize_hog(float total, float *hist) {
+ for (int i = 0; i < BINS; ++i) hist[i] /= total;
+}
+
+static AOM_INLINE void lowbd_generate_hog(const uint8_t *src, int stride,
+ int rows, int cols, float *hist) {
float total = 0.1f;
src += stride;
for (int r = 1; r < rows - 1; ++r) {
@@ -144,7 +151,7 @@ static AOM_INLINE void generate_hog(const uint8_t *src, int stride, int rows,
const uint8_t *below = &src[c + stride];
const uint8_t *left = &src[c - 1];
const uint8_t *right = &src[c + 1];
- // Calculate gradient using Sobel fitlers.
+ // Calculate gradient using Sobel filters.
const int dx = (right[-stride] + 2 * right[0] + right[stride]) -
(left[-stride] + 2 * left[0] + left[stride]);
const int dy = (below[-1] + 2 * below[0] + below[1]) -
@@ -165,13 +172,49 @@ static AOM_INLINE void generate_hog(const uint8_t *src, int stride, int rows,
src += stride;
}
- for (int i = 0; i < BINS; ++i) hist[i] /= total;
+ normalize_hog(total, hist);
}
-static AOM_INLINE void generate_hog_hbd(const uint8_t *src8, int stride,
- int rows, int cols, float *hist) {
+// Computes and stores pixel level gradient information of a given superblock
+// for LBD encode.
+static AOM_INLINE void lowbd_compute_gradient_info_sb(MACROBLOCK *const x,
+ BLOCK_SIZE sb_size,
+ PLANE_TYPE plane) {
+ PixelLevelGradientInfo *const grad_info_sb =
+ x->pixel_gradient_info + plane * MAX_SB_SQUARE;
+ const uint8_t *src = x->plane[plane].src.buf;
+ const int stride = x->plane[plane].src.stride;
+ const int ss_x = x->e_mbd.plane[plane].subsampling_x;
+ const int ss_y = x->e_mbd.plane[plane].subsampling_y;
+ const int sb_height = block_size_high[sb_size] >> ss_y;
+ const int sb_width = block_size_wide[sb_size] >> ss_x;
+ src += stride;
+ for (int r = 1; r < sb_height - 1; ++r) {
+ for (int c = 1; c < sb_width - 1; ++c) {
+ const uint8_t *above = &src[c - stride];
+ const uint8_t *below = &src[c + stride];
+ const uint8_t *left = &src[c - 1];
+ const uint8_t *right = &src[c + 1];
+ // Calculate gradient using Sobel filters.
+ const int dx = (right[-stride] + 2 * right[0] + right[stride]) -
+ (left[-stride] + 2 * left[0] + left[stride]);
+ const int dy = (below[-1] + 2 * below[0] + below[1]) -
+ (above[-1] + 2 * above[0] + above[1]);
+ grad_info_sb[r * sb_width + c].is_dx_zero = (dx == 0);
+ grad_info_sb[r * sb_width + c].abs_dx_abs_dy_sum =
+ (uint16_t)(abs(dx) + abs(dy));
+ grad_info_sb[r * sb_width + c].hist_bin_idx =
+ (dx != 0) ? get_hist_bin_idx(dx, dy) : -1;
+ }
+ src += stride;
+ }
+}
+
+#if CONFIG_AV1_HIGHBITDEPTH
+static AOM_INLINE void highbd_generate_hog(const uint8_t *src8, int stride,
+ int rows, int cols, float *hist) {
float total = 0.1f;
- uint16_t *src = CONVERT_TO_SHORTPTR(src8);
+ const uint16_t *src = CONVERT_TO_SHORTPTR(src8);
src += stride;
for (int r = 1; r < rows - 1; ++r) {
for (int c = 1; c < cols - 1; ++c) {
@@ -179,7 +222,7 @@ static AOM_INLINE void generate_hog_hbd(const uint8_t *src8, int stride,
const uint16_t *below = &src[c + stride];
const uint16_t *left = &src[c - 1];
const uint16_t *right = &src[c + 1];
- // Calculate gradient using Sobel fitlers.
+ // Calculate gradient using Sobel filters.
const int dx = (right[-stride] + 2 * right[0] + right[stride]) -
(left[-stride] + 2 * left[0] + left[stride]);
const int dy = (below[-1] + 2 * below[0] + below[1]) -
@@ -200,11 +243,151 @@ static AOM_INLINE void generate_hog_hbd(const uint8_t *src8, int stride,
src += stride;
}
- for (int i = 0; i < BINS; ++i) hist[i] /= total;
+ normalize_hog(total, hist);
+}
+
+// Computes and stores pixel level gradient information of a given superblock
+// for HBD encode.
+static AOM_INLINE void highbd_compute_gradient_info_sb(MACROBLOCK *const x,
+ BLOCK_SIZE sb_size,
+ PLANE_TYPE plane) {
+ PixelLevelGradientInfo *const grad_info_sb =
+ x->pixel_gradient_info + plane * MAX_SB_SQUARE;
+ const uint16_t *src = CONVERT_TO_SHORTPTR(x->plane[plane].src.buf);
+ const int stride = x->plane[plane].src.stride;
+ const int ss_x = x->e_mbd.plane[plane].subsampling_x;
+ const int ss_y = x->e_mbd.plane[plane].subsampling_y;
+ const int sb_height = block_size_high[sb_size] >> ss_y;
+ const int sb_width = block_size_wide[sb_size] >> ss_x;
+ src += stride;
+ for (int r = 1; r < sb_height - 1; ++r) {
+ for (int c = 1; c < sb_width - 1; ++c) {
+ const uint16_t *above = &src[c - stride];
+ const uint16_t *below = &src[c + stride];
+ const uint16_t *left = &src[c - 1];
+ const uint16_t *right = &src[c + 1];
+ // Calculate gradient using Sobel filters.
+ const int dx = (right[-stride] + 2 * right[0] + right[stride]) -
+ (left[-stride] + 2 * left[0] + left[stride]);
+ const int dy = (below[-1] + 2 * below[0] + below[1]) -
+ (above[-1] + 2 * above[0] + above[1]);
+ grad_info_sb[r * sb_width + c].is_dx_zero = (dx == 0);
+ grad_info_sb[r * sb_width + c].abs_dx_abs_dy_sum =
+ (uint16_t)(abs(dx) + abs(dy));
+ grad_info_sb[r * sb_width + c].hist_bin_idx =
+ (dx != 0) ? get_hist_bin_idx(dx, dy) : -1;
+ }
+ src += stride;
+ }
+}
+#endif // CONFIG_AV1_HIGHBITDEPTH
+
+static AOM_INLINE void generate_hog(const uint8_t *src8, int stride, int rows,
+ int cols, float *hist, int highbd) {
+#if CONFIG_AV1_HIGHBITDEPTH
+ if (highbd) {
+ highbd_generate_hog(src8, stride, rows, cols, hist);
+ return;
+ }
+#else
+ (void)highbd;
+#endif // CONFIG_AV1_HIGHBITDEPTH
+ lowbd_generate_hog(src8, stride, rows, cols, hist);
+}
+
+static AOM_INLINE void compute_gradient_info_sb(MACROBLOCK *const x,
+ BLOCK_SIZE sb_size,
+ PLANE_TYPE plane) {
+#if CONFIG_AV1_HIGHBITDEPTH
+ if (is_cur_buf_hbd(&x->e_mbd)) {
+ highbd_compute_gradient_info_sb(x, sb_size, plane);
+ return;
+ }
+#endif // CONFIG_AV1_HIGHBITDEPTH
+ lowbd_compute_gradient_info_sb(x, sb_size, plane);
+}
+
+// Function to generate pixel level gradient information for a given superblock.
+// Sets the flags 'is_sb_gradient_cached' for the specific plane-type if
+// gradient info is generated for the same.
+static AOM_INLINE void produce_gradients_for_sb(AV1_COMP *cpi, MACROBLOCK *x,
+ BLOCK_SIZE sb_size, int mi_row,
+ int mi_col) {
+ const SPEED_FEATURES *sf = &cpi->sf;
+ // Initialise flags related to hog data caching.
+ x->is_sb_gradient_cached[PLANE_TYPE_Y] = false;
+ x->is_sb_gradient_cached[PLANE_TYPE_UV] = false;
+
+ // SB level caching of gradient data may not help in speedup for the following
+ // cases:
+ // (1) Inter frames (due to early intra gating)
+ // (2) When partition_search_type is not SEARCH_PARTITION
+ // Hence, gradient data is computed at block level in such cases.
+
+ if (!frame_is_intra_only(&cpi->common) ||
+ sf->part_sf.partition_search_type != SEARCH_PARTITION)
+ return;
+
+ const int num_planes = av1_num_planes(&cpi->common);
+
+ av1_setup_src_planes(x, cpi->source, mi_row, mi_col, num_planes, sb_size);
+
+ if (sf->intra_sf.intra_pruning_with_hog) {
+ compute_gradient_info_sb(x, sb_size, PLANE_TYPE_Y);
+ x->is_sb_gradient_cached[PLANE_TYPE_Y] = true;
+ }
+ if (sf->intra_sf.chroma_intra_pruning_with_hog && num_planes > 1) {
+ compute_gradient_info_sb(x, sb_size, PLANE_TYPE_UV);
+ x->is_sb_gradient_cached[PLANE_TYPE_UV] = true;
+ }
+}
+
+// Reuses the pixel level gradient data generated at superblock level for block
+// level histogram computation.
+static AOM_INLINE void generate_hog_using_gradient_cache(const MACROBLOCK *x,
+ int rows, int cols,
+ BLOCK_SIZE sb_size,
+ PLANE_TYPE plane,
+ float *hist) {
+ float total = 0.1f;
+ const int ss_x = x->e_mbd.plane[plane].subsampling_x;
+ const int ss_y = x->e_mbd.plane[plane].subsampling_y;
+ const int sb_width = block_size_wide[sb_size] >> ss_x;
+
+ // Derive the offset from the starting of the superblock in order to locate
+ // the block level gradient data in the cache.
+ const int mi_row_in_sb = x->e_mbd.mi_row & (mi_size_high[sb_size] - 1);
+ const int mi_col_in_sb = x->e_mbd.mi_col & (mi_size_wide[sb_size] - 1);
+ const int block_offset_in_grad_cache =
+ sb_width * (mi_row_in_sb << (MI_SIZE_LOG2 - ss_y)) +
+ (mi_col_in_sb << (MI_SIZE_LOG2 - ss_x));
+ const PixelLevelGradientInfo *grad_info_blk = x->pixel_gradient_info +
+ plane * MAX_SB_SQUARE +
+ block_offset_in_grad_cache;
+
+ // Retrieve the cached gradient information and generate the histogram.
+ for (int r = 1; r < rows - 1; ++r) {
+ for (int c = 1; c < cols - 1; ++c) {
+ const uint16_t abs_dx_abs_dy_sum =
+ grad_info_blk[r * sb_width + c].abs_dx_abs_dy_sum;
+ if (!abs_dx_abs_dy_sum) continue;
+ total += abs_dx_abs_dy_sum;
+ const bool is_dx_zero = grad_info_blk[r * sb_width + c].is_dx_zero;
+ if (is_dx_zero) {
+ hist[0] += abs_dx_abs_dy_sum >> 1;
+ hist[BINS - 1] += abs_dx_abs_dy_sum >> 1;
+ } else {
+ const int8_t idx = grad_info_blk[r * sb_width + c].hist_bin_idx;
+ assert(idx >= 0 && idx < BINS);
+ hist[idx] += abs_dx_abs_dy_sum;
+ }
+ }
+ }
+ normalize_hog(total, hist);
}
static INLINE void collect_hog_data(const MACROBLOCK *x, BLOCK_SIZE bsize,
- int plane, float *hog) {
+ BLOCK_SIZE sb_size, int plane, float *hog) {
const MACROBLOCKD *xd = &x->e_mbd;
const struct macroblockd_plane *const pd = &xd->plane[plane];
const int ss_x = pd->subsampling_x;
@@ -217,12 +400,15 @@ static INLINE void collect_hog_data(const MACROBLOCK *x, BLOCK_SIZE bsize,
const int cols =
((xd->mb_to_right_edge >= 0) ? bw : (xd->mb_to_right_edge >> 3) + bw) >>
ss_x;
- const int src_stride = x->plane[plane].src.stride;
- const uint8_t *src = x->plane[plane].src.buf;
- if (is_cur_buf_hbd(xd)) {
- generate_hog_hbd(src, src_stride, rows, cols, hog);
+
+ // If gradient data is already generated at SB level, reuse the cached data.
+ // Otherwise, compute the data.
+ if (x->is_sb_gradient_cached[plane]) {
+ generate_hog_using_gradient_cache(x, rows, cols, sb_size, plane, hog);
} else {
- generate_hog(src, src_stride, rows, cols, hog);
+ const uint8_t *src = x->plane[plane].src.buf;
+ const int src_stride = x->plane[plane].src.stride;
+ generate_hog(src, src_stride, rows, cols, hog, is_cur_buf_hbd(xd));
}
// Scale the hog so the luma and chroma are on the same scale
@@ -232,13 +418,13 @@ static INLINE void collect_hog_data(const MACROBLOCK *x, BLOCK_SIZE bsize,
}
static AOM_INLINE void prune_intra_mode_with_hog(
- const MACROBLOCK *x, BLOCK_SIZE bsize, float th,
+ const MACROBLOCK *x, BLOCK_SIZE bsize, BLOCK_SIZE sb_size, float th,
uint8_t *directional_mode_skip_mask, int is_chroma) {
aom_clear_system_state();
const int plane = is_chroma ? AOM_PLANE_U : AOM_PLANE_Y;
float hist[BINS] = { 0.0f };
- collect_hog_data(x, bsize, plane, hist);
+ collect_hog_data(x, bsize, sb_size, plane, hist);
// Make prediction for each of the mode
float scores[DIRECTIONAL_MODES] = { 0.0f };
@@ -305,7 +491,7 @@ static AOM_INLINE int intra_mode_info_cost_y(const AV1_COMP *cpi,
const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
palette_mode_cost +=
av1_palette_color_cost_y(&mbmi->palette_mode_info, color_cache,
- n_cache, cpi->common.seq_params.bit_depth);
+ n_cache, cpi->common.seq_params->bit_depth);
palette_mode_cost +=
av1_cost_color_map(x, 0, bsize, mbmi->tx_size, PALETTE_MAP);
total_rate += palette_mode_cost;
@@ -365,7 +551,7 @@ static AOM_INLINE int intra_mode_info_cost_uv(const AV1_COMP *cpi,
uint16_t color_cache[2 * PALETTE_MAX_SIZE];
const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
palette_mode_cost += av1_palette_color_cost_uv(
- pmi, color_cache, n_cache, cpi->common.seq_params.bit_depth);
+ pmi, color_cache, n_cache, cpi->common.seq_params->bit_depth);
palette_mode_cost +=
av1_cost_color_map(x, 1, bsize, mbmi->tx_size, PALETTE_MAP);
total_rate += palette_mode_cost;
@@ -385,11 +571,11 @@ static AOM_INLINE int intra_mode_info_cost_uv(const AV1_COMP *cpi,
/*!\cond */
// Makes a quick intra prediction and estimate the rdcost with a model without
// going through the whole txfm/quantize/itxfm process.
-static int64_t intra_model_rd(const AV1_COMP *const cpi, MACROBLOCK *const x,
+static int64_t intra_model_rd(const AV1_COMMON *cm, MACROBLOCK *const x,
int plane, BLOCK_SIZE plane_bsize,
- TX_SIZE tx_size) {
- const AV1_COMMON *cm = &cpi->common;
+ TX_SIZE tx_size, int use_hadamard) {
MACROBLOCKD *const xd = &x->e_mbd;
+ const BitDepthInfo bd_info = get_bit_depth_info(xd);
int row, col;
assert(!is_inter_block(xd->mi[0]));
const int stepr = tx_size_high_unit[tx_size];
@@ -405,27 +591,16 @@ static int64_t intra_model_rd(const AV1_COMP *const cpi, MACROBLOCK *const x,
for (row = 0; row < max_blocks_high; row += stepr) {
for (col = 0; col < max_blocks_wide; col += stepc) {
av1_predict_intra_block_facade(cm, xd, plane, col, row, tx_size);
+ // Here we use p->src_diff and p->coeff as temporary buffers for
+ // prediction residue and transform coefficients. The buffers are only
+ // used in this for loop, therefore we don't need to properly add offset
+ // to the buffers.
av1_subtract_block(
- xd, txbh, txbw, p->src_diff, block_size_wide[plane_bsize],
+ bd_info, txbh, txbw, p->src_diff, block_size_wide[plane_bsize],
p->src.buf + (((row * p->src.stride) + col) << 2), p->src.stride,
pd->dst.buf + (((row * pd->dst.stride) + col) << 2), pd->dst.stride);
- switch (tx_size) {
- case TX_4X4:
- aom_hadamard_4x4(p->src_diff, block_size_wide[plane_bsize], p->coeff);
- break;
- case TX_8X8:
- aom_hadamard_8x8(p->src_diff, block_size_wide[plane_bsize], p->coeff);
- break;
- case TX_16X16:
- aom_hadamard_16x16(p->src_diff, block_size_wide[plane_bsize],
- p->coeff);
- break;
- case TX_32X32:
- aom_hadamard_32x32(p->src_diff, block_size_wide[plane_bsize],
- p->coeff);
- break;
- default: assert(0);
- }
+ av1_quick_txfm(use_hadamard, tx_size, bd_info, p->src_diff,
+ block_size_wide[plane_bsize], p->coeff);
satd_cost += aom_satd(p->coeff, tx_size_2d[tx_size]);
}
}
@@ -448,7 +623,9 @@ static AOM_INLINE int model_intra_yrd_and_prune(const AV1_COMP *const cpi,
int64_t *best_model_rd) {
const TX_SIZE tx_size = AOMMIN(TX_32X32, max_txsize_lookup[bsize]);
const int plane = 0;
- const int64_t this_model_rd = intra_model_rd(cpi, x, plane, bsize, tx_size);
+ const AV1_COMMON *cm = &cpi->common;
+ const int64_t this_model_rd =
+ intra_model_rd(cm, x, plane, bsize, tx_size, /*use_hadamard=*/1);
if (*best_model_rd != INT64_MAX &&
this_model_rd > *best_model_rd + (*best_model_rd >> 2)) {
return 1;
diff --git a/third_party/libaom/source/libaom/av1/encoder/level.c b/third_party/libaom/source/libaom/av1/encoder/level.c
index 7a74c460e4..4e1749a1dd 100644
--- a/third_party/libaom/source/libaom/av1/encoder/level.c
+++ b/third_party/libaom/source/libaom/av1/encoder/level.c
@@ -353,7 +353,7 @@ static double time_to_decode_frame(const AV1_COMMON *const cm,
if (spatial_layer_dimensions_present_flag) {
assert(0 && "Spatial layer dimensions not supported yet.");
} else {
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const int max_frame_width = seq_params->max_frame_width;
const int max_frame_height = seq_params->max_frame_height;
luma_samples = max_frame_width * max_frame_height;
@@ -473,7 +473,7 @@ void av1_decoder_model_init(const AV1_COMP *const cpi, AV1_LEVEL level,
decoder_model->level = level;
const AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
decoder_model->bit_rate = get_max_bitrate(
av1_level_defs + level, seq_params->tier[op_index], seq_params->profile);
@@ -690,7 +690,7 @@ void av1_decoder_model_process_frame(const AV1_COMP *const cpi,
void av1_init_level_info(AV1_COMP *cpi) {
for (int op_index = 0; op_index < MAX_NUM_OPERATING_POINTS; ++op_index) {
AV1LevelInfo *const this_level_info =
- cpi->level_params.level_info[op_index];
+ cpi->ppi->level_params.level_info[op_index];
if (!this_level_info) continue;
memset(this_level_info, 0, sizeof(*this_level_info));
AV1LevelSpec *const level_spec = &this_level_info->level_spec;
@@ -1048,7 +1048,7 @@ static void scan_past_frames(const FrameWindowBuffer *const buffer,
void av1_update_level_info(AV1_COMP *cpi, size_t size, int64_t ts_start,
int64_t ts_end) {
AV1_COMMON *const cm = &cpi->common;
- const AV1LevelParams *const level_params = &cpi->level_params;
+ const AV1LevelParams *const level_params = &cpi->ppi->level_params;
const int upscaled_width = cm->superres_upscaled_width;
const int width = cm->width;
@@ -1057,7 +1057,7 @@ void av1_update_level_info(AV1_COMP *cpi, size_t size, int64_t ts_start,
const int tile_rows = cm->tiles.rows;
const int tiles = tile_cols * tile_rows;
const int luma_pic_size = upscaled_width * height;
- const int frame_header_count = level_params->frame_header_count;
+ const int frame_header_count = cpi->frame_header_count;
const int show_frame = cm->show_frame;
const int show_existing_frame = cm->show_existing_frame;
@@ -1075,7 +1075,7 @@ void av1_update_level_info(AV1_COMP *cpi, size_t size, int64_t ts_start,
const int temporal_layer_id = cm->temporal_layer_id;
const int spatial_layer_id = cm->spatial_layer_id;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
const BITSTREAM_PROFILE profile = seq_params->profile;
const int is_still_picture = seq_params->still_picture;
// update level_stats
@@ -1148,7 +1148,7 @@ void av1_update_level_info(AV1_COMP *cpi, size_t size, int64_t ts_start,
if (fail_id != TARGET_LEVEL_OK) {
const int target_level_major = 2 + (target_level >> 2);
const int target_level_minor = target_level & 3;
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Failed to encode to the target level %d_%d. %s",
target_level_major, target_level_minor,
level_fail_messages[fail_id]);
diff --git a/third_party/libaom/source/libaom/av1/encoder/level.h b/third_party/libaom/source/libaom/av1/encoder/level.h
index 5e0cce2007..2800e3d40d 100644
--- a/third_party/libaom/source/libaom/av1/encoder/level.h
+++ b/third_party/libaom/source/libaom/av1/encoder/level.h
@@ -164,8 +164,6 @@ typedef struct AV1LevelParams {
uint32_t keep_level_stats;
// Level information for each operating point.
AV1LevelInfo *level_info[MAX_NUM_OPERATING_POINTS];
- // Count the number of OBU_FRAME and OBU_FRAME_HEADER for level calculation.
- int frame_header_count;
} AV1LevelParams;
static INLINE int is_in_operating_point(int operating_point,
diff --git a/third_party/libaom/source/libaom/av1/encoder/mcomp.c b/third_party/libaom/source/libaom/av1/encoder/mcomp.c
index 06f9386102..1a53c23c74 100644
--- a/third_party/libaom/source/libaom/av1/encoder/mcomp.c
+++ b/third_party/libaom/source/libaom/av1/encoder/mcomp.c
@@ -95,7 +95,7 @@ void av1_make_default_fullpel_ms_params(
// High level params
ms_params->bsize = bsize;
- ms_params->vfp = &cpi->fn_ptr[bsize];
+ ms_params->vfp = &cpi->ppi->fn_ptr[bsize];
init_ms_buffers(&ms_params->ms_buffers, x);
@@ -145,8 +145,8 @@ void av1_set_ms_to_intra_mode(FULLPEL_MOTION_SEARCH_PARAMS *ms_params,
MV_COST_PARAMS *mv_cost_params = &ms_params->mv_cost_params;
mv_cost_params->mvjcost = dv_costs->joint_mv;
- mv_cost_params->mvcost[0] = &dv_costs->mv_component[0][MV_MAX];
- mv_cost_params->mvcost[1] = &dv_costs->mv_component[1][MV_MAX];
+ mv_cost_params->mvcost[0] = dv_costs->dv_costs[0];
+ mv_cost_params->mvcost[1] = dv_costs->dv_costs[1];
}
void av1_make_default_subpel_ms_params(SUBPEL_MOTION_SEARCH_PARAMS *ms_params,
@@ -167,7 +167,7 @@ void av1_make_default_subpel_ms_params(SUBPEL_MOTION_SEARCH_PARAMS *ms_params,
x->errorperbit, x->sadperbit);
// Subpel variance params
- ms_params->var_params.vfp = &cpi->fn_ptr[bsize];
+ ms_params->var_params.vfp = &cpi->ppi->fn_ptr[bsize];
ms_params->var_params.subpel_search_type =
cpi->sf.mv_sf.use_accurate_subpel_search;
ms_params->var_params.w = block_size_wide[bsize];
@@ -253,7 +253,7 @@ static INLINE int mv_cost(const MV *mv, const int *joint_cost,
// nearest 2 ** 7.
// This is NOT used during motion compensation.
int av1_mv_bit_cost(const MV *mv, const MV *ref_mv, const int *mvjcost,
- int *mvcost[2], int weight) {
+ int *const mvcost[2], int weight) {
const MV diff = { mv->row - ref_mv->row, mv->col - ref_mv->col };
return ROUND_POWER_OF_TWO(
mv_cost(&diff, mvjcost, CONVERT_TO_CONST_MVCOST(mvcost)) * weight, 7);
@@ -290,6 +290,9 @@ static INLINE int mv_err_cost(const MV *mv, const MV *ref_mv,
static INLINE int mv_err_cost_(const MV *mv,
const MV_COST_PARAMS *mv_cost_params) {
+ if (mv_cost_params->mv_cost_type == MV_COST_NONE) {
+ return 0;
+ }
return mv_err_cost(mv, mv_cost_params->ref_mv, mv_cost_params->mvjcost,
mv_cost_params->mvcost, mv_cost_params->error_per_bit,
mv_cost_params->mv_cost_type);
@@ -1830,7 +1833,7 @@ int av1_intrabc_hash_search(const AV1_COMP *cpi, const MACROBLOCKD *xd,
const MV dv = { GET_MV_SUBPEL(ref_block_hash.y - y_pos),
GET_MV_SUBPEL(ref_block_hash.x - x_pos) };
if (!av1_is_dv_valid(dv, &cpi->common, xd, mi_row, mi_col, bsize,
- cpi->common.seq_params.mib_size_log2))
+ cpi->common.seq_params->mib_size_log2))
continue;
FULLPEL_MV hash_mv;
@@ -1957,8 +1960,8 @@ unsigned int av1_int_pro_motion_estimation(const AV1_COMP *cpi, MACROBLOCK *x,
if (xd->bd != 8) {
unsigned int sad;
best_int_mv->as_fullmv = kZeroFullMv;
- sad = cpi->fn_ptr[bsize].sdf(x->plane[0].src.buf, src_stride,
- xd->plane[0].pre[0].buf, ref_stride);
+ sad = cpi->ppi->fn_ptr[bsize].sdf(x->plane[0].src.buf, src_stride,
+ xd->plane[0].pre[0].buf, ref_stride);
if (scaled_ref_frame) {
int i;
@@ -2001,7 +2004,8 @@ unsigned int av1_int_pro_motion_estimation(const AV1_COMP *cpi, MACROBLOCK *x,
FULLPEL_MV this_mv = best_int_mv->as_fullmv;
src_buf = x->plane[0].src.buf;
ref_buf = get_buf_from_fullmv(&xd->plane[0].pre[0], &this_mv);
- best_sad = cpi->fn_ptr[bsize].sdf(src_buf, src_stride, ref_buf, ref_stride);
+ best_sad =
+ cpi->ppi->fn_ptr[bsize].sdf(src_buf, src_stride, ref_buf, ref_stride);
{
const uint8_t *const pos[4] = {
@@ -2011,7 +2015,8 @@ unsigned int av1_int_pro_motion_estimation(const AV1_COMP *cpi, MACROBLOCK *x,
ref_buf + ref_stride,
};
- cpi->fn_ptr[bsize].sdx4df(src_buf, src_stride, pos, ref_stride, this_sad);
+ cpi->ppi->fn_ptr[bsize].sdx4df(src_buf, src_stride, pos, ref_stride,
+ this_sad);
}
for (idx = 0; idx < 4; ++idx) {
@@ -2034,7 +2039,8 @@ unsigned int av1_int_pro_motion_estimation(const AV1_COMP *cpi, MACROBLOCK *x,
ref_buf = get_buf_from_fullmv(&xd->plane[0].pre[0], &this_mv);
- tmp_sad = cpi->fn_ptr[bsize].sdf(src_buf, src_stride, ref_buf, ref_stride);
+ tmp_sad =
+ cpi->ppi->fn_ptr[bsize].sdf(src_buf, src_stride, ref_buf, ref_stride);
if (best_sad > tmp_sad) {
best_int_mv->as_fullmv = this_mv;
best_sad = tmp_sad;
@@ -2265,7 +2271,6 @@ static INLINE int get_subpel_part(int x) { return x & 7; }
// Gets the address of the ref buffer at subpel location (r, c), rounded to the
// nearest fullpel precision toward - \infty
-
static INLINE const uint8_t *get_buf_from_mv(const struct buf_2d *buf,
const MV mv) {
const int offset = (mv.row >> 3) * buf->stride + (mv.col >> 3);
diff --git a/third_party/libaom/source/libaom/av1/encoder/mcomp.h b/third_party/libaom/source/libaom/av1/encoder/mcomp.h
index 901671e27f..b2539f5100 100644
--- a/third_party/libaom/source/libaom/av1/encoder/mcomp.h
+++ b/third_party/libaom/source/libaom/av1/encoder/mcomp.h
@@ -84,7 +84,7 @@ typedef struct {
} MV_COST_PARAMS;
int av1_mv_bit_cost(const MV *mv, const MV *ref_mv, const int *mvjcost,
- int *mvcost[2], int weight);
+ int *const mvcost[2], int weight);
int av1_get_mvpred_sse(const MV_COST_PARAMS *mv_cost_params,
const FULLPEL_MV best_mv,
diff --git a/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.c b/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.c
index 96b77b754d..07485bd68c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.c
+++ b/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.c
@@ -15,6 +15,7 @@
#include "av1/encoder/encodemv.h"
#include "av1/encoder/encoder.h"
+#include "av1/encoder/interp_search.h"
#include "av1/encoder/mcomp.h"
#include "av1/encoder/motion_search_facade.h"
#include "av1/encoder/partition_strategy.h"
@@ -41,7 +42,7 @@ static int compare_weight(const void *a, const void *b) {
// Allow more mesh searches for screen content type on the ARF.
static int use_fine_search_interval(const AV1_COMP *const cpi) {
return cpi->is_screen_content_type &&
- cpi->gf_group.update_type[cpi->gf_group.index] == ARF_UPDATE &&
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index] == ARF_UPDATE &&
cpi->oxcf.speed <= 2;
}
@@ -62,15 +63,15 @@ static INLINE void get_mv_candidate_from_tpl(const AV1_COMP *const cpi,
const int mi_col = xd->mi_col;
const BLOCK_SIZE tpl_bsize =
- convert_length_to_bsize(cpi->tpl_data.tpl_bsize_1d);
+ convert_length_to_bsize(cpi->ppi->tpl_data.tpl_bsize_1d);
const int tplw = mi_size_wide[tpl_bsize];
const int tplh = mi_size_high[tpl_bsize];
const int nw = mi_size_wide[bsize] / tplw;
const int nh = mi_size_high[bsize] / tplh;
if (nw >= 1 && nh >= 1) {
- const int of_h = mi_row % mi_size_high[cm->seq_params.sb_size];
- const int of_w = mi_col % mi_size_wide[cm->seq_params.sb_size];
+ const int of_h = mi_row % mi_size_high[cm->seq_params->sb_size];
+ const int of_w = mi_col % mi_size_wide[cm->seq_params->sb_size];
const int start = of_h / tplh * sb_enc->tpl_stride + of_w / tplw;
int valid = 1;
@@ -119,7 +120,8 @@ static INLINE void get_mv_candidate_from_tpl(const AV1_COMP *const cpi,
void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
BLOCK_SIZE bsize, int ref_idx, int *rate_mv,
int search_range, inter_mode_info *mode_info,
- int_mv *best_mv) {
+ int_mv *best_mv,
+ struct HandleInterModeArgs *const args) {
MACROBLOCKD *xd = &x->e_mbd;
const AV1_COMMON *cm = &cpi->common;
const MotionVectorSearchParams *mv_search_params = &cpi->mv_search_params;
@@ -243,13 +245,9 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
}
}
- // Terminate search with the current ref_idx if we have already encountered
- // another ref_mv in the drl such that:
- // 1. The other drl has the same fullpel_mv during the SIMPLE_TRANSLATION
- // search process as the current fullpel_mv.
- // 2. The rate needed to encode the current fullpel_mv is larger than that
- // for the other ref_mv.
- if (cpi->sf.inter_sf.skip_repeated_full_newmv &&
+ // Terminate search with the current ref_idx based on fullpel mv, rate cost,
+ // and other know cost.
+ if (cpi->sf.inter_sf.skip_newmv_in_drl >= 2 &&
mbmi->motion_mode == SIMPLE_TRANSLATION &&
best_mv->as_int != INVALID_MV) {
int_mv this_mv;
@@ -260,6 +258,7 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
mv_costs->mv_cost_stack, MV_COST_WEIGHT);
mode_info[ref_mv_idx].full_search_mv.as_int = this_mv.as_int;
mode_info[ref_mv_idx].full_mv_rate = this_mv_rate;
+ mode_info[ref_mv_idx].full_mv_bestsme = bestsme;
for (int prev_ref_idx = 0; prev_ref_idx < ref_mv_idx; ++prev_ref_idx) {
// Check if the motion search result same as previous results
@@ -280,6 +279,19 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
return;
}
}
+
+ // Terminate the evaluation of current ref_mv_idx based on bestsme and
+ // drl_cost.
+ const int psme = mode_info[prev_ref_idx].full_mv_bestsme;
+ if (psme == INT_MAX) continue;
+ const int thr =
+ cpi->sf.inter_sf.skip_newmv_in_drl == 3 ? (psme + (psme >> 2)) : psme;
+ if (cpi->sf.inter_sf.skip_newmv_in_drl >= 3 &&
+ mode_info[ref_mv_idx].full_mv_bestsme > thr &&
+ mode_info[prev_ref_idx].drl_cost < mode_info[ref_mv_idx].drl_cost) {
+ best_mv->as_int = INVALID_MV;
+ return;
+ }
}
}
@@ -289,6 +301,8 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
const int use_fractional_mv =
bestsme < INT_MAX && cpi->common.features.cur_frame_force_integer_mv == 0;
+ int best_mv_rate = 0;
+ int mv_rate_calculated = 0;
if (use_fractional_mv) {
int_mv fractional_ms_list[3];
av1_set_fractional_mv(fractional_ms_list);
@@ -337,9 +351,10 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
subpel_start_mv = get_mv_from_fullmv(&second_best_mv.as_fullmv);
if (av1_is_subpelmv_in_range(&ms_params.mv_limits,
subpel_start_mv)) {
+ unsigned int sse;
const int this_var = mv_search_params->find_fractional_mv_step(
xd, cm, &ms_params, subpel_start_mv, &this_best_mv, &dis,
- &x->pred_sse[ref], fractional_ms_list);
+ &sse, fractional_ms_list);
if (!cpi->sf.mv_sf.disable_second_mv) {
// If cpi->sf.mv_sf.disable_second_mv is 0, use actual rd cost
@@ -358,11 +373,17 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
int64_t tmp_rd =
RDCOST(x->rdmult, tmp_rd_stats.rate + tmp_mv_rate,
tmp_rd_stats.dist);
- if (tmp_rd < rd) best_mv->as_mv = this_best_mv;
+ if (tmp_rd < rd) {
+ best_mv->as_mv = this_best_mv;
+ x->pred_sse[ref] = sse;
+ }
} else {
// If cpi->sf.mv_sf.disable_second_mv = 1, use var to decide the
// best MV.
- if (this_var < best_mv_var) best_mv->as_mv = this_best_mv;
+ if (this_var < best_mv_var) {
+ best_mv->as_mv = this_best_mv;
+ x->pred_sse[ref] = sse;
+ }
}
}
}
@@ -379,9 +400,52 @@ void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
break;
default: assert(0 && "Invalid motion mode!\n");
}
+
+ // Terminate search with the current ref_idx based on subpel mv and rate
+ // cost.
+ if (cpi->sf.inter_sf.skip_newmv_in_drl >= 1 && args != NULL &&
+ mbmi->motion_mode == SIMPLE_TRANSLATION &&
+ best_mv->as_int != INVALID_MV) {
+ const int ref_mv_idx = mbmi->ref_mv_idx;
+ best_mv_rate =
+ av1_mv_bit_cost(&best_mv->as_mv, &ref_mv, mv_costs->nmv_joint_cost,
+ mv_costs->mv_cost_stack, MV_COST_WEIGHT);
+ mv_rate_calculated = 1;
+
+ for (int prev_ref_idx = 0; prev_ref_idx < ref_mv_idx; ++prev_ref_idx) {
+ if (!args->single_newmv_valid[prev_ref_idx][ref]) continue;
+ // Check if the motion vectors are the same.
+ if (best_mv->as_int == args->single_newmv[prev_ref_idx][ref].as_int) {
+ // Skip this evaluation if the previous one is skipped.
+ if (mode_info[prev_ref_idx].skip) {
+ mode_info[ref_mv_idx].skip = 1;
+ break;
+ }
+ // Compare the rate cost that we current know.
+ const int prev_rate_cost =
+ args->single_newmv_rate[prev_ref_idx][ref] +
+ mode_info[prev_ref_idx].drl_cost;
+ const int this_rate_cost =
+ best_mv_rate + mode_info[ref_mv_idx].drl_cost;
+
+ if (prev_rate_cost <= this_rate_cost) {
+ // If the current rate_cost is worse than the previous rate_cost,
+ // then we terminate the search for this ref_mv_idx.
+ mode_info[ref_mv_idx].skip = 1;
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ if (mv_rate_calculated) {
+ *rate_mv = best_mv_rate;
+ } else {
+ *rate_mv =
+ av1_mv_bit_cost(&best_mv->as_mv, &ref_mv, mv_costs->nmv_joint_cost,
+ mv_costs->mv_cost_stack, MV_COST_WEIGHT);
}
- *rate_mv = av1_mv_bit_cost(&best_mv->as_mv, &ref_mv, mv_costs->nmv_joint_cost,
- mv_costs->mv_cost_stack, MV_COST_WEIGHT);
}
int av1_joint_motion_search(const AV1_COMP *cpi, MACROBLOCK *x,
@@ -920,7 +984,7 @@ int_mv av1_simple_motion_sse_var(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
const uint8_t *dst = xd->plane[0].dst.buf;
const int dst_stride = xd->plane[0].dst.stride;
- *var = cpi->fn_ptr[bsize].vf(src, src_stride, dst, dst_stride, sse);
+ *var = cpi->ppi->fn_ptr[bsize].vf(src, src_stride, dst, dst_stride, sse);
return best_mv;
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.h b/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.h
index 5736f2b756..bf81fe243a 100644
--- a/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.h
+++ b/third_party/libaom/source/libaom/av1/encoder/motion_search_facade.h
@@ -21,20 +21,19 @@ extern "C" {
// TODO(any): rename this struct to something else. There is already another
// struct called inter_modes_info, which makes this terribly confusing.
typedef struct {
- int64_t rd;
int drl_cost;
-
- int rate_mv;
- int_mv mv;
-
int_mv full_search_mv;
int full_mv_rate;
+ int full_mv_bestsme;
+ int skip;
} inter_mode_info;
+struct HandleInterModeArgs;
void av1_single_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
BLOCK_SIZE bsize, int ref_idx, int *rate_mv,
int search_range, inter_mode_info *mode_info,
- int_mv *best_mv);
+ int_mv *best_mv,
+ struct HandleInterModeArgs *const args);
int av1_joint_motion_search(const AV1_COMP *cpi, MACROBLOCK *x,
BLOCK_SIZE bsize, int_mv *cur_mv,
diff --git a/third_party/libaom/source/libaom/av1/encoder/mv_prec.c b/third_party/libaom/source/libaom/av1/encoder/mv_prec.c
index cc81d72170..ae9dc35af4 100644
--- a/third_party/libaom/source/libaom/av1/encoder/mv_prec.c
+++ b/third_party/libaom/source/libaom/av1/encoder/mv_prec.c
@@ -230,7 +230,7 @@ static AOM_INLINE void collect_mv_stats_b(MV_STATS *mv_stats,
const int y_stride = cpi->source->y_stride;
const int px_row = 4 * mi_row, px_col = 4 * mi_col;
const int buf_is_hbd = cpi->source->flags & YV12_FLAG_HIGHBITDEPTH;
- const int bd = cm->seq_params.bit_depth;
+ const int bd = cm->seq_params->bit_depth;
if (buf_is_hbd) {
uint16_t *source_buf =
CONVERT_TO_SHORTPTR(cpi->source->y_buffer) + px_row * y_stride + px_col;
@@ -339,8 +339,8 @@ static AOM_INLINE void collect_mv_stats_tile(MV_STATS *mv_stats,
const int mi_row_end = tile_info->mi_row_end;
const int mi_col_start = tile_info->mi_col_start;
const int mi_col_end = tile_info->mi_col_end;
- const int sb_size_mi = cm->seq_params.mib_size;
- BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const int sb_size_mi = cm->seq_params->mib_size;
+ BLOCK_SIZE sb_size = cm->seq_params->sb_size;
for (int mi_row = mi_row_start; mi_row < mi_row_end; mi_row += sb_size_mi) {
for (int mi_col = mi_col_start; mi_col < mi_col_end; mi_col += sb_size_mi) {
collect_mv_stats_sb(mv_stats, cpi, mi_row, mi_col, sb_size);
@@ -349,7 +349,7 @@ static AOM_INLINE void collect_mv_stats_tile(MV_STATS *mv_stats,
}
void av1_collect_mv_stats(AV1_COMP *cpi, int current_q) {
- MV_STATS *mv_stats = &cpi->mv_stats;
+ MV_STATS *mv_stats = &cpi->ppi->mv_stats;
const AV1_COMMON *cm = &cpi->common;
const int tile_cols = cm->tiles.cols;
const int tile_rows = cm->tiles.rows;
@@ -420,8 +420,8 @@ void av1_pick_and_set_high_precision_mv(AV1_COMP *cpi, int qindex) {
}
#if !CONFIG_REALTIME_ONLY
else if (cpi->sf.hl_sf.high_precision_mv_usage == LAST_MV_DATA &&
- av1_frame_allows_smart_mv(cpi) && cpi->mv_stats.valid) {
- use_hp = get_smart_mv_prec(cpi, &cpi->mv_stats, qindex);
+ av1_frame_allows_smart_mv(cpi) && cpi->ppi->mv_stats.valid) {
+ use_hp = get_smart_mv_prec(cpi, &cpi->ppi->mv_stats, qindex);
}
#endif // !CONFIG_REALTIME_ONLY
diff --git a/third_party/libaom/source/libaom/av1/encoder/mv_prec.h b/third_party/libaom/source/libaom/av1/encoder/mv_prec.h
index 89f95f553e..11dcdd8806 100644
--- a/third_party/libaom/source/libaom/av1/encoder/mv_prec.h
+++ b/third_party/libaom/source/libaom/av1/encoder/mv_prec.h
@@ -21,8 +21,8 @@
void av1_collect_mv_stats(AV1_COMP *cpi, int current_q);
static AOM_INLINE int av1_frame_allows_smart_mv(const AV1_COMP *cpi) {
- const int gf_group_index = cpi->gf_group.index;
- const int gf_update_type = cpi->gf_group.update_type[gf_group_index];
+ const int gf_group_index = cpi->gf_frame_index;
+ const int gf_update_type = cpi->ppi->gf_group.update_type[gf_group_index];
return !frame_is_intra_only(&cpi->common) &&
!(gf_update_type == INTNL_OVERLAY_UPDATE ||
gf_update_type == OVERLAY_UPDATE);
diff --git a/third_party/libaom/source/libaom/av1/encoder/nonrd_pickmode.c b/third_party/libaom/source/libaom/av1/encoder/nonrd_pickmode.c
index 279fd922dd..088135a2dd 100644
--- a/third_party/libaom/source/libaom/av1/encoder/nonrd_pickmode.c
+++ b/third_party/libaom/source/libaom/av1/encoder/nonrd_pickmode.c
@@ -353,6 +353,8 @@ static INLINE void find_predictors(AV1_COMP *cpi, MACROBLOCK *x,
(void)tile_data;
x->pred_mv_sad[ref_frame] = INT_MAX;
+ x->pred_mv0_sad[ref_frame] = INT_MAX;
+ x->pred_mv1_sad[ref_frame] = INT_MAX;
frame_mv[NEWMV][ref_frame].as_int = INVALID_MV;
// TODO(kyslov) this needs various further optimizations. to be continued..
assert(yv12 != NULL);
@@ -518,7 +520,7 @@ static TX_SIZE calculate_tx_size(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
TX_SIZE tx_size;
const TxfmSearchParams *txfm_params = &x->txfm_search_params;
if (txfm_params->tx_mode_search_type == TX_MODE_SELECT) {
- if (sse > (var << 2))
+ if (sse > (var << 1))
tx_size =
AOMMIN(max_txsize_lookup[bsize],
tx_mode_to_biggest_tx_size[txfm_params->tx_mode_search_type]);
@@ -729,9 +731,9 @@ static void model_skip_for_sb_y_large(AV1_COMP *cpi, BLOCK_SIZE bsize,
(puv->dequant_QTX[1] * puv->dequant_QTX[1]) >> 3;
av1_enc_build_inter_predictor(cm, xd, mi_row, mi_col, NULL, bsize, i,
i);
- var_uv[j] = cpi->fn_ptr[uv_bsize].vf(puv->src.buf, puv->src.stride,
- puvd->dst.buf, puvd->dst.stride,
- &sse_uv[j]);
+ var_uv[j] = cpi->ppi->fn_ptr[uv_bsize].vf(
+ puv->src.buf, puv->src.stride, puvd->dst.buf, puvd->dst.stride,
+ &sse_uv[j]);
if ((var_uv[j] < uv_ac_thr || var_uv[j] == 0) &&
(sse_uv[j] - var_uv[j] < uv_dc_thr || sse_uv[j] == var_uv[j]))
skip_uv[j] = 1;
@@ -776,8 +778,8 @@ static void model_rd_for_sb_y(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
int rate;
int64_t dist;
- unsigned int var = cpi->fn_ptr[bsize].vf(p->src.buf, p->src.stride,
- pd->dst.buf, pd->dst.stride, &sse);
+ unsigned int var = cpi->ppi->fn_ptr[bsize].vf(
+ p->src.buf, p->src.stride, pd->dst.buf, pd->dst.stride, &sse);
xd->mi[0]->tx_size = calculate_tx_size(cpi, bsize, x, var, sse);
if (calculate_rd) {
@@ -1171,8 +1173,8 @@ static void model_rd_for_sb_uv(AV1_COMP *cpi, BLOCK_SIZE plane_bsize,
unsigned int var;
if (!x->color_sensitivity[i - 1]) continue;
- var = cpi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf,
- pd->dst.stride, &sse);
+ var = cpi->ppi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf,
+ pd->dst.stride, &sse);
assert(sse >= var);
tot_sse += sse;
@@ -1251,12 +1253,12 @@ static void estimate_block_intra(int plane, int block, int row, int col,
(void)block;
- p->src.buf = &src_buf_base[4 * (row * src_stride + col)];
- pd->dst.buf = &dst_buf_base[4 * (row * dst_stride + col)];
-
av1_predict_intra_block_facade(cm, xd, plane, col, row, tx_size);
av1_invalid_rd_stats(&this_rdc);
+ p->src.buf = &src_buf_base[4 * (row * src_stride + col)];
+ pd->dst.buf = &dst_buf_base[4 * (row * dst_stride + col)];
+
if (plane == 0) {
block_yrd(cpi, x, 0, 0, &this_rdc, &args->skippable, bsize_tx,
AOMMIN(tx_size, TX_16X16));
@@ -1562,7 +1564,7 @@ static void search_filter_ref(AV1_COMP *cpi, MACROBLOCK *x, RD_STATS *this_rdc,
else
model_rd_for_sb_y(cpi, bsize, x, xd, &pf_rd_stats[i], 1);
pf_rd_stats[i].rate += av1_get_switchable_rate(
- x, xd, cm->features.interp_filter, cm->seq_params.enable_dual_filter);
+ x, xd, cm->features.interp_filter, cm->seq_params->enable_dual_filter);
cost = RDCOST(x->rdmult, pf_rd_stats[i].rate, pf_rd_stats[i].dist);
pf_tx_size[i] = mi->tx_size;
if (cost < best_cost) {
@@ -1618,6 +1620,7 @@ typedef struct _mode_search_stat {
static void compute_intra_yprediction(const AV1_COMMON *cm,
PREDICTION_MODE mode, BLOCK_SIZE bsize,
MACROBLOCK *x, MACROBLOCKD *xd) {
+ const SequenceHeader *seq_params = cm->seq_params;
struct macroblockd_plane *const pd = &xd->plane[0];
struct macroblock_plane *const p = &x->plane[0];
uint8_t *const src_buf_base = p->src.buf;
@@ -1644,10 +1647,11 @@ static void compute_intra_yprediction(const AV1_COMMON *cm,
for (col = 0; col < max_blocks_wide; col += (1 << tx_size)) {
p->src.buf = &src_buf_base[4 * (row * (int64_t)src_stride + col)];
pd->dst.buf = &dst_buf_base[4 * (row * (int64_t)dst_stride + col)];
- av1_predict_intra_block(cm, xd, block_size_wide[bsize],
- block_size_high[bsize], tx_size, mode, 0, 0,
- FILTER_INTRA_MODES, pd->dst.buf, dst_stride,
- pd->dst.buf, dst_stride, 0, 0, plane);
+ av1_predict_intra_block(
+ xd, seq_params->sb_size, seq_params->enable_intra_edge_filter,
+ block_size_wide[bsize], block_size_high[bsize], tx_size, mode, 0, 0,
+ FILTER_INTRA_MODES, pd->dst.buf, dst_stride, pd->dst.buf, dst_stride,
+ 0, 0, plane);
}
}
p->src.buf = src_buf_base;
@@ -1671,7 +1675,9 @@ void av1_nonrd_pick_intra_mode(AV1_COMP *cpi, MACROBLOCK *x, RD_STATS *rd_cost,
const MB_MODE_INFO *left_mi = xd->left_mbmi;
const PREDICTION_MODE A = av1_above_block_mode(above_mi);
const PREDICTION_MODE L = av1_left_block_mode(left_mi);
- bmode_costs = x->mode_costs.y_mode_costs[A][L];
+ const int above_ctx = intra_mode_context[A];
+ const int left_ctx = intra_mode_context[L];
+ bmode_costs = x->mode_costs.y_mode_costs[above_ctx][left_ctx];
av1_invalid_rd_stats(&best_rdc);
av1_invalid_rd_stats(&this_rdc);
@@ -1734,10 +1740,11 @@ static AOM_INLINE void get_ref_frame_use_mask(AV1_COMP *cpi, MACROBLOCK *x,
int *force_skip_low_temp_var) {
AV1_COMMON *const cm = &cpi->common;
const struct segmentation *const seg = &cm->seg;
- const int is_small_sb = (cm->seq_params.sb_size == BLOCK_64X64);
+ const int is_small_sb = (cm->seq_params->sb_size == BLOCK_64X64);
// For SVC the usage of alt_ref is determined by the ref_frame_flags.
- int use_alt_ref_frame = cpi->use_svc || cpi->sf.rt_sf.use_nonrd_altref_frame;
+ int use_alt_ref_frame =
+ cpi->ppi->use_svc || cpi->sf.rt_sf.use_nonrd_altref_frame;
int use_golden_ref_frame = 1;
use_ref_frame[LAST_FRAME] = 1; // we never skip LAST
@@ -1832,7 +1839,7 @@ static void estimate_intra_mode(
int intra_cost_penalty = av1_get_intra_cost_penalty(
quant_params->base_qindex, quant_params->y_dc_delta_q,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
int64_t inter_mode_thresh = RDCOST(x->rdmult, intra_cost_penalty, 0);
int perform_intra_pred = cpi->sf.rt_sf.check_intra_pred_nonrd;
// For spatial enhancemanent layer: turn off intra prediction if the
@@ -1851,8 +1858,8 @@ static void estimate_intra_mode(
// Adjust thresholds to make intra mode likely tested if the other
// references (golden, alt) are skipped/not checked. For now always
// adjust for svc mode.
- if (cpi->use_svc || (cpi->sf.rt_sf.use_nonrd_altref_frame == 0 &&
- cpi->sf.rt_sf.nonrd_prune_ref_frame_search > 0)) {
+ if (cpi->ppi->use_svc || (cpi->sf.rt_sf.use_nonrd_altref_frame == 0 &&
+ cpi->sf.rt_sf.nonrd_prune_ref_frame_search > 0)) {
spatial_var_thresh = 150;
motion_thresh = 0;
}
@@ -2063,6 +2070,40 @@ static AOM_INLINE int skip_mode_by_bsize_and_ref_frame(
return 0;
}
+void set_color_sensitivity(AV1_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd,
+ BLOCK_SIZE bsize, int y_sad,
+ unsigned int source_variance) {
+ const int factor = (bsize >= BLOCK_32X32) ? 2 : 3;
+ NOISE_LEVEL noise_level = kLow;
+ int norm_sad =
+ y_sad >> (b_width_log2_lookup[bsize] + b_height_log2_lookup[bsize]);
+ // If the spatial source variance is high and the normalized y_sad
+ // is low, then y-channel is likely good for mode estimation, so keep
+ // color_sensitivity off. For low noise content for now, since there is
+ // some bdrate regression for noisy color clip.
+ if (cpi->noise_estimate.enabled)
+ noise_level = av1_noise_estimate_extract_level(&cpi->noise_estimate);
+ if (noise_level == kLow && source_variance > 1000 && norm_sad < 50) {
+ x->color_sensitivity[0] = 0;
+ x->color_sensitivity[1] = 0;
+ return;
+ }
+ for (int i = 1; i <= 2; ++i) {
+ if (x->color_sensitivity[i - 1] == 2) {
+ struct macroblock_plane *const p = &x->plane[i];
+ struct macroblockd_plane *const pd = &xd->plane[i];
+ const BLOCK_SIZE bs =
+ get_plane_block_size(bsize, pd->subsampling_x, pd->subsampling_y);
+ const int uv_sad = cpi->ppi->fn_ptr[bs].sdf(p->src.buf, p->src.stride,
+ pd->dst.buf, pd->dst.stride);
+ const int norm_uv_sad =
+ uv_sad >> (b_width_log2_lookup[bs] + b_height_log2_lookup[bs]);
+ x->color_sensitivity[i - 1] =
+ uv_sad > (factor * (y_sad >> 3)) && norm_uv_sad > 40;
+ }
+ }
+}
+
void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
MACROBLOCK *x, RD_STATS *rd_cost,
BLOCK_SIZE bsize, PICK_MODE_CONTEXT *ctx) {
@@ -2104,7 +2145,7 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
DECLARE_ALIGNED(16, uint8_t, pred_buf[3 * 128 * 128]);
PRED_BUFFER *this_mode_pred = NULL;
const int reuse_inter_pred = cpi->sf.rt_sf.reuse_inter_pred_nonrd &&
- cm->seq_params.bit_depth == AOM_BITS_8;
+ cm->seq_params->bit_depth == AOM_BITS_8;
const int bh = block_size_high[bsize];
const int bw = block_size_wide[bsize];
@@ -2135,7 +2176,8 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
cpi->common.height != cpi->resize_pending_params.height));
#endif
-
+ x->color_sensitivity[0] = x->color_sensitivity_sb[0];
+ x->color_sensitivity[1] = x->color_sensitivity_sb[1];
init_best_pickmode(&best_pickmode);
const ModeCosts *mode_costs = &x->mode_costs;
@@ -2170,7 +2212,8 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
#if CONFIG_AV1_TEMPORAL_DENOISING
if (cpi->oxcf.noise_sensitivity > 0) {
- // if (cpi->use_svc) denoise_svc_pickmode = av1_denoise_svc_non_key(cpi);
+ // if (cpi->ppi->use_svc) denoise_svc_pickmode =
+ // av1_denoise_svc_non_key(cpi);
if (cpi->denoiser.denoising_level > kDenLowLow && denoise_svc_pickmode)
av1_denoiser_reset_frame_stats(ctx);
}
@@ -2183,7 +2226,7 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
// to source, so use subpel motion vector to compensate. The nonzero motion
// is half pixel shifted to left and top, so (-4, -4). This has more effect
// on higher resolutins, so condition it on that for now.
- if (cpi->use_svc && svc->spatial_layer_id > 0 &&
+ if (cpi->ppi->use_svc && svc->spatial_layer_id > 0 &&
svc->downsample_filter_phase[svc->spatial_layer_id - 1] == 8 &&
cm->width * cm->height > 640 * 480) {
svc_mv_col = -4;
@@ -2210,7 +2253,7 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
const int use_model_yrd_large =
cpi->oxcf.rc_cfg.mode == AOM_CBR && large_block &&
!cyclic_refresh_segment_id_boosted(xd->mi[0]->segment_id) &&
- quant_params->base_qindex && cm->seq_params.bit_depth == 8;
+ quant_params->base_qindex && cm->seq_params->bit_depth == 8;
const int enable_filter_search =
is_filter_search_enabled(cpi, mi_row, mi_col, bsize);
@@ -2264,7 +2307,7 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
if (!use_ref_frame_mask[ref_frame]) continue;
force_mv_inter_layer = 0;
- if (cpi->use_svc && svc->spatial_layer_id > 0 &&
+ if (cpi->ppi->use_svc && svc->spatial_layer_id > 0 &&
((ref_frame == LAST_FRAME && svc->skip_mvsearch_last) ||
(ref_frame == GOLDEN_FRAME && svc->skip_mvsearch_gf))) {
// Only test mode if NEARESTMV/NEARMV is (svc_mv_col, svc_mv_row),
@@ -2306,6 +2349,10 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
if ((int64_t)(x->pred_mv_sad[ref_frame]) > thresh_sad_pred) continue;
}
}
+ // Check for skipping NEARMV based on pred_mv_sad.
+ if (this_mode == NEARMV && x->pred_mv1_sad[ref_frame] != INT_MAX &&
+ x->pred_mv1_sad[ref_frame] > (x->pred_mv0_sad[ref_frame] << 1))
+ continue;
if (skip_mode_by_threshold(
this_mode, ref_frame, frame_mv[this_mode][ref_frame],
@@ -2357,6 +2404,22 @@ void av1_nonrd_pick_inter_mode_sb(AV1_COMP *cpi, TileDataEnc *tile_data,
#if COLLECT_PICK_MODE_STAT
ms_stat.num_nonskipped_searches[bsize][this_mode]++;
#endif
+
+ if (idx == 0) {
+ // Set color sensitivity on first tested mode only.
+ // Use y-sad already computed in find_predictors: take the sad with motion
+ // vector closest to 0; the uv-sad computed below in set_color_sensitivity
+ // is for zeromv.
+ int y_sad = x->pred_mv0_sad[LAST_FRAME];
+ if (x->pred_mv1_sad[LAST_FRAME] != INT_MAX &&
+ (abs(frame_mv[NEARMV][LAST_FRAME].as_mv.col) +
+ abs(frame_mv[NEARMV][LAST_FRAME].as_mv.row)) <
+ (abs(frame_mv[NEARESTMV][LAST_FRAME].as_mv.col) +
+ abs(frame_mv[NEARESTMV][LAST_FRAME].as_mv.row)))
+ y_sad = x->pred_mv1_sad[LAST_FRAME];
+ set_color_sensitivity(cpi, x, xd, bsize, y_sad, x->source_variance);
+ }
+
if (enable_filter_search && !force_mv_inter_layer &&
((mi->mv[0].as_mv.row & 0x07) || (mi->mv[0].as_mv.col & 0x07)) &&
(ref_frame == LAST_FRAME || !x->nonrd_prune_ref_frame_search)) {
diff --git a/third_party/libaom/source/libaom/av1/encoder/optical_flow.c b/third_party/libaom/source/libaom/av1/encoder/optical_flow.c
index 82ae9c5774..d2f03ed641 100644
--- a/third_party/libaom/source/libaom/av1/encoder/optical_flow.c
+++ b/third_party/libaom/source/libaom/av1/encoder/optical_flow.c
@@ -819,7 +819,7 @@ static void solve_horn_schunck(const double *ix, const double *iy,
}
av1_init_sparse_mtx(row_pos, col_pos, values, c, 2 * width * height,
2 * width * height, &A);
- // substract init mv part from b
+ // subtract init mv part from b
av1_mtx_vect_multi_left(&A, mv_init_vec, temp_b, 2 * width * height);
for (int i = 0; i < 2 * width * height; i++) {
b[i] = -temp_b[i];
@@ -882,10 +882,11 @@ static void solve_horn_schunck(const double *ix, const double *iy,
}
// Calculate optical flow from from_frame to to_frame using the H-S method.
-void horn_schunck(const YV12_BUFFER_CONFIG *from_frame,
- const YV12_BUFFER_CONFIG *to_frame, const int level,
- const int mv_stride, const int mv_height, const int mv_width,
- const OPFL_PARAMS *opfl_params, LOCALMV *mvs) {
+static void horn_schunck(const YV12_BUFFER_CONFIG *from_frame,
+ const YV12_BUFFER_CONFIG *to_frame, const int level,
+ const int mv_stride, const int mv_height,
+ const int mv_width, const OPFL_PARAMS *opfl_params,
+ LOCALMV *mvs) {
// mvs are always on level 0, here we define two new mv arrays that is of size
// of this level.
const int fw = from_frame->y_crop_width;
diff --git a/third_party/libaom/source/libaom/av1/encoder/palette.c b/third_party/libaom/source/libaom/av1/encoder/palette.c
index fd579b7f7f..fbc16ca742 100644
--- a/third_party/libaom/source/libaom/av1/encoder/palette.c
+++ b/third_party/libaom/source/libaom/av1/encoder/palette.c
@@ -218,12 +218,12 @@ static AOM_INLINE void palette_rd_y(
const AV1_COMP *const cpi, MACROBLOCK *x, MB_MODE_INFO *mbmi,
BLOCK_SIZE bsize, int dc_mode_cost, const int *data, int *centroids, int n,
uint16_t *color_cache, int n_cache, MB_MODE_INFO *best_mbmi,
- uint8_t *best_palette_color_map, int64_t *best_rd, int64_t *best_model_rd,
- int *rate, int *rate_tokenonly, int64_t *distortion, int *skippable,
- int *beat_best_rd, PICK_MODE_CONTEXT *ctx, uint8_t *blk_skip,
- uint8_t *tx_type_map, int *beat_best_palette_rd) {
+ uint8_t *best_palette_color_map, int64_t *best_rd, int *rate,
+ int *rate_tokenonly, int64_t *distortion, int *skippable, int *beat_best_rd,
+ PICK_MODE_CONTEXT *ctx, uint8_t *blk_skip, uint8_t *tx_type_map,
+ int *beat_best_palette_rd) {
optimize_palette_colors(color_cache, n_cache, n, 1, centroids,
- cpi->common.seq_params.bit_depth);
+ cpi->common.seq_params->bit_depth);
const int num_unique_colors = av1_remove_duplicates(centroids, n);
if (num_unique_colors < PALETTE_MIN_SIZE) {
// Too few unique colors to create a palette. And DC_PRED will work
@@ -231,10 +231,10 @@ static AOM_INLINE void palette_rd_y(
return;
}
PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
for (int i = 0; i < num_unique_colors; ++i) {
pmi->palette_colors[i] = clip_pixel_highbd(
- (int)centroids[i], cpi->common.seq_params.bit_depth);
+ (int)centroids[i], cpi->common.seq_params->bit_depth);
}
} else {
for (int i = 0; i < num_unique_colors; ++i) {
@@ -251,10 +251,6 @@ static AOM_INLINE void palette_rd_y(
1);
extend_palette_color_map(color_map, cols, rows, block_width, block_height);
- if (model_intra_yrd_and_prune(cpi, x, bsize, best_model_rd)) {
- return;
- }
-
RD_STATS tokenonly_rd_stats;
av1_pick_uniform_tx_size_type_yrd(cpi, x, &tokenonly_rd_stats, bsize,
*best_rd);
@@ -304,10 +300,9 @@ static AOM_INLINE int perform_top_color_palette_search(
BLOCK_SIZE bsize, int dc_mode_cost, const int *data, int *top_colors,
int start_n, int end_n, int step_size, int *last_n_searched,
uint16_t *color_cache, int n_cache, MB_MODE_INFO *best_mbmi,
- uint8_t *best_palette_color_map, int64_t *best_rd, int64_t *best_model_rd,
- int *rate, int *rate_tokenonly, int64_t *distortion, int *skippable,
- int *beat_best_rd, PICK_MODE_CONTEXT *ctx, uint8_t *best_blk_skip,
- uint8_t *tx_type_map) {
+ uint8_t *best_palette_color_map, int64_t *best_rd, int *rate,
+ int *rate_tokenonly, int64_t *distortion, int *skippable, int *beat_best_rd,
+ PICK_MODE_CONTEXT *ctx, uint8_t *best_blk_skip, uint8_t *tx_type_map) {
int centroids[PALETTE_MAX_SIZE];
int n = start_n;
int top_color_winner = end_n;
@@ -320,8 +315,8 @@ static AOM_INLINE int perform_top_color_palette_search(
memcpy(centroids, top_colors, n * sizeof(top_colors[0]));
palette_rd_y(cpi, x, mbmi, bsize, dc_mode_cost, data, centroids, n,
color_cache, n_cache, best_mbmi, best_palette_color_map,
- best_rd, best_model_rd, rate, rate_tokenonly, distortion,
- skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map,
+ best_rd, rate, rate_tokenonly, distortion, skippable,
+ beat_best_rd, ctx, best_blk_skip, tx_type_map,
&beat_best_palette_rd);
*last_n_searched = n;
if (beat_best_palette_rd) {
@@ -345,10 +340,9 @@ static AOM_INLINE int perform_k_means_palette_search(
int upper_bound, int start_n, int end_n, int step_size,
int *last_n_searched, uint16_t *color_cache, int n_cache,
MB_MODE_INFO *best_mbmi, uint8_t *best_palette_color_map, int64_t *best_rd,
- int64_t *best_model_rd, int *rate, int *rate_tokenonly, int64_t *distortion,
- int *skippable, int *beat_best_rd, PICK_MODE_CONTEXT *ctx,
- uint8_t *best_blk_skip, uint8_t *tx_type_map, uint8_t *color_map,
- int data_points) {
+ int *rate, int *rate_tokenonly, int64_t *distortion, int *skippable,
+ int *beat_best_rd, PICK_MODE_CONTEXT *ctx, uint8_t *best_blk_skip,
+ uint8_t *tx_type_map, uint8_t *color_map, int data_points) {
int centroids[PALETTE_MAX_SIZE];
const int max_itr = 50;
int n = start_n;
@@ -366,8 +360,8 @@ static AOM_INLINE int perform_k_means_palette_search(
av1_k_means(data, centroids, color_map, data_points, n, 1, max_itr);
palette_rd_y(cpi, x, mbmi, bsize, dc_mode_cost, data, centroids, n,
color_cache, n_cache, best_mbmi, best_palette_color_map,
- best_rd, best_model_rd, rate, rate_tokenonly, distortion,
- skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map,
+ best_rd, rate, rate_tokenonly, distortion, skippable,
+ beat_best_rd, ctx, best_blk_skip, tx_type_map,
&beat_best_palette_rd);
*last_n_searched = n;
if (beat_best_palette_rd) {
@@ -434,9 +428,9 @@ static AOM_INLINE void fill_data_and_get_bounds(
void av1_rd_pick_palette_intra_sby(
const AV1_COMP *cpi, MACROBLOCK *x, BLOCK_SIZE bsize, int dc_mode_cost,
MB_MODE_INFO *best_mbmi, uint8_t *best_palette_color_map, int64_t *best_rd,
- int64_t *best_model_rd, int *rate, int *rate_tokenonly, int64_t *distortion,
- int *skippable, int *beat_best_rd, PICK_MODE_CONTEXT *ctx,
- uint8_t *best_blk_skip, uint8_t *tx_type_map) {
+ int *rate, int *rate_tokenonly, int64_t *distortion, int *skippable,
+ int *beat_best_rd, PICK_MODE_CONTEXT *ctx, uint8_t *best_blk_skip,
+ uint8_t *tx_type_map) {
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = xd->mi[0];
assert(!is_inter_block(mbmi));
@@ -450,7 +444,7 @@ void av1_rd_pick_palette_intra_sby(
int block_width, block_height, rows, cols;
av1_get_block_dimensions(bsize, 0, xd, &block_width, &block_height, &rows,
&cols);
- const SequenceHeader *const seq_params = &cpi->common.seq_params;
+ const SequenceHeader *const seq_params = cpi->common.seq_params;
const int is_hbd = seq_params->use_highbitdepth;
const int bit_depth = seq_params->bit_depth;
int unused;
@@ -532,8 +526,8 @@ void av1_rd_pick_palette_intra_sby(
const int top_color_winner = perform_top_color_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, top_colors, min_n, max_n + 1,
step_size, &unused, color_cache, n_cache, best_mbmi,
- best_palette_color_map, best_rd, best_model_rd, rate, rate_tokenonly,
- distortion, skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map);
+ best_palette_color_map, best_rd, rate, rate_tokenonly, distortion,
+ skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map);
// Evaluate neighbors for the winner color (if winner is found) in the
// above coarse search for dominant colors
if (top_color_winner <= max_n) {
@@ -544,18 +538,18 @@ void av1_rd_pick_palette_intra_sby(
perform_top_color_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, top_colors, stage2_min_n,
stage2_max_n + 1, stage2_step_size, &unused, color_cache, n_cache,
- best_mbmi, best_palette_color_map, best_rd, best_model_rd, rate,
- rate_tokenonly, distortion, skippable, beat_best_rd, ctx,
- best_blk_skip, tx_type_map);
+ best_mbmi, best_palette_color_map, best_rd, rate, rate_tokenonly,
+ distortion, skippable, beat_best_rd, ctx, best_blk_skip,
+ tx_type_map);
}
// K-means clustering.
// Perform k-means coarse palette search to find the winner candidate
const int k_means_winner = perform_k_means_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, lower_bound, upper_bound,
min_n, max_n + 1, step_size, &unused, color_cache, n_cache, best_mbmi,
- best_palette_color_map, best_rd, best_model_rd, rate, rate_tokenonly,
- distortion, skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map,
- color_map, rows * cols);
+ best_palette_color_map, best_rd, rate, rate_tokenonly, distortion,
+ skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map, color_map,
+ rows * cols);
// Evaluate neighbors for the winner color (if winner is found) in the
// above coarse search for k-means
if (k_means_winner <= max_n) {
@@ -567,9 +561,8 @@ void av1_rd_pick_palette_intra_sby(
cpi, x, mbmi, bsize, dc_mode_cost, data, lower_bound, upper_bound,
start_n_stage2, end_n_stage2 + 1, step_size_stage2, &unused,
color_cache, n_cache, best_mbmi, best_palette_color_map, best_rd,
- best_model_rd, rate, rate_tokenonly, distortion, skippable,
- beat_best_rd, ctx, best_blk_skip, tx_type_map, color_map,
- rows * cols);
+ rate, rate_tokenonly, distortion, skippable, beat_best_rd, ctx,
+ best_blk_skip, tx_type_map, color_map, rows * cols);
}
} else {
const int max_n = AOMMIN(colors, PALETTE_MAX_SIZE),
@@ -579,17 +572,16 @@ void av1_rd_pick_palette_intra_sby(
perform_top_color_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, top_colors, max_n, min_n - 1,
-1, &last_n_searched, color_cache, n_cache, best_mbmi,
- best_palette_color_map, best_rd, best_model_rd, rate, rate_tokenonly,
- distortion, skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map);
+ best_palette_color_map, best_rd, rate, rate_tokenonly, distortion,
+ skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map);
if (last_n_searched > min_n) {
// Search in ascending order until we get to the previous best
perform_top_color_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, top_colors, min_n,
last_n_searched, 1, &unused, color_cache, n_cache, best_mbmi,
- best_palette_color_map, best_rd, best_model_rd, rate,
- rate_tokenonly, distortion, skippable, beat_best_rd, ctx,
- best_blk_skip, tx_type_map);
+ best_palette_color_map, best_rd, rate, rate_tokenonly, distortion,
+ skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map);
}
// K-means clustering.
if (colors == PALETTE_MIN_SIZE) {
@@ -599,26 +591,25 @@ void av1_rd_pick_palette_intra_sby(
centroids[1] = upper_bound;
palette_rd_y(cpi, x, mbmi, bsize, dc_mode_cost, data, centroids, colors,
color_cache, n_cache, best_mbmi, best_palette_color_map,
- best_rd, best_model_rd, rate, rate_tokenonly, distortion,
- skippable, beat_best_rd, ctx, best_blk_skip, tx_type_map,
- NULL);
+ best_rd, rate, rate_tokenonly, distortion, skippable,
+ beat_best_rd, ctx, best_blk_skip, tx_type_map, NULL);
} else {
// Perform k-means palette search in descending order
last_n_searched = max_n;
perform_k_means_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, lower_bound, upper_bound,
max_n, min_n - 1, -1, &last_n_searched, color_cache, n_cache,
- best_mbmi, best_palette_color_map, best_rd, best_model_rd, rate,
- rate_tokenonly, distortion, skippable, beat_best_rd, ctx,
- best_blk_skip, tx_type_map, color_map, rows * cols);
+ best_mbmi, best_palette_color_map, best_rd, rate, rate_tokenonly,
+ distortion, skippable, beat_best_rd, ctx, best_blk_skip,
+ tx_type_map, color_map, rows * cols);
if (last_n_searched > min_n) {
// Search in ascending order until we get to the previous best
perform_k_means_palette_search(
cpi, x, mbmi, bsize, dc_mode_cost, data, lower_bound, upper_bound,
min_n, last_n_searched, 1, &unused, color_cache, n_cache,
- best_mbmi, best_palette_color_map, best_rd, best_model_rd, rate,
- rate_tokenonly, distortion, skippable, beat_best_rd, ctx,
- best_blk_skip, tx_type_map, color_map, rows * cols);
+ best_mbmi, best_palette_color_map, best_rd, rate, rate_tokenonly,
+ distortion, skippable, beat_best_rd, ctx, best_blk_skip,
+ tx_type_map, color_map, rows * cols);
}
}
}
@@ -645,7 +636,7 @@ void av1_rd_pick_palette_intra_sbuv(const AV1_COMP *cpi, MACROBLOCK *x,
mbmi->bsize));
PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
const BLOCK_SIZE bsize = mbmi->bsize;
- const SequenceHeader *const seq_params = &cpi->common.seq_params;
+ const SequenceHeader *const seq_params = cpi->common.seq_params;
int this_rate;
int64_t this_rd;
int colors_u, colors_v, colors;
@@ -737,7 +728,7 @@ void av1_rd_pick_palette_intra_sbuv(const AV1_COMP *cpi, MACROBLOCK *x,
}
av1_k_means(data, centroids, color_map, rows * cols, n, 2, max_itr);
optimize_palette_colors(color_cache, n_cache, n, 2, centroids,
- cpi->common.seq_params.bit_depth);
+ cpi->common.seq_params->bit_depth);
// Sort the U channel colors in ascending order.
for (i = 0; i < 2 * (n - 1); i += 2) {
int min_idx = i;
@@ -811,7 +802,7 @@ void av1_restore_uv_color_map(const AV1_COMP *cpi, MACROBLOCK *x) {
for (r = 0; r < rows; ++r) {
for (c = 0; c < cols; ++c) {
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
data[(r * cols + c) * 2] = src_u16[r * src_stride + c];
data[(r * cols + c) * 2 + 1] = src_v16[r * src_stride + c];
} else {
diff --git a/third_party/libaom/source/libaom/av1/encoder/palette.h b/third_party/libaom/source/libaom/av1/encoder/palette.h
index 85af473892..7d9a72f61d 100644
--- a/third_party/libaom/source/libaom/av1/encoder/palette.h
+++ b/third_party/libaom/source/libaom/av1/encoder/palette.h
@@ -185,10 +185,9 @@ int av1_palette_color_cost_uv(const PALETTE_MODE_INFO *const pmi,
void av1_rd_pick_palette_intra_sby(
const struct AV1_COMP *cpi, struct macroblock *x, BLOCK_SIZE bsize,
int dc_mode_cost, MB_MODE_INFO *best_mbmi, uint8_t *best_palette_color_map,
- int64_t *best_rd, int64_t *best_model_rd, int *rate, int *rate_tokenonly,
- int64_t *distortion, int *skippable, int *beat_best_rd,
- struct PICK_MODE_CONTEXT *ctx, uint8_t *best_blk_skip,
- uint8_t *tx_type_map);
+ int64_t *best_rd, int *rate, int *rate_tokenonly, int64_t *distortion,
+ int *skippable, int *beat_best_rd, struct PICK_MODE_CONTEXT *ctx,
+ uint8_t *best_blk_skip, uint8_t *tx_type_map);
/*!\brief Search for the best palette in the chroma plane.
*
diff --git a/third_party/libaom/source/libaom/av1/encoder/partition_search.c b/third_party/libaom/source/libaom/av1/encoder/partition_search.c
index 5d54a80b36..c5bfaf684f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/partition_search.c
+++ b/third_party/libaom/source/libaom/av1/encoder/partition_search.c
@@ -25,6 +25,7 @@
#include "av1/encoder/encodemv.h"
#include "av1/encoder/motion_search_facade.h"
#include "av1/encoder/partition_search.h"
+#include "av1/encoder/partition_strategy.h"
#include "av1/encoder/reconinter_enc.h"
#include "av1/encoder/tokenize.h"
#include "av1/encoder/var_based_part.h"
@@ -34,6 +35,48 @@
#include "av1/encoder/tune_vmaf.h"
#endif
+void av1_reset_part_sf(PARTITION_SPEED_FEATURES *part_sf) {
+ part_sf->partition_search_type = SEARCH_PARTITION;
+ part_sf->less_rectangular_check_level = 0;
+ part_sf->use_square_partition_only_threshold = BLOCK_128X128;
+ part_sf->auto_max_partition_based_on_simple_motion = NOT_IN_USE;
+ part_sf->default_max_partition_size = BLOCK_LARGEST;
+ part_sf->default_min_partition_size = BLOCK_4X4;
+ part_sf->adjust_var_based_rd_partitioning = 0;
+ part_sf->allow_partition_search_skip = 0;
+ part_sf->max_intra_bsize = BLOCK_LARGEST;
+ // This setting only takes effect when partition_search_type is set
+ // to FIXED_PARTITION.
+ part_sf->fixed_partition_size = BLOCK_16X16;
+ // Recode loop tolerance %.
+ part_sf->partition_search_breakout_dist_thr = 0;
+ part_sf->partition_search_breakout_rate_thr = 0;
+ part_sf->prune_ext_partition_types_search_level = 0;
+ part_sf->prune_part4_search = 0;
+ part_sf->ml_prune_partition = 0;
+ part_sf->ml_early_term_after_part_split_level = 0;
+ for (int i = 0; i < PARTITION_BLOCK_SIZES; ++i) {
+ part_sf->ml_partition_search_breakout_thresh[i] =
+ -1; // -1 means not enabled.
+ }
+ part_sf->simple_motion_search_prune_agg = 0;
+ part_sf->simple_motion_search_split = 0;
+ part_sf->simple_motion_search_prune_rect = 0;
+ part_sf->simple_motion_search_early_term_none = 0;
+ part_sf->simple_motion_search_reduce_search_steps = 0;
+ part_sf->intra_cnn_split = 0;
+ part_sf->ext_partition_eval_thresh = BLOCK_8X8;
+ part_sf->prune_ext_part_using_split_info = 0;
+ part_sf->prune_rectangular_split_based_on_qidx = 0;
+ part_sf->early_term_after_none_split = 0;
+ part_sf->ml_predict_breakout_level = 0;
+ part_sf->prune_sub_8x8_partition_level = 0;
+ part_sf->simple_motion_search_rect_split = 0;
+ part_sf->reuse_prev_rd_results_for_part_ab = 0;
+ part_sf->reuse_best_prediction_for_part_ab = 0;
+ part_sf->use_best_rd_for_pruning = 0;
+}
+
static void update_txfm_count(MACROBLOCK *x, MACROBLOCKD *xd,
FRAME_COUNTS *counts, TX_SIZE tx_size, int depth,
int blk_row, int blk_col,
@@ -151,11 +194,14 @@ static void set_txfm_context(MACROBLOCKD *xd, TX_SIZE tx_size, int blk_row,
const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
const int bsw = tx_size_wide_unit[sub_txs];
const int bsh = tx_size_high_unit[sub_txs];
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
- for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
- const int offsetr = blk_row + row;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
+ for (int row = 0; row < row_end; row += bsh) {
+ const int offsetr = blk_row + row;
+ for (int col = 0; col < col_end; col += bsw) {
const int offsetc = blk_col + col;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
set_txfm_context(xd, sub_txs, offsetr, offsetc);
}
}
@@ -281,7 +327,7 @@ static void encode_superblock(const AV1_COMP *const cpi, TileDataEnc *tile_data,
xd->block_ref_scale_factors[ref], num_planes);
}
const int start_plane = (cpi->sf.rt_sf.reuse_inter_pred_nonrd &&
- cm->seq_params.bit_depth == AOM_BITS_8)
+ cm->seq_params->bit_depth == AOM_BITS_8)
? 1
: 0;
av1_enc_build_inter_predictor(cm, xd, mi_row, mi_col, NULL, bsize,
@@ -395,8 +441,8 @@ static void encode_superblock(const AV1_COMP *const cpi, TileDataEnc *tile_data,
if (!dry_run) {
if (cpi->oxcf.pass == 0 && cpi->svc.temporal_layer_id == 0 &&
cpi->sf.rt_sf.use_temporal_noise_estimate &&
- (!cpi->use_svc ||
- (cpi->use_svc &&
+ (!cpi->ppi->use_svc ||
+ (cpi->ppi->use_svc &&
!cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame &&
cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)))
update_zeromv_cnt(cpi, mbmi, mi_row, mi_col, bsize);
@@ -590,7 +636,7 @@ static void pick_sb_modes(AV1_COMP *const cpi, TileDataEnc *tile_data,
RD_STATS *rd_cost, PARTITION_TYPE partition,
BLOCK_SIZE bsize, PICK_MODE_CONTEXT *ctx,
RD_STATS best_rd) {
- if (best_rd.rdcost < 0) {
+ if (cpi->sf.part_sf.use_best_rd_for_pruning && best_rd.rdcost < 0) {
ctx->rd_stats.rdcost = INT64_MAX;
ctx->rd_stats.skip_txfm = 0;
av1_invalid_rd_stats(rd_cost);
@@ -599,7 +645,8 @@ static void pick_sb_modes(AV1_COMP *const cpi, TileDataEnc *tile_data,
av1_set_offsets(cpi, &tile_data->tile_info, x, mi_row, mi_col, bsize);
- if (ctx->rd_mode_is_ready) {
+ if (cpi->sf.part_sf.reuse_prev_rd_results_for_part_ab &&
+ ctx->rd_mode_is_ready) {
assert(ctx->mic.bsize == bsize);
assert(ctx->mic.partition == partition);
rd_cost->rate = ctx->rd_stats.rate;
@@ -672,6 +719,13 @@ static void pick_sb_modes(AV1_COMP *const cpi, TileDataEnc *tile_data,
av1_set_error_per_bit(&x->errorperbit, x->rdmult);
av1_rd_cost_update(x->rdmult, &best_rd);
+ // If set best_rd.rdcost to INT64_MAX, the encoder will not use any previous
+ // rdcost information for the following mode search.
+ // Disabling the feature could get some coding gain, with encoder slowdown.
+ if (!cpi->sf.part_sf.use_best_rd_for_pruning) {
+ av1_invalid_rd_stats(&best_rd);
+ }
+
// Find best coding mode & reconstruct the MB so it is available
// as a predictor for MBs that follow in the SB
if (frame_is_intra_only(cm)) {
@@ -750,11 +804,11 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td) {
#if CONFIG_ENTROPY_STATS
// delta quant applies to both intra and inter
const int super_block_upper_left =
- ((xd->mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
- ((xd->mi_col & (cm->seq_params.mib_size - 1)) == 0);
+ ((xd->mi_row & (cm->seq_params->mib_size - 1)) == 0) &&
+ ((xd->mi_col & (cm->seq_params->mib_size - 1)) == 0);
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
if (delta_q_info->delta_q_present_flag &&
- (bsize != cm->seq_params.sb_size || !mbmi->skip_txfm) &&
+ (bsize != cm->seq_params->sb_size || !mbmi->skip_txfm) &&
super_block_upper_left) {
const int dq = (mbmi->current_qindex - xd->current_base_qindex) /
delta_q_info->delta_q_res;
@@ -798,10 +852,16 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td) {
}
if (av1_allow_intrabc(cm)) {
- update_cdf(fc->intrabc_cdf, is_intrabc_block(mbmi), 2);
+ const int is_intrabc = is_intrabc_block(mbmi);
+ update_cdf(fc->intrabc_cdf, is_intrabc, 2);
#if CONFIG_ENTROPY_STATS
- ++td->counts->intrabc[is_intrabc_block(mbmi)];
+ ++td->counts->intrabc[is_intrabc];
#endif // CONFIG_ENTROPY_STATS
+ if (is_intrabc) {
+ const int_mv dv_ref = x->mbmi_ext_frame->ref_mv_stack[0].this_mv;
+ av1_update_mv_stats(&mbmi->mv[0].as_mv, &dv_ref.as_mv, &fc->ndvc,
+ MV_SUBPEL_NONE);
+ }
}
if (frame_is_intra_only(cm) || mbmi->skip_mode) return;
@@ -947,7 +1007,7 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td) {
}
}
- if (cm->seq_params.enable_interintra_compound &&
+ if (cm->seq_params->enable_interintra_compound &&
is_interintra_allowed(mbmi)) {
const int bsize_group = size_group_lookup[bsize];
if (mbmi->ref_frame[1] == INTRA_FRAME) {
@@ -1008,7 +1068,7 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td) {
mbmi->motion_mode == SIMPLE_TRANSLATION);
const int masked_compound_used = is_any_masked_compound_used(bsize) &&
- cm->seq_params.enable_masked_compound;
+ cm->seq_params->enable_masked_compound;
if (masked_compound_used) {
const int comp_group_idx_ctx = get_comp_group_idx_context(xd);
#if CONFIG_ENTROPY_STATS
@@ -1053,7 +1113,7 @@ static void update_stats(const AV1_COMMON *const cm, ThreadData *td) {
if (inter_block && cm->features.interp_filter == SWITCHABLE &&
mbmi->motion_mode != WARPED_CAUSAL &&
!is_nontrans_global_motion(xd, mbmi)) {
- update_filter_type_cdf(xd, mbmi, cm->seq_params.enable_dual_filter);
+ update_filter_type_cdf(xd, mbmi, cm->seq_params->enable_dual_filter);
}
if (inter_block &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
@@ -1160,8 +1220,8 @@ static void encode_b(const AV1_COMP *const cpi, TileDataEnc *tile_data,
TileInfo *const tile = &tile_data->tile_info;
MACROBLOCK *const x = &td->mb;
MACROBLOCKD *xd = &x->e_mbd;
- const int subsampling_x = cm->seq_params.subsampling_x;
- const int subsampling_y = cm->seq_params.subsampling_y;
+ const int subsampling_x = cm->seq_params->subsampling_x;
+ const int subsampling_y = cm->seq_params->subsampling_y;
av1_set_offsets_without_segment_id(cpi, tile, x, mi_row, mi_col, bsize);
const int origin_mult = x->rdmult;
@@ -1174,9 +1234,9 @@ static void encode_b(const AV1_COMP *const cpi, TileDataEnc *tile_data,
set_cb_offsets(x->mbmi_ext_frame->cb_offset, x->cb_offset[PLANE_TYPE_Y],
x->cb_offset[PLANE_TYPE_UV]);
assert(x->cb_offset[PLANE_TYPE_Y] <
- (1 << num_pels_log2_lookup[cpi->common.seq_params.sb_size]));
+ (1 << num_pels_log2_lookup[cpi->common.seq_params->sb_size]));
assert(x->cb_offset[PLANE_TYPE_UV] <
- ((1 << num_pels_log2_lookup[cpi->common.seq_params.sb_size]) >>
+ ((1 << num_pels_log2_lookup[cpi->common.seq_params->sb_size]) >>
(subsampling_x + subsampling_y)));
}
@@ -1184,7 +1244,7 @@ static void encode_b(const AV1_COMP *const cpi, TileDataEnc *tile_data,
if (!dry_run) {
update_cb_offsets(x, bsize, subsampling_x, subsampling_y);
- if (bsize == cpi->common.seq_params.sb_size && mbmi->skip_txfm == 1 &&
+ if (bsize == cpi->common.seq_params->sb_size && mbmi->skip_txfm == 1 &&
cm->delta_q_info.delta_lf_present_flag) {
const int frame_lf_count =
av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
@@ -1202,11 +1262,11 @@ static void encode_b(const AV1_COMP *const cpi, TileDataEnc *tile_data,
// delta quant applies to both intra and inter
const int super_block_upper_left =
- ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
- ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
+ ((mi_row & (cm->seq_params->mib_size - 1)) == 0) &&
+ ((mi_col & (cm->seq_params->mib_size - 1)) == 0);
const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
if (delta_q_info->delta_q_present_flag &&
- (bsize != cm->seq_params.sb_size || !mbmi->skip_txfm) &&
+ (bsize != cm->seq_params->sb_size || !mbmi->skip_txfm) &&
super_block_upper_left) {
xd->current_base_qindex = mbmi->current_qindex;
if (delta_q_info->delta_lf_present_flag) {
@@ -1753,11 +1813,11 @@ void av1_rd_use_partition(AV1_COMP *cpi, ThreadData *td, TileDataEnc *tile_data,
// We must have chosen a partitioning and encoding or we'll fail later on.
// No other opportunities for success.
- if (bsize == cm->seq_params.sb_size)
+ if (bsize == cm->seq_params->sb_size)
assert(chosen_rdc.rate < INT_MAX && chosen_rdc.dist < INT64_MAX);
if (do_recon) {
- if (bsize == cm->seq_params.sb_size) {
+ if (bsize == cm->seq_params->sb_size) {
// NOTE: To get estimate for rate due to the tokens, use:
// int rate_coeffs = 0;
// encode_sb(cpi, td, tile_data, tp, mi_row, mi_col, DRY_RUN_COSTCOEFFS,
@@ -1792,15 +1852,15 @@ static void encode_b_nonrd(const AV1_COMP *const cpi, TileDataEnc *tile_data,
// Nonrd pickmode does not currently support second/combined reference.
assert(!has_second_ref(mbmi));
av1_update_state(cpi, td, ctx, mi_row, mi_col, bsize, dry_run);
- const int subsampling_x = cpi->common.seq_params.subsampling_x;
- const int subsampling_y = cpi->common.seq_params.subsampling_y;
+ const int subsampling_x = cpi->common.seq_params->subsampling_x;
+ const int subsampling_y = cpi->common.seq_params->subsampling_y;
if (!dry_run) {
set_cb_offsets(x->mbmi_ext_frame->cb_offset, x->cb_offset[PLANE_TYPE_Y],
x->cb_offset[PLANE_TYPE_UV]);
assert(x->cb_offset[PLANE_TYPE_Y] <
- (1 << num_pels_log2_lookup[cpi->common.seq_params.sb_size]));
+ (1 << num_pels_log2_lookup[cpi->common.seq_params->sb_size]));
assert(x->cb_offset[PLANE_TYPE_UV] <
- ((1 << num_pels_log2_lookup[cpi->common.seq_params.sb_size]) >>
+ ((1 << num_pels_log2_lookup[cpi->common.seq_params->sb_size]) >>
(subsampling_x + subsampling_y)));
}
encode_superblock(cpi, tile_data, td, tp, dry_run, bsize, rate);
@@ -1808,6 +1868,8 @@ static void encode_b_nonrd(const AV1_COMP *const cpi, TileDataEnc *tile_data,
update_cb_offsets(x, bsize, subsampling_x, subsampling_y);
if (tile_data->allow_update_cdf) update_stats(&cpi->common, td);
}
+ if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ && mbmi->skip_txfm)
+ av1_cyclic_reset_segment_skip(cpi, x, mi_row, mi_col, bsize);
// TODO(Ravi/Remya): Move this copy function to a better logical place
// This function will copy the best mode information from block
// level (x->mbmi_ext) to frame level (cpi->mbmi_ext_info.frame_base). This
@@ -1889,8 +1951,8 @@ static void pick_sb_modes_nonrd(AV1_COMP *const cpi, TileDataEnc *tile_data,
int i;
wait_for_top_right_sb(&cpi->mt_info.enc_row_mt, &tile_data->row_mt_sync,
- &tile_data->tile_info, cm->seq_params.sb_size,
- cm->seq_params.mib_size_log2, bsize, mi_row, mi_col);
+ &tile_data->tile_info, cm->seq_params->sb_size,
+ cm->seq_params->mib_size_log2, bsize, mi_row, mi_col);
#if CONFIG_COLLECT_COMPONENT_TIMING
start_timing(cpi, rd_pick_sb_modes_time);
@@ -1947,6 +2009,30 @@ static void pick_sb_modes_nonrd(AV1_COMP *const cpi, TileDataEnc *tile_data,
end_timing(cpi, av1_rd_pick_inter_mode_sb_time);
#endif
}
+ if (cpi->sf.rt_sf.skip_cdef_sb) {
+ // Find the corresponding 64x64 block. It'll be the 128x128 block if that's
+ // the block size.
+ const int mi_row_sb = mi_row - mi_row % MI_SIZE_64X64;
+ const int mi_col_sb = mi_col - mi_col % MI_SIZE_64X64;
+ MB_MODE_INFO **mi_sb =
+ cm->mi_params.mi_grid_base +
+ get_mi_grid_idx(&cm->mi_params, mi_row_sb, mi_col_sb);
+ // Do not skip if intra or new mv is picked.
+ const int skip = mi_sb[0]->skip_cdef_curr_sb &&
+ !(mbmi->mode < INTRA_MODES || mbmi->mode == NEWMV);
+ // If 128x128 block is used, we need to set the flag for all 4 64x64 sub
+ // "blocks".
+ const int block64_in_sb = (bsize == BLOCK_128X128) ? 2 : 1;
+ for (int r = 0; r < block64_in_sb; ++r) {
+ for (int c = 0; c < block64_in_sb; ++c) {
+ const int idx_in_sb =
+ r * MI_SIZE_64X64 * cm->mi_params.mi_stride + c * MI_SIZE_64X64;
+ if (mi_sb[idx_in_sb]) mi_sb[idx_in_sb]->skip_cdef_curr_sb = skip;
+ }
+ }
+ // Store in the pickmode context.
+ ctx->mic.skip_cdef_curr_sb = mi_sb[0]->skip_cdef_curr_sb;
+ }
x->rdmult = orig_rdmult;
ctx->rd_stats.rate = rd_cost->rate;
ctx->rd_stats.dist = rd_cost->dist;
@@ -2301,15 +2387,15 @@ static bool rd_test_partition3(AV1_COMP *const cpi, ThreadData *td,
// Loop over sub-partitions in AB partition type.
for (int i = 0; i < SUB_PARTITIONS_AB; i++) {
if (mode_cache && mode_cache[i]) {
- x->use_intermode_cache = 1;
- x->intermode_cache = mode_cache[i];
+ x->use_mb_mode_cache = 1;
+ x->mb_mode_cache = mode_cache[i];
}
const int mode_search_success =
rd_try_subblock(cpi, td, tile_data, tp, i == SUB_PARTITIONS_AB - 1,
ab_mi_pos[i][0], ab_mi_pos[i][1], ab_subsize[i],
*best_rdc, &sum_rdc, partition, ctxs[i]);
- x->use_intermode_cache = 0;
- x->intermode_cache = NULL;
+ x->use_mb_mode_cache = 0;
+ x->mb_mode_cache = NULL;
if (!mode_search_success) {
return false;
}
@@ -2629,7 +2715,8 @@ static void rectangular_partition_search(
TokenExtra **tp, MACROBLOCK *x, PC_TREE *pc_tree,
RD_SEARCH_MACROBLOCK_CONTEXT *x_ctx,
PartitionSearchState *part_search_state, RD_STATS *best_rdc,
- RD_RECT_PART_WIN_INFO *rect_part_win_info) {
+ RD_RECT_PART_WIN_INFO *rect_part_win_info, const RECT_PART_TYPE start_type,
+ const RECT_PART_TYPE end_type) {
const AV1_COMMON *const cm = &cpi->common;
PartitionBlkParams blk_params = part_search_state->part_blk_params;
RD_STATS *sum_rdc = &part_search_state->sum_rdc;
@@ -2663,7 +2750,7 @@ static void rectangular_partition_search(
};
// Loop over rectangular partition types.
- for (RECT_PART_TYPE i = HORZ; i < NUM_RECT_PARTS; i++) {
+ for (RECT_PART_TYPE i = start_type; i <= end_type; i++) {
assert(IMPLIES(!cpi->oxcf.part_cfg.enable_rect_partitions,
!part_search_state->partition_rect_allowed[i]));
@@ -2879,7 +2966,8 @@ static void ab_partitions_search(
TokenExtra **tp, MACROBLOCK *x, RD_SEARCH_MACROBLOCK_CONTEXT *x_ctx,
PC_TREE *pc_tree, PartitionSearchState *part_search_state,
RD_STATS *best_rdc, RD_RECT_PART_WIN_INFO *rect_part_win_info,
- int pb_source_variance, int ext_partition_allowed) {
+ int pb_source_variance, int ext_partition_allowed,
+ const AB_PART_TYPE start_type, const AB_PART_TYPE end_type) {
PartitionBlkParams blk_params = part_search_state->part_blk_params;
const int mi_row = blk_params.mi_row;
const int mi_col = blk_params.mi_col;
@@ -2888,9 +2976,9 @@ static void ab_partitions_search(
int ab_partitions_allowed[NUM_AB_PARTS] = { 1, 1, 1, 1 };
// Prune AB partitions
av1_prune_ab_partitions(
- cpi, x, pc_tree, bsize, pb_source_variance, best_rdc->rdcost,
- part_search_state->rect_part_rd, part_search_state->split_rd,
- rect_part_win_info, ext_partition_allowed,
+ cpi, x, pc_tree, bsize, mi_row, mi_col, pb_source_variance,
+ best_rdc->rdcost, part_search_state->rect_part_rd,
+ part_search_state->split_rd, rect_part_win_info, ext_partition_allowed,
part_search_state->partition_rect_allowed[HORZ],
part_search_state->partition_rect_allowed[VERT],
&ab_partitions_allowed[HORZ_A], &ab_partitions_allowed[HORZ_B],
@@ -2946,7 +3034,7 @@ static void ab_partitions_search(
};
// Loop over AB partition types.
- for (AB_PART_TYPE ab_part_type = 0; ab_part_type < NUM_AB_PARTS;
+ for (AB_PART_TYPE ab_part_type = start_type; ab_part_type <= end_type;
ab_part_type++) {
const PARTITION_TYPE part_type = ab_part_type + PARTITION_HORZ_A;
@@ -2956,33 +3044,35 @@ static void ab_partitions_search(
continue;
blk_params.subsize = get_partition_subsize(bsize, part_type);
- for (int i = 0; i < SUB_PARTITIONS_AB; i++) {
- // Set AB partition context.
- cur_part_ctxs[ab_part_type][i] = av1_alloc_pmc(
- cpi, ab_subsize[ab_part_type][i], &td->shared_coeff_buf);
- // Set mode as not ready.
- cur_part_ctxs[ab_part_type][i]->rd_mode_is_ready = 0;
- }
+ if (cpi->sf.part_sf.reuse_prev_rd_results_for_part_ab) {
+ for (int i = 0; i < SUB_PARTITIONS_AB; i++) {
+ // Set AB partition context.
+ cur_part_ctxs[ab_part_type][i] = av1_alloc_pmc(
+ cpi, ab_subsize[ab_part_type][i], &td->shared_coeff_buf);
+ // Set mode as not ready.
+ cur_part_ctxs[ab_part_type][i]->rd_mode_is_ready = 0;
+ }
- // We can copy directly the mode search results if we have already searched
- // the current block and the contexts match.
- if (is_ctx_ready[ab_part_type][0]) {
- av1_copy_tree_context(cur_part_ctxs[ab_part_type][0],
- mode_srch_ctx[ab_part_type][0][0]);
- cur_part_ctxs[ab_part_type][0]->mic.partition = part_type;
- cur_part_ctxs[ab_part_type][0]->rd_mode_is_ready = 1;
- if (is_ctx_ready[ab_part_type][1]) {
- av1_copy_tree_context(cur_part_ctxs[ab_part_type][1],
- mode_srch_ctx[ab_part_type][1][0]);
- cur_part_ctxs[ab_part_type][1]->mic.partition = part_type;
- cur_part_ctxs[ab_part_type][1]->rd_mode_is_ready = 1;
+ // We can copy directly the mode search results if we have already
+ // searched the current block and the contexts match.
+ if (is_ctx_ready[ab_part_type][0]) {
+ av1_copy_tree_context(cur_part_ctxs[ab_part_type][0],
+ mode_srch_ctx[ab_part_type][0][0]);
+ cur_part_ctxs[ab_part_type][0]->mic.partition = part_type;
+ cur_part_ctxs[ab_part_type][0]->rd_mode_is_ready = 1;
+ if (is_ctx_ready[ab_part_type][1]) {
+ av1_copy_tree_context(cur_part_ctxs[ab_part_type][1],
+ mode_srch_ctx[ab_part_type][1][0]);
+ cur_part_ctxs[ab_part_type][1]->mic.partition = part_type;
+ cur_part_ctxs[ab_part_type][1]->rd_mode_is_ready = 1;
+ }
}
}
// Even if the contexts don't match, we can still speed up by reusing the
// previous prediction mode.
const MB_MODE_INFO *mode_cache[3] = { NULL, NULL, NULL };
- if (cpi->sf.inter_sf.reuse_best_prediction_for_part_ab) {
+ if (cpi->sf.part_sf.reuse_best_prediction_for_part_ab) {
set_mode_cache_for_partition_ab(mode_cache, pc_tree, ab_part_type);
}
@@ -3180,21 +3270,6 @@ static void prune_4_way_partition_search(
part4_search_allowed);
}
-// Set PARTITION_NONE allowed flag.
-static AOM_INLINE void set_part_none_allowed_flag(
- AV1_COMP *const cpi, PartitionSearchState *part_search_state) {
- PartitionBlkParams blk_params = part_search_state->part_blk_params;
- if ((blk_params.width <= blk_params.min_partition_size_1d) &&
- blk_params.has_rows && blk_params.has_cols)
- part_search_state->partition_none_allowed = 1;
- assert(part_search_state->terminate_partition_search == 0);
-
- // Set PARTITION_NONE for screen content.
- if (cpi->use_screen_content_tools)
- part_search_state->partition_none_allowed =
- blk_params.has_rows && blk_params.has_cols;
-}
-
// Set params needed for PARTITION_NONE search.
static void set_none_partition_params(const AV1_COMP *const cpi, ThreadData *td,
MACROBLOCK *x, PC_TREE *pc_tree,
@@ -3247,11 +3322,10 @@ static void prune_partitions_after_none(AV1_COMP *const cpi, MACROBLOCK *x,
bsize <= cpi->sf.part_sf.use_square_partition_only_threshold &&
bsize > BLOCK_4X4 && cpi->sf.part_sf.ml_predict_breakout_level >= 1;
if (use_ml_based_breakout) {
- if (av1_ml_predict_breakout(cpi, bsize, x, this_rdc, *pb_source_variance,
- xd->bd)) {
- part_search_state->do_square_split = 0;
- part_search_state->do_rectangular_split = 0;
- }
+ av1_ml_predict_breakout(cpi, bsize, x, this_rdc, blk_params,
+ *pb_source_variance, xd->bd,
+ &part_search_state->do_square_split,
+ &part_search_state->do_rectangular_split);
}
// Adjust dist breakout threshold according to the partition size.
@@ -3329,10 +3403,11 @@ static void prune_partitions_after_split(
!part_search_state->terminate_partition_search) {
av1_setup_src_planes(x, cpi->source, mi_row, mi_col, av1_num_planes(cm),
bsize);
- av1_ml_prune_rect_partition(
- cpi, x, bsize, best_rdc->rdcost, part_search_state->none_rd,
- part_search_state->split_rd, &part_search_state->prune_rect_part[HORZ],
- &part_search_state->prune_rect_part[VERT]);
+ av1_ml_prune_rect_partition(cpi, x, bsize, mi_row, mi_col, best_rdc->rdcost,
+ part_search_state->none_rd,
+ part_search_state->split_rd,
+ &part_search_state->prune_rect_part[HORZ],
+ &part_search_state->prune_rect_part[VERT]);
}
}
@@ -3351,12 +3426,11 @@ static void none_partition_search(
const BLOCK_SIZE bsize = blk_params.bsize;
assert(bsize < BLOCK_SIZES_ALL);
- // Set PARTITION_NONE allowed flag.
- set_part_none_allowed_flag(cpi, part_search_state);
if (!part_search_state->partition_none_allowed) return;
int pt_cost = 0;
RD_STATS best_remain_rdcost;
+ av1_invalid_rd_stats(&best_remain_rdcost);
// Set PARTITION_NONE context and cost.
set_none_partition_params(cpi, td, x, pc_tree, part_search_state,
@@ -3402,7 +3476,7 @@ static void none_partition_search(
if (cpi->sf.inter_sf.prune_ref_frame_for_rect_partitions) {
const int ref_type = av1_ref_frame_type(pc_tree->none->mic.ref_frame);
av1_update_picked_ref_frames_mask(
- x, ref_type, bsize, cm->seq_params.mib_size, mi_row, mi_col);
+ x, ref_type, bsize, cm->seq_params->mib_size, mi_row, mi_col);
}
// Calculate the total cost and update the best partition.
@@ -3553,6 +3627,376 @@ static void split_partition_search(
av1_restore_context(x, x_ctx, mi_row, mi_col, bsize, av1_num_planes(cm));
}
+// The max number of nodes in the partition tree.
+// The number of leaf nodes is (128x128) / (4x4) = 1024.
+// The number of All possible parent nodes is 1 + 2 + ... + 512 = 1023.
+#define NUM_NODES 2048
+
+static void write_partition_tree(AV1_COMP *const cpi,
+ const PC_TREE *const pc_tree,
+ const BLOCK_SIZE bsize, const int mi_row,
+ const int mi_col) {
+ (void)mi_row;
+ (void)mi_col;
+ const char *path = cpi->oxcf.partition_info_path;
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s/partition_tree_sb%d_c%d", path,
+ cpi->sb_counter, 0);
+ ++cpi->sb_counter;
+ FILE *pfile = fopen(filename, "w");
+ fprintf(pfile, "%d", bsize);
+
+ // Write partition type with BFS order.
+ const PC_TREE *tree_node_queue[NUM_NODES] = { NULL };
+ int q_idx = 0;
+ int depth = 0;
+ int last_idx = 1;
+ int num_nodes = 1;
+
+ // First traversal to get number of leaf nodes and depth.
+ tree_node_queue[q_idx] = pc_tree;
+ while (num_nodes > 0) {
+ const PC_TREE *node = tree_node_queue[q_idx];
+ if (node->partitioning == PARTITION_SPLIT) {
+ for (int i = 0; i < 4; ++i) {
+ tree_node_queue[last_idx] = node->split[i];
+ ++last_idx;
+ }
+ ++depth;
+ num_nodes += 4;
+ }
+ --num_nodes;
+ ++q_idx;
+ }
+ const int num_leafs = last_idx;
+ fprintf(pfile, ",%d,%d", num_leafs, /*num_configs=*/1);
+
+ // Write partitions for each node.
+ q_idx = 0;
+ depth = 0;
+ last_idx = 1;
+ num_nodes = 1;
+ tree_node_queue[q_idx] = pc_tree;
+ while (num_nodes > 0) {
+ const PC_TREE *node = tree_node_queue[q_idx];
+ fprintf(pfile, ",%d", node->partitioning);
+ if (node->partitioning == PARTITION_SPLIT) {
+ for (int i = 0; i < 4; ++i) {
+ tree_node_queue[last_idx] = node->split[i];
+ ++last_idx;
+ }
+ ++depth;
+ num_nodes += 4;
+ }
+ --num_nodes;
+ ++q_idx;
+ }
+ fprintf(pfile, "\n");
+
+ fclose(pfile);
+}
+
+static void verify_write_partition_tree(const AV1_COMP *const cpi,
+ const PC_TREE *const pc_tree,
+ const BLOCK_SIZE bsize,
+ const int config_id, const int mi_row,
+ const int mi_col) {
+ (void)mi_row;
+ (void)mi_col;
+ const char *path = cpi->oxcf.partition_info_path;
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s/verify_partition_tree_sb%d_c%d",
+ path, cpi->sb_counter, config_id);
+ FILE *pfile = fopen(filename, "w");
+ fprintf(pfile, "%d", bsize);
+
+ // Write partition type with BFS order.
+ const PC_TREE *tree_node_queue[NUM_NODES] = { NULL };
+ int q_idx = 0;
+ int depth = 0;
+ int last_idx = 1;
+ int num_nodes = 1;
+
+ // First traversal to get number of leaf nodes and depth.
+ tree_node_queue[q_idx] = pc_tree;
+ while (num_nodes > 0) {
+ const PC_TREE *node = tree_node_queue[q_idx];
+ if (node != NULL && node->partitioning == PARTITION_SPLIT) {
+ for (int i = 0; i < 4; ++i) {
+ tree_node_queue[last_idx] = node->split[i];
+ ++last_idx;
+ }
+ ++depth;
+ num_nodes += 4;
+ }
+ --num_nodes;
+ ++q_idx;
+ }
+ const int num_leafs = last_idx;
+ fprintf(pfile, ",%d,%d", num_leafs, /*num_configs=*/1);
+
+ // Write partitions for each node.
+ q_idx = 0;
+ depth = 0;
+ last_idx = 1;
+ num_nodes = 1;
+ tree_node_queue[q_idx] = pc_tree;
+ while (num_nodes > 0) {
+ const PC_TREE *node = tree_node_queue[q_idx];
+ if (node != NULL) { // suppress warning
+ fprintf(pfile, ",%d", node->partitioning);
+ if (node->partitioning == PARTITION_SPLIT) {
+ for (int i = 0; i < 4; ++i) {
+ tree_node_queue[last_idx] = node->split[i];
+ ++last_idx;
+ }
+ ++depth;
+ num_nodes += 4;
+ }
+ }
+ --num_nodes;
+ ++q_idx;
+ }
+ fprintf(pfile, "\n");
+
+ fclose(pfile);
+}
+
+static int read_partition_tree(AV1_COMP *const cpi, PC_TREE *const pc_tree,
+ const int config_id) {
+ const char *path = cpi->oxcf.partition_info_path;
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s/partition_tree_sb%d_c%d", path,
+ cpi->sb_counter, config_id);
+ FILE *pfile = fopen(filename, "r");
+ if (pfile == NULL) {
+ printf("Can't find the file: %s\n", filename);
+ exit(0);
+ }
+
+ int read_bsize;
+ int num_nodes;
+ int num_configs;
+ fscanf(pfile, "%d,%d,%d", &read_bsize, &num_nodes, &num_configs);
+ assert(read_bsize == cpi->common.seq_params->sb_size);
+ BLOCK_SIZE bsize = (BLOCK_SIZE)read_bsize;
+
+ PC_TREE *tree_node_queue[NUM_NODES] = { NULL };
+ int last_idx = 1;
+ int q_idx = 0;
+ tree_node_queue[q_idx] = pc_tree;
+ while (num_nodes > 0) {
+ int partitioning;
+ fscanf(pfile, ",%d", &partitioning);
+ assert(partitioning >= PARTITION_NONE &&
+ partitioning < EXT_PARTITION_TYPES);
+ PC_TREE *node = tree_node_queue[q_idx];
+ if (node != NULL) node->partitioning = partitioning;
+ if (partitioning == PARTITION_SPLIT) {
+ const BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT);
+ for (int i = 0; i < 4; ++i) {
+ if (node != NULL) { // Suppress warning
+ node->split[i] = av1_alloc_pc_tree_node(subsize);
+ node->split[i]->index = i;
+ tree_node_queue[last_idx] = node->split[i];
+ ++last_idx;
+ }
+ }
+ bsize = subsize;
+ }
+ --num_nodes;
+ ++q_idx;
+ }
+ fclose(pfile);
+
+ return num_configs;
+}
+
+static RD_STATS rd_search_for_fixed_partition(
+ AV1_COMP *const cpi, ThreadData *td, TileDataEnc *tile_data,
+ TokenExtra **tp, SIMPLE_MOTION_DATA_TREE *sms_tree, int mi_row, int mi_col,
+ const BLOCK_SIZE bsize, PC_TREE *pc_tree) {
+ const PARTITION_TYPE partition = pc_tree->partitioning;
+ const AV1_COMMON *const cm = &cpi->common;
+ const int num_planes = av1_num_planes(cm);
+ MACROBLOCK *const x = &td->mb;
+ MACROBLOCKD *const xd = &x->e_mbd;
+ TileInfo *const tile_info = &tile_data->tile_info;
+ RD_STATS best_rdc;
+ av1_invalid_rd_stats(&best_rdc);
+ int sum_subblock_rate = 0;
+ int64_t sum_subblock_dist = 0;
+ PartitionSearchState part_search_state;
+ init_partition_search_state_params(x, cpi, &part_search_state, mi_row, mi_col,
+ bsize);
+ // Override partition costs at the edges of the frame in the same
+ // way as in read_partition (see decodeframe.c).
+ PartitionBlkParams blk_params = part_search_state.part_blk_params;
+ if (!(blk_params.has_rows && blk_params.has_cols))
+ set_partition_cost_for_edge_blk(cm, &part_search_state);
+
+ av1_set_offsets(cpi, tile_info, x, mi_row, mi_col, bsize);
+
+ // Save rdmult before it might be changed, so it can be restored later.
+ const int orig_rdmult = x->rdmult;
+ setup_block_rdmult(cpi, x, mi_row, mi_col, bsize, NO_AQ, NULL);
+ (void)orig_rdmult;
+
+ // Set the context.
+ RD_SEARCH_MACROBLOCK_CONTEXT x_ctx;
+ xd->above_txfm_context =
+ cm->above_contexts.txfm[tile_info->tile_row] + mi_col;
+ xd->left_txfm_context =
+ xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
+ av1_save_context(x, &x_ctx, mi_row, mi_col, bsize, num_planes);
+
+ assert(bsize < BLOCK_SIZES_ALL);
+ unsigned int pb_source_variance = UINT_MAX;
+ int64_t part_none_rd = INT64_MAX;
+ int64_t none_rd = INT64_MAX;
+ int inc_step[NUM_PART4_TYPES] = { 0 };
+ if (partition == PARTITION_HORZ_4) inc_step[HORZ4] = mi_size_high[bsize] / 4;
+ if (partition == PARTITION_VERT_4) inc_step[VERT4] = mi_size_wide[bsize] / 4;
+
+ switch (partition) {
+ case PARTITION_NONE:
+ none_partition_search(cpi, td, tile_data, x, pc_tree, sms_tree, &x_ctx,
+ &part_search_state, &best_rdc, &pb_source_variance,
+ &none_rd, &part_none_rd);
+ break;
+ case PARTITION_HORZ:
+ rectangular_partition_search(cpi, td, tile_data, tp, x, pc_tree, &x_ctx,
+ &part_search_state, &best_rdc, NULL, HORZ,
+ HORZ);
+ break;
+ case PARTITION_VERT:
+ rectangular_partition_search(cpi, td, tile_data, tp, x, pc_tree, &x_ctx,
+ &part_search_state, &best_rdc, NULL, VERT,
+ VERT);
+ break;
+ case PARTITION_HORZ_A:
+ ab_partitions_search(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ &part_search_state, &best_rdc, NULL,
+ pb_source_variance, 1, HORZ_A, HORZ_A);
+ break;
+ case PARTITION_HORZ_B:
+ ab_partitions_search(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ &part_search_state, &best_rdc, NULL,
+ pb_source_variance, 1, HORZ_B, HORZ_B);
+ break;
+ case PARTITION_VERT_A:
+ ab_partitions_search(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ &part_search_state, &best_rdc, NULL,
+ pb_source_variance, 1, VERT_A, VERT_A);
+ break;
+ case PARTITION_VERT_B:
+ ab_partitions_search(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ &part_search_state, &best_rdc, NULL,
+ pb_source_variance, 1, VERT_B, VERT_B);
+ break;
+ case PARTITION_HORZ_4:
+ rd_pick_4partition(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ pc_tree->horizontal4, &part_search_state, &best_rdc,
+ inc_step, PARTITION_HORZ_4);
+ break;
+ case PARTITION_VERT_4:
+ rd_pick_4partition(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
+ pc_tree->vertical4, &part_search_state, &best_rdc,
+ inc_step, PARTITION_VERT_4);
+ break;
+ case PARTITION_SPLIT:
+ for (int idx = 0; idx < SUB_PARTITIONS_SPLIT; ++idx) {
+ const BLOCK_SIZE subsize =
+ get_partition_subsize(bsize, PARTITION_SPLIT);
+ assert(subsize < BLOCK_SIZES_ALL);
+ const int next_mi_row =
+ idx < 2 ? mi_row : mi_row + mi_size_high[subsize];
+ const int next_mi_col =
+ idx % 2 == 0 ? mi_col : mi_col + mi_size_wide[subsize];
+ if (next_mi_row >= cm->mi_params.mi_rows ||
+ next_mi_col >= cm->mi_params.mi_cols) {
+ continue;
+ }
+ const RD_STATS subblock_rdc = rd_search_for_fixed_partition(
+ cpi, td, tile_data, tp, sms_tree->split[idx], next_mi_row,
+ next_mi_col, subsize, pc_tree->split[idx]);
+ sum_subblock_rate += subblock_rdc.rate;
+ sum_subblock_dist += subblock_rdc.dist;
+ }
+ best_rdc.rate = sum_subblock_rate;
+ best_rdc.rate += part_search_state.partition_cost[PARTITION_SPLIT];
+ best_rdc.dist = sum_subblock_dist;
+ best_rdc.rdcost = RDCOST(x->rdmult, best_rdc.rate, best_rdc.dist);
+ break;
+ default: assert(0 && "invalid partition type."); exit(0);
+ }
+ // Note: it is necessary to restore context information.
+ av1_restore_context(x, &x_ctx, mi_row, mi_col, bsize, num_planes);
+
+ if (bsize != cm->seq_params->sb_size) {
+ encode_sb(cpi, td, tile_data, tp, mi_row, mi_col, DRY_RUN_NORMAL, bsize,
+ pc_tree, NULL);
+ }
+ x->rdmult = orig_rdmult;
+
+ return best_rdc;
+}
+
+bool av1_rd_partition_search(AV1_COMP *const cpi, ThreadData *td,
+ TileDataEnc *tile_data, TokenExtra **tp,
+ SIMPLE_MOTION_DATA_TREE *sms_root, int mi_row,
+ int mi_col, const BLOCK_SIZE bsize,
+ RD_STATS *best_rd_cost) {
+ AV1_COMMON *const cm = &cpi->common;
+ MACROBLOCK *const x = &td->mb;
+ int best_idx = 0;
+ int64_t min_rdcost = INT64_MAX;
+ int num_configs;
+ RD_STATS *rdcost = NULL;
+ int i = 0;
+ do {
+ PC_TREE *const pc_tree = av1_alloc_pc_tree_node(bsize);
+ num_configs = read_partition_tree(cpi, pc_tree, i);
+ if (i == 0) {
+ rdcost = aom_calloc(num_configs, sizeof(*rdcost));
+ }
+ if (num_configs <= 0) {
+ av1_free_pc_tree_recursive(pc_tree, av1_num_planes(cm), 0, 0);
+ if (rdcost != NULL) aom_free(rdcost);
+ exit(0);
+ return false;
+ }
+ verify_write_partition_tree(cpi, pc_tree, bsize, i, mi_row, mi_col);
+ // Encode the block with the given partition tree. Get rdcost and encoding
+ // time.
+ rdcost[i] = rd_search_for_fixed_partition(cpi, td, tile_data, tp, sms_root,
+ mi_row, mi_col, bsize, pc_tree);
+
+ if (rdcost[i].rdcost < min_rdcost) {
+ min_rdcost = rdcost[i].rdcost;
+ best_idx = i;
+ *best_rd_cost = rdcost[i];
+ }
+ av1_free_pc_tree_recursive(pc_tree, av1_num_planes(cm), 0, 0);
+ ++i;
+ } while (i < num_configs);
+
+ // Encode with the partition configuration with the smallest rdcost.
+ PC_TREE *const pc_tree = av1_alloc_pc_tree_node(bsize);
+ read_partition_tree(cpi, pc_tree, best_idx);
+ rd_search_for_fixed_partition(cpi, td, tile_data, tp, sms_root, mi_row,
+ mi_col, bsize, pc_tree);
+ set_cb_offsets(x->cb_offset, 0, 0);
+ encode_sb(cpi, td, tile_data, tp, mi_row, mi_col, OUTPUT_ENABLED, bsize,
+ pc_tree, NULL);
+
+ av1_free_pc_tree_recursive(pc_tree, av1_num_planes(cm), 0, 0);
+ aom_free(rdcost);
+ ++cpi->sb_counter;
+
+ return true;
+}
+
/*!\brief AV1 block partition search (full search).
*
* \ingroup partition_search
@@ -3617,7 +4061,7 @@ bool av1_rd_pick_partition(AV1_COMP *const cpi, ThreadData *td,
av1_invalid_rd_stats(rd_cost);
return part_search_state.found_best_partition;
}
- if (bsize == cm->seq_params.sb_size) x->must_find_valid_partition = 0;
+ if (bsize == cm->seq_params->sb_size) x->must_find_valid_partition = 0;
// Override skipping rectangular partition operations for edge blocks.
if (none_rd) *none_rd = 0;
@@ -3742,7 +4186,7 @@ BEGIN_PARTITION_SEARCH:
// when NONE and SPLIT partition rd_costs are INT64_MAX.
if (cpi->sf.part_sf.early_term_after_none_split &&
part_none_rd == INT64_MAX && part_split_rd == INT64_MAX &&
- !x->must_find_valid_partition && (bsize != cm->seq_params.sb_size)) {
+ !x->must_find_valid_partition && (bsize != cm->seq_params->sb_size)) {
part_search_state.terminate_partition_search = 1;
}
@@ -3755,7 +4199,7 @@ BEGIN_PARTITION_SEARCH:
// Rectangular partitions search stage.
rectangular_partition_search(cpi, td, tile_data, tp, x, pc_tree, &x_ctx,
&part_search_state, &best_rdc,
- rect_part_win_info);
+ rect_part_win_info, HORZ, VERT);
#if CONFIG_COLLECT_COMPONENT_TIMING
end_timing(cpi, rectangular_partition_search_time);
#endif
@@ -3784,7 +4228,8 @@ BEGIN_PARTITION_SEARCH:
// AB partitions search stage.
ab_partitions_search(cpi, td, tile_data, tp, x, &x_ctx, pc_tree,
&part_search_state, &best_rdc, rect_part_win_info,
- pb_source_variance, ext_partition_allowed);
+ pb_source_variance, ext_partition_allowed, HORZ_A,
+ VERT_B);
#if CONFIG_COLLECT_COMPONENT_TIMING
end_timing(cpi, ab_partitions_search_time);
#endif
@@ -3832,7 +4277,7 @@ BEGIN_PARTITION_SEARCH:
end_timing(cpi, rd_pick_4partition_time);
#endif
- if (bsize == cm->seq_params.sb_size &&
+ if (bsize == cm->seq_params->sb_size &&
!part_search_state.found_best_partition) {
// Did not find a valid partition, go back and search again, with less
// constraint on which partition types to search.
@@ -3859,7 +4304,7 @@ BEGIN_PARTITION_SEARCH:
// prediction block.
print_partition_timing_stats_with_rdcost(
part_timing_stats, mi_row, mi_col, bsize,
- cpi->gf_group.update_type[cpi->gf_group.index],
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index],
cm->current_frame.frame_number, &best_rdc, "part_timing.csv");
/*
print_partition_timing_stats(part_timing_stats, cm->show_frame,
@@ -3881,11 +4326,14 @@ BEGIN_PARTITION_SEARCH:
// If a valid partition is found and reconstruction is required for future
// sub-blocks in the same group.
if (part_search_state.found_best_partition && pc_tree->index != 3) {
- if (bsize == cm->seq_params.sb_size) {
+ if (bsize == cm->seq_params->sb_size) {
// Encode the superblock.
const int emit_output = multi_pass_mode != SB_DRY_PASS;
const RUN_TYPE run_type = emit_output ? OUTPUT_ENABLED : DRY_RUN_NORMAL;
+ // Write partition tree to file. Not used by default.
+ if (0) write_partition_tree(cpi, pc_tree, bsize, mi_row, mi_col);
+
set_cb_offsets(x->cb_offset, 0, 0);
encode_sb(cpi, td, tile_data, tp, mi_row, mi_col, run_type, bsize,
pc_tree, NULL);
@@ -3907,7 +4355,7 @@ BEGIN_PARTITION_SEARCH:
if (pc_tree_dealloc == 0)
av1_free_pc_tree_recursive(pc_tree, num_planes, 1, 1);
- if (bsize == cm->seq_params.sb_size) {
+ if (bsize == cm->seq_params->sb_size) {
assert(best_rdc.rate < INT_MAX);
assert(best_rdc.dist < INT64_MAX);
} else {
@@ -3958,7 +4406,7 @@ static int ml_predict_var_paritioning(AV1_COMP *cpi, MACROBLOCK *x,
const float thresh = cpi->oxcf.speed <= 5 ? 1.25f : 0.0f;
float features[FEATURES] = { 0.0f };
const int dc_q = av1_dc_quant_QTX(cm->quant_params.base_qindex, 0,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
int feature_idx = 0;
float score[LABELS];
@@ -4038,7 +4486,7 @@ static int store_partition_data(AV1_COMP *cpi, MACROBLOCK *x, BLOCK_SIZE bsize,
{
const int dc_q = av1_dc_quant_QTX(cm->quant_params.base_qindex, 0,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
int feature_idx = 0;
features[feature_idx++] = logf((float)(dc_q * dc_q) / 256.0f + 1.0f);
@@ -4186,7 +4634,7 @@ void av1_nonrd_pick_partition(AV1_COMP *cpi, ThreadData *td,
int partition_none_allowed = !force_horz_split && !force_vert_split;
assert(mi_size_wide[bsize] == mi_size_high[bsize]); // Square partition only
- assert(cm->seq_params.sb_size == BLOCK_64X64); // Small SB so far
+ assert(cm->seq_params->sb_size == BLOCK_64X64); // Small SB so far
(void)*tp_orig;
@@ -4293,7 +4741,7 @@ void av1_nonrd_pick_partition(AV1_COMP *cpi, ThreadData *td,
fill_mode_info_sb(cpi, x, mi_row, mi_col, bsize, pc_tree);
if (do_recon) {
- if (bsize == cm->seq_params.sb_size) {
+ if (bsize == cm->seq_params->sb_size) {
// NOTE: To get estimate for rate due to the tokens, use:
// int rate_coeffs = 0;
// encode_sb(cpi, td, tile_data, tp, mi_row, mi_col, DRY_RUN_COSTCOEFFS,
diff --git a/third_party/libaom/source/libaom/av1/encoder/partition_search.h b/third_party/libaom/source/libaom/av1/encoder/partition_search.h
index 136548e3e6..8a6717690c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/partition_search.h
+++ b/third_party/libaom/source/libaom/av1/encoder/partition_search.h
@@ -39,6 +39,13 @@ void av1_nonrd_pick_partition(AV1_COMP *cpi, ThreadData *td,
RD_STATS *rd_cost, int do_recon, int64_t best_rd,
PC_TREE *pc_tree);
#endif
+void av1_reset_part_sf(PARTITION_SPEED_FEATURES *part_sf);
+
+bool av1_rd_partition_search(AV1_COMP *const cpi, ThreadData *td,
+ TileDataEnc *tile_data, TokenExtra **tp,
+ SIMPLE_MOTION_DATA_TREE *sms_root, int mi_row,
+ int mi_col, BLOCK_SIZE bsize,
+ RD_STATS *best_rd_cost);
bool av1_rd_pick_partition(AV1_COMP *const cpi, ThreadData *td,
TileDataEnc *tile_data, TokenExtra **tp, int mi_row,
int mi_col, BLOCK_SIZE bsize, RD_STATS *rd_cost,
@@ -57,12 +64,14 @@ static AOM_INLINE void set_cb_offsets(uint16_t *cb_offset,
static AOM_INLINE void update_cb_offsets(MACROBLOCK *x, const BLOCK_SIZE bsize,
const int subsampling_x,
const int subsampling_y) {
- const BLOCK_SIZE plane_bsize =
- get_plane_block_size(bsize, subsampling_x, subsampling_y);
x->cb_offset[PLANE_TYPE_Y] += block_size_wide[bsize] * block_size_high[bsize];
- if (x->e_mbd.is_chroma_ref)
+ if (x->e_mbd.is_chroma_ref) {
+ const BLOCK_SIZE plane_bsize =
+ get_plane_block_size(bsize, subsampling_x, subsampling_y);
+ assert(plane_bsize != BLOCK_INVALID);
x->cb_offset[PLANE_TYPE_UV] +=
block_size_wide[plane_bsize] * block_size_high[plane_bsize];
+ }
}
#endif // AOM_AV1_ENCODER_PARTITION_SEARCH_H_
diff --git a/third_party/libaom/source/libaom/av1/encoder/partition_strategy.c b/third_party/libaom/source/libaom/av1/encoder/partition_strategy.c
index f846d595bc..bf678a452f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/partition_strategy.c
+++ b/third_party/libaom/source/libaom/av1/encoder/partition_strategy.c
@@ -35,6 +35,48 @@ static AOM_INLINE void simple_motion_search_prune_part_features(
int mi_row, int mi_col, BLOCK_SIZE bsize, float *features,
int features_to_get);
+static bool ext_ml_model_decision_before_none(
+ AV1_COMP *cpi, const float features_from_motion[FEATURE_SIZE_SMS_SPLIT],
+ int *partition_none_allowed, int *partition_horz_allowed,
+ int *partition_vert_allowed, int *do_rectangular_split,
+ int *do_square_split);
+
+static bool ext_ml_model_decision_before_none_part2(
+ AV1_COMP *cpi,
+ const float features_from_motion[FEATURE_SIZE_SMS_PRUNE_PART],
+ int *prune_horz, int *prune_vert);
+
+static bool ext_ml_model_decision_after_none(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_after_none, int *do_square_split,
+ int *do_rectangular_split);
+
+static bool ext_ml_model_decision_after_none_part2(
+ AV1_COMP *const cpi, const float *const features_terminate,
+ int *terminate_partition_search);
+
+static bool ext_ml_model_decision_after_split(
+ AV1_COMP *const cpi, const float *const features_terminate,
+ int *terminate_partition_search);
+
+static bool ext_ml_model_decision_after_split_part2(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_prune, int *prune_rect_part_horz,
+ int *prune_rect_part_vert);
+
+static bool ext_ml_model_decision_after_rect(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_after_rect, int *horza_partition_allowed,
+ int *horzb_partition_allowed, int *verta_partition_allowed,
+ int *vertb_partition_allowed);
+
+static bool ext_ml_model_decision_after_part_ab(
+ AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize, int part_ctx,
+ int64_t best_rd, int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
+ int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const partition_horz4_allowed,
+ int *const partition_vert4_allowed, unsigned int pb_source_variance,
+ int mi_row, int mi_col);
+
static INLINE int convert_bsize_to_idx(BLOCK_SIZE bsize) {
switch (bsize) {
case BLOCK_128X128: return 0;
@@ -45,9 +87,45 @@ static INLINE int convert_bsize_to_idx(BLOCK_SIZE bsize) {
default: assert(0 && "Invalid bsize"); return -1;
}
}
-#endif
-#if !CONFIG_REALTIME_ONLY
+static char *get_feature_file_name(int id) {
+ static char *feature_file_names[] = {
+ "feature_before_partition_none",
+ "feature_before_partition_none_prune_rect",
+ "feature_after_partition_none_prune",
+ "feature_after_partition_none_terminate",
+ "feature_after_partition_split_terminate",
+ "feature_after_partition_split_prune_rect",
+ "feature_after_partition_rect",
+ "feature_after_partition_ab",
+ };
+
+ return feature_file_names[id];
+}
+
+static void write_features_to_file(const char *const path,
+ const bool is_test_mode,
+ const float *features,
+ const int feature_size, const int id,
+ const int bsize, const int mi_row,
+ const int mi_col) {
+ if (!WRITE_FEATURE_TO_FILE && !is_test_mode) return;
+
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s/%s", path,
+ get_feature_file_name(id));
+ FILE *pfile = fopen(filename, "a");
+ if (!is_test_mode) {
+ fprintf(pfile, "%d,%d,%d,%d,%d\n", id, bsize, mi_row, mi_col, feature_size);
+ }
+ for (int i = 0; i < feature_size; ++i) {
+ fprintf(pfile, "%.6f", features[i]);
+ if (i < feature_size - 1) fprintf(pfile, ",");
+ }
+ fprintf(pfile, "\n");
+ fclose(pfile);
+}
+
// TODO(chiyotsai@google.com): This is very much a work in progress. We still
// need to the following:
// -- add support for hdres
@@ -61,7 +139,7 @@ void av1_intra_mode_cnn_partition(const AV1_COMMON *const cm, MACROBLOCK *x,
int *partition_vert_allowed,
int *do_rectangular_split,
int *do_square_split) {
- assert(cm->seq_params.sb_size >= BLOCK_64X64 &&
+ assert(cm->seq_params->sb_size >= BLOCK_64X64 &&
"Invalid sb_size for intra_cnn!");
const int bsize_idx = convert_bsize_to_idx(bsize);
@@ -284,6 +362,20 @@ void av1_simple_motion_search_based_split(
simple_motion_search_prune_part_features(cpi, x, sms_tree, mi_row, mi_col,
bsize, features,
FEATURE_SMS_SPLIT_MODEL_FLAG);
+
+ // Write features to file
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features,
+ FEATURE_SIZE_SMS_SPLIT, 0, bsize, mi_row, mi_col);
+
+ // Note: it is intended to not normalize the features here, to keep it
+ // consistent for all features collected and passed to the external model.
+ if (ext_ml_model_decision_before_none(
+ cpi, features, partition_none_allowed, partition_horz_allowed,
+ partition_vert_allowed, do_rectangular_split, do_square_split)) {
+ return;
+ }
+
for (int idx = 0; idx < FEATURE_SIZE_SMS_SPLIT; idx++) {
features[idx] = (features[idx] - ml_mean[idx]) / ml_std[idx];
}
@@ -308,7 +400,7 @@ void av1_simple_motion_search_based_split(
// If the score is very low, prune rectangular split since it is unlikely to
// occur.
if (cpi->sf.part_sf.simple_motion_search_rect_split) {
- const float scale = res_idx >= 2 ? 3 : 2;
+ const float scale = res_idx >= 2 ? 3.0f : 2.0f;
const float rect_split_thresh =
scale * av1_simple_motion_search_no_split_thresh
[cpi->sf.part_sf.simple_motion_search_rect_split][res_idx]
@@ -356,7 +448,7 @@ static int simple_motion_search_get_best_ref(
int_mv best_mv =
av1_simple_motion_search(cpi, x, mi_row, mi_col, bsize, ref,
start_mvs[ref], num_planes, use_subpixel);
- curr_var = cpi->fn_ptr[bsize].vf(
+ curr_var = cpi->ppi->fn_ptr[bsize].vf(
x->plane[0].src.buf, x->plane[0].src.stride, xd->plane[0].dst.buf,
xd->plane[0].dst.stride, &curr_sse);
if (curr_sse < *best_sse) {
@@ -543,6 +635,24 @@ void av1_simple_motion_search_prune_rect(
simple_motion_search_prune_part_features(cpi, x, sms_tree, mi_row, mi_col,
bsize, features,
FEATURE_SMS_PRUNE_PART_FLAG);
+
+ // Note: it is intended to not normalize the features here, to keep it
+ // consistent for all features collected and passed to the external model.
+ if (cpi->sf.part_sf.simple_motion_search_prune_rect &&
+ !frame_is_intra_only(cm) &&
+ (partition_horz_allowed || partition_vert_allowed) &&
+ bsize >= BLOCK_8X8 && !av1_superres_scaled(cm)) {
+ // Write features to file
+ write_features_to_file(
+ cpi->oxcf.partition_info_path, cpi->ext_part_controller.test_mode,
+ features, FEATURE_SIZE_SMS_PRUNE_PART, 1, bsize, mi_row, mi_col);
+
+ if (ext_ml_model_decision_before_none_part2(cpi, features, prune_horz,
+ prune_vert)) {
+ return;
+ }
+ }
+
for (int f_idx = 0; f_idx < FEATURE_SIZE_SMS_PRUNE_PART; f_idx++) {
features[f_idx] = (features[f_idx] - ml_mean[f_idx]) / ml_std[f_idx];
}
@@ -617,6 +727,15 @@ void av1_simple_motion_search_early_term_none(
assert(0 && "Unexpected block size in simple_motion_term_none");
}
+ // Write features to file
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features,
+ FEATURE_SIZE_SMS_TERM_NONE, 3, bsize, mi_row, mi_col);
+
+ if (ext_ml_model_decision_after_none_part2(cpi, features, early_terminate)) {
+ return;
+ }
+
if (ml_model) {
float score = 0.0f;
for (f_idx = 0; f_idx < FEATURE_SIZE_SMS_TERM_NONE; f_idx++) {
@@ -636,8 +755,9 @@ void av1_get_max_min_partition_features(AV1_COMP *const cpi, MACROBLOCK *x,
float *features) {
AV1_COMMON *const cm = &cpi->common;
MACROBLOCKD *xd = &x->e_mbd;
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
+ // Currently this only allows 128X128 SB size. May extend it to 64X64 SB size.
assert(sb_size == BLOCK_128X128);
int f_idx = 0;
@@ -701,14 +821,18 @@ void av1_get_max_min_partition_features(AV1_COMP *const cpi, MACROBLOCK *x,
if (log_sse > max_log_sse) max_log_sse = log_sse;
}
aom_clear_system_state();
- const float avg_mv_row = sum_mv_row / 64.0f;
- const float var_mv_row = sum_mv_row_sq / 64.0f - avg_mv_row * avg_mv_row;
+ const int blks = mb_rows * mb_cols;
+ const float avg_mv_row = sum_mv_row / (float)blks;
+ const float var_mv_row =
+ sum_mv_row_sq / (float)blks - avg_mv_row * avg_mv_row;
- const float avg_mv_col = sum_mv_col / 64.0f;
- const float var_mv_col = sum_mv_col_sq / 64.0f - avg_mv_col * avg_mv_col;
+ const float avg_mv_col = sum_mv_col / (float)blks;
+ const float var_mv_col =
+ sum_mv_col_sq / (float)blks - avg_mv_col * avg_mv_col;
- const float avg_log_sse = sum_log_sse / 64.0f;
- const float var_log_sse = sum_log_sse_sq / 64.0f - avg_log_sse * avg_log_sse;
+ const float avg_log_sse = sum_log_sse / (float)blks;
+ const float var_log_sse =
+ sum_log_sse_sq / (float)blks - avg_log_sse * avg_log_sse;
features[f_idx++] = avg_log_sse;
features[f_idx++] = avg_mv_col;
@@ -727,11 +851,20 @@ void av1_get_max_min_partition_features(AV1_COMP *const cpi, MACROBLOCK *x,
assert(f_idx == FEATURE_SIZE_MAX_MIN_PART_PRED);
}
+// Convert result index to block size.
+// result idx block size
+// 0 BLOCK_16X16
+// 1 BLOCK_32X32
+// 2 BLOCK_64X64
+// 3 BLOCK_128X128
+static BLOCK_SIZE get_block_size(int idx) {
+ return (BLOCK_SIZE)((idx + 2) * 3);
+}
+
BLOCK_SIZE av1_predict_max_partition(const AV1_COMP *const cpi,
const MACROBLOCK *const x,
const float *features) {
- float scores[MAX_NUM_CLASSES_MAX_MIN_PART_PRED] = { 0.0f },
- probs[MAX_NUM_CLASSES_MAX_MIN_PART_PRED] = { 0.0f };
+ float scores[MAX_NUM_CLASSES_MAX_MIN_PART_PRED] = { 0.0f };
const NN_CONFIG *nn_config = &av1_max_part_pred_nn_config;
assert(cpi->sf.part_sf.auto_max_partition_based_on_simple_motion !=
@@ -739,21 +872,26 @@ BLOCK_SIZE av1_predict_max_partition(const AV1_COMP *const cpi,
aom_clear_system_state();
av1_nn_predict(features, nn_config, 1, scores);
- av1_nn_softmax(scores, probs, MAX_NUM_CLASSES_MAX_MIN_PART_PRED);
int result = MAX_NUM_CLASSES_MAX_MIN_PART_PRED - 1;
if (cpi->sf.part_sf.auto_max_partition_based_on_simple_motion ==
DIRECT_PRED) {
result = 0;
- float max_prob = probs[0];
+ float max_score = scores[0];
for (int i = 1; i < MAX_NUM_CLASSES_MAX_MIN_PART_PRED; ++i) {
- if (probs[i] > max_prob) {
- max_prob = probs[i];
+ if (scores[i] > max_score) {
+ max_score = scores[i];
result = i;
}
}
- } else if (cpi->sf.part_sf.auto_max_partition_based_on_simple_motion ==
- RELAXED_PRED) {
+ return get_block_size(result);
+ }
+
+ float probs[MAX_NUM_CLASSES_MAX_MIN_PART_PRED] = { 0.0f };
+ av1_nn_softmax(scores, probs, MAX_NUM_CLASSES_MAX_MIN_PART_PRED);
+
+ if (cpi->sf.part_sf.auto_max_partition_based_on_simple_motion ==
+ RELAXED_PRED) {
for (result = MAX_NUM_CLASSES_MAX_MIN_PART_PRED - 1; result >= 0;
--result) {
if (result < MAX_NUM_CLASSES_MAX_MIN_PART_PRED - 1) {
@@ -763,7 +901,7 @@ BLOCK_SIZE av1_predict_max_partition(const AV1_COMP *const cpi,
}
} else if (cpi->sf.part_sf.auto_max_partition_based_on_simple_motion ==
ADAPT_PRED) {
- const BLOCK_SIZE sb_size = cpi->common.seq_params.sb_size;
+ const BLOCK_SIZE sb_size = cpi->common.seq_params->sb_size;
const MACROBLOCKD *const xd = &x->e_mbd;
// TODO(debargha): x->source_variance is unavailable at this point,
// so compute. The redundant recomputation later can be removed.
@@ -784,7 +922,7 @@ BLOCK_SIZE av1_predict_max_partition(const AV1_COMP *const cpi,
}
}
- return (BLOCK_SIZE)((result + 2) * 3);
+ return get_block_size(result);
}
// Get the minimum partition block width and height(in log scale) under a
@@ -911,6 +1049,16 @@ void av1_ml_early_term_after_split(AV1_COMP *const cpi, MACROBLOCK *const x,
assert(f_idx == FEATURES);
+ // Write features to file
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features, FEATURES,
+ 4, bsize, mi_row, mi_col);
+
+ if (ext_ml_model_decision_after_split(cpi, features,
+ terminate_partition_search)) {
+ return;
+ }
+
float score = 0.0f;
av1_nn_predict(features, nn_config, 1, &score);
// Score is indicator of confidence that we should NOT terminate.
@@ -918,10 +1066,11 @@ void av1_ml_early_term_after_split(AV1_COMP *const cpi, MACROBLOCK *const x,
}
#undef FEATURES
-void av1_ml_prune_rect_partition(const AV1_COMP *const cpi,
- const MACROBLOCK *const x, BLOCK_SIZE bsize,
- int64_t best_rd, int64_t none_rd,
- int64_t *split_rd, int *const dst_prune_horz,
+void av1_ml_prune_rect_partition(AV1_COMP *const cpi, const MACROBLOCK *const x,
+ BLOCK_SIZE bsize, const int mi_row,
+ const int mi_col, int64_t best_rd,
+ int64_t none_rd, int64_t *split_rd,
+ int *const dst_prune_horz,
int *const dst_prune_vert) {
if (bsize < BLOCK_8X8 || best_rd >= 1000000000) return;
best_rd = AOMMAX(best_rd, 1);
@@ -998,6 +1147,17 @@ void av1_ml_prune_rect_partition(const AV1_COMP *const cpi,
for (int i = 0; i < SUB_PARTITIONS_SPLIT; i++)
features[5 + i] = (float)split_variance[i] / (float)whole_block_variance;
+ // Write features to file
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features,
+ /*feature_size=*/9, 5, bsize, mi_row, mi_col);
+
+ if (ext_ml_model_decision_after_split_part2(
+ &cpi->ext_part_controller, frame_is_intra_only(&cpi->common),
+ features, dst_prune_horz, dst_prune_vert)) {
+ return;
+ }
+
// 2. Do the prediction and prune 0-2 partitions based on their probabilities
float raw_scores[3] = { 0.0f };
av1_nn_predict(features, nn_config, 1, raw_scores);
@@ -1014,7 +1174,8 @@ void av1_ml_prune_rect_partition(const AV1_COMP *const cpi,
// Use a ML model to predict if horz_a, horz_b, vert_a, and vert_b should be
// considered.
void av1_ml_prune_ab_partition(
- BLOCK_SIZE bsize, int part_ctx, int var_ctx, int64_t best_rd,
+ AV1_COMP *const cpi, BLOCK_SIZE bsize, const int mi_row, const int mi_col,
+ int part_ctx, int var_ctx, int64_t best_rd,
int64_t horz_rd[SUB_PARTITIONS_RECT], int64_t vert_rd[SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const horza_partition_allowed,
int *const horzb_partition_allowed, int *const verta_partition_allowed,
@@ -1065,6 +1226,20 @@ void av1_ml_prune_ab_partition(
}
assert(feature_index == 10);
+ // Write features to file
+ if (!frame_is_intra_only(&cpi->common)) {
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features,
+ /*feature_size=*/10, 6, bsize, mi_row, mi_col);
+ }
+
+ if (ext_ml_model_decision_after_rect(
+ &cpi->ext_part_controller, frame_is_intra_only(&cpi->common),
+ features, horza_partition_allowed, horzb_partition_allowed,
+ verta_partition_allowed, vertb_partition_allowed)) {
+ return;
+ }
+
// Calculate scores using the NN model.
float score[16] = { 0.0f };
av1_nn_predict(features, nn_config, 1, score);
@@ -1101,12 +1276,17 @@ void av1_ml_prune_ab_partition(
#define LABELS 4
// Use a ML model to predict if horz4 and vert4 should be considered.
void av1_ml_prune_4_partition(
- const AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize,
- int part_ctx, int64_t best_rd,
- int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
+ AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize, int part_ctx,
+ int64_t best_rd, int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const partition_horz4_allowed,
int *const partition_vert4_allowed, unsigned int pb_source_variance,
int mi_row, int mi_col) {
+ if (ext_ml_model_decision_after_part_ab(
+ cpi, x, bsize, part_ctx, best_rd, rect_part_rd, split_rd,
+ partition_horz4_allowed, partition_vert4_allowed, pb_source_variance,
+ mi_row, mi_col))
+ return;
+
if (best_rd >= 1000000000) return;
int64_t *horz_rd = rect_part_rd[HORZ];
int64_t *vert_rd = rect_part_rd[VERT];
@@ -1206,6 +1386,13 @@ void av1_ml_prune_4_partition(
}
assert(feature_index == FEATURES);
+ // Write features to file
+ if (!frame_is_intra_only(&cpi->common)) {
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features,
+ FEATURES, 7, bsize, mi_row, mi_col);
+ }
+
// Calculate scores using the NN model.
float score[LABELS] = { 0.0f };
av1_nn_predict(features, nn_config, 1, score);
@@ -1238,10 +1425,12 @@ void av1_ml_prune_4_partition(
#undef LABELS
#define FEATURES 4
-int av1_ml_predict_breakout(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
- const MACROBLOCK *const x,
- const RD_STATS *const rd_stats,
- unsigned int pb_source_variance, int bit_depth) {
+void av1_ml_predict_breakout(AV1_COMP *const cpi, BLOCK_SIZE bsize,
+ const MACROBLOCK *const x,
+ const RD_STATS *const rd_stats,
+ const PartitionBlkParams blk_params,
+ unsigned int pb_source_variance, int bit_depth,
+ int *do_square_split, int *do_rectangular_split) {
const NN_CONFIG *nn_config = NULL;
int thresh = 0;
switch (bsize) {
@@ -1267,7 +1456,7 @@ int av1_ml_predict_breakout(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
break;
default: assert(0 && "Unexpected bsize.");
}
- if (!nn_config || thresh < 0) return 0;
+ if (!nn_config || thresh < 0) return;
const float ml_predict_breakout_thresh_scale[3] = { 1.15f, 1.05f, 1.0f };
thresh = (int)((float)thresh *
@@ -1295,13 +1484,28 @@ int av1_ml_predict_breakout(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
features[feature_index++] = (float)(dc_q * dc_q) / 256.0f;
assert(feature_index == FEATURES);
+ // Write features to file
+ write_features_to_file(cpi->oxcf.partition_info_path,
+ cpi->ext_part_controller.test_mode, features, FEATURES,
+ 2, blk_params.bsize, blk_params.mi_row,
+ blk_params.mi_col);
+
+ if (ext_ml_model_decision_after_none(
+ &cpi->ext_part_controller, frame_is_intra_only(&cpi->common),
+ features, do_square_split, do_rectangular_split)) {
+ return;
+ }
+
// Calculate score using the NN model.
float score = 0.0f;
av1_nn_predict(features, nn_config, 1, &score);
aom_clear_system_state();
// Make decision.
- return (int)(score * 100) >= thresh;
+ if ((int)(score * 100) >= thresh) {
+ *do_square_split = 0;
+ *do_rectangular_split = 0;
+ }
}
#undef FEATURES
@@ -1361,7 +1565,7 @@ void av1_prune_partitions_before_search(
const int try_intra_cnn_split =
!cpi->use_screen_content_tools && frame_is_intra_only(cm) &&
cpi->sf.part_sf.intra_cnn_split &&
- cm->seq_params.sb_size >= BLOCK_64X64 && bsize <= BLOCK_64X64 &&
+ cm->seq_params->sb_size >= BLOCK_64X64 && bsize <= BLOCK_64X64 &&
bsize >= BLOCK_8X8 &&
mi_row + mi_size_high[bsize] <= mi_params->mi_rows &&
mi_col + mi_size_wide[bsize] <= mi_params->mi_cols;
@@ -1483,8 +1687,9 @@ int evaluate_ab_partition_based_on_split(
}
void av1_prune_ab_partitions(
- const AV1_COMP *cpi, const MACROBLOCK *x, const PC_TREE *pc_tree,
- BLOCK_SIZE bsize, int pb_source_variance, int64_t best_rdcost,
+ AV1_COMP *cpi, const MACROBLOCK *x, const PC_TREE *pc_tree,
+ BLOCK_SIZE bsize, const int mi_row, const int mi_col,
+ int pb_source_variance, int64_t best_rdcost,
int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT],
const RD_RECT_PART_WIN_INFO *rect_part_win_info, int ext_partition_allowed,
@@ -1580,7 +1785,7 @@ void av1_prune_ab_partitions(
// TODO(huisu@google.com): x->source_variance may not be the current
// block's variance. The correct one to use is pb_source_variance. Need to
// re-train the model to fix it.
- av1_ml_prune_ab_partition(bsize, pc_tree->partitioning,
+ av1_ml_prune_ab_partition(cpi, bsize, mi_row, mi_col, pc_tree->partitioning,
get_unsigned_bits(x->source_variance),
best_rdcost, horz_rd, vert_rd, split_rd,
horza_partition_allowed, horzb_partition_allowed,
@@ -1617,4 +1822,390 @@ void av1_prune_ab_partitions(
}
}
+// Prepare features for the external model. Specifically, features after
+// ab partition is searched.
+static void prepare_features_after_part_ab(
+ const AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize,
+ int part_ctx, int64_t best_rd,
+ int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
+ int64_t split_rd[SUB_PARTITIONS_SPLIT], unsigned int pb_source_variance,
+ int mi_row, int mi_col, aom_partition_features_t *const features) {
+ int64_t *horz_rd = rect_part_rd[HORZ];
+ int64_t *vert_rd = rect_part_rd[VERT];
+
+ aom_clear_system_state();
+
+ // Generate features.
+ int feature_index = 0;
+ features->after_part_ab.f[feature_index++] = (float)part_ctx;
+ features->after_part_ab.f[feature_index++] =
+ (float)get_unsigned_bits(pb_source_variance);
+
+ const int rdcost = (int)AOMMIN(INT_MAX, best_rd);
+ int sub_block_rdcost[8] = { 0 };
+ int rd_index = 0;
+ for (int i = 0; i < SUB_PARTITIONS_RECT; ++i) {
+ if (horz_rd[i] > 0 && horz_rd[i] < 1000000000)
+ sub_block_rdcost[rd_index] = (int)horz_rd[i];
+ ++rd_index;
+ }
+ for (int i = 0; i < SUB_PARTITIONS_RECT; ++i) {
+ if (vert_rd[i] > 0 && vert_rd[i] < 1000000000)
+ sub_block_rdcost[rd_index] = (int)vert_rd[i];
+ ++rd_index;
+ }
+ for (int i = 0; i < SUB_PARTITIONS_SPLIT; ++i) {
+ if (split_rd[i] > 0 && split_rd[i] < 1000000000)
+ sub_block_rdcost[rd_index] = (int)split_rd[i];
+ ++rd_index;
+ }
+ for (int i = 0; i < 8; ++i) {
+ // Ratio between the sub-block RD and the whole-block RD.
+ float rd_ratio = 1.0f;
+ if (sub_block_rdcost[i] > 0 && sub_block_rdcost[i] < rdcost)
+ rd_ratio = (float)sub_block_rdcost[i] / (float)rdcost;
+ features->after_part_ab.f[feature_index++] = rd_ratio;
+ }
+
+ // Get variance of the 1:4 and 4:1 sub-blocks.
+ unsigned int horz_4_source_var[SUB_PARTITIONS_PART4] = { 0 };
+ unsigned int vert_4_source_var[SUB_PARTITIONS_PART4] = { 0 };
+ {
+ BLOCK_SIZE horz_4_bs = get_partition_subsize(bsize, PARTITION_HORZ_4);
+ BLOCK_SIZE vert_4_bs = get_partition_subsize(bsize, PARTITION_VERT_4);
+ av1_setup_src_planes(x, cpi->source, mi_row, mi_col,
+ av1_num_planes(&cpi->common), bsize);
+ const int src_stride = x->plane[0].src.stride;
+ uint8_t *src = x->plane[0].src.buf;
+ const MACROBLOCKD *const xd = &x->e_mbd;
+
+ struct buf_2d horz_4_src, vert_4_src;
+ horz_4_src.stride = src_stride;
+ vert_4_src.stride = src_stride;
+
+ for (int i = 0; i < SUB_PARTITIONS_PART4; ++i) {
+ horz_4_src.buf = src + i * block_size_high[horz_4_bs] * src_stride;
+ vert_4_src.buf = src + i * block_size_wide[vert_4_bs];
+
+ if (is_cur_buf_hbd(xd)) {
+ horz_4_source_var[i] = av1_high_get_sby_perpixel_variance(
+ cpi, &horz_4_src, horz_4_bs, xd->bd);
+ vert_4_source_var[i] = av1_high_get_sby_perpixel_variance(
+ cpi, &vert_4_src, vert_4_bs, xd->bd);
+ } else {
+ horz_4_source_var[i] =
+ av1_get_sby_perpixel_variance(cpi, &horz_4_src, horz_4_bs);
+ vert_4_source_var[i] =
+ av1_get_sby_perpixel_variance(cpi, &vert_4_src, vert_4_bs);
+ }
+ }
+ }
+
+ const float denom = (float)(pb_source_variance + 1);
+ const float low_b = 0.1f;
+ const float high_b = 10.0f;
+ for (int i = 0; i < SUB_PARTITIONS_PART4; ++i) {
+ // Ratio between the 4:1 sub-block variance and the whole-block variance.
+ float var_ratio = (float)(horz_4_source_var[i] + 1) / denom;
+ if (var_ratio < low_b) var_ratio = low_b;
+ if (var_ratio > high_b) var_ratio = high_b;
+ features->after_part_ab.f[feature_index++] = var_ratio;
+ }
+ for (int i = 0; i < SUB_PARTITIONS_PART4; ++i) {
+ // Ratio between the 1:4 sub-block RD and the whole-block RD.
+ float var_ratio = (float)(vert_4_source_var[i] + 1) / denom;
+ if (var_ratio < low_b) var_ratio = low_b;
+ if (var_ratio > high_b) var_ratio = high_b;
+ features->after_part_ab.f[feature_index++] = var_ratio;
+ }
+ assert(feature_index == 18);
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions before partition none. Specifically, these parameters:
+// partition_none_allowed
+// partition_horz_allowed
+// partition_vert_allowed
+// do_rectangular_split
+// do_square_split
+static bool ext_ml_model_decision_before_none(
+ AV1_COMP *cpi, const float features_from_motion[FEATURE_SIZE_SMS_SPLIT],
+ int *partition_none_allowed, int *partition_horz_allowed,
+ int *partition_vert_allowed, int *do_rectangular_split,
+ int *do_square_split) {
+ ExtPartController *const ext_part_controller = &cpi->ext_part_controller;
+ if (!ext_part_controller->ready) return false;
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_BEFORE_PART_NONE;
+ for (int i = 0; i < FEATURE_SIZE_SMS_SPLIT; ++i) {
+ features.before_part_none.f[i] = features_from_motion[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *partition_none_allowed = decision.partition_none_allowed;
+ *partition_horz_allowed = decision.partition_rect_allowed[HORZ];
+ *partition_vert_allowed = decision.partition_rect_allowed[VERT];
+ *do_rectangular_split = decision.do_rectangular_split;
+ *do_square_split = decision.do_square_split;
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions before partition none. Specifically, these parameters:
+// prune_horz
+// prune_vert
+static bool ext_ml_model_decision_before_none_part2(
+ AV1_COMP *cpi,
+ const float features_from_motion[FEATURE_SIZE_SMS_PRUNE_PART],
+ int *prune_horz, int *prune_vert) {
+ ExtPartController *const ext_part_controller = &cpi->ext_part_controller;
+ if (!ext_part_controller->ready) return false;
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_BEFORE_PART_NONE_PART2;
+ for (int i = 0; i < FEATURE_SIZE_SMS_PRUNE_PART; ++i) {
+ features.before_part_none.f_part2[i] = features_from_motion[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *prune_horz = decision.prune_rect_part[HORZ];
+ *prune_vert = decision.prune_rect_part[VERT];
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after none partition. Specifically, these parameters:
+// do_square_split
+// do_rectangular_split
+bool ext_ml_model_decision_after_none(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_after_none, int *do_square_split,
+ int *do_rectangular_split) {
+ if (!ext_part_controller->ready || is_intra_frame) return false;
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_NONE;
+ for (int i = 0; i < 4; ++i) {
+ features.after_part_none.f[i] = features_after_none[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *do_square_split = decision.do_square_split;
+ *do_rectangular_split = decision.do_rectangular_split;
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after none partition. Specifically, these parameters:
+// terminate_partition_search
+bool ext_ml_model_decision_after_none_part2(
+ AV1_COMP *const cpi, const float *const features_terminate,
+ int *terminate_partition_search) {
+ AV1_COMMON *const cm = &cpi->common;
+ ExtPartController *const ext_part_controller = &cpi->ext_part_controller;
+ if (!ext_part_controller->ready || frame_is_intra_only(cm)) return false;
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_NONE_PART2;
+ for (int i = 0; i < FEATURE_SIZE_SMS_TERM_NONE; ++i) {
+ features.after_part_none.f_terminate[i] = features_terminate[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *terminate_partition_search = decision.terminate_partition_search;
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after none partition. Specifically, these parameters:
+// terminate_partition_search
+bool ext_ml_model_decision_after_split(AV1_COMP *const cpi,
+ const float *const features_terminate,
+ int *terminate_partition_search) {
+ const AV1_COMMON *const cm = &cpi->common;
+ ExtPartController *const ext_part_controller = &cpi->ext_part_controller;
+ if (frame_is_intra_only(cm) || !cpi->ext_part_controller.ready) {
+ return false;
+ }
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_SPLIT;
+ for (int i = 0; i < 31; ++i) {
+ features.after_part_split.f_terminate[i] = features_terminate[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *terminate_partition_search = decision.terminate_partition_search;
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after none partition. Specifically, these parameters:
+// prune_rect_part[HORZ]
+// prune_rect_part[VERT]
+bool ext_ml_model_decision_after_split_part2(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_prune, int *prune_rect_part_horz,
+ int *prune_rect_part_vert) {
+ if (is_intra_frame || !ext_part_controller->ready) {
+ return false;
+ }
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_SPLIT_PART2;
+ for (int i = 0; i < 9; ++i) {
+ features.after_part_split.f_prune_rect[i] = features_prune[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *prune_rect_part_horz = decision.prune_rect_part[0];
+ *prune_rect_part_vert = decision.prune_rect_part[1];
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after rectangular partition. Specifically, these parameters:
+// horza_partition_allowed
+// horzb_partition_allowed
+// verta_partition_allowed
+// vertb_partition_allowed
+static bool ext_ml_model_decision_after_rect(
+ ExtPartController *const ext_part_controller, const int is_intra_frame,
+ const float *const features_after_rect, int *horza_partition_allowed,
+ int *horzb_partition_allowed, int *verta_partition_allowed,
+ int *vertb_partition_allowed) {
+ if (is_intra_frame || !ext_part_controller->ready) return false;
+
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_RECT;
+ for (int i = 0; i < 10; ++i) {
+ features.after_part_rect.f[i] = features_after_rect[i];
+ }
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *horza_partition_allowed = decision.horza_partition_allowed;
+ *horzb_partition_allowed = decision.horzb_partition_allowed;
+ *verta_partition_allowed = decision.verta_partition_allowed;
+ *vertb_partition_allowed = decision.vertb_partition_allowed;
+
+ return true;
+}
+
+// If the external partition model is used, we let it determine partition
+// decisions after AB partition. Specifically, these parameters:
+// partition_vert4_allowed
+// partition_horz4_allowed
+static bool ext_ml_model_decision_after_part_ab(
+ AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize, int part_ctx,
+ int64_t best_rd, int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
+ int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const partition_horz4_allowed,
+ int *const partition_vert4_allowed, unsigned int pb_source_variance,
+ int mi_row, int mi_col) {
+ const AV1_COMMON *const cm = &cpi->common;
+ ExtPartController *const ext_part_controller = &cpi->ext_part_controller;
+
+ if (!frame_is_intra_only(cm) && ext_part_controller->ready) {
+ // Setup features.
+ aom_partition_features_t features;
+ features.id = FEATURE_AFTER_PART_AB;
+ prepare_features_after_part_ab(cpi, x, bsize, part_ctx, best_rd,
+ rect_part_rd, split_rd, pb_source_variance,
+ mi_row, mi_col, &features);
+
+ // Send necessary features to the external model.
+ av1_ext_part_send_features(ext_part_controller, &features);
+
+ // Get partition decisions from the external model.
+ aom_partition_decision_t decision;
+ const bool valid_decision =
+ av1_ext_part_get_partition_decision(ext_part_controller, &decision);
+ if (!valid_decision) return false;
+
+ // Populate decisions
+ *partition_horz4_allowed = decision.partition_horz4_allowed;
+ *partition_vert4_allowed = decision.partition_vert4_allowed;
+
+ return true;
+ }
+
+ return false;
+}
+
#endif // !CONFIG_REALTIME_ONLY
diff --git a/third_party/libaom/source/libaom/av1/encoder/partition_strategy.h b/third_party/libaom/source/libaom/av1/encoder/partition_strategy.h
index 0527a944cd..ed66a364d9 100644
--- a/third_party/libaom/source/libaom/av1/encoder/partition_strategy.h
+++ b/third_party/libaom/source/libaom/av1/encoder/partition_strategy.h
@@ -13,58 +13,10 @@
#define AOM_AV1_ENCODER_PARTITION_STRATEGY_H_
#include "av1/encoder/encodeframe.h"
+#include "av1/encoder/encodeframe_utils.h"
#include "av1/encoder/encodemb.h"
#include "av1/encoder/encoder.h"
-#define FEATURE_SIZE_SMS_SPLIT_FAST 6
-#define FEATURE_SIZE_SMS_SPLIT 17
-#define FEATURE_SIZE_SMS_PRUNE_PART 25
-#define FEATURE_SIZE_SMS_TERM_NONE 28
-#define FEATURE_SIZE_FP_SMS_TERM_NONE 20
-#define FEATURE_SIZE_MAX_MIN_PART_PRED 13
-#define MAX_NUM_CLASSES_MAX_MIN_PART_PRED 4
-
-#define FEATURE_SMS_NONE_FLAG 1
-#define FEATURE_SMS_SPLIT_FLAG (1 << 1)
-#define FEATURE_SMS_RECT_FLAG (1 << 2)
-
-#define FEATURE_SMS_PRUNE_PART_FLAG \
- (FEATURE_SMS_NONE_FLAG | FEATURE_SMS_SPLIT_FLAG | FEATURE_SMS_RECT_FLAG)
-#define FEATURE_SMS_SPLIT_MODEL_FLAG \
- (FEATURE_SMS_NONE_FLAG | FEATURE_SMS_SPLIT_FLAG)
-
-// Number of sub-partitions in rectangular partition types.
-#define SUB_PARTITIONS_RECT 2
-
-// Number of sub-partitions in split partition type.
-#define SUB_PARTITIONS_SPLIT 4
-
-// Number of sub-partitions in AB partition types.
-#define SUB_PARTITIONS_AB 3
-
-// Number of sub-partitions in 4-way partition types.
-#define SUB_PARTITIONS_PART4 4
-
-// 4part parition types.
-enum { HORZ4 = 0, VERT4, NUM_PART4_TYPES } UENUM1BYTE(PART4_TYPES);
-
-// AB parition types.
-enum {
- HORZ_A = 0,
- HORZ_B,
- VERT_A,
- VERT_B,
- NUM_AB_PARTS
-} UENUM1BYTE(AB_PART_TYPE);
-
-// Rectangular parition types.
-enum { HORZ = 0, VERT, NUM_RECT_PARTS } UENUM1BYTE(RECT_PART_TYPE);
-
-// Structure to keep win flags for HORZ and VERT partition evaluations.
-typedef struct {
- int rect_part_win[NUM_RECT_PARTS];
-} RD_RECT_PART_WIN_INFO;
-
void av1_intra_mode_cnn_partition(const AV1_COMMON *const cm, MACROBLOCK *x,
int bsize, int label_idx,
int *partition_none_allowed,
@@ -129,16 +81,18 @@ void av1_ml_early_term_after_split(AV1_COMP *const cpi, MACROBLOCK *const x,
// no information about rectangular partitions. Preliminary experiments suggest
// that we can get better performance by adding in q_index and rectangular
// sse/var from SMS. We should retrain and tune this model later.
-void av1_ml_prune_rect_partition(const AV1_COMP *const cpi,
- const MACROBLOCK *const x, BLOCK_SIZE bsize,
- int64_t best_rd, int64_t none_rd,
- int64_t *split_rd, int *const dst_prune_horz,
+void av1_ml_prune_rect_partition(AV1_COMP *const cpi, const MACROBLOCK *const x,
+ BLOCK_SIZE bsize, const int mi_row,
+ const int mi_col, int64_t best_rd,
+ int64_t none_rd, int64_t *split_rd,
+ int *const dst_prune_horz,
int *const dst_prune_vert);
// Use a ML model to predict if horz_a, horz_b, vert_a, and vert_b should be
// considered.
void av1_ml_prune_ab_partition(
- BLOCK_SIZE bsize, int part_ctx, int var_ctx, int64_t best_rd,
+ AV1_COMP *const cpi, BLOCK_SIZE bsize, const int mi_row, const int mi_col,
+ int part_ctx, int var_ctx, int64_t best_rd,
int64_t horz_rd[SUB_PARTITIONS_RECT], int64_t vert_rd[SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const horza_partition_allowed,
int *const horzb_partition_allowed, int *const verta_partition_allowed,
@@ -146,18 +100,19 @@ void av1_ml_prune_ab_partition(
// Use a ML model to predict if horz4 and vert4 should be considered.
void av1_ml_prune_4_partition(
- const AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize,
- int part_ctx, int64_t best_rd,
- int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
+ AV1_COMP *const cpi, MACROBLOCK *const x, BLOCK_SIZE bsize, int part_ctx,
+ int64_t best_rd, int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT], int *const partition_horz4_allowed,
int *const partition_vert4_allowed, unsigned int pb_source_variance,
int mi_row, int mi_col);
// ML-based partition search breakout after PARTITION_NONE.
-int av1_ml_predict_breakout(const AV1_COMP *const cpi, BLOCK_SIZE bsize,
- const MACROBLOCK *const x,
- const RD_STATS *const rd_stats,
- unsigned int pb_source_variance, int bit_depth);
+void av1_ml_predict_breakout(AV1_COMP *const cpi, BLOCK_SIZE bsize,
+ const MACROBLOCK *const x,
+ const RD_STATS *const rd_stats,
+ const PartitionBlkParams blk_params,
+ unsigned int pb_source_variance, int bit_depth,
+ int *do_square_split, int *do_rectangular_split);
// The first round of partition pruning determined before any partition
// has been tested. The decisions will be updated and passed back
@@ -183,8 +138,9 @@ void av1_prune_partitions_by_max_min_bsize(
// Prune out AB partitions based on rd decisions made from testing the
// basic partitions.
void av1_prune_ab_partitions(
- const AV1_COMP *cpi, const MACROBLOCK *x, const PC_TREE *pc_tree,
- BLOCK_SIZE bsize, int pb_source_variance, int64_t best_rdcost,
+ AV1_COMP *cpi, const MACROBLOCK *x, const PC_TREE *pc_tree,
+ BLOCK_SIZE bsize, const int mi_row, const int mi_col,
+ int pb_source_variance, int64_t best_rdcost,
int64_t rect_part_rd[NUM_RECT_PARTS][SUB_PARTITIONS_RECT],
int64_t split_rd[SUB_PARTITIONS_SPLIT],
const RD_RECT_PART_WIN_INFO *rect_part_win_info, int ext_partition_allowed,
@@ -261,22 +217,66 @@ static INLINE int is_full_sb(const CommonModeInfoParams *const mi_params,
(mi_col + sb_mi_wide) <= mi_params->mi_cols;
}
+#if !CONFIG_REALTIME_ONLY
// Do not use this criteria for screen content videos.
// Since screen content videos could often find good predictors and the largest
// block size is likely to be used.
static INLINE int use_auto_max_partition(const AV1_COMP *const cpi,
BLOCK_SIZE sb_size, int mi_row,
int mi_col) {
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
const AV1_COMMON *const cm = &cpi->common;
return !frame_is_intra_only(cm) && !cpi->use_screen_content_tools &&
cpi->sf.part_sf.auto_max_partition_based_on_simple_motion !=
NOT_IN_USE &&
sb_size == BLOCK_128X128 &&
is_full_sb(&cm->mi_params, mi_row, mi_col, sb_size) &&
- cpi->gf_group.update_type[cpi->gf_group.index] != OVERLAY_UPDATE &&
- cpi->gf_group.update_type[cpi->gf_group.index] != INTNL_OVERLAY_UPDATE;
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index] !=
+ OVERLAY_UPDATE &&
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index] !=
+ INTNL_OVERLAY_UPDATE;
}
+static BLOCK_SIZE dim_to_size(int dim) {
+ switch (dim) {
+ case 4: return BLOCK_4X4;
+ case 8: return BLOCK_8X8;
+ case 16: return BLOCK_16X16;
+ case 32: return BLOCK_32X32;
+ case 64: return BLOCK_64X64;
+ case 128: return BLOCK_128X128;
+ default: assert(0); return 0;
+ }
+}
+
+static AOM_INLINE void set_max_min_partition_size(SuperBlockEnc *sb_enc,
+ AV1_COMP *cpi, MACROBLOCK *x,
+ const SPEED_FEATURES *sf,
+ BLOCK_SIZE sb_size,
+ int mi_row, int mi_col) {
+ const AV1_COMMON *cm = &cpi->common;
+
+ sb_enc->max_partition_size =
+ AOMMIN(sf->part_sf.default_max_partition_size,
+ dim_to_size(cpi->oxcf.part_cfg.max_partition_size));
+ sb_enc->min_partition_size =
+ AOMMAX(sf->part_sf.default_min_partition_size,
+ dim_to_size(cpi->oxcf.part_cfg.min_partition_size));
+ sb_enc->max_partition_size =
+ AOMMIN(sb_enc->max_partition_size, cm->seq_params->sb_size);
+ sb_enc->min_partition_size =
+ AOMMIN(sb_enc->min_partition_size, cm->seq_params->sb_size);
+
+ if (use_auto_max_partition(cpi, sb_size, mi_row, mi_col)) {
+ float features[FEATURE_SIZE_MAX_MIN_PART_PRED] = { 0.0f };
+
+ av1_get_max_min_partition_features(cpi, x, mi_row, mi_col, features);
+ sb_enc->max_partition_size =
+ AOMMAX(AOMMIN(av1_predict_max_partition(cpi, x, features),
+ sb_enc->max_partition_size),
+ sb_enc->min_partition_size);
+ }
+}
+#endif // !CONFIG_REALTIME_ONLY
#endif // AOM_AV1_ENCODER_PARTITION_STRATEGY_H_
diff --git a/third_party/libaom/source/libaom/av1/encoder/pass2_strategy.c b/third_party/libaom/source/libaom/av1/encoder/pass2_strategy.c
index 804fb3a510..e3639f7784 100644
--- a/third_party/libaom/source/libaom/av1/encoder/pass2_strategy.c
+++ b/third_party/libaom/source/libaom/av1/encoder/pass2_strategy.c
@@ -43,6 +43,13 @@
#define DEFAULT_KF_BOOST 2300
#define DEFAULT_GF_BOOST 2000
#define GROUP_ADAPTIVE_MAXQ 1
+
+static INLINE int is_fp_stats_to_predict_flat_gop_invalid(
+ const FIRSTPASS_STATS *fp_stats) {
+ return ((fp_stats->tr_coded_error < 0) || (fp_stats->pcnt_third_ref < 0) ||
+ (fp_stats->frame_avg_wavelet_energy < 0));
+}
+
static void init_gf_stats(GF_GROUP_STATS *gf_stats);
// Calculate an active area of the image that discounts formatting
@@ -182,7 +189,7 @@ static double calc_correction_factor(double err_per_mb, int q) {
// Based on history adjust expectations of bits per macroblock.
static void twopass_update_bpm_factor(AV1_COMP *cpi, int rate_err_tol) {
- TWO_PASS *twopass = &cpi->twopass;
+ TWO_PASS *twopass = &cpi->ppi->twopass;
const RATE_CONTROL *const rc = &cpi->rc;
int err_estimate = rc->rate_error_estimate;
@@ -194,14 +201,14 @@ static void twopass_update_bpm_factor(AV1_COMP *cpi, int rate_err_tol) {
const double max_fac = 1.0 + adj_limit;
if (rc->vbr_bits_off_target && rc->total_actual_bits > 0) {
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
rate_err_factor =
(double)twopass->rolling_arf_group_actual_bits /
DOUBLE_DIVIDE_CHECK((double)twopass->rolling_arf_group_target_bits);
} else {
rate_err_factor =
1.0 - ((double)(rc->vbr_bits_off_target) /
- AOMMAX(rc->total_actual_bits, cpi->twopass.bits_left));
+ AOMMAX(rc->total_actual_bits, cpi->ppi->twopass.bits_left));
}
rate_err_factor = AOMMAX(min_fac, AOMMIN(max_fac, rate_err_factor));
@@ -209,7 +216,7 @@ static void twopass_update_bpm_factor(AV1_COMP *cpi, int rate_err_tol) {
// Adjustment is damped if this is 1 pass with look ahead processing
// (as there are only ever a few frames of data) and for all but the first
// GOP in normal two pass.
- if ((twopass->bpm_factor != 1.0) || cpi->lap_enabled) {
+ if ((twopass->bpm_factor != 1.0) || cpi->ppi->lap_enabled) {
rate_err_factor = 1.0 + ((rate_err_factor - 1.0) / damp_fac);
}
}
@@ -302,9 +309,9 @@ static int get_twopass_worst_quality(AV1_COMP *cpi, const double av_frame_err,
// Try and pick a max Q that will be high enough to encode the
// content at the given rate.
int q = find_qindex_by_rate_with_correction(
- target_norm_bits_per_mb, cpi->common.seq_params.bit_depth,
- av_err_per_mb, cpi->twopass.bpm_factor, rate_err_tol, rc->best_quality,
- rc->worst_quality);
+ target_norm_bits_per_mb, cpi->common.seq_params->bit_depth,
+ av_err_per_mb, cpi->ppi->twopass.bpm_factor, rate_err_tol,
+ rc->best_quality, rc->worst_quality);
// Restriction on active max q for constrained quality mode.
if (rc_cfg->mode == AOM_CQ) q = AOMMAX(q, rc_cfg->cq_level);
@@ -312,57 +319,63 @@ static int get_twopass_worst_quality(AV1_COMP *cpi, const double av_frame_err,
}
}
-#define SR_DIFF_PART 0.0015
-#define MOTION_AMP_PART 0.003
#define INTRA_PART 0.005
#define DEFAULT_DECAY_LIMIT 0.75
#define LOW_SR_DIFF_TRHESH 0.1
-#define SR_DIFF_MAX 128.0
#define NCOUNT_FRAME_II_THRESH 5.0
+#define LOW_CODED_ERR_PER_MB 10.0
-static double get_sr_decay_rate(const FRAME_INFO *frame_info,
- const FIRSTPASS_STATS *frame) {
- const int num_mbs = frame_info->num_mbs;
- double sr_diff = (frame->sr_coded_error - frame->coded_error) / num_mbs;
+/* This function considers how the quality of prediction may be deteriorating
+ * with distance. It comapres the coded error for the last frame and the
+ * second reference frame (usually two frames old) and also applies a factor
+ * based on the extent of INTRA coding.
+ *
+ * The decay factor is then used to reduce the contribution of frames further
+ * from the alt-ref or golden frame, to the bitframe boost calculation for that
+ * alt-ref or golden frame.
+ */
+static double get_sr_decay_rate(const FIRSTPASS_STATS *frame) {
+ double sr_diff = (frame->sr_coded_error - frame->coded_error);
double sr_decay = 1.0;
double modified_pct_inter;
double modified_pcnt_intra;
- const double motion_amplitude_factor =
- frame->pcnt_motion * ((frame->mvc_abs + frame->mvr_abs) / 2);
modified_pct_inter = frame->pcnt_inter;
- if ((frame->intra_error / DOUBLE_DIVIDE_CHECK(frame->coded_error)) <
- (double)NCOUNT_FRAME_II_THRESH) {
+ if ((frame->coded_error > LOW_CODED_ERR_PER_MB) &&
+ ((frame->intra_error / DOUBLE_DIVIDE_CHECK(frame->coded_error)) <
+ (double)NCOUNT_FRAME_II_THRESH)) {
modified_pct_inter = frame->pcnt_inter - frame->pcnt_neutral;
}
modified_pcnt_intra = 100 * (1.0 - modified_pct_inter);
if ((sr_diff > LOW_SR_DIFF_TRHESH)) {
- sr_diff = AOMMIN(sr_diff, SR_DIFF_MAX);
- sr_decay = 1.0 - (SR_DIFF_PART * sr_diff) -
- (MOTION_AMP_PART * motion_amplitude_factor) -
- (INTRA_PART * modified_pcnt_intra);
+ double sr_diff_part = ((sr_diff * 0.25) / frame->intra_error);
+ sr_decay = 1.0 - sr_diff_part - (INTRA_PART * modified_pcnt_intra);
}
- return AOMMAX(sr_decay, AOMMIN(DEFAULT_DECAY_LIMIT, modified_pct_inter));
+ return AOMMAX(sr_decay, DEFAULT_DECAY_LIMIT);
}
// This function gives an estimate of how badly we believe the prediction
// quality is decaying from frame to frame.
-static double get_zero_motion_factor(const FRAME_INFO *frame_info,
- const FIRSTPASS_STATS *frame) {
+static double get_zero_motion_factor(const FIRSTPASS_STATS *frame) {
const double zero_motion_pct = frame->pcnt_inter - frame->pcnt_motion;
- double sr_decay = get_sr_decay_rate(frame_info, frame);
+ double sr_decay = get_sr_decay_rate(frame);
return AOMMIN(sr_decay, zero_motion_pct);
}
-#define ZM_POWER_FACTOR 0.75
+#define DEFAULT_ZM_FACTOR 0.5
+static double get_prediction_decay_rate(const FIRSTPASS_STATS *frame_stats) {
+ const double sr_decay_rate = get_sr_decay_rate(frame_stats);
+ double zero_motion_factor =
+ DEFAULT_ZM_FACTOR * (frame_stats->pcnt_inter - frame_stats->pcnt_motion);
-static double get_prediction_decay_rate(const FRAME_INFO *frame_info,
- const FIRSTPASS_STATS *next_frame) {
- const double sr_decay_rate = get_sr_decay_rate(frame_info, next_frame);
- const double zero_motion_factor =
- (0.95 * pow((next_frame->pcnt_inter - next_frame->pcnt_motion),
- ZM_POWER_FACTOR));
+ // Clamp value to range 0.0 to 1.0
+ // This should happen anyway if input values are sensibly clamped but checked
+ // here just in case.
+ if (zero_motion_factor > 1.0)
+ zero_motion_factor = 1.0;
+ else if (zero_motion_factor < 0.0)
+ zero_motion_factor = 0.0;
return AOMMAX(zero_motion_factor,
(sr_decay_rate + ((1.0 - sr_decay_rate) * zero_motion_factor)));
@@ -449,7 +462,6 @@ static void accumulate_this_frame_stats(const FIRSTPASS_STATS *stats,
}
static void accumulate_next_frame_stats(const FIRSTPASS_STATS *stats,
- const FRAME_INFO *frame_info,
const int flash_detected,
const int frames_since_key,
const int cur_idx,
@@ -470,16 +482,15 @@ static void accumulate_next_frame_stats(const FIRSTPASS_STATS *stats,
// Accumulate the effect of prediction quality decay
if (!flash_detected) {
gf_stats->last_loop_decay_rate = gf_stats->loop_decay_rate;
- gf_stats->loop_decay_rate = get_prediction_decay_rate(frame_info, stats);
+ gf_stats->loop_decay_rate = get_prediction_decay_rate(stats);
gf_stats->decay_accumulator =
gf_stats->decay_accumulator * gf_stats->loop_decay_rate;
// Monitor for static sections.
if ((frames_since_key + cur_idx - 1) > 1) {
- gf_stats->zero_motion_accumulator =
- AOMMIN(gf_stats->zero_motion_accumulator,
- get_zero_motion_factor(frame_info, stats));
+ gf_stats->zero_motion_accumulator = AOMMIN(
+ gf_stats->zero_motion_accumulator, get_zero_motion_factor(stats));
}
}
}
@@ -618,8 +629,8 @@ static double calc_kf_frame_boost(const RATE_CONTROL *rc,
return AOMMIN(frame_boost, max_boost * boost_q_correction);
}
-static int get_projected_gfu_boost(const RATE_CONTROL *rc, int gfu_boost,
- int frames_to_project,
+static int get_projected_gfu_boost(const PRIMARY_RATE_CONTROL *p_rc,
+ int gfu_boost, int frames_to_project,
int num_stats_used_for_gfu_boost) {
/*
* If frames_to_project is equal to num_stats_used_for_gfu_boost,
@@ -629,7 +640,7 @@ static int get_projected_gfu_boost(const RATE_CONTROL *rc, int gfu_boost,
*/
if (num_stats_used_for_gfu_boost >= frames_to_project) return gfu_boost;
- double min_boost_factor = sqrt(rc->baseline_gf_interval);
+ double min_boost_factor = sqrt(p_rc->baseline_gf_interval);
// Get the current tpl factor (number of frames = frames_to_project).
double tpl_factor = av1_get_gfu_boost_projection_factor(
min_boost_factor, MAX_GFUBOOST_FACTOR, frames_to_project);
@@ -642,11 +653,13 @@ static int get_projected_gfu_boost(const RATE_CONTROL *rc, int gfu_boost,
}
#define GF_MAX_BOOST 90.0
+#define GF_MIN_BOOST 50
#define MIN_DECAY_FACTOR 0.01
-int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
+int av1_calc_arf_boost(const TWO_PASS *twopass,
+ const PRIMARY_RATE_CONTROL *p_rc, const RATE_CONTROL *rc,
FRAME_INFO *frame_info, int offset, int f_frames,
int b_frames, int *num_fpstats_used,
- int *num_fpstats_required) {
+ int *num_fpstats_required, int project_gfu_boost) {
int i;
GF_GROUP_STATS gf_stats;
init_gf_stats(&gf_stats);
@@ -670,8 +683,7 @@ int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
// Accumulate the effect of prediction quality decay.
if (!flash_detected) {
- gf_stats.decay_accumulator *=
- get_prediction_decay_rate(frame_info, this_frame);
+ gf_stats.decay_accumulator *= get_prediction_decay_rate(this_frame);
gf_stats.decay_accumulator = gf_stats.decay_accumulator < MIN_DECAY_FACTOR
? MIN_DECAY_FACTOR
: gf_stats.decay_accumulator;
@@ -704,8 +716,7 @@ int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
// Cumulative effect of prediction quality decay.
if (!flash_detected) {
- gf_stats.decay_accumulator *=
- get_prediction_decay_rate(frame_info, this_frame);
+ gf_stats.decay_accumulator *= get_prediction_decay_rate(this_frame);
gf_stats.decay_accumulator = gf_stats.decay_accumulator < MIN_DECAY_FACTOR
? MIN_DECAY_FACTOR
: gf_stats.decay_accumulator;
@@ -719,16 +730,16 @@ int av1_calc_arf_boost(const TWO_PASS *twopass, const RATE_CONTROL *rc,
}
arf_boost += (int)boost_score;
- if (num_fpstats_required) {
+ if (project_gfu_boost) {
+ assert(num_fpstats_required != NULL);
+ assert(num_fpstats_used != NULL);
*num_fpstats_required = f_frames + b_frames;
- if (num_fpstats_used) {
- arf_boost = get_projected_gfu_boost(rc, arf_boost, *num_fpstats_required,
- *num_fpstats_used);
- }
+ arf_boost = get_projected_gfu_boost(p_rc, arf_boost, *num_fpstats_required,
+ *num_fpstats_used);
}
- if (arf_boost < ((b_frames + f_frames) * 50))
- arf_boost = ((b_frames + f_frames) * 50);
+ if (arf_boost < ((b_frames + f_frames) * GF_MIN_BOOST))
+ arf_boost = ((b_frames + f_frames) * GF_MIN_BOOST);
return arf_boost;
}
@@ -767,7 +778,8 @@ static int calculate_section_intra_ratio(const FIRSTPASS_STATS *begin,
static int64_t calculate_total_gf_group_bits(AV1_COMP *cpi,
double gf_group_err) {
const RATE_CONTROL *const rc = &cpi->rc;
- const TWO_PASS *const twopass = &cpi->twopass;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ const TWO_PASS *const twopass = &cpi->ppi->twopass;
const int max_bits = frame_max_bits(rc, &cpi->oxcf);
int64_t total_group_bits;
@@ -787,8 +799,8 @@ static int64_t calculate_total_gf_group_bits(AV1_COMP *cpi,
: total_group_bits;
// Clip based on user supplied data rate variability limit.
- if (total_group_bits > (int64_t)max_bits * rc->baseline_gf_interval)
- total_group_bits = (int64_t)max_bits * rc->baseline_gf_interval;
+ if (total_group_bits > (int64_t)max_bits * p_rc->baseline_gf_interval)
+ total_group_bits = (int64_t)max_bits * p_rc->baseline_gf_interval;
return total_group_bits;
}
@@ -834,7 +846,8 @@ static int adjust_boost_bits_for_target_level(const AV1_COMP *const cpi,
int64_t group_bits,
int frame_type) {
const AV1_COMMON *const cm = &cpi->common;
- const SequenceHeader *const seq_params = &cm->seq_params;
+ const SequenceHeader *const seq_params = cm->seq_params;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const int temporal_layer_id = cm->temporal_layer_id;
const int spatial_layer_id = cm->spatial_layer_id;
for (int index = 0; index < seq_params->operating_points_cnt_minus_1 + 1;
@@ -845,7 +858,7 @@ static int adjust_boost_bits_for_target_level(const AV1_COMP *const cpi,
}
const AV1_LEVEL target_level =
- cpi->level_params.target_seq_level_idx[index];
+ cpi->ppi->level_params.target_seq_level_idx[index];
if (target_level >= SEQ_LEVELS) continue;
assert(is_valid_seq_level_idx(target_level));
@@ -859,18 +872,20 @@ static int adjust_boost_bits_for_target_level(const AV1_COMP *const cpi,
const int level_enforced_max_kf_bits = target_bits_per_frame * 8;
if (bits_assigned > level_enforced_max_kf_bits) {
const int frames = rc->frames_to_key - 1;
- rc->kf_boost = calculate_boost_factor(
+ p_rc->kf_boost = calculate_boost_factor(
frames, level_enforced_max_kf_bits, group_bits);
- bits_assigned = calculate_boost_bits(frames, rc->kf_boost, group_bits);
+ bits_assigned =
+ calculate_boost_bits(frames, p_rc->kf_boost, group_bits);
}
} else if (frame_type == 1) {
// Maximum bits for arf is 4 times the target_bits_per_frame.
const int level_enforced_max_arf_bits = target_bits_per_frame * 4;
if (bits_assigned > level_enforced_max_arf_bits) {
- rc->gfu_boost = calculate_boost_factor(
- rc->baseline_gf_interval, level_enforced_max_arf_bits, group_bits);
- bits_assigned = calculate_boost_bits(rc->baseline_gf_interval,
- rc->gfu_boost, group_bits);
+ p_rc->gfu_boost =
+ calculate_boost_factor(p_rc->baseline_gf_interval,
+ level_enforced_max_arf_bits, group_bits);
+ bits_assigned = calculate_boost_bits(p_rc->baseline_gf_interval,
+ p_rc->gfu_boost, group_bits);
}
} else {
assert(0);
@@ -883,7 +898,9 @@ static int adjust_boost_bits_for_target_level(const AV1_COMP *const cpi,
// Allocate bits to each frame in a GF / ARF group
double layer_fraction[MAX_ARF_LAYERS + 1] = { 1.0, 0.70, 0.55, 0.60,
0.60, 1.0, 1.0 };
-static void allocate_gf_group_bits(GF_GROUP *gf_group, RATE_CONTROL *const rc,
+static void allocate_gf_group_bits(GF_GROUP *gf_group,
+ PRIMARY_RATE_CONTROL *const p_rc,
+ RATE_CONTROL *const rc,
int64_t gf_group_bits, int gf_arf_bits,
int key_frame, int use_arf) {
int64_t total_group_bits = gf_group_bits;
@@ -900,7 +917,7 @@ static void allocate_gf_group_bits(GF_GROUP *gf_group, RATE_CONTROL *const rc,
if (use_arf) total_group_bits -= gf_arf_bits;
int num_frames =
- AOMMAX(1, rc->baseline_gf_interval - (rc->frames_since_key == 0));
+ AOMMAX(1, p_rc->baseline_gf_interval - (rc->frames_since_key == 0));
base_frame_bits = (int)(total_group_bits / num_frames);
// Check the number of frames in each layer in case we have a
@@ -943,7 +960,8 @@ static void allocate_gf_group_bits(GF_GROUP *gf_group, RATE_CONTROL *const rc,
// in the next GOP. For GF group, next GOP will overwrite the rate allocation.
// Setting this frame to use 0 bit (of out the current GOP budget) will
// simplify logics in reference frame management.
- gf_group->bit_allocation[gf_group_size] = 0;
+ if (gf_group_size < MAX_STATIC_GF_GROUP_LENGTH)
+ gf_group->bit_allocation[gf_group_size] = 0;
}
// Returns true if KF group and GF group both are almost completely static.
@@ -967,7 +985,7 @@ static INLINE int detect_gf_cut(AV1_COMP *cpi, int frame_index, int cur_start,
int active_min_gf_interval,
GF_GROUP_STATS *gf_stats) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
InitialDimensions *const initial_dimensions = &cpi->initial_dimensions;
// Motion breakout threshold for loop below depends on image size.
const double mv_ratio_accumulator_thresh =
@@ -997,12 +1015,71 @@ static INLINE int detect_gf_cut(AV1_COMP *cpi, int frame_index, int cur_start,
// so we can continue for more frames.
if (((frame_index - cur_start) >= active_max_gf_interval + 1) &&
!is_almost_static(gf_stats->zero_motion_accumulator,
- twopass->kf_zeromotion_pct, cpi->lap_enabled)) {
+ twopass->kf_zeromotion_pct, cpi->ppi->lap_enabled)) {
return 1;
}
return 0;
}
+static int is_shorter_gf_interval_better(AV1_COMP *cpi,
+ EncodeFrameParams *frame_params,
+ const EncodeFrameInput *frame_input) {
+ RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ int gop_length_decision_method = cpi->sf.tpl_sf.gop_length_decision_method;
+ int shorten_gf_interval;
+
+ av1_tpl_preload_rc_estimate(cpi, frame_params);
+
+ if (gop_length_decision_method == 2) {
+ // GF group length is decided based on GF boost and tpl stats of ARFs from
+ // base layer, (base+1) layer.
+ shorten_gf_interval =
+ (p_rc->gfu_boost <
+ p_rc->num_stats_used_for_gfu_boost * GF_MIN_BOOST * 1.4) &&
+ !av1_tpl_setup_stats(cpi, 3, frame_params, frame_input);
+ } else {
+ int do_complete_tpl = 1;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ int is_temporal_filter_enabled =
+ (rc->frames_since_key > 0 && gf_group->arf_index > -1);
+
+ if (is_temporal_filter_enabled) {
+ int arf_src_index = gf_group->arf_src_offset[gf_group->arf_index];
+ FRAME_UPDATE_TYPE arf_update_type =
+ gf_group->update_type[gf_group->arf_index];
+ int is_forward_keyframe = 0;
+ av1_temporal_filter(cpi, arf_src_index, arf_update_type,
+ is_forward_keyframe, NULL);
+ aom_extend_frame_borders(&cpi->ppi->alt_ref_buffer,
+ av1_num_planes(&cpi->common));
+ }
+
+ if (gop_length_decision_method == 1) {
+ // Check if tpl stats of ARFs from base layer, (base+1) layer,
+ // (base+2) layer can decide the GF group length.
+ int gop_length_eval =
+ av1_tpl_setup_stats(cpi, 2, frame_params, frame_input);
+
+ if (gop_length_eval != 2) {
+ do_complete_tpl = 0;
+ shorten_gf_interval = !gop_length_eval;
+ }
+ }
+
+ if (do_complete_tpl) {
+ // Decide GF group length based on complete tpl stats.
+ shorten_gf_interval =
+ !av1_tpl_setup_stats(cpi, 1, frame_params, frame_input);
+ // Tpl stats is reused when the ARF is temporally filtered and GF
+ // interval is not shortened.
+ if (is_temporal_filter_enabled && !shorten_gf_interval)
+ cpi->skip_tpl_setup_stats = 1;
+ }
+ }
+ return shorten_gf_interval;
+}
+
#define MIN_FWD_KF_INTERVAL 8
#define MIN_SHRINK_LEN 6 // the minimum length of gf if we are shrinking
#define SMOOTH_FILT_LEN 7
@@ -1014,17 +1091,16 @@ const double smooth_filt[SMOOTH_FILT_LEN] = { 0.006, 0.061, 0.242, 0.383,
0.242, 0.061, 0.006 };
// Smooth filter intra_error and coded_error in firstpass stats.
-// If ignore[i]==1, the ith element should not be used in the filtering.
-static void smooth_filter_stats(const FIRSTPASS_STATS *stats, const int *ignore,
- int start_idx, int last_idx,
- double *filt_intra_err,
+// If stats[i].is_flash==1, the ith element should not be used in the filtering.
+static void smooth_filter_stats(const FIRSTPASS_STATS *stats, int start_idx,
+ int last_idx, double *filt_intra_err,
double *filt_coded_err) {
int i, j;
for (i = start_idx; i <= last_idx; i++) {
double total_wt = 0;
for (j = -HALF_FILT_LEN; j <= HALF_FILT_LEN; j++) {
int idx = AOMMIN(AOMMAX(i + j, start_idx), last_idx);
- if (ignore[idx]) continue;
+ if (stats[idx].is_flash) continue;
filt_intra_err[i] +=
smooth_filt[j + HALF_FILT_LEN] * stats[idx].intra_error;
@@ -1041,7 +1117,7 @@ static void smooth_filter_stats(const FIRSTPASS_STATS *stats, const int *ignore,
for (j = -HALF_FILT_LEN; j <= HALF_FILT_LEN; j++) {
int idx = AOMMIN(AOMMAX(i + j, start_idx), last_idx);
// Coded error involves idx and idx - 1.
- if (ignore[idx] || (idx > 0 && ignore[idx - 1])) continue;
+ if (stats[idx].is_flash || (idx > 0 && stats[idx - 1].is_flash)) continue;
filt_coded_err[i] +=
smooth_filt[j + HALF_FILT_LEN] * stats[idx].coded_error;
@@ -1070,7 +1146,7 @@ static void get_gradient(const double *values, int start, int last,
}
static int find_next_scenecut(const FIRSTPASS_STATS *const stats_start,
- int first, int last, int *ignore) {
+ int first, int last) {
// Identify unstable areas caused by scenecuts.
// Find the max and 2nd max coded error, and the average of the rest frames.
// If there is only one frame that yields a huge coded error, it is likely a
@@ -1081,14 +1157,16 @@ static int find_next_scenecut(const FIRSTPASS_STATS *const stats_start,
if (last - first == 0) return -1;
for (int i = first; i <= last; i++) {
- if (ignore[i] || (i > 0 && ignore[i - 1])) continue;
+ if (stats_start[i].is_flash || (i > 0 && stats_start[i - 1].is_flash))
+ continue;
double temp_intra = AOMMAX(stats_start[i].intra_error, 0.01);
this_ratio = stats_start[i].coded_error / temp_intra;
// find the avg ratio in the preceding neighborhood
max_prev_ratio = 0;
max_prev_coded = 0;
for (int j = AOMMAX(first, i - HALF_WIN); j < i; j++) {
- if (ignore[j] || (j > 0 && ignore[j - 1])) continue;
+ if (stats_start[j].is_flash || (j > 0 && stats_start[j - 1].is_flash))
+ continue;
temp_intra = AOMMAX(stats_start[j].intra_error, 0.01);
double temp_ratio = stats_start[j].coded_error / temp_intra;
if (temp_ratio > max_prev_ratio) {
@@ -1102,7 +1180,8 @@ static int find_next_scenecut(const FIRSTPASS_STATS *const stats_start,
max_next_ratio = 0;
max_next_coded = 0;
for (int j = i + 1; j <= AOMMIN(i + HALF_WIN, last); j++) {
- if (ignore[j] || (j > 0 && ignore[j - 1])) continue;
+ if (stats_start[i].is_flash || (i > 0 && stats_start[i - 1].is_flash))
+ continue;
temp_intra = AOMMAX(stats_start[j].intra_error, 0.01);
double temp_ratio = stats_start[j].coded_error / temp_intra;
if (temp_ratio > max_next_ratio) {
@@ -1135,19 +1214,6 @@ static int find_next_scenecut(const FIRSTPASS_STATS *const stats_start,
return -1;
}
-static void mark_flashes(const FIRSTPASS_STATS *stats, int start_idx,
- int last_idx, int *is_flash) {
- int i;
- for (i = start_idx; i < last_idx; i++) {
- if (stats[i + 1].pcnt_second_ref > stats[i + 1].pcnt_inter &&
- stats[i + 1].pcnt_second_ref >= 0.5) {
- // this is a new flash frame
- is_flash[i] = 1;
- continue;
- }
- }
-}
-
// Remove the region with index next_region.
// parameter merge: 0: merge with previous; 1: merge with next; 2:
// merge with both, take type from previous if possible
@@ -1220,46 +1286,10 @@ static void insert_region(int start, int last, REGION_TYPES type,
*cur_region_idx = k;
}
-// Estimate the noise variance of each frame from the first pass stats
-static void estimate_region_noise(const FIRSTPASS_STATS *stats,
- const int *is_flash, REGIONS *region) {
- double C1, C2, C3, noise;
- int count = 0;
- region->avg_noise_var = -1;
- for (int i = region->start + 2; i <= region->last; i++) {
- if (is_flash[i] || is_flash[i - 1] || is_flash[i - 2]) continue;
-
- C1 = stats[i - 1].intra_error *
- (stats[i].intra_error - stats[i].coded_error);
- C2 = stats[i - 2].intra_error *
- (stats[i - 1].intra_error - stats[i - 1].coded_error);
- C3 = stats[i - 2].intra_error *
- (stats[i].intra_error - stats[i].sr_coded_error);
- if (C1 <= 0 || C2 <= 0 || C3 <= 0) continue;
- C1 = sqrt(C1);
- C2 = sqrt(C2);
- C3 = sqrt(C3);
-
- noise = stats[i - 1].intra_error - C1 * C2 / C3;
- noise = AOMMAX(noise, 0.01);
- region->avg_noise_var = (region->avg_noise_var == -1)
- ? noise
- : AOMMIN(noise, region->avg_noise_var);
- count++;
- }
- if (count == 0) {
- region->avg_noise_var = 0;
- }
-}
-
-// Analyze the corrrelation coefficient of each frame with its previous frame in
-// a region. Also get the average of stats inside a region.
-// Before calling this function, the region's noise variance is needed.
-static void analyze_region(const FIRSTPASS_STATS *stats, int region_idx,
- REGIONS *regions, double *coeff) {
- double cor_coeff;
-
- int i, k = region_idx;
+// Get the average of stats inside a region.
+static void analyze_region(const FIRSTPASS_STATS *stats, int k,
+ REGIONS *regions) {
+ int i;
regions[k].avg_cor_coeff = 0;
regions[k].avg_sr_fr_ratio = 0;
regions[k].avg_intra_err = 0;
@@ -1268,12 +1298,6 @@ static void analyze_region(const FIRSTPASS_STATS *stats, int region_idx,
int check_first_sr = (k != 0);
for (i = regions[k].start; i <= regions[k].last; i++) {
- double C = sqrt(AOMMAX(stats[i - 1].intra_error *
- (stats[i].intra_error - stats[i].coded_error),
- 0.001));
- cor_coeff =
- C / AOMMAX(stats[i - 1].intra_error - regions[k].avg_noise_var, 0.001);
-
if (i > regions[k].start || check_first_sr) {
double num_frames =
(double)(regions[k].last - regions[k].start + check_first_sr);
@@ -1289,85 +1313,27 @@ static void analyze_region(const FIRSTPASS_STATS *stats, int region_idx,
regions[k].avg_coded_err +=
stats[i].coded_error / (double)(regions[k].last - regions[k].start + 1);
- coeff[i] =
- cor_coeff *
- sqrt(
- AOMMAX(stats[i - 1].intra_error - regions[k].avg_noise_var, 0.001) /
- AOMMAX(stats[i].intra_error - regions[k].avg_noise_var, 0.001));
- // clip correlation coefficient.
- coeff[i] = AOMMIN(AOMMAX(coeff[i], 0), 1);
-
regions[k].avg_cor_coeff +=
- coeff[i] / (double)(regions[k].last - regions[k].start + 1);
+ AOMMAX(stats[i].cor_coeff, 0.001) /
+ (double)(regions[k].last - regions[k].start + 1);
+ regions[k].avg_noise_var +=
+ AOMMAX(stats[i].noise_var, 0.001) /
+ (double)(regions[k].last - regions[k].start + 1);
}
}
-// Calculate the regions stats of every region. Uses the stable regions to
-// estimate noise variance of other regions. Then call analyze_region for each.
-static void get_region_stats(const FIRSTPASS_STATS *stats, const int *is_flash,
- REGIONS *regions, double *coeff, int num_regions) {
- int k, count_stable = 0;
- // Analyze stable regions.
- for (k = 0; k < num_regions; k++) {
- if (regions[k].type == STABLE_REGION) {
- estimate_region_noise(stats, is_flash, regions + k);
- analyze_region(stats, k, regions, coeff);
- count_stable++;
- }
- }
-
- if (count_stable == 0) {
- // no stable region, just use the lowest noise variance estimated.
- double lowest_noise = -1;
- for (k = 0; k < num_regions; k++) {
- if (regions[k].type == SCENECUT_REGION) continue;
- estimate_region_noise(stats, is_flash, regions + k);
- if (regions[k].avg_noise_var < 0.01) continue;
- if (lowest_noise < 0 || lowest_noise > regions[k].avg_noise_var) {
- lowest_noise = regions[k].avg_noise_var;
- }
- }
- lowest_noise = AOMMAX(lowest_noise, 0);
- for (k = 0; k < num_regions; k++) {
- regions[k].avg_noise_var = lowest_noise;
- analyze_region(stats, k, regions, coeff);
- }
- return;
- }
-
- // Analyze other regions
- for (k = 0; k < num_regions; k++) {
- if (regions[k].type != STABLE_REGION) {
- // use the average of the nearest previous and next stable regions
- int count = 0;
- regions[k].avg_noise_var = 0;
- for (int r = k - 1; r >= 0; r--) {
- if (regions[r].type == STABLE_REGION) {
- count++;
- regions[k].avg_noise_var += regions[r].avg_noise_var;
- break;
- }
- }
- for (int r = k + 1; r < num_regions; r++) {
- if (regions[r].type == STABLE_REGION) {
- count++;
- regions[k].avg_noise_var += regions[r].avg_noise_var;
- break;
- }
- }
- if (count) {
- regions[k].avg_noise_var /= (double)count;
- }
- analyze_region(stats, k, regions, coeff);
- }
+// Calculate the regions stats of every region.
+static void get_region_stats(const FIRSTPASS_STATS *stats, REGIONS *regions,
+ int num_regions) {
+ for (int k = 0; k < num_regions; k++) {
+ analyze_region(stats, k, regions);
}
}
// Find tentative stable regions
static int find_stable_regions(const FIRSTPASS_STATS *stats,
- const double *grad_coded, const int *ignore,
- int this_start, int this_last,
- REGIONS *regions) {
+ const double *grad_coded, int this_start,
+ int this_last, REGIONS *regions) {
int i, j, k = 0;
regions[k].start = this_start;
for (i = this_start; i <= this_last; i++) {
@@ -1377,7 +1343,7 @@ static int find_stable_regions(const FIRSTPASS_STATS *stats,
int count = 0;
for (j = -HALF_WIN; j <= HALF_WIN; j++) {
int idx = AOMMIN(AOMMAX(i + j, this_start), this_last);
- if (ignore[idx] || (idx > 0 && ignore[idx - 1])) continue;
+ if (stats[idx].is_flash || (idx > 0 && stats[idx - 1].is_flash)) continue;
mean_intra += stats[idx].intra_error;
var_intra += stats[idx].intra_error * stats[idx].intra_error;
mean_coded += stats[idx].coded_error;
@@ -1451,15 +1417,13 @@ static void remove_short_regions(REGIONS *regions, int *num_regions,
}
static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
- const int *is_flash,
- const double *grad, REGIONS *regions,
- double *coeff, int *num_regions) {
+ REGIONS *regions, int *num_regions) {
int i, j, k;
// Remove regions that are too short. Likely noise.
remove_short_regions(regions, num_regions, STABLE_REGION, HALF_WIN);
remove_short_regions(regions, num_regions, HIGH_VAR_REGION, HALF_WIN);
- get_region_stats(stats, is_flash, regions, coeff, *num_regions);
+ get_region_stats(stats, regions, *num_regions);
// Adjust region boundaries. The thresholds are empirically obtained, but
// overall the performance is not very sensitive to small changes to them.
@@ -1469,34 +1433,24 @@ static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
// Adjust previous boundary.
// First find the average intra/coded error in the previous
// neighborhood.
- double avg_intra_err = 0, avg_coded_err = 0, avg_coeff = 0;
- int starti = AOMMAX(regions[k - 1].last - WINDOW_SIZE + 1,
- regions[k - 1].start + 1);
- int lasti = regions[k - 1].last;
+ double avg_intra_err = 0;
+ const int starti = AOMMAX(regions[k - 1].last - WINDOW_SIZE + 1,
+ regions[k - 1].start + 1);
+ const int lasti = regions[k - 1].last;
int counti = 0;
for (i = starti; i <= lasti; i++) {
avg_intra_err += stats[i].intra_error;
- avg_coded_err += stats[i].coded_error;
- avg_coeff += coeff[i];
counti++;
}
if (counti > 0) {
avg_intra_err = AOMMAX(avg_intra_err / (double)counti, 0.001);
- avg_coded_err /= AOMMAX(avg_coded_err / (double)counti, 0.001);
- avg_coeff /= AOMMIN(avg_intra_err / (double)counti, 0.99999);
int count_coded = 0, count_grad = 0;
for (j = lasti + 1; j <= regions[k].last; j++) {
- int intra_close =
+ const int intra_close =
fabs(stats[j].intra_error - avg_intra_err) / avg_intra_err < 0.1;
- int coded_close =
- fabs(stats[j].coded_error - avg_coded_err) / avg_coded_err < 0.15;
- int grad_small = fabs(grad[j]) / avg_coded_err < 0.05;
- int coded_small = stats[j].coded_error / avg_intra_err < 0.03;
- int coeff_close =
- (1 - coeff[j]) / (1 - avg_coeff) < 1.5 || coeff[j] > 0.995;
- if (!coeff_close || (!coded_close && !coded_small)) count_coded--;
- if (!grad_small && !coded_small) count_grad--;
-
+ const int coded_small = stats[j].coded_error / avg_intra_err < 0.1;
+ const int coeff_close = stats[j].cor_coeff > 0.995;
+ if (!coeff_close || !coded_small) count_coded--;
if (intra_close && count_coded >= 0 && count_grad >= 0) {
// this frame probably belongs to the previous stable region
regions[k - 1].last = j;
@@ -1510,35 +1464,26 @@ static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
if (k < *num_regions - 1) {
// Adjust next boundary.
// First find the average intra/coded error in the next neighborhood.
- double avg_intra_err = 0, avg_coded_err = 0, avg_coeff = 0;
- int starti = regions[k + 1].start;
- int lasti = AOMMIN(regions[k + 1].last - 1,
- regions[k + 1].start + WINDOW_SIZE - 1);
+ double avg_intra_err = 0;
+ const int starti = regions[k + 1].start;
+ const int lasti = AOMMIN(regions[k + 1].last - 1,
+ regions[k + 1].start + WINDOW_SIZE - 1);
int counti = 0;
for (i = starti; i <= lasti; i++) {
avg_intra_err += stats[i].intra_error;
- avg_coded_err += stats[i + 1].coded_error;
- avg_coeff += coeff[i];
counti++;
}
if (counti > 0) {
avg_intra_err = AOMMAX(avg_intra_err / (double)counti, 0.001);
- avg_coded_err /= AOMMAX(avg_coded_err / (double)counti, 0.001);
- avg_coeff /= AOMMIN(avg_intra_err / (double)counti, 0.99999);
// At the boundary, coded error is large, but still the frame is stable
int count_coded = 1, count_grad = 1;
for (j = starti - 1; j >= regions[k].start; j--) {
- int intra_close =
+ const int intra_close =
fabs(stats[j].intra_error - avg_intra_err) / avg_intra_err < 0.1;
- int coded_close =
- fabs(stats[j + 1].coded_error - avg_coded_err) / avg_coded_err <
- 0.15;
- int grad_small = fabs(grad[j + 1]) / avg_coded_err < 0.05;
- int coded_small = stats[j + 1].coded_error / avg_intra_err < 0.03;
- int coeff_close =
- (1 - coeff[j + 1]) / (1 - avg_coeff) < 1.5 || coeff[j] > 0.995;
- if (!coeff_close || (!coded_close && !coded_small)) count_coded--;
- if (!grad_small && !coded_small) count_grad--;
+ const int coded_small =
+ stats[j + 1].coded_error / avg_intra_err < 0.1;
+ const int coeff_close = stats[j].cor_coeff > 0.995;
+ if (!coeff_close || !coded_small) count_coded--;
if (intra_close && count_coded >= 0 && count_grad >= 0) {
// this frame probably belongs to the next stable region
regions[k + 1].start = j;
@@ -1553,7 +1498,7 @@ static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
cleanup_regions(regions, num_regions);
remove_short_regions(regions, num_regions, HIGH_VAR_REGION, HALF_WIN);
- get_region_stats(stats, is_flash, regions, coeff, *num_regions);
+ get_region_stats(stats, regions, *num_regions);
// If a stable regions has higher error than neighboring high var regions,
// or if the stable region has a lower average correlation,
@@ -1561,25 +1506,31 @@ static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
k = 0;
while (k < *num_regions && (*num_regions) > 1) {
if (regions[k].type == STABLE_REGION &&
+ (regions[k].last - regions[k].start + 1) < 2 * WINDOW_SIZE &&
((k > 0 && // previous regions
- (regions[k].avg_coded_err > regions[k - 1].avg_coded_err ||
- regions[k].avg_cor_coeff < regions[k - 1].avg_cor_coeff)) &&
+ (regions[k].avg_coded_err > regions[k - 1].avg_coded_err * 1.01 ||
+ regions[k].avg_cor_coeff < regions[k - 1].avg_cor_coeff * 0.999)) &&
(k < *num_regions - 1 && // next region
- (regions[k].avg_coded_err > regions[k + 1].avg_coded_err ||
- regions[k].avg_cor_coeff < regions[k + 1].avg_cor_coeff)))) {
+ (regions[k].avg_coded_err > regions[k + 1].avg_coded_err * 1.01 ||
+ regions[k].avg_cor_coeff < regions[k + 1].avg_cor_coeff * 0.999)))) {
// merge current region with the previous and next regions
remove_region(2, regions, num_regions, &k);
- analyze_region(stats, k - 1, regions, coeff);
+ analyze_region(stats, k - 1, regions);
} else if (regions[k].type == HIGH_VAR_REGION &&
+ (regions[k].last - regions[k].start + 1) < 2 * WINDOW_SIZE &&
((k > 0 && // previous regions
- (regions[k].avg_coded_err < regions[k - 1].avg_coded_err ||
- regions[k].avg_cor_coeff > regions[k - 1].avg_cor_coeff)) &&
+ (regions[k].avg_coded_err <
+ regions[k - 1].avg_coded_err * 0.99 ||
+ regions[k].avg_cor_coeff >
+ regions[k - 1].avg_cor_coeff * 1.001)) &&
(k < *num_regions - 1 && // next region
- (regions[k].avg_coded_err < regions[k + 1].avg_coded_err ||
- regions[k].avg_cor_coeff > regions[k + 1].avg_cor_coeff)))) {
+ (regions[k].avg_coded_err <
+ regions[k + 1].avg_coded_err * 0.99 ||
+ regions[k].avg_cor_coeff >
+ regions[k + 1].avg_cor_coeff * 1.001)))) {
// merge current region with the previous and next regions
remove_region(2, regions, num_regions, &k);
- analyze_region(stats, k - 1, regions, coeff);
+ analyze_region(stats, k - 1, regions);
} else {
k++;
}
@@ -1591,8 +1542,7 @@ static void adjust_unstable_region_bounds(const FIRSTPASS_STATS *stats,
// Identify blending regions.
static void find_blending_regions(const FIRSTPASS_STATS *stats,
- const int *is_flash, REGIONS *regions,
- int *num_regions, double *coeff) {
+ REGIONS *regions, int *num_regions) {
int i, k = 0;
// Blending regions will have large content change, therefore will have a
// large consistent change in intra error.
@@ -1607,7 +1557,8 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
int start = 0, last;
for (i = regions[k].start; i <= regions[k].last; i++) {
// First mark the regions that has consistent large change of intra error.
- if (is_flash[i] || (i > 0 && is_flash[i - 1])) continue;
+ if (k == 0 && i == regions[k].start) continue;
+ if (stats[i].is_flash || (i > 0 && stats[i - 1].is_flash)) continue;
double grad = stats[i].intra_error - stats[i - 1].intra_error;
int large_change = fabs(grad) / AOMMAX(stats[i].intra_error, 0.01) > 0.05;
int this_dir = 0;
@@ -1622,7 +1573,11 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
insert_region(start, last, BLENDING_REGION, regions, num_regions, &k);
}
dir = this_dir;
- start = i;
+ if (k == 0 && i == regions[k].start + 1) {
+ start = i - 1;
+ } else {
+ start = i;
+ }
}
if (dir != 0) {
last = regions[k].last;
@@ -1633,14 +1588,14 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
// If the blending region has very low correlation, mark it as high variance
// since we probably cannot benefit from it anyways.
- get_region_stats(stats, is_flash, regions, coeff, *num_regions);
+ get_region_stats(stats, regions, *num_regions);
for (k = 0; k < *num_regions; k++) {
if (regions[k].type != BLENDING_REGION) continue;
if (regions[k].last == regions[k].start || regions[k].avg_cor_coeff < 0.6 ||
count_stable == 0)
regions[k].type = HIGH_VAR_REGION;
}
- get_region_stats(stats, is_flash, regions, coeff, *num_regions);
+ get_region_stats(stats, regions, *num_regions);
// It is possible for blending to result in a "dip" in intra error (first
// decrease then increase). Therefore we need to find the dip and combine the
@@ -1669,7 +1624,7 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
if (regions[k].avg_sr_fr_ratio > ratio_thres) {
regions[k].type = BLENDING_REGION;
remove_region(2, regions, num_regions, &k);
- analyze_region(stats, k - 1, regions, coeff);
+ analyze_region(stats, k - 1, regions);
continue;
}
}
@@ -1727,7 +1682,7 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
if (to_merge) {
remove_region(0, regions, num_regions, &k);
- analyze_region(stats, k - 1, regions, coeff);
+ analyze_region(stats, k - 1, regions);
continue;
} else {
// These are possibly two separate blending regions. Mark the boundary
@@ -1735,9 +1690,9 @@ static void find_blending_regions(const FIRSTPASS_STATS *stats,
int prev_k = k - 1;
insert_region(regions[prev_k].last, regions[prev_k].last,
HIGH_VAR_REGION, regions, num_regions, &prev_k);
- analyze_region(stats, prev_k, regions, coeff);
+ analyze_region(stats, prev_k, regions);
k = prev_k + 1;
- analyze_region(stats, k, regions, coeff);
+ analyze_region(stats, k, regions);
}
}
k++;
@@ -1793,16 +1748,13 @@ static void cleanup_blendings(REGIONS *regions, int *num_regions) {
// pointing to.
static void identify_regions(const FIRSTPASS_STATS *const stats_start,
int total_frames, int offset, REGIONS *regions,
- int *total_regions, double *cor_coeff) {
+ int *total_regions) {
int k;
if (total_frames <= 1) return;
- double *coeff = cor_coeff + offset;
-
// store the initial decisions
REGIONS temp_regions[MAX_FIRSTPASS_ANALYSIS_FRAMES];
av1_zero_array(temp_regions, MAX_FIRSTPASS_ANALYSIS_FRAMES);
- int is_flash[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
// buffers for filtered stats
double filt_intra_err[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
double filt_coded_err[MAX_FIRSTPASS_ANALYSIS_FRAMES] = { 0 };
@@ -1810,32 +1762,28 @@ static void identify_regions(const FIRSTPASS_STATS *const stats_start,
int cur_region = 0, this_start = 0, this_last;
- // find possible flash frames
- mark_flashes(stats_start, 0, total_frames - 1, is_flash);
-
- // first get the obvious scenecuts
int next_scenecut = -1;
-
do {
+ // first get the obvious scenecuts
next_scenecut =
- find_next_scenecut(stats_start, this_start, total_frames - 1, is_flash);
+ find_next_scenecut(stats_start, this_start, total_frames - 1);
this_last = (next_scenecut >= 0) ? (next_scenecut - 1) : total_frames - 1;
+
// low-pass filter the needed stats
- smooth_filter_stats(stats_start, is_flash, this_start, this_last,
- filt_intra_err, filt_coded_err);
+ smooth_filter_stats(stats_start, this_start, this_last, filt_intra_err,
+ filt_coded_err);
get_gradient(filt_coded_err, this_start, this_last, grad_coded);
// find tentative stable regions and unstable regions
- int num_regions = find_stable_regions(stats_start, grad_coded, is_flash,
- this_start, this_last, temp_regions);
- adjust_unstable_region_bounds(stats_start, is_flash, grad_coded,
- temp_regions, coeff, &num_regions);
+ int num_regions = find_stable_regions(stats_start, grad_coded, this_start,
+ this_last, temp_regions);
- get_region_stats(stats_start, is_flash, temp_regions, coeff, num_regions);
+ adjust_unstable_region_bounds(stats_start, temp_regions, &num_regions);
+
+ get_region_stats(stats_start, temp_regions, num_regions);
// Try to identify blending regions in the unstable regions
- find_blending_regions(stats_start, is_flash, temp_regions, &num_regions,
- coeff);
+ find_blending_regions(stats_start, temp_regions, &num_regions);
cleanup_blendings(temp_regions, &num_regions);
// The flash points should all be considered high variance points
@@ -1848,7 +1796,7 @@ static void identify_regions(const FIRSTPASS_STATS *const stats_start,
int start = temp_regions[k].start;
int last = temp_regions[k].last;
for (int i = start; i <= last; i++) {
- if (is_flash[i]) {
+ if (stats_start[i].is_flash) {
insert_region(i, i, HIGH_VAR_REGION, temp_regions, &num_regions, &k);
}
}
@@ -1858,6 +1806,11 @@ static void identify_regions(const FIRSTPASS_STATS *const stats_start,
// copy the regions in the scenecut group
for (k = 0; k < num_regions; k++) {
+ if (temp_regions[k].last < temp_regions[k].start &&
+ k == num_regions - 1) {
+ num_regions--;
+ break;
+ }
regions[k + cur_region] = temp_regions[k];
}
cur_region += num_regions;
@@ -1874,17 +1827,21 @@ static void identify_regions(const FIRSTPASS_STATS *const stats_start,
} while (next_scenecut >= 0);
*total_regions = cur_region;
- get_region_stats(stats_start, is_flash, regions, coeff, *total_regions);
+ get_region_stats(stats_start, regions, *total_regions);
for (k = 0; k < *total_regions; k++) {
// If scenecuts are very minor, mark them as high variance.
- if (regions[k].type != SCENECUT_REGION || regions[k].avg_cor_coeff < 0.8) {
+ if (regions[k].type != SCENECUT_REGION ||
+ regions[k].avg_cor_coeff *
+ (1 - stats_start[regions[k].start].noise_var /
+ regions[k].avg_intra_err) <
+ 0.8) {
continue;
}
regions[k].type = HIGH_VAR_REGION;
}
cleanup_regions(regions, total_regions);
- get_region_stats(stats_start, is_flash, regions, coeff, *total_regions);
+ get_region_stats(stats_start, regions, *total_regions);
for (k = 0; k < *total_regions; k++) {
regions[k].start += offset;
@@ -1911,16 +1868,17 @@ static int find_regions_index(const REGIONS *regions, int num_regions,
* \param[in] max_gop_length Maximum length of the GF group
* \param[in] max_intervals Maximum number of intervals to decide
*
- * \return Nothing is returned. Instead, cpi->rc.gf_intervals is
+ * \return Nothing is returned. Instead, cpi->ppi->rc.gf_intervals is
* changed to store the decided GF group lengths.
*/
static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
int max_intervals) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FIRSTPASS_STATS next_frame;
const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
- FRAME_INFO *frame_info = &cpi->frame_info;
+ const FIRSTPASS_STATS *const stats = start_pos - (rc->frames_since_key == 0);
int i;
int flash_detected;
@@ -1930,9 +1888,9 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
if (has_no_stats_stage(cpi)) {
for (i = 0; i < MAX_NUM_GF_INTERVALS; i++) {
- rc->gf_intervals[i] = AOMMIN(rc->max_gf_interval, max_gop_length);
+ p_rc->gf_intervals[i] = AOMMIN(rc->max_gf_interval, max_gop_length);
}
- rc->cur_gf_index = 0;
+ p_rc->cur_gf_index = 0;
rc->intervals_till_gf_calculate_due = MAX_NUM_GF_INTERVALS;
return;
}
@@ -1944,17 +1902,17 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
const int min_shrink_int = AOMMAX(MIN_SHRINK_LEN, active_min_gf_interval);
i = (rc->frames_since_key == 0);
- max_intervals = cpi->lap_enabled ? 1 : max_intervals;
+ max_intervals = cpi->ppi->lap_enabled ? 1 : max_intervals;
int count_cuts = 1;
// If cpi->gf_state.arf_gf_boost_lst is 0, we are starting with a KF or GF.
- int cur_start = -1 + !cpi->gf_state.arf_gf_boost_lst, cur_last;
+ int cur_start = -1 + !cpi->ppi->gf_state.arf_gf_boost_lst, cur_last;
int cut_pos[MAX_NUM_GF_INTERVALS + 1] = { -1 };
int cut_here;
GF_GROUP_STATS gf_stats;
init_gf_stats(&gf_stats);
while (count_cuts < max_intervals + 1) {
// reaches next key frame, break here
- if (i >= rc->frames_to_key + rc->next_is_fwd_key) {
+ if (i >= rc->frames_to_key + p_rc->next_is_fwd_key) {
cut_here = 2;
} else if (i - cur_start >= rc->static_scene_max_gf_interval) {
// reached maximum len, but nothing special yet (almost static)
@@ -1969,7 +1927,7 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
flash_detected = detect_flash(twopass, 0);
// TODO(bohanli): remove redundant accumulations here, or unify
// this and the ones in define_gf_group
- accumulate_next_frame_stats(&next_frame, frame_info, flash_detected,
+ accumulate_next_frame_stats(&next_frame, flash_detected,
rc->frames_since_key, i, &gf_stats);
cut_here = detect_gf_cut(cpi, i, cur_start, flash_detected,
@@ -1981,10 +1939,10 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
int ori_last = cur_last;
// The region frame idx does not start from the same frame as cur_start
// and cur_last. Need to offset them.
- int offset = rc->frames_since_key - rc->regions_offset;
- REGIONS *regions = rc->regions;
- int num_regions = rc->num_regions;
- if (cpi->oxcf.kf_cfg.fwd_kf_enabled && rc->next_is_fwd_key) {
+ int offset = rc->frames_since_key - p_rc->regions_offset;
+ REGIONS *regions = p_rc->regions;
+ int num_regions = p_rc->num_regions;
+ if (cpi->oxcf.kf_cfg.fwd_kf_enabled && p_rc->next_is_fwd_key) {
const int frames_left = rc->frames_to_key - i;
const int min_int = AOMMIN(MIN_FWD_KF_INTERVAL, active_min_gf_interval);
if (frames_left < min_int && frames_left > 0) {
@@ -2021,7 +1979,11 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
// If we have a scenecut, then stop at it.
// TODO(bohanli): add logic here to stop before the scenecut and for
// the next gop start from the scenecut with GF
- int is_minor_sc = (regions[scenecut_idx].avg_cor_coeff > 0.6);
+ int is_minor_sc =
+ (regions[scenecut_idx].avg_cor_coeff *
+ (1 - stats[regions[scenecut_idx].start - offset].noise_var /
+ regions[scenecut_idx].avg_intra_err) >
+ 0.6);
cur_last = regions[scenecut_idx].last - offset - !is_minor_sc;
} else {
int is_last_analysed = (k_last == num_regions - 1) &&
@@ -2032,45 +1994,91 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
// if we are very close to the end, then do not shrink since it may
// introduce intervals that are too short
if (!(is_last_analysed && not_enough_regions)) {
- int found = 0;
- // first try to end at a stable area
- for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
- if (regions[find_regions_index(regions, num_regions, j + offset)]
- .type == STABLE_REGION) {
- cur_last = j;
- found = 1;
- break;
- }
+ const double arf_length_factor = 0.1;
+ double best_score = 0;
+ int best_j = -1;
+ const int first_frame = regions[0].start - offset;
+ const int last_frame = regions[num_regions - 1].last - offset;
+ // score of how much the arf helps the whole GOP
+ double base_score = 0.0;
+ // Accumulate base_score in
+ for (int j = cur_start + 1; j < cur_start + min_shrink_int; j++) {
+ if (stats + j >= twopass->stats_buf_ctx->stats_in_end) break;
+ base_score = (base_score + 1.0) * stats[j].cor_coeff;
}
- if (!found) {
- // Could not find stable point,
- // try to find an OK point (high correlation, not blending)
- for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
- REGIONS *cur_region =
- regions +
- find_regions_index(regions, num_regions, j + offset);
- double avg_coeff = cur_region->avg_cor_coeff;
- if (rc->cor_coeff[j + offset] > avg_coeff &&
- cur_region->type != BLENDING_REGION) {
- cur_last = j;
- found = 1;
+ int met_blending = 0; // Whether we have met blending areas before
+ int last_blending = 0; // Whether the previous frame if blending
+ for (int j = cur_start + min_shrink_int; j <= cur_last; j++) {
+ if (stats + j >= twopass->stats_buf_ctx->stats_in_end) break;
+ base_score = (base_score + 1.0) * stats[j].cor_coeff;
+ int this_reg =
+ find_regions_index(regions, num_regions, j + offset);
+ if (this_reg < 0) continue;
+ // A GOP should include at most 1 blending region.
+ if (regions[this_reg].type == BLENDING_REGION) {
+ last_blending = 1;
+ if (met_blending) {
break;
+ } else {
+ base_score = 0;
+ continue;
}
+ } else {
+ if (last_blending) met_blending = 1;
+ last_blending = 0;
+ }
+
+ // Add the factor of how good the neighborhood is for this
+ // candidate arf.
+ double this_score = arf_length_factor * base_score;
+ double temp_accu_coeff = 1.0;
+ // following frames
+ int count_f = 0;
+ for (int n = j + 1; n <= j + 3 && n <= last_frame; n++) {
+ if (stats + n >= twopass->stats_buf_ctx->stats_in_end) break;
+ temp_accu_coeff *= stats[n].cor_coeff;
+ this_score +=
+ temp_accu_coeff *
+ (1 - stats[n].noise_var /
+ AOMMAX(regions[this_reg].avg_intra_err, 0.001));
+ count_f++;
+ }
+ // preceding frames
+ temp_accu_coeff = 1.0;
+ for (int n = j; n > j - 3 * 2 + count_f && n > first_frame; n--) {
+ if (stats + n < twopass->stats_buf_ctx->stats_in_start) break;
+ temp_accu_coeff *= stats[n].cor_coeff;
+ this_score +=
+ temp_accu_coeff *
+ (1 - stats[n].noise_var /
+ AOMMAX(regions[this_reg].avg_intra_err, 0.001));
+ }
+
+ if (this_score > best_score) {
+ best_score = this_score;
+ best_j = j;
}
}
- if (!found) {
- // Could not find a better point,
- // try not to cut in blending areas
- for (int j = cur_last; j >= cur_start + min_shrink_int; j--) {
- REGIONS *cur_region =
- regions +
- find_regions_index(regions, num_regions, j + offset);
- if (cur_region->type != BLENDING_REGION) {
- cur_last = j;
- break;
+
+ // For blending areas, move one more frame in case we missed the
+ // first blending frame.
+ int best_reg =
+ find_regions_index(regions, num_regions, best_j + offset);
+ if (best_reg < num_regions - 1 && best_reg > 0) {
+ if (regions[best_reg - 1].type == BLENDING_REGION &&
+ regions[best_reg + 1].type == BLENDING_REGION) {
+ if (best_j + offset == regions[best_reg].start &&
+ best_j + offset < regions[best_reg].last) {
+ best_j += 1;
+ } else if (best_j + offset == regions[best_reg].last &&
+ best_j + offset > regions[best_reg].start) {
+ best_j -= 1;
}
}
}
+
+ if (cur_last - best_j < 2) best_j = cur_last;
+ if (best_j > 0 && best_score > 0.1) cur_last = best_j;
// if cannot find anything, just cut at the original place.
}
}
@@ -2081,11 +2089,11 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
// reset pointers to the shrinked location
twopass->stats_in = start_pos + cur_last;
cur_start = cur_last;
- if (regions[find_regions_index(regions, num_regions,
- cur_start + 1 + offset)]
- .type == SCENECUT_REGION) {
- cur_start++;
- }
+ int cur_region_idx =
+ find_regions_index(regions, num_regions, cur_start + 1 + offset);
+ if (cur_region_idx >= 0)
+ if (regions[cur_region_idx].type == SCENECUT_REGION) cur_start++;
+
i = cur_last;
if (cut_here > 1 && cur_last == ori_last) break;
@@ -2099,9 +2107,9 @@ static void calculate_gf_length(AV1_COMP *cpi, int max_gop_length,
// save intervals
rc->intervals_till_gf_calculate_due = count_cuts - 1;
for (int n = 1; n < count_cuts; n++) {
- rc->gf_intervals[n - 1] = cut_pos[n] - cut_pos[n - 1];
+ p_rc->gf_intervals[n - 1] = cut_pos[n] - cut_pos[n - 1];
}
- rc->cur_gf_index = 0;
+ p_rc->cur_gf_index = 0;
twopass->stats_in = start_pos;
}
@@ -2110,12 +2118,13 @@ static void correct_frames_to_key(AV1_COMP *cpi) {
(int)av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
if (lookahead_size <
av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage)) {
- assert(IMPLIES(cpi->oxcf.pass != 0 && cpi->frames_left > 0,
- lookahead_size == cpi->frames_left));
+ assert(IMPLIES(cpi->oxcf.pass != 0 && cpi->ppi->frames_left > 0,
+ lookahead_size == cpi->ppi->frames_left));
cpi->rc.frames_to_key = AOMMIN(cpi->rc.frames_to_key, lookahead_size);
- } else if (cpi->frames_left > 0) {
+ } else if (cpi->ppi->frames_left > 0) {
// Correct frames to key based on limit
- cpi->rc.frames_to_key = AOMMIN(cpi->rc.frames_to_key, cpi->frames_left);
+ cpi->rc.frames_to_key =
+ AOMMIN(cpi->rc.frames_to_key, cpi->ppi->frames_left);
}
}
@@ -2129,11 +2138,12 @@ static void correct_frames_to_key(AV1_COMP *cpi) {
*
* \param[in] cpi Top-level encoder structure
*
- * \return Nothing is returned. Instead, cpi->gf_group is changed.
+ * \return Nothing is returned. Instead, cpi->ppi->gf_group is changed.
*/
static void define_gf_group_pass0(AV1_COMP *cpi) {
RATE_CONTROL *const rc = &cpi->rc;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const GFConfig *const gf_cfg = &oxcf->gf_cfg;
int target;
@@ -2141,28 +2151,28 @@ static void define_gf_group_pass0(AV1_COMP *cpi) {
if (oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
av1_cyclic_refresh_set_golden_update(cpi);
} else {
- rc->baseline_gf_interval = rc->gf_intervals[rc->cur_gf_index];
+ p_rc->baseline_gf_interval = p_rc->gf_intervals[p_rc->cur_gf_index];
rc->intervals_till_gf_calculate_due--;
- rc->cur_gf_index++;
+ p_rc->cur_gf_index++;
}
// correct frames_to_key when lookahead queue is flushing
correct_frames_to_key(cpi);
- if (rc->baseline_gf_interval > rc->frames_to_key)
- rc->baseline_gf_interval = rc->frames_to_key;
+ if (p_rc->baseline_gf_interval > rc->frames_to_key)
+ p_rc->baseline_gf_interval = rc->frames_to_key;
- rc->gfu_boost = DEFAULT_GF_BOOST;
- rc->constrained_gf_group =
- (rc->baseline_gf_interval >= rc->frames_to_key) ? 1 : 0;
+ p_rc->gfu_boost = DEFAULT_GF_BOOST;
+ p_rc->constrained_gf_group =
+ (p_rc->baseline_gf_interval >= rc->frames_to_key) ? 1 : 0;
gf_group->max_layer_depth_allowed = oxcf->gf_cfg.gf_max_pyr_height;
// Rare case when the look-ahead is less than the target GOP length, can't
// generate ARF frame.
- if (rc->baseline_gf_interval > gf_cfg->lag_in_frames ||
+ if (p_rc->baseline_gf_interval > gf_cfg->lag_in_frames ||
!is_altref_enabled(gf_cfg->lag_in_frames, gf_cfg->enable_auto_arf) ||
- rc->baseline_gf_interval < rc->min_gf_interval)
+ p_rc->baseline_gf_interval < rc->min_gf_interval)
gf_group->max_layer_depth_allowed = 0;
// Set up the structure of this Group-Of-Pictures (same as GF_GROUP)
@@ -2194,7 +2204,8 @@ static INLINE void set_baseline_gf_interval(AV1_COMP *cpi, int arf_position,
int use_alt_ref,
int is_final_pass) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
// Set the interval until the next gf.
// If forward keyframes are enabled, ensure the final gf group obeys the
// MIN_FWD_KF_INTERVAL.
@@ -2203,27 +2214,28 @@ static INLINE void set_baseline_gf_interval(AV1_COMP *cpi, int arf_position,
twopass->stats_buf_ctx->stats_in_end;
if (cpi->oxcf.kf_cfg.fwd_kf_enabled && use_alt_ref && !is_last_kf &&
- cpi->rc.next_is_fwd_key) {
+ cpi->ppi->p_rc.next_is_fwd_key) {
if (arf_position == rc->frames_to_key + 1) {
- rc->baseline_gf_interval = arf_position;
+ p_rc->baseline_gf_interval = arf_position;
// if the last gf group will be smaller than MIN_FWD_KF_INTERVAL
} else if (rc->frames_to_key + 1 - arf_position <
AOMMAX(MIN_FWD_KF_INTERVAL, rc->min_gf_interval)) {
// if possible, merge the last two gf groups
if (rc->frames_to_key + 1 <= active_max_gf_interval) {
- rc->baseline_gf_interval = rc->frames_to_key + 1;
+ p_rc->baseline_gf_interval = rc->frames_to_key + 1;
if (is_final_pass) rc->intervals_till_gf_calculate_due = 0;
// if merging the last two gf groups creates a group that is too long,
// split them and force the last gf group to be the MIN_FWD_KF_INTERVAL
} else {
- rc->baseline_gf_interval = rc->frames_to_key + 1 - MIN_FWD_KF_INTERVAL;
+ p_rc->baseline_gf_interval =
+ rc->frames_to_key + 1 - MIN_FWD_KF_INTERVAL;
if (is_final_pass) rc->intervals_till_gf_calculate_due = 0;
}
} else {
- rc->baseline_gf_interval = arf_position;
+ p_rc->baseline_gf_interval = arf_position;
}
} else {
- rc->baseline_gf_interval = arf_position;
+ p_rc->baseline_gf_interval = arf_position;
}
}
@@ -2269,18 +2281,19 @@ static void init_gf_stats(GF_GROUP_STATS *gf_stats) {
* \param[in] is_final_pass Whether this is the final pass for the
* GF group, or a trial (non-zero)
*
- * \return Nothing is returned. Instead, cpi->gf_group is changed.
+ * \return Nothing is returned. Instead, cpi->ppi->gf_group is changed.
*/
static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
EncodeFrameParams *frame_params, int max_gop_length,
int is_final_pass) {
AV1_COMMON *const cm = &cpi->common;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FIRSTPASS_STATS next_frame;
const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
- GF_GROUP *gf_group = &cpi->gf_group;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
FRAME_INFO *frame_info = &cpi->frame_info;
const GFConfig *const gf_cfg = &oxcf->gf_cfg;
const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
@@ -2289,12 +2302,13 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
int64_t gf_group_bits;
const int is_intra_only = rc->frames_since_key == 0;
- cpi->internal_altref_allowed = (gf_cfg->gf_max_pyr_height > 1);
+ cpi->ppi->internal_altref_allowed = (gf_cfg->gf_max_pyr_height > 1);
// Reset the GF group data structures unless this is a key
// frame in which case it will already have been done.
if (!is_intra_only) {
- av1_zero(cpi->gf_group);
+ av1_zero(cpi->ppi->gf_group);
+ cpi->gf_frame_index = 0;
}
aom_clear_system_state();
@@ -2306,7 +2320,7 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
}
// correct frames_to_key when lookahead queue is emptying
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
correct_frames_to_key(cpi);
}
@@ -2336,8 +2350,8 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
AOMMIN(rc->max_gf_interval, max_gop_length);
i = is_intra_only;
- // get the determined gf group length from rc->gf_intervals
- while (i < rc->gf_intervals[rc->cur_gf_index]) {
+ // get the determined gf group length from p_rc->gf_intervals
+ while (i < p_rc->gf_intervals[p_rc->cur_gf_index]) {
// read in the next frame
if (EOF == input_stats(twopass, &next_frame)) break;
// Accumulate error score of frames in this gf group.
@@ -2360,7 +2374,7 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
i = is_intra_only;
input_stats(twopass, &next_frame);
- while (i < rc->gf_intervals[rc->cur_gf_index]) {
+ while (i < p_rc->gf_intervals[p_rc->cur_gf_index]) {
// read in the next frame
if (EOF == input_stats(twopass, &next_frame)) break;
@@ -2369,13 +2383,13 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
flash_detected = detect_flash(twopass, 0);
// accumulate stats for next frame
- accumulate_next_frame_stats(&next_frame, frame_info, flash_detected,
+ accumulate_next_frame_stats(&next_frame, flash_detected,
rc->frames_since_key, i, &gf_stats);
++i;
}
- i = rc->gf_intervals[rc->cur_gf_index];
+ i = p_rc->gf_intervals[p_rc->cur_gf_index];
// save the errs for the last frame
last_frame_stats.frame_coded_error = next_frame.coded_error;
@@ -2384,11 +2398,11 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
if (is_final_pass) {
rc->intervals_till_gf_calculate_due--;
- rc->cur_gf_index++;
+ p_rc->cur_gf_index++;
}
// Was the group length constrained by the requirement for a new KF?
- rc->constrained_gf_group = (i >= rc->frames_to_key) ? 1 : 0;
+ p_rc->constrained_gf_group = (i >= rc->frames_to_key) ? 1 : 0;
const int num_mbs = (oxcf->resize_cfg.resize_mode != RESIZE_NONE)
? cpi->initial_mbs
@@ -2407,32 +2421,34 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
gf_stats.zero_motion_accumulator > MIN_ZERO_MOTION &&
gf_stats.avg_sr_coded_error / num_mbs < MAX_SR_CODED_ERROR &&
gf_stats.avg_raw_err_stdev < MAX_RAW_ERR_VAR) {
- cpi->internal_altref_allowed = 0;
+ cpi->ppi->internal_altref_allowed = 0;
}
int use_alt_ref;
if (can_disable_arf) {
use_alt_ref =
!is_almost_static(gf_stats.zero_motion_accumulator,
- twopass->kf_zeromotion_pct, cpi->lap_enabled) &&
- rc->use_arf_in_this_kf_group && (i < gf_cfg->lag_in_frames) &&
+ twopass->kf_zeromotion_pct, cpi->ppi->lap_enabled) &&
+ p_rc->use_arf_in_this_kf_group && (i < gf_cfg->lag_in_frames) &&
(i >= MIN_GF_INTERVAL);
+ FIRSTPASS_STATS *total_stats = twopass->stats_buf_ctx->total_stats;
// TODO(urvang): Improve and use model for VBR, CQ etc as well.
- if (use_alt_ref && rc_cfg->mode == AOM_Q && rc_cfg->cq_level <= 200) {
+ if (use_alt_ref && use_ml_model_to_decide_flat_gop(rc_cfg) &&
+ !is_fp_stats_to_predict_flat_gop_invalid(total_stats)) {
aom_clear_system_state();
float features[21];
get_features_from_gf_stats(
&gf_stats, &first_frame_stats, &last_frame_stats, num_mbs,
- rc->constrained_gf_group, twopass->kf_zeromotion_pct, i, features);
+ p_rc->constrained_gf_group, twopass->kf_zeromotion_pct, i, features);
// Infer using ML model.
float score;
av1_nn_predict(features, &av1_use_flat_gop_nn_config, 1, &score);
use_alt_ref = (score <= 0.0);
}
} else {
- use_alt_ref =
- rc->use_arf_in_this_kf_group && (i < gf_cfg->lag_in_frames) && (i > 2);
+ use_alt_ref = p_rc->use_arf_in_this_kf_group &&
+ (i < gf_cfg->lag_in_frames) && (i > 2);
}
#define REDUCE_GF_LENGTH_THRESH 4
@@ -2443,7 +2459,7 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
// work well for certain other cases.
const int allow_gf_length_reduction =
((rc_cfg->mode == AOM_Q && rc_cfg->cq_level <= 128) ||
- !cpi->internal_altref_allowed) &&
+ !cpi->ppi->internal_altref_allowed) &&
!is_lossless_requested(rc_cfg);
if (allow_gf_length_reduction && use_alt_ref) {
@@ -2485,48 +2501,48 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
: AOMMAX(0, rc->frames_to_key - i);
// Calculate the boost for alt ref.
- rc->gfu_boost = av1_calc_arf_boost(
- twopass, rc, frame_info, alt_offset, forward_frames, ext_len,
- cpi->lap_enabled ? &rc->num_stats_used_for_gfu_boost : NULL,
- cpi->lap_enabled ? &rc->num_stats_required_for_gfu_boost : NULL);
+ p_rc->gfu_boost = av1_calc_arf_boost(
+ twopass, p_rc, rc, frame_info, alt_offset, forward_frames, ext_len,
+ &p_rc->num_stats_used_for_gfu_boost,
+ &p_rc->num_stats_required_for_gfu_boost, cpi->ppi->lap_enabled);
} else {
reset_fpf_position(twopass, start_pos);
gf_group->max_layer_depth_allowed = 0;
set_baseline_gf_interval(cpi, i, active_max_gf_interval, use_alt_ref,
is_final_pass);
- rc->gfu_boost = AOMMIN(
+ p_rc->gfu_boost = AOMMIN(
MAX_GF_BOOST,
- av1_calc_arf_boost(
- twopass, rc, frame_info, alt_offset, ext_len, 0,
- cpi->lap_enabled ? &rc->num_stats_used_for_gfu_boost : NULL,
- cpi->lap_enabled ? &rc->num_stats_required_for_gfu_boost : NULL));
+ av1_calc_arf_boost(twopass, p_rc, rc, frame_info, alt_offset, ext_len,
+ 0, &p_rc->num_stats_used_for_gfu_boost,
+ &p_rc->num_stats_required_for_gfu_boost,
+ cpi->ppi->lap_enabled));
}
#define LAST_ALR_BOOST_FACTOR 0.2f
- rc->arf_boost_factor = 1.0;
+ p_rc->arf_boost_factor = 1.0;
if (use_alt_ref && !is_lossless_requested(rc_cfg)) {
// Reduce the boost of altref in the last gf group
if (rc->frames_to_key - ext_len == REDUCE_GF_LENGTH_BY ||
rc->frames_to_key - ext_len == 0) {
- rc->arf_boost_factor = LAST_ALR_BOOST_FACTOR;
+ p_rc->arf_boost_factor = LAST_ALR_BOOST_FACTOR;
}
}
- rc->frames_till_gf_update_due = rc->baseline_gf_interval;
+ rc->frames_till_gf_update_due = p_rc->baseline_gf_interval;
// Reset the file position.
reset_fpf_position(twopass, start_pos);
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
// Since we don't have enough stats to know the actual error of the
// gf group, we assume error of each frame to be equal to 1 and set
// the error of the group as baseline_gf_interval.
- gf_stats.gf_group_err = rc->baseline_gf_interval;
+ gf_stats.gf_group_err = p_rc->baseline_gf_interval;
}
// Calculate the bits to be allocated to the gf/arf group as a whole
gf_group_bits = calculate_total_gf_group_bits(cpi, gf_stats.gf_group_err);
- rc->gf_group_bits = gf_group_bits;
+ p_rc->gf_group_bits = gf_group_bits;
#if GROUP_ADAPTIVE_MAXQ
// Calculate an estimate of the maxq needed for the group.
@@ -2534,17 +2550,17 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
// where there could be significant overshoot than for easier
// sections where we do not wish to risk creating an overshoot
// of the allocated bit budget.
- if ((rc_cfg->mode != AOM_Q) && (rc->baseline_gf_interval > 1) &&
+ if ((rc_cfg->mode != AOM_Q) && (p_rc->baseline_gf_interval > 1) &&
is_final_pass) {
const int vbr_group_bits_per_frame =
- (int)(gf_group_bits / rc->baseline_gf_interval);
+ (int)(gf_group_bits / p_rc->baseline_gf_interval);
const double group_av_err =
- gf_stats.gf_group_raw_error / rc->baseline_gf_interval;
+ gf_stats.gf_group_raw_error / p_rc->baseline_gf_interval;
const double group_av_skip_pct =
- gf_stats.gf_group_skip_pct / rc->baseline_gf_interval;
+ gf_stats.gf_group_skip_pct / p_rc->baseline_gf_interval;
const double group_av_inactive_zone =
((gf_stats.gf_group_inactive_zone_rows * 2) /
- (rc->baseline_gf_interval * (double)cm->mi_params.mb_rows));
+ (p_rc->baseline_gf_interval * (double)cm->mi_params.mb_rows));
int tmp_q;
tmp_q = get_twopass_worst_quality(
@@ -2568,7 +2584,7 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
if (rc->frames_since_key != 0) {
twopass->section_intra_rating = calculate_section_intra_ratio(
start_pos, twopass->stats_buf_ctx->stats_in_end,
- rc->baseline_gf_interval);
+ p_rc->baseline_gf_interval);
}
av1_gop_bit_allocation(cpi, rc, gf_group, rc->frames_since_key == 0,
@@ -2577,12 +2593,12 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
frame_params->frame_type =
rc->frames_since_key == 0 ? KEY_FRAME : INTER_FRAME;
frame_params->show_frame =
- !(gf_group->update_type[gf_group->index] == ARF_UPDATE ||
- gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE);
+ !(gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE ||
+ gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE);
// TODO(jingning): Generalize this condition.
if (is_final_pass) {
- cpi->gf_state.arf_gf_boost_lst = use_alt_ref;
+ cpi->ppi->gf_state.arf_gf_boost_lst = use_alt_ref;
// Reset rolling actual and target bits counters for ARF groups.
twopass->rolling_arf_group_target_bits = 1;
@@ -2597,12 +2613,13 @@ static void define_gf_group(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
void av1_gop_bit_allocation(const AV1_COMP *cpi, RATE_CONTROL *const rc,
GF_GROUP *gf_group, int is_key_frame, int use_arf,
int64_t gf_group_bits) {
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
// Calculate the extra bits to be used for boosted frame(s)
#ifdef FIXED_ARF_BITS
int gf_arf_bits = (int)(ARF_BITS_FRACTION * gf_group_bits);
#else
int gf_arf_bits = calculate_boost_bits(
- rc->baseline_gf_interval - (rc->frames_since_key == 0), rc->gfu_boost,
+ p_rc->baseline_gf_interval - (rc->frames_since_key == 0), p_rc->gfu_boost,
gf_group_bits);
#endif
@@ -2610,8 +2627,8 @@ void av1_gop_bit_allocation(const AV1_COMP *cpi, RATE_CONTROL *const rc,
gf_group_bits, 1);
// Allocate bits to each of the frames in the GF group.
- allocate_gf_group_bits(gf_group, rc, gf_group_bits, gf_arf_bits, is_key_frame,
- use_arf);
+ allocate_gf_group_bits(gf_group, p_rc, rc, gf_group_bits, gf_arf_bits,
+ is_key_frame, use_arf);
}
// Minimum % intra coding observed in first pass (1.0 = 100%)
@@ -2786,10 +2803,10 @@ static int test_candidate_kf(TWO_PASS *twopass,
#define MIN_STATIC_KF_BOOST 5400 // Minimum boost for static KF interval
static int detect_app_forced_key(AV1_COMP *cpi) {
- if (cpi->oxcf.kf_cfg.fwd_kf_enabled) cpi->rc.next_is_fwd_key = 1;
+ if (cpi->oxcf.kf_cfg.fwd_kf_enabled) cpi->ppi->p_rc.next_is_fwd_key = 1;
int num_frames_to_app_forced_key = is_forced_keyframe_pending(
cpi->ppi->lookahead, cpi->ppi->lookahead->max_sz, cpi->compressor_stage);
- if (num_frames_to_app_forced_key != -1) cpi->rc.next_is_fwd_key = 0;
+ if (num_frames_to_app_forced_key != -1) cpi->ppi->p_rc.next_is_fwd_key = 0;
return num_frames_to_app_forced_key;
}
@@ -2799,16 +2816,16 @@ static int get_projected_kf_boost(AV1_COMP *cpi) {
* all stats needed for prior boost calculation are available.
* Hence projecting the prior boost is not needed in this cases.
*/
- if (cpi->rc.num_stats_used_for_kf_boost >= cpi->rc.frames_to_key)
- return cpi->rc.kf_boost;
+ if (cpi->ppi->p_rc.num_stats_used_for_kf_boost >= cpi->rc.frames_to_key)
+ return cpi->ppi->p_rc.kf_boost;
// Get the current tpl factor (number of frames = frames_to_key).
double tpl_factor = av1_get_kf_boost_projection_factor(cpi->rc.frames_to_key);
// Get the tpl factor when number of frames = num_stats_used_for_kf_boost.
- double tpl_factor_num_stats =
- av1_get_kf_boost_projection_factor(cpi->rc.num_stats_used_for_kf_boost);
+ double tpl_factor_num_stats = av1_get_kf_boost_projection_factor(
+ cpi->ppi->p_rc.num_stats_used_for_kf_boost);
int projected_kf_boost =
- (int)rint((tpl_factor * cpi->rc.kf_boost) / tpl_factor_num_stats);
+ (int)rint((tpl_factor * cpi->ppi->p_rc.kf_boost) / tpl_factor_num_stats);
return projected_kf_boost;
}
@@ -2828,8 +2845,9 @@ static int get_projected_kf_boost(AV1_COMP *cpi) {
static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
double *kf_group_err,
int num_frames_to_detect_scenecut) {
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const KeyFrameCfg *const kf_cfg = &oxcf->kf_cfg;
double recent_loop_decay[FRAMES_TO_CHECK_DECAY];
@@ -2874,7 +2892,7 @@ static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
input_stats(twopass, this_frame);
// Provided that we are not at the end of the file...
- if ((cpi->rc.enable_scenecut_detection > 0) && kf_cfg->auto_key &&
+ if ((cpi->ppi->p_rc.enable_scenecut_detection > 0) && kf_cfg->auto_key &&
twopass->stats_in < twopass->stats_buf_ctx->stats_in_end) {
double loop_decay_rate;
@@ -2882,14 +2900,13 @@ static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
if (frames_since_key >= kf_cfg->key_freq_min &&
test_candidate_kf(twopass, &last_frame, this_frame, twopass->stats_in,
frames_since_key, oxcf->rc_cfg.mode,
- cpi->rc.enable_scenecut_detection)) {
+ cpi->ppi->p_rc.enable_scenecut_detection)) {
scenecut_detected = 1;
break;
}
// How fast is the prediction quality decaying?
- loop_decay_rate =
- get_prediction_decay_rate(frame_info, twopass->stats_in);
+ loop_decay_rate = get_prediction_decay_rate(twopass->stats_in);
// We want to know something about the recent past... rather than
// as used elsewhere where we are concerned with decay in prediction
@@ -2909,7 +2926,7 @@ static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
// In the case of transition followed by a static scene, the key frame
// could be a good predictor for the following frames, therefore we
// do not use an arf.
- rc->use_arf_in_this_kf_group = 0;
+ p_rc->use_arf_in_this_kf_group = 0;
break;
}
@@ -2928,14 +2945,14 @@ static int define_kf_interval(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame,
}
if (kf_group_err != NULL)
- rc->num_stats_used_for_kf_boost = num_stats_used_for_kf_boost;
+ p_rc->num_stats_used_for_kf_boost = num_stats_used_for_kf_boost;
- if (cpi->lap_enabled && !scenecut_detected)
+ if (cpi->ppi->lap_enabled && !scenecut_detected)
frames_to_key = num_frames_to_next_key;
if (!kf_cfg->fwd_kf_enabled || scenecut_detected ||
twopass->stats_in >= twopass->stats_buf_ctx->stats_in_end)
- rc->next_is_fwd_key = 0;
+ p_rc->next_is_fwd_key = 0;
return frames_to_key;
}
@@ -2964,9 +2981,9 @@ static double get_kf_group_avg_error(TWO_PASS *twopass,
static int64_t get_kf_group_bits(AV1_COMP *cpi, double kf_group_err,
double kf_group_avg_error) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
int64_t kf_group_bits;
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
kf_group_bits = (int64_t)rc->frames_to_key * rc->avg_frame_bandwidth;
if (cpi->oxcf.rc_cfg.vbr_corpus_complexity_lap) {
const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
@@ -2990,7 +3007,7 @@ static int64_t get_kf_group_bits(AV1_COMP *cpi, double kf_group_err,
static int calc_avg_stats(AV1_COMP *cpi, FIRSTPASS_STATS *avg_frame_stat) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FIRSTPASS_STATS cur_frame;
av1_zero(cur_frame);
int num_frames = 0;
@@ -3039,7 +3056,7 @@ static double get_kf_boost_score(AV1_COMP *cpi, double kf_raw_err,
double *zero_motion_accumulator,
double *sr_accumulator, int use_avg_stat) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FRAME_INFO *const frame_info = &cpi->frame_info;
FIRSTPASS_STATS frame_stat;
av1_zero(frame_stat);
@@ -3061,8 +3078,7 @@ static double get_kf_boost_score(AV1_COMP *cpi, double kf_raw_err,
// For the first frame in kf group, the second ref indicator is invalid.
if (i > 0) {
*zero_motion_accumulator =
- AOMMIN(*zero_motion_accumulator,
- get_zero_motion_factor(frame_info, &frame_stat));
+ AOMMIN(*zero_motion_accumulator, get_zero_motion_factor(&frame_stat));
} else {
*zero_motion_accumulator = frame_stat.pcnt_inter - frame_stat.pcnt_motion;
}
@@ -3102,8 +3118,9 @@ static double get_kf_boost_score(AV1_COMP *cpi, double kf_raw_err,
*/
static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
FRAME_INFO *const frame_info = &cpi->frame_info;
AV1_COMMON *const cm = &cpi->common;
CurrentFrame *const current_frame = &cm->current_frame;
@@ -3115,27 +3132,26 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
rc->frames_since_key = 0;
// Use arfs if possible.
- rc->use_arf_in_this_kf_group = is_altref_enabled(
+ p_rc->use_arf_in_this_kf_group = is_altref_enabled(
oxcf->gf_cfg.lag_in_frames, oxcf->gf_cfg.enable_auto_arf);
// Reset the GF group data structures.
av1_zero(*gf_group);
+ cpi->gf_frame_index = 0;
// KF is always a GF so clear frames till next gf counter.
rc->frames_till_gf_update_due = 0;
- rc->frames_to_key = 1;
-
if (has_no_stats_stage(cpi)) {
int num_frames_to_app_forced_key = detect_app_forced_key(cpi);
- rc->this_key_frame_forced =
+ p_rc->this_key_frame_forced =
current_frame->frame_number != 0 && rc->frames_to_key == 0;
if (num_frames_to_app_forced_key != -1)
rc->frames_to_key = num_frames_to_app_forced_key;
else
rc->frames_to_key = AOMMAX(1, kf_cfg->key_freq_max);
correct_frames_to_key(cpi);
- rc->kf_boost = DEFAULT_KF_BOOST;
+ p_rc->kf_boost = DEFAULT_KF_BOOST;
gf_group->update_type[0] = KF_UPDATE;
return;
}
@@ -3153,7 +3169,7 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
int64_t kf_group_bits_clipped = INT64_MAX;
// Is this a forced key frame by interval.
- rc->this_key_frame_forced = rc->next_key_frame_forced;
+ p_rc->this_key_frame_forced = p_rc->next_key_frame_forced;
twopass->kf_group_bits = 0; // Total bits available to kf group
twopass->kf_group_error_left = 0; // Group modified error score.
@@ -3169,7 +3185,7 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
else
rc->frames_to_key = kf_cfg->key_freq_max;
- if (cpi->lap_enabled) correct_frames_to_key(cpi);
+ if (cpi->ppi->lap_enabled) correct_frames_to_key(cpi);
// If there is a max kf interval set by the user we must obey it.
// We already breakout of the loop above at 2x max.
@@ -3191,28 +3207,29 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
calculate_modified_err(frame_info, twopass, oxcf, &tmp_frame);
if (EOF == input_stats(twopass, &tmp_frame)) break;
}
- rc->next_key_frame_forced = 1;
+ p_rc->next_key_frame_forced = 1;
} else if ((twopass->stats_in == twopass->stats_buf_ctx->stats_in_end &&
is_stat_consumption_stage_twopass(cpi)) ||
rc->frames_to_key >= kf_cfg->key_freq_max) {
- rc->next_key_frame_forced = 1;
+ p_rc->next_key_frame_forced = 1;
} else {
- rc->next_key_frame_forced = 0;
+ p_rc->next_key_frame_forced = 0;
}
- if (kf_cfg->fwd_kf_enabled) rc->next_is_fwd_key |= rc->next_key_frame_forced;
+ if (kf_cfg->fwd_kf_enabled)
+ p_rc->next_is_fwd_key |= p_rc->next_key_frame_forced;
// Special case for the last key frame of the file.
if (twopass->stats_in >= twopass->stats_buf_ctx->stats_in_end) {
// Accumulate kf group error.
kf_group_err +=
calculate_modified_err(frame_info, twopass, oxcf, this_frame);
- rc->next_is_fwd_key = 0;
+ p_rc->next_is_fwd_key = 0;
}
// Calculate the number of bits that should be assigned to the kf group.
if ((twopass->bits_left > 0 && twopass->modified_error_left > 0.0) ||
- (cpi->lap_enabled && oxcf->rc_cfg.mode != AOM_Q)) {
+ (cpi->ppi->lap_enabled && oxcf->rc_cfg.mode != AOM_Q)) {
// Maximum number of bits for a single normal frame (not key frame).
const int max_bits = frame_max_bits(rc, oxcf);
@@ -3237,7 +3254,7 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
}
twopass->kf_group_bits = AOMMAX(0, twopass->kf_group_bits);
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
// In the case of single pass based on LAP, frames to key may have an
// inaccurate value, and hence should be clipped to an appropriate
// interval.
@@ -3268,17 +3285,17 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
twopass->section_intra_rating = calculate_section_intra_ratio(
start_position, twopass->stats_buf_ctx->stats_in_end, rc->frames_to_key);
- rc->kf_boost = (int)boost_score;
+ p_rc->kf_boost = (int)boost_score;
- if (cpi->lap_enabled) {
+ if (cpi->ppi->lap_enabled) {
if (oxcf->rc_cfg.mode == AOM_Q) {
- rc->kf_boost = get_projected_kf_boost(cpi);
+ p_rc->kf_boost = get_projected_kf_boost(cpi);
} else {
// TODO(any): Explore using average frame stats for AOM_Q as well.
boost_score = get_kf_boost_score(
cpi, kf_raw_err, &zero_motion_accumulator, &sr_accumulator, 1);
reset_fpf_position(twopass, start_position);
- rc->kf_boost += (int)boost_score;
+ p_rc->kf_boost += (int)boost_score;
}
}
@@ -3286,13 +3303,13 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
// if the kf group is very short.
if ((zero_motion_accumulator > STATIC_KF_GROUP_FLOAT_THRESH) &&
(rc->frames_to_key > 8)) {
- rc->kf_boost = AOMMAX(rc->kf_boost, MIN_STATIC_KF_BOOST);
+ p_rc->kf_boost = AOMMAX(p_rc->kf_boost, MIN_STATIC_KF_BOOST);
} else {
// Apply various clamps for min and max boost
- rc->kf_boost = AOMMAX(rc->kf_boost, (rc->frames_to_key * 3));
- rc->kf_boost = AOMMAX(rc->kf_boost, MIN_KF_BOOST);
+ p_rc->kf_boost = AOMMAX(p_rc->kf_boost, (rc->frames_to_key * 3));
+ p_rc->kf_boost = AOMMAX(p_rc->kf_boost, MIN_KF_BOOST);
#ifdef STRICT_RC
- rc->kf_boost = AOMMIN(rc->kf_boost, MAX_KF_BOOST);
+ p_rc->kf_boost = AOMMIN(p_rc->kf_boost, MAX_KF_BOOST);
#endif
}
@@ -3301,9 +3318,10 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
// very high, we calculate the bits based on a clipped value of
// frames_to_key.
kf_bits = calculate_boost_bits(
- AOMMIN(rc->frames_to_key, frames_to_key_clipped) - 1, rc->kf_boost,
+ AOMMIN(rc->frames_to_key, frames_to_key_clipped) - 1, p_rc->kf_boost,
AOMMIN(twopass->kf_group_bits, kf_group_bits_clipped));
- // printf("kf boost = %d kf_bits = %d kf_zeromotion_pct = %d\n", rc->kf_boost,
+ // printf("kf boost = %d kf_bits = %d kf_zeromotion_pct = %d\n",
+ // p_rc->kf_boost,
// kf_bits, twopass->kf_zeromotion_pct);
kf_bits = adjust_boost_bits_for_target_level(cpi, rc, kf_bits,
twopass->kf_group_bits, 0);
@@ -3315,7 +3333,7 @@ static void find_next_key_frame(AV1_COMP *cpi, FIRSTPASS_STATS *this_frame) {
gf_group->update_type[0] = KF_UPDATE;
// Note the total error score of the kf group minus the key frame itself.
- if (cpi->lap_enabled)
+ if (cpi->ppi->lap_enabled)
// As we don't have enough stats to know the actual error of the group,
// we assume the complexity of each frame to be equal to 1, and set the
// error as the number of frames in the group(minus the keyframe).
@@ -3335,7 +3353,7 @@ static int is_skippable_frame(const AV1_COMP *cpi) {
// first pass, and so do its previous and forward frames, then this frame
// can be skipped for partition check, and the partition size is assigned
// according to the variance
- const TWO_PASS *const twopass = &cpi->twopass;
+ const TWO_PASS *const twopass = &cpi->ppi->twopass;
return (!frame_is_intra_only(&cpi->common) &&
twopass->stats_in - 2 > twopass->stats_buf_ctx->stats_in_start &&
@@ -3358,34 +3376,78 @@ static int get_section_target_bandwidth(AV1_COMP *cpi) {
AV1_COMMON *const cm = &cpi->common;
CurrentFrame *const current_frame = &cm->current_frame;
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
int section_target_bandwidth;
const int frames_left = (int)(twopass->stats_buf_ctx->total_stats->count -
current_frame->frame_number);
- if (cpi->lap_enabled)
+ if (cpi->ppi->lap_enabled)
section_target_bandwidth = (int)rc->avg_frame_bandwidth;
else
section_target_bandwidth = (int)(twopass->bits_left / frames_left);
return section_target_bandwidth;
}
+static INLINE void set_twopass_params_based_on_fp_stats(
+ const AV1_COMP *cpi, const FIRSTPASS_STATS *this_frame_ptr) {
+ if (this_frame_ptr == NULL) return;
+
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
+ const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
+ ? cpi->initial_mbs
+ : cpi->common.mi_params.MBs;
+ // The multiplication by 256 reverses a scaling factor of (>> 8)
+ // applied when combining MB error values for the frame.
+ twopass->mb_av_energy = log((this_frame_ptr->intra_error / num_mbs) + 1.0);
+
+ const FIRSTPASS_STATS *const total_stats =
+ twopass->stats_buf_ctx->total_stats;
+ if (is_fp_wavelet_energy_invalid(total_stats) == 0) {
+ twopass->frame_avg_haar_energy =
+ log((this_frame_ptr->frame_avg_wavelet_energy / num_mbs) + 1.0);
+ }
+
+ // Set the frame content type flag.
+ if (this_frame_ptr->intra_skip_pct >= FC_ANIMATION_THRESH)
+ twopass->fr_content_type = FC_GRAPHICS_ANIMATION;
+ else
+ twopass->fr_content_type = FC_NORMAL;
+}
+
static void process_first_pass_stats(AV1_COMP *cpi,
FIRSTPASS_STATS *this_frame) {
AV1_COMMON *const cm = &cpi->common;
CurrentFrame *const current_frame = &cm->current_frame;
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
+ FIRSTPASS_STATS *total_stats = twopass->stats_buf_ctx->total_stats;
+
+ if (current_frame->frame_number == 0) {
+ const GFConfig *const gf_cfg = &cpi->oxcf.gf_cfg;
+ const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
+ if (use_ml_model_to_decide_flat_gop(rc_cfg) && can_disable_altref(gf_cfg) &&
+ is_fp_stats_to_predict_flat_gop_invalid(total_stats)) {
+ // warn(
+ // "First pass stats required in the ML model to predict a flat GOP "
+ // "structure is invalid. Continuing encoding by disabling the ML "
+ // "model.\n");
+ // The first pass statistics like tr_coded_error, pcnt_third_ref,
+ // frame_avg_wavelet_energy are invalid as their calculations were
+ // skipped in the first pass of encoding. As these stats are required
+ // in the ML model to predict a flat GOP structure, the ML model would be
+ // disabled. This case arises when the encode configuration used in first
+ // pass encoding is different from second pass encoding.
+ }
+ }
if (cpi->oxcf.rc_cfg.mode != AOM_Q && current_frame->frame_number == 0 &&
- cpi->gf_group.index == 0 && cpi->twopass.stats_buf_ctx->total_stats &&
- cpi->twopass.stats_buf_ctx->total_left_stats) {
- if (cpi->lap_enabled) {
+ cpi->gf_frame_index == 0 && total_stats &&
+ cpi->ppi->twopass.stats_buf_ctx->total_left_stats) {
+ if (cpi->ppi->lap_enabled) {
/*
* Accumulate total_stats using available limited number of stats,
* and assign it to total_left_stats.
*/
- *cpi->twopass.stats_buf_ctx->total_left_stats =
- *cpi->twopass.stats_buf_ctx->total_stats;
+ *cpi->ppi->twopass.stats_buf_ctx->total_left_stats = *total_stats;
}
// Special case code for first frame.
const int section_target_bandwidth = get_section_target_bandwidth(cpi);
@@ -3406,43 +3468,25 @@ static void process_first_pass_stats(AV1_COMP *cpi,
rc->active_worst_quality = tmp_q;
rc->ni_av_qi = tmp_q;
rc->last_q[INTER_FRAME] = tmp_q;
- rc->avg_q = av1_convert_qindex_to_q(tmp_q, cm->seq_params.bit_depth);
+ rc->avg_q = av1_convert_qindex_to_q(tmp_q, cm->seq_params->bit_depth);
rc->avg_frame_qindex[INTER_FRAME] = tmp_q;
rc->last_q[KEY_FRAME] = (tmp_q + cpi->oxcf.rc_cfg.best_allowed_q) / 2;
rc->avg_frame_qindex[KEY_FRAME] = rc->last_q[KEY_FRAME];
}
- int err = 0;
- if (cpi->lap_enabled) {
- err = input_stats_lap(twopass, this_frame);
+ if (cpi->ppi->lap_enabled) {
+ input_stats_lap(twopass, this_frame);
} else {
- err = input_stats(twopass, this_frame);
- }
- if (err == EOF) return;
-
- {
- const int num_mbs = (cpi->oxcf.resize_cfg.resize_mode != RESIZE_NONE)
- ? cpi->initial_mbs
- : cm->mi_params.MBs;
- // The multiplication by 256 reverses a scaling factor of (>> 8)
- // applied when combining MB error values for the frame.
- twopass->mb_av_energy = log((this_frame->intra_error / num_mbs) + 1.0);
- twopass->frame_avg_haar_energy =
- log((this_frame->frame_avg_wavelet_energy / num_mbs) + 1.0);
+ input_stats(twopass, this_frame);
}
-
- // Set the frame content type flag.
- if (this_frame->intra_skip_pct >= FC_ANIMATION_THRESH)
- twopass->fr_content_type = FC_GRAPHICS_ANIMATION;
- else
- twopass->fr_content_type = FC_NORMAL;
+ set_twopass_params_based_on_fp_stats(cpi, this_frame);
}
static void setup_target_rate(AV1_COMP *cpi) {
RATE_CONTROL *const rc = &cpi->rc;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
- int target_rate = gf_group->bit_allocation[gf_group->index];
+ int target_rate = gf_group->bit_allocation[cpi->gf_frame_index];
if (has_no_stats_stage(cpi)) {
av1_rc_set_frame_target(cpi, target_rate, cpi->common.width,
@@ -3452,24 +3496,160 @@ static void setup_target_rate(AV1_COMP *cpi) {
rc->base_frame_target = target_rate;
}
+static void mark_flashes(FIRSTPASS_STATS *first_stats,
+ FIRSTPASS_STATS *last_stats) {
+ FIRSTPASS_STATS *this_stats = first_stats, *next_stats;
+ while (this_stats < last_stats - 1) {
+ next_stats = this_stats + 1;
+ if (next_stats->pcnt_second_ref > next_stats->pcnt_inter &&
+ next_stats->pcnt_second_ref >= 0.5) {
+ this_stats->is_flash = 1;
+ } else {
+ this_stats->is_flash = 0;
+ }
+ this_stats = next_stats;
+ }
+ // We always treat the last one as none flash.
+ if (last_stats - 1 >= first_stats) {
+ (last_stats - 1)->is_flash = 0;
+ }
+}
+
+// Estimate the noise variance of each frame from the first pass stats
+static void estimate_noise(FIRSTPASS_STATS *first_stats,
+ FIRSTPASS_STATS *last_stats) {
+ FIRSTPASS_STATS *this_stats, *next_stats;
+ double C1, C2, C3, noise;
+ int count = 0;
+ for (this_stats = first_stats + 2; this_stats < last_stats; this_stats++) {
+ this_stats->noise_var = 0.0;
+ // flashes tend to have high correlation of innovations, so ignore them.
+ if (this_stats->is_flash || (this_stats - 1)->is_flash ||
+ (this_stats - 2)->is_flash)
+ continue;
+
+ C1 = (this_stats - 1)->intra_error *
+ (this_stats->intra_error - this_stats->coded_error);
+ C2 = (this_stats - 2)->intra_error *
+ ((this_stats - 1)->intra_error - (this_stats - 1)->coded_error);
+ C3 = (this_stats - 2)->intra_error *
+ (this_stats->intra_error - this_stats->sr_coded_error);
+ if (C1 <= 0 || C2 <= 0 || C3 <= 0) continue;
+ C1 = sqrt(C1);
+ C2 = sqrt(C2);
+ C3 = sqrt(C3);
+
+ noise = (this_stats - 1)->intra_error - C1 * C2 / C3;
+ noise = AOMMAX(noise, 0.01);
+ this_stats->noise_var = noise;
+ count++;
+ }
+
+ // Copy noise from the neighbor if the noise value is not trustworthy
+ for (this_stats = first_stats + 2; this_stats < last_stats; this_stats++) {
+ if (this_stats->is_flash || (this_stats - 1)->is_flash ||
+ (this_stats - 2)->is_flash)
+ continue;
+ if (this_stats->noise_var < 1.0) {
+ int found = 0;
+ // TODO(bohanli): consider expanding to two directions at the same time
+ for (next_stats = this_stats + 1; next_stats < last_stats; next_stats++) {
+ if (next_stats->is_flash || (next_stats - 1)->is_flash ||
+ (next_stats - 2)->is_flash || next_stats->noise_var < 1.0)
+ continue;
+ found = 1;
+ this_stats->noise_var = next_stats->noise_var;
+ break;
+ }
+ if (found) continue;
+ for (next_stats = this_stats - 1; next_stats >= first_stats + 2;
+ next_stats--) {
+ if (next_stats->is_flash || (next_stats - 1)->is_flash ||
+ (next_stats - 2)->is_flash || next_stats->noise_var < 1.0)
+ continue;
+ this_stats->noise_var = next_stats->noise_var;
+ break;
+ }
+ }
+ }
+
+ // copy the noise if this is a flash
+ for (this_stats = first_stats + 2; this_stats < last_stats; this_stats++) {
+ if (this_stats->is_flash || (this_stats - 1)->is_flash ||
+ (this_stats - 2)->is_flash) {
+ int found = 0;
+ for (next_stats = this_stats + 1; next_stats < last_stats; next_stats++) {
+ if (next_stats->is_flash || (next_stats - 1)->is_flash ||
+ (next_stats - 2)->is_flash)
+ continue;
+ found = 1;
+ this_stats->noise_var = next_stats->noise_var;
+ break;
+ }
+ if (found) continue;
+ for (next_stats = this_stats - 1; next_stats >= first_stats + 2;
+ next_stats--) {
+ if (next_stats->is_flash || (next_stats - 1)->is_flash ||
+ (next_stats - 2)->is_flash)
+ continue;
+ this_stats->noise_var = next_stats->noise_var;
+ break;
+ }
+ }
+ }
+
+ // if we are at the first 2 frames, copy the noise
+ for (this_stats = first_stats;
+ this_stats < first_stats + 2 && (first_stats + 2) < last_stats;
+ this_stats++) {
+ this_stats->noise_var = (first_stats + 2)->noise_var;
+ }
+}
+
+// Estimate correlation coefficient of each frame with its previous frame.
+static void estimate_coeff(FIRSTPASS_STATS *first_stats,
+ FIRSTPASS_STATS *last_stats) {
+ FIRSTPASS_STATS *this_stats;
+ for (this_stats = first_stats + 1; this_stats < last_stats; this_stats++) {
+ const double C =
+ sqrt(AOMMAX((this_stats - 1)->intra_error *
+ (this_stats->intra_error - this_stats->coded_error),
+ 0.001));
+ const double cor_coeff =
+ C /
+ AOMMAX((this_stats - 1)->intra_error - this_stats->noise_var, 0.001);
+
+ this_stats->cor_coeff =
+ cor_coeff *
+ sqrt(AOMMAX((this_stats - 1)->intra_error - this_stats->noise_var,
+ 0.001) /
+ AOMMAX(this_stats->intra_error - this_stats->noise_var, 0.001));
+ // clip correlation coefficient.
+ this_stats->cor_coeff = AOMMIN(AOMMAX(this_stats->cor_coeff, 0), 1);
+ }
+ first_stats->cor_coeff = 1.0;
+}
+
void av1_get_second_pass_params(AV1_COMP *cpi,
EncodeFrameParams *const frame_params,
const EncodeFrameInput *const frame_input,
unsigned int frame_flags) {
RATE_CONTROL *const rc = &cpi->rc;
- TWO_PASS *const twopass = &cpi->twopass;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const FIRSTPASS_STATS *const start_pos = twopass->stats_in;
if (is_stat_consumption_stage(cpi) && !twopass->stats_in) return;
- const int update_type = gf_group->update_type[gf_group->index];
- frame_params->frame_type = gf_group->frame_type[gf_group->index];
+ assert(twopass->stats_in != NULL);
+ const int update_type = gf_group->update_type[cpi->gf_frame_index];
+ frame_params->frame_type = gf_group->frame_type[cpi->gf_frame_index];
- if (gf_group->index < gf_group->size && !(frame_flags & FRAMEFLAGS_KEY)) {
- assert(gf_group->index < gf_group->size);
+ if (cpi->gf_frame_index < gf_group->size && !(frame_flags & FRAMEFLAGS_KEY)) {
+ assert(cpi->gf_frame_index < gf_group->size);
setup_target_rate(cpi);
@@ -3481,6 +3661,9 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
if (cpi->sf.part_sf.allow_partition_search_skip && oxcf->pass == 2) {
cpi->partition_search_skippable_frame = is_skippable_frame(cpi);
}
+ const FIRSTPASS_STATS *const this_frame_ptr = read_frame_stats(
+ twopass, gf_group->arf_src_offset[cpi->gf_frame_index]);
+ set_twopass_params_based_on_fp_stats(cpi, this_frame_ptr);
return;
}
}
@@ -3493,7 +3676,7 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
av1_zero(this_frame);
// call above fn
if (is_stat_consumption_stage(cpi)) {
- if (gf_group->index < gf_group->size || rc->frames_to_key == 0)
+ if (cpi->gf_frame_index < gf_group->size || rc->frames_to_key == 0)
process_first_pass_stats(cpi, &this_frame);
} else {
rc->active_worst_quality = oxcf->rc_cfg.cq_level;
@@ -3504,7 +3687,7 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
this_frame_copy = this_frame;
int is_overlay_forward_kf =
rc->frames_to_key == 0 &&
- gf_group->update_type[gf_group->index] == OVERLAY_UPDATE;
+ gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE;
if (rc->frames_to_key <= 0 && !is_overlay_forward_kf) {
assert(rc->frames_to_key >= -1);
// Define next KF group and assign bits to it.
@@ -3554,12 +3737,12 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
}
// Define a new GF/ARF group. (Should always enter here for key frames).
- if (gf_group->index == gf_group->size) {
+ if (cpi->gf_frame_index == gf_group->size) {
assert(cpi->common.current_frame.frame_number == 0 ||
- gf_group->index == gf_group->size);
+ cpi->gf_frame_index == gf_group->size);
const FIRSTPASS_STATS *const start_position = twopass->stats_in;
- if (cpi->lap_enabled && cpi->rc.enable_scenecut_detection) {
+ if (cpi->ppi->lap_enabled && cpi->ppi->p_rc.enable_scenecut_detection) {
int num_frames_to_detect_scenecut, frames_to_key;
num_frames_to_detect_scenecut = MAX_GF_LENGTH_LAP + 1;
frames_to_key = define_kf_interval(cpi, &this_frame, NULL,
@@ -3578,41 +3761,45 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
: MAX_GF_LENGTH_LAP;
// Identify regions if needed.
+ // TODO(bohanli): identify regions for all stats available.
if (rc->frames_since_key == 0 || rc->frames_since_key == 1 ||
- (rc->frames_till_regions_update - rc->frames_since_key <
+ (p_rc->frames_till_regions_update - rc->frames_since_key <
rc->frames_to_key &&
- rc->frames_till_regions_update - rc->frames_since_key <
+ p_rc->frames_till_regions_update - rc->frames_since_key <
max_gop_length + 1)) {
- int is_first_stat =
- twopass->stats_in == twopass->stats_buf_ctx->stats_in_start;
- const FIRSTPASS_STATS *stats_start = twopass->stats_in + is_first_stat;
- // offset of stats_start from the current frame
- int offset = is_first_stat || (rc->frames_since_key == 0);
- // offset of the region indices from the previous key frame
- rc->regions_offset = rc->frames_since_key;
// how many frames we can analyze from this frame
- int rest_frames = AOMMIN(rc->frames_to_key + rc->next_is_fwd_key,
+ int rest_frames = AOMMIN(rc->frames_to_key + p_rc->next_is_fwd_key,
MAX_FIRSTPASS_ANALYSIS_FRAMES);
- rest_frames =
- AOMMIN(rest_frames,
- (int)(twopass->stats_buf_ctx->stats_in_end - stats_start + 1) +
- offset);
-
- rc->frames_till_regions_update = rest_frames;
-
- identify_regions(stats_start, rest_frames - offset, offset, rc->regions,
- &rc->num_regions, rc->cor_coeff);
+ rest_frames = AOMMIN(
+ rest_frames, (int)(twopass->stats_buf_ctx->stats_in_end -
+ twopass->stats_in + (rc->frames_since_key == 0)));
+ p_rc->frames_till_regions_update = rest_frames;
+
+ if (cpi->ppi->lap_enabled) {
+ mark_flashes(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+ estimate_noise(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+ estimate_coeff(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+ identify_regions(twopass->stats_in, rest_frames,
+ (rc->frames_since_key == 0), p_rc->regions,
+ &p_rc->num_regions);
+ } else {
+ identify_regions(twopass->stats_in - (rc->frames_since_key == 0),
+ rest_frames, 0, p_rc->regions, &p_rc->num_regions);
+ }
}
int cur_region_idx =
- find_regions_index(rc->regions, rc->num_regions,
- rc->frames_since_key - rc->regions_offset);
+ find_regions_index(p_rc->regions, p_rc->num_regions,
+ rc->frames_since_key - p_rc->regions_offset);
if ((cur_region_idx >= 0 &&
- rc->regions[cur_region_idx].type == SCENECUT_REGION) ||
+ p_rc->regions[cur_region_idx].type == SCENECUT_REGION) ||
rc->frames_since_key == 0) {
// If we start from a scenecut, then the last GOP's arf boost is not
// needed for this GOP.
- cpi->gf_state.arf_gf_boost_lst = 0;
+ cpi->ppi->gf_state.arf_gf_boost_lst = 0;
}
// TODO(jingning): Resoleve the redundant calls here.
@@ -3621,62 +3808,49 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
}
if (max_gop_length > 16 && oxcf->algo_cfg.enable_tpl_model &&
- !cpi->sf.tpl_sf.disable_gop_length_decision) {
- int this_idx = rc->frames_since_key + rc->gf_intervals[rc->cur_gf_index] -
- rc->regions_offset - 1;
+ cpi->sf.tpl_sf.gop_length_decision_method != 3) {
+ int this_idx = rc->frames_since_key +
+ p_rc->gf_intervals[p_rc->cur_gf_index] -
+ p_rc->regions_offset - 1;
int this_region =
- find_regions_index(rc->regions, rc->num_regions, this_idx);
+ find_regions_index(p_rc->regions, p_rc->num_regions, this_idx);
int next_region =
- find_regions_index(rc->regions, rc->num_regions, this_idx + 1);
+ find_regions_index(p_rc->regions, p_rc->num_regions, this_idx + 1);
int is_last_scenecut =
- (rc->gf_intervals[rc->cur_gf_index] >= rc->frames_to_key ||
- rc->regions[this_region].type == SCENECUT_REGION ||
- rc->regions[next_region].type == SCENECUT_REGION);
- int ori_gf_int = rc->gf_intervals[rc->cur_gf_index];
+ (p_rc->gf_intervals[p_rc->cur_gf_index] >= rc->frames_to_key ||
+ p_rc->regions[this_region].type == SCENECUT_REGION ||
+ p_rc->regions[next_region].type == SCENECUT_REGION);
+ int ori_gf_int = p_rc->gf_intervals[p_rc->cur_gf_index];
- if (rc->gf_intervals[rc->cur_gf_index] > 16) {
+ if (p_rc->gf_intervals[p_rc->cur_gf_index] > 16 &&
+ rc->min_gf_interval <= 16) {
// The calculate_gf_length function is previously used with
// max_gop_length = 32 with look-ahead gf intervals.
define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 0);
this_frame = this_frame_copy;
- int is_temporal_filter_enabled =
- (rc->frames_since_key > 0 && gf_group->arf_index > -1);
- if (is_temporal_filter_enabled) {
- int arf_src_index = gf_group->arf_src_offset[gf_group->arf_index];
- FRAME_UPDATE_TYPE arf_update_type =
- gf_group->update_type[gf_group->arf_index];
- int is_forward_keyframe = 0;
- av1_temporal_filter(cpi, arf_src_index, arf_update_type,
- is_forward_keyframe, NULL);
- aom_extend_frame_borders(&cpi->alt_ref_buffer,
- av1_num_planes(&cpi->common));
- }
- if (!av1_tpl_setup_stats(cpi, 1, frame_params, frame_input)) {
- // Tpl decides that a shorter gf interval is better.
+
+ if (is_shorter_gf_interval_better(cpi, frame_params, frame_input)) {
+ // A shorter gf interval is better.
// TODO(jingning): Remove redundant computations here.
max_gop_length = 16;
calculate_gf_length(cpi, max_gop_length, 1);
if (is_last_scenecut &&
- (ori_gf_int - rc->gf_intervals[rc->cur_gf_index] < 4)) {
- rc->gf_intervals[rc->cur_gf_index] = ori_gf_int;
+ (ori_gf_int - p_rc->gf_intervals[p_rc->cur_gf_index] < 4)) {
+ p_rc->gf_intervals[p_rc->cur_gf_index] = ori_gf_int;
}
- } else {
- // Tpl stats is reused only when the ARF frame is temporally filtered
- if (is_temporal_filter_enabled)
- cpi->tpl_data.skip_tpl_setup_stats = 1;
}
}
}
define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 0);
- if (gf_group->update_type[gf_group->index] != ARF_UPDATE &&
+ if (gf_group->update_type[cpi->gf_frame_index] != ARF_UPDATE &&
rc->frames_since_key > 0)
process_first_pass_stats(cpi, &this_frame);
define_gf_group(cpi, &this_frame, frame_params, max_gop_length, 1);
- rc->frames_till_gf_update_due = rc->baseline_gf_interval;
- assert(gf_group->index == 0);
+ rc->frames_till_gf_update_due = p_rc->baseline_gf_interval;
+ assert(cpi->gf_frame_index == 0);
#if ARF_STATS_OUTPUT
{
FILE *fpfile;
@@ -3684,18 +3858,22 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
++arf_count;
fprintf(fpfile, "%10d %10d %10d %10d %10d\n",
cpi->common.current_frame.frame_number,
- rc->frames_till_gf_update_due, rc->kf_boost, arf_count,
- rc->gfu_boost);
+ rc->frames_till_gf_update_due, cpi->ppi->p_rc.kf_boost, arf_count,
+ p_rc->gfu_boost);
fclose(fpfile);
}
#endif
}
- assert(gf_group->index < gf_group->size);
+ assert(cpi->gf_frame_index < gf_group->size);
- if (gf_group->update_type[gf_group->index] == ARF_UPDATE ||
- gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE) {
+ if (gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE ||
+ gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE) {
reset_fpf_position(twopass, start_pos);
+
+ const FIRSTPASS_STATS *const this_frame_ptr = read_frame_stats(
+ twopass, gf_group->arf_src_offset[cpi->gf_frame_index]);
+ set_twopass_params_based_on_fp_stats(cpi, this_frame_ptr);
} else {
// Update the total stats remaining structure.
if (twopass->stats_buf_ctx->total_left_stats)
@@ -3703,7 +3881,7 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
&this_frame_copy);
}
- frame_params->frame_type = gf_group->frame_type[gf_group->index];
+ frame_params->frame_type = gf_group->frame_type[cpi->gf_frame_index];
// Do the firstpass stats indicate that this frame is skippable for the
// partition search?
@@ -3716,13 +3894,20 @@ void av1_get_second_pass_params(AV1_COMP *cpi,
void av1_init_second_pass(AV1_COMP *cpi) {
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FRAME_INFO *const frame_info = &cpi->frame_info;
double frame_rate;
FIRSTPASS_STATS *stats;
if (!twopass->stats_buf_ctx->stats_in_end) return;
+ mark_flashes(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+ estimate_noise(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+ estimate_coeff(twopass->stats_buf_ctx->stats_in_start,
+ twopass->stats_buf_ctx->stats_in_end);
+
stats = twopass->stats_buf_ctx->total_stats;
*stats = *twopass->stats_buf_ctx->stats_in_end;
@@ -3779,7 +3964,7 @@ void av1_init_second_pass(AV1_COMP *cpi) {
}
void av1_init_single_pass_lap(AV1_COMP *cpi) {
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
if (!twopass->stats_buf_ctx->stats_in_end) return;
@@ -3813,7 +3998,7 @@ void av1_init_single_pass_lap(AV1_COMP *cpi) {
#define MINQ_ADJ_LIMIT_CQ 20
#define HIGH_UNDERSHOOT_RATIO 2
void av1_twopass_postencode_update(AV1_COMP *cpi) {
- TWO_PASS *const twopass = &cpi->twopass;
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
RATE_CONTROL *const rc = &cpi->rc;
const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
@@ -3840,7 +4025,8 @@ void av1_twopass_postencode_update(AV1_COMP *cpi) {
// Update the active best quality pyramid.
if (!rc->is_src_frame_alt_ref) {
- const int pyramid_level = cpi->gf_group.layer_depth[cpi->gf_group.index];
+ const int pyramid_level =
+ cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index];
int i;
for (i = pyramid_level; i <= MAX_ARF_LAYERS; ++i) {
rc->active_best_quality[i] = cpi->common.quant_params.base_qindex;
@@ -3871,9 +4057,9 @@ void av1_twopass_postencode_update(AV1_COMP *cpi) {
(double)twopass->rolling_arf_group_target_bits,
twopass->bpm_factor,
av1_convert_qindex_to_q(cpi->common.quant_params.base_qindex,
- cm->seq_params.bit_depth),
+ cm->seq_params->bit_depth),
av1_convert_qindex_to_q(rc->active_worst_quality,
- cm->seq_params.bit_depth));
+ cm->seq_params->bit_depth));
fclose(fpfile);
}
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/pickcdef.c b/third_party/libaom/source/libaom/av1/encoder/pickcdef.c
index 55e466d601..f9758343dc 100644
--- a/third_party/libaom/source/libaom/av1/encoder/pickcdef.c
+++ b/third_party/libaom/source/libaom/av1/encoder/pickcdef.c
@@ -454,13 +454,13 @@ static AOM_INLINE void cdef_params_init(const YV12_BUFFER_CONFIG *frame,
(mi_params->mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
cdef_search_ctx->nhfb =
(mi_params->mi_cols + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
- cdef_search_ctx->coeff_shift = AOMMAX(cm->seq_params.bit_depth - 8, 0);
+ cdef_search_ctx->coeff_shift = AOMMAX(cm->seq_params->bit_depth - 8, 0);
cdef_search_ctx->damping = 3 + (cm->quant_params.base_qindex >> 6);
cdef_search_ctx->total_strengths = nb_cdef_strengths[pick_method];
cdef_search_ctx->num_planes = num_planes;
cdef_search_ctx->pick_method = pick_method;
cdef_search_ctx->sb_count = 0;
- av1_setup_dst_planes(xd->plane, cm->seq_params.sb_size, frame, 0, 0, 0,
+ av1_setup_dst_planes(xd->plane, cm->seq_params->sb_size, frame, 0, 0, 0,
num_planes);
// Initialize plane wise information.
for (int pli = 0; pli < num_planes; pli++) {
@@ -478,7 +478,7 @@ static AOM_INLINE void cdef_params_init(const YV12_BUFFER_CONFIG *frame,
}
// Function pointer initialization.
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
cdef_search_ctx->copy_fn = copy_sb16_16_highbd;
cdef_search_ctx->compute_cdef_dist_fn = compute_cdef_dist_highbd;
} else {
@@ -491,13 +491,20 @@ static AOM_INLINE void cdef_params_init(const YV12_BUFFER_CONFIG *frame,
#endif
}
-static void pick_cdef_from_qp(AV1_COMMON *const cm) {
- const int bd = cm->seq_params.bit_depth;
+static void pick_cdef_from_qp(AV1_COMMON *const cm, int skip_cdef,
+ int frames_since_key) {
+ const int bd = cm->seq_params->bit_depth;
const int q =
av1_ac_quant_QTX(cm->quant_params.base_qindex, 0, bd) >> (bd - 8);
CdefInfo *const cdef_info = &cm->cdef_info;
- cdef_info->cdef_bits = 0;
- cdef_info->nb_cdef_strengths = 1;
+ // Check the speed feature to avoid extra signaling.
+ if (skip_cdef) {
+ cdef_info->cdef_bits = 1;
+ cdef_info->nb_cdef_strengths = 2;
+ } else {
+ cdef_info->cdef_bits = 0;
+ cdef_info->nb_cdef_strengths = 1;
+ }
cdef_info->cdef_damping = 3 + (cm->quant_params.base_qindex >> 6);
int predicted_y_f1 = 0;
@@ -537,13 +544,22 @@ static void pick_cdef_from_qp(AV1_COMMON *const cm) {
cdef_info->cdef_uv_strengths[0] =
predicted_uv_f1 * CDEF_SEC_STRENGTHS + predicted_uv_f2;
+ if (skip_cdef) {
+ cdef_info->cdef_strengths[1] = 0;
+ cdef_info->cdef_uv_strengths[1] = 0;
+ }
const CommonModeInfoParams *const mi_params = &cm->mi_params;
const int nvfb = (mi_params->mi_rows + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
const int nhfb = (mi_params->mi_cols + MI_SIZE_64X64 - 1) / MI_SIZE_64X64;
MB_MODE_INFO **mbmi = mi_params->mi_grid_base;
for (int r = 0; r < nvfb; ++r) {
for (int c = 0; c < nhfb; ++c) {
- mbmi[MI_SIZE_64X64 * c]->cdef_strength = 0;
+ MB_MODE_INFO *current_mbmi = mbmi[MI_SIZE_64X64 * c];
+ current_mbmi->cdef_strength = 0;
+ if (skip_cdef && current_mbmi->skip_cdef_curr_sb &&
+ frames_since_key > 10) {
+ current_mbmi->cdef_strength = 1;
+ }
}
mbmi += MI_SIZE_64X64 * mi_params->mi_stride;
}
@@ -551,10 +567,10 @@ static void pick_cdef_from_qp(AV1_COMMON *const cm) {
void av1_cdef_search(MultiThreadInfo *mt_info, const YV12_BUFFER_CONFIG *frame,
const YV12_BUFFER_CONFIG *ref, AV1_COMMON *cm,
- MACROBLOCKD *xd, CDEF_PICK_METHOD pick_method,
- int rdmult) {
+ MACROBLOCKD *xd, CDEF_PICK_METHOD pick_method, int rdmult,
+ int skip_cdef_feature, int frames_since_key) {
if (pick_method == CDEF_PICK_FROM_Q) {
- pick_cdef_from_qp(cm);
+ pick_cdef_from_qp(cm, skip_cdef_feature, frames_since_key);
return;
}
const CommonModeInfoParams *const mi_params = &cm->mi_params;
diff --git a/third_party/libaom/source/libaom/av1/encoder/pickcdef.h b/third_party/libaom/source/libaom/av1/encoder/pickcdef.h
index 7fe1edb695..6bea1b0945 100644
--- a/third_party/libaom/source/libaom/av1/encoder/pickcdef.h
+++ b/third_party/libaom/source/libaom/av1/encoder/pickcdef.h
@@ -58,20 +58,6 @@ typedef uint64_t (*compute_cdef_dist_t)(void *dst, int dstride, uint16_t *src,
BLOCK_SIZE bsize, int coeff_shift,
int row, int col);
-// Data related to CDEF search multi-thread synchronization.
-typedef struct AV1CdefSyncData {
-#if CONFIG_MULTITHREAD
- // Mutex lock used while dispatching jobs.
- pthread_mutex_t *mutex_;
-#endif // CONFIG_MULTITHREAD
- // Flag to indicate all blocks are processed and end of frame is reached
- int end_of_frame;
- // Row index in units of 64x64 block
- int fbr;
- // Column index in units of 64x64 block
- int fbc;
-} AV1CdefSync;
-
/*! \brief CDEF search context.
*/
typedef struct {
@@ -224,6 +210,8 @@ void av1_cdef_mse_calc_block(CdefSearchCtx *cdef_search_ctx, int fbr, int fbc,
* \param[in] xd Pointer to common current coding block structure
* \param[in] pick_method The method used to select params
* \param[in] rdmult rd multiplier to use in making param choices
+ * \param[in] skip_cdef_feature Speed feature to skip cdef
+ * \param[in] frames_since_key Number of frames since key frame
*
* \return Nothing is returned. Instead, optimal CDEF parameters are stored
* in the \c cdef_info structure of type \ref CdefInfo inside \c cm:
@@ -239,7 +227,8 @@ void av1_cdef_mse_calc_block(CdefSearchCtx *cdef_search_ctx, int fbr, int fbc,
void av1_cdef_search(struct MultiThreadInfo *mt_info,
const YV12_BUFFER_CONFIG *frame,
const YV12_BUFFER_CONFIG *ref, AV1_COMMON *cm,
- MACROBLOCKD *xd, CDEF_PICK_METHOD pick_method, int rdmult);
+ MACROBLOCKD *xd, CDEF_PICK_METHOD pick_method, int rdmult,
+ int skip_cdef_feature, int frames_since_key);
#ifdef __cplusplus
} // extern "C"
diff --git a/third_party/libaom/source/libaom/av1/encoder/picklpf.c b/third_party/libaom/source/libaom/av1/encoder/picklpf.c
index 9b3924f5ce..44030767b5 100644
--- a/third_party/libaom/source/libaom/av1/encoder/picklpf.c
+++ b/third_party/libaom/source/libaom/av1/encoder/picklpf.c
@@ -39,8 +39,8 @@ static void yv12_copy_plane(const YV12_BUFFER_CONFIG *src_bc,
int av1_get_max_filter_level(const AV1_COMP *cpi) {
if (is_stat_consumption_stage_twopass(cpi)) {
- return cpi->twopass.section_intra_rating > 8 ? MAX_LOOP_FILTER * 3 / 4
- : MAX_LOOP_FILTER;
+ return cpi->ppi->twopass.section_intra_rating > 8 ? MAX_LOOP_FILTER * 3 / 4
+ : MAX_LOOP_FILTER;
} else {
return MAX_LOOP_FILTER;
}
@@ -78,16 +78,16 @@ static int64_t try_filter_frame(const YV12_BUFFER_CONFIG *sd,
0,
#endif
mt_info->workers, num_workers,
- &mt_info->lf_row_sync);
+ &mt_info->lf_row_sync, 0);
else
av1_loop_filter_frame(&cm->cur_frame->buf, cm, &cpi->td.mb.e_mbd,
#if CONFIG_LPF_MASK
0,
#endif
- plane, plane + 1, partial_frame);
+ plane, plane + 1, partial_frame, 0);
filt_err = aom_get_sse_plane(sd, &cm->cur_frame->buf, plane,
- cm->seq_params.use_highbitdepth);
+ cm->seq_params->use_highbitdepth);
// Re-instate the unfiltered frame
yv12_copy_plane(&cpi->last_frame_uf, &cm->cur_frame->buf, plane);
@@ -153,8 +153,8 @@ static int search_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
int64_t bias = (best_err >> (15 - (filt_mid / 8))) * filter_step;
if ((is_stat_consumption_stage_twopass(cpi)) &&
- (cpi->twopass.section_intra_rating < 20))
- bias = (bias * cpi->twopass.section_intra_rating) / 20;
+ (cpi->ppi->twopass.section_intra_rating < 20))
+ bias = (bias * cpi->ppi->twopass.section_intra_rating) / 20;
// yx, bias less for large block size
if (cm->features.tx_mode != ONLY_4X4) bias >>= 1;
@@ -205,7 +205,7 @@ static int search_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
if (best_cost_ret)
*best_cost_ret = RDCOST_DBL_WITH_NATIVE_BD_DIST(
- x->rdmult, 0, (best_err << 4), cm->seq_params.bit_depth);
+ x->rdmult, 0, (best_err << 4), cm->seq_params->bit_depth);
return filt_best;
}
@@ -226,7 +226,7 @@ void av1_pick_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
const int min_filter_level = 0;
const int max_filter_level = av1_get_max_filter_level(cpi);
const int q = av1_ac_quant_QTX(cm->quant_params.base_qindex, 0,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
// based on tests result for rtc test set
// 0.04590 boosted or 0.02295 non-booseted in 18-bit fixed point
const int strength_boost_q_treshold = 0;
@@ -244,7 +244,7 @@ void av1_pick_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
// And high bit depth separately:
// filt_guess = q * 0.316206 + 3.87252
int filt_guess;
- switch (cm->seq_params.bit_depth) {
+ switch (cm->seq_params->bit_depth) {
case AOM_BITS_8:
filt_guess =
(cm->current_frame.frame_type == KEY_FRAME)
@@ -263,7 +263,7 @@ void av1_pick_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
"or AOM_BITS_12");
return;
}
- if (cm->seq_params.bit_depth != AOM_BITS_8 &&
+ if (cm->seq_params->bit_depth != AOM_BITS_8 &&
cm->current_frame.frame_type == KEY_FRAME)
filt_guess -= 4;
// TODO(chengchen): retrain the model for Y, U, V filter levels
@@ -272,10 +272,20 @@ void av1_pick_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
lf->filter_level_u = clamp(filt_guess, min_filter_level, max_filter_level);
lf->filter_level_v = clamp(filt_guess, min_filter_level, max_filter_level);
} else {
- const int last_frame_filter_level[4] = { lf->filter_level[0],
- lf->filter_level[1],
- lf->filter_level_u,
- lf->filter_level_v };
+ int last_frame_filter_level[4] = { 0 };
+ if (!frame_is_intra_only(cm)) {
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ last_frame_filter_level[0] = cpi->ppi->filter_level[0];
+ last_frame_filter_level[1] = cpi->ppi->filter_level[1];
+ last_frame_filter_level[2] = cpi->ppi->filter_level_u;
+ last_frame_filter_level[3] = cpi->ppi->filter_level_v;
+#else
+ last_frame_filter_level[0] = lf->filter_level[0];
+ last_frame_filter_level[1] = lf->filter_level[1];
+ last_frame_filter_level[2] = lf->filter_level_u;
+ last_frame_filter_level[3] = lf->filter_level_v;
+#endif
+ }
lf->filter_level[0] = lf->filter_level[1] =
search_filter_level(sd, cpi, method == LPF_PICK_FROM_SUBIMAGE,
@@ -297,5 +307,14 @@ void av1_pick_filter_level(const YV12_BUFFER_CONFIG *sd, AV1_COMP *cpi,
search_filter_level(sd, cpi, method == LPF_PICK_FROM_SUBIMAGE,
last_frame_filter_level, NULL, 2, 0);
}
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Store current frame loopfilter levels if update flag is set.
+ if (cpi->do_frame_data_update) {
+ cpi->ppi->filter_level[0] = lf->filter_level[0];
+ cpi->ppi->filter_level[1] = lf->filter_level[1];
+ cpi->ppi->filter_level_u = lf->filter_level_u;
+ cpi->ppi->filter_level_v = lf->filter_level_v;
+ }
+#endif
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/pickrst.c b/third_party/libaom/source/libaom/av1/encoder/pickrst.c
index 21965138be..2c12cb014f 100644
--- a/third_party/libaom/source/libaom/av1/encoder/pickrst.c
+++ b/third_party/libaom/source/libaom/av1/encoder/pickrst.c
@@ -199,8 +199,8 @@ static int64_t try_restoration_unit(const RestSearchCtxt *rsc,
const int is_uv = plane > 0;
const RestorationInfo *rsi = &cm->rst_info[plane];
RestorationLineBuffers rlbs;
- const int bit_depth = cm->seq_params.bit_depth;
- const int highbd = cm->seq_params.use_highbitdepth;
+ const int bit_depth = cm->seq_params->bit_depth;
+ const int highbd = cm->seq_params->use_highbitdepth;
const YV12_BUFFER_CONFIG *fts = &cm->cur_frame->buf;
// TODO(yunqing): For now, only use optimized LR filter in decoder. Can be
@@ -209,8 +209,8 @@ static int64_t try_restoration_unit(const RestSearchCtxt *rsc,
av1_loop_restoration_filter_unit(
limits, rui, &rsi->boundaries, &rlbs, tile_rect, rsc->tile_stripe0,
- is_uv && cm->seq_params.subsampling_x,
- is_uv && cm->seq_params.subsampling_y, highbd, bit_depth,
+ is_uv && cm->seq_params->subsampling_x,
+ is_uv && cm->seq_params->subsampling_y, highbd, bit_depth,
fts->buffers[plane], fts->strides[is_uv], rsc->dst->buffers[plane],
rsc->dst->strides[is_uv], cm->rst_tmpbuf, optimized_lr);
@@ -886,8 +886,8 @@ static AOM_INLINE void search_sgrproj(const RestorationTileLimits *limits,
const MACROBLOCK *const x = rsc->x;
const AV1_COMMON *const cm = rsc->cm;
- const int highbd = cm->seq_params.use_highbitdepth;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int highbd = cm->seq_params->use_highbitdepth;
+ const int bit_depth = cm->seq_params->bit_depth;
const int64_t bits_none = x->mode_costs.sgrproj_restore_cost[0];
// Prune evaluation of RESTORE_SGRPROJ if 'skip_sgr_eval' is set
@@ -905,8 +905,8 @@ static AOM_INLINE void search_sgrproj(const RestorationTileLimits *limits,
rsc->src_buffer + limits->v_start * rsc->src_stride + limits->h_start;
const int is_uv = rsc->plane > 0;
- const int ss_x = is_uv && cm->seq_params.subsampling_x;
- const int ss_y = is_uv && cm->seq_params.subsampling_y;
+ const int ss_x = is_uv && cm->seq_params->subsampling_x;
+ const int ss_y = is_uv && cm->seq_params->subsampling_y;
const int procunit_width = RESTORATION_PROC_UNIT_SIZE >> ss_x;
const int procunit_height = RESTORATION_PROC_UNIT_SIZE >> ss_y;
@@ -1474,12 +1474,12 @@ static AOM_INLINE void search_wiener(const RestorationTileLimits *limits,
const int scale[3] = { 0, 1, 2 };
// Obtain the normalized Qscale
const int qs = av1_dc_quant_QTX(rsc->cm->quant_params.base_qindex, 0,
- rsc->cm->seq_params.bit_depth) >>
+ rsc->cm->seq_params->bit_depth) >>
3;
// Derive threshold as sqr(normalized Qscale) * scale / 16,
const uint64_t thresh =
(qs * qs * scale[rsc->lpf_sf->prune_wiener_based_on_src_var]) >> 4;
- const int highbd = rsc->cm->seq_params.use_highbitdepth;
+ const int highbd = rsc->cm->seq_params->use_highbitdepth;
const uint64_t src_var =
var_restoration_unit(limits, rsc->src, rsc->plane, highbd);
// Do not perform Wiener search if source variance is lower than threshold
@@ -1510,11 +1510,11 @@ static AOM_INLINE void search_wiener(const RestorationTileLimits *limits,
#if CONFIG_AV1_HIGHBITDEPTH
const AV1_COMMON *const cm = rsc->cm;
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
av1_compute_stats_highbd(reduced_wiener_win, rsc->dgd_buffer,
rsc->src_buffer, limits->h_start, limits->h_end,
limits->v_start, limits->v_end, rsc->dgd_stride,
- rsc->src_stride, M, H, cm->seq_params.bit_depth);
+ rsc->src_stride, M, H, cm->seq_params->bit_depth);
} else {
av1_compute_stats(reduced_wiener_win, rsc->dgd_buffer, rsc->src_buffer,
limits->h_start, limits->h_end, limits->v_start,
@@ -1567,10 +1567,10 @@ static AOM_INLINE void search_wiener(const RestorationTileLimits *limits,
double cost_none = RDCOST_DBL_WITH_NATIVE_BD_DIST(
x->rdmult, bits_none >> 4, rusi->sse[RESTORE_NONE],
- rsc->cm->seq_params.bit_depth);
+ rsc->cm->seq_params->bit_depth);
double cost_wiener = RDCOST_DBL_WITH_NATIVE_BD_DIST(
x->rdmult, bits_wiener >> 4, rusi->sse[RESTORE_WIENER],
- rsc->cm->seq_params.bit_depth);
+ rsc->cm->seq_params->bit_depth);
RestorationType rtype =
(cost_wiener < cost_none) ? RESTORE_WIENER : RESTORE_NONE;
@@ -1601,7 +1601,7 @@ static AOM_INLINE void search_norestore(const RestorationTileLimits *limits,
RestSearchCtxt *rsc = (RestSearchCtxt *)priv;
RestUnitSearchInfo *rusi = &rsc->rusi[rest_unit_idx];
- const int highbd = rsc->cm->seq_params.use_highbitdepth;
+ const int highbd = rsc->cm->seq_params->use_highbitdepth;
rusi->sse[RESTORE_NONE] = sse_restoration_unit(
limits, rsc->src, &rsc->cm->cur_frame->buf, rsc->plane, highbd);
@@ -1653,8 +1653,8 @@ static AOM_INLINE void search_switchable(const RestorationTileLimits *limits,
}
const int64_t coeff_bits = coeff_pcost << AV1_PROB_COST_SHIFT;
const int64_t bits = x->mode_costs.switchable_restore_cost[r] + coeff_bits;
- double cost = RDCOST_DBL_WITH_NATIVE_BD_DIST(x->rdmult, bits >> 4, sse,
- rsc->cm->seq_params.bit_depth);
+ double cost = RDCOST_DBL_WITH_NATIVE_BD_DIST(
+ x->rdmult, bits >> 4, sse, rsc->cm->seq_params->bit_depth);
if (r == RESTORE_SGRPROJ && rusi->sgrproj.ep < 10)
cost *= (1 + DUAL_SGR_PENALTY_MULT * rsc->lpf_sf->dual_sgr_penalty_level);
if (r == 0 || cost < best_cost) {
@@ -1694,7 +1694,7 @@ static double search_rest_type(RestSearchCtxt *rsc, RestorationType rtype) {
av1_foreach_rest_unit_in_plane(rsc->cm, rsc->plane, funs[rtype], rsc,
&rsc->tile_rect, rsc->cm->rst_tmpbuf, NULL);
return RDCOST_DBL_WITH_NATIVE_BD_DIST(
- rsc->x->rdmult, rsc->bits >> 4, rsc->sse, rsc->cm->seq_params.bit_depth);
+ rsc->x->rdmult, rsc->bits >> 4, rsc->sse, rsc->cm->seq_params->bit_depth);
}
static int rest_tiles_in_plane(const AV1_COMMON *cm, int plane) {
@@ -1740,7 +1740,7 @@ void av1_pick_filter_restoration(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi) {
double best_cost = 0;
RestorationType best_rtype = RESTORE_NONE;
- const int highbd = rsc.cm->seq_params.use_highbitdepth;
+ const int highbd = rsc.cm->seq_params->use_highbitdepth;
if (!cpi->sf.lpf_sf.disable_loop_restoration_chroma || !plane) {
av1_extend_frame(rsc.dgd_buffer, rsc.plane_width, rsc.plane_height,
rsc.dgd_stride, RESTORATION_BORDER, RESTORATION_BORDER,
diff --git a/third_party/libaom/source/libaom/av1/encoder/ratectrl.c b/third_party/libaom/source/libaom/av1/encoder/ratectrl.c
index 33befa6147..c24c822b9b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/ratectrl.c
+++ b/third_party/libaom/source/libaom/av1/encoder/ratectrl.c
@@ -233,11 +233,12 @@ static void update_layer_buffer_level(SVC *svc, int encoded_frame_size) {
LAYER_IDS_TO_IDX(svc->spatial_layer_id, i, svc->number_temporal_layers);
LAYER_CONTEXT *lc = &svc->layer_context[layer];
RATE_CONTROL *lrc = &lc->rc;
+ PRIMARY_RATE_CONTROL *lp_rc = &lc->p_rc;
lrc->bits_off_target +=
(int)(lc->target_bandwidth / lc->framerate) - encoded_frame_size;
// Clip buffer level to maximum buffer size for the layer.
lrc->bits_off_target =
- AOMMIN(lrc->bits_off_target, lrc->maximum_buffer_size);
+ AOMMIN(lrc->bits_off_target, lp_rc->maximum_buffer_size);
lrc->buffer_level = lrc->bits_off_target;
}
}
@@ -245,6 +246,7 @@ static void update_layer_buffer_level(SVC *svc, int encoded_frame_size) {
static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) {
const AV1_COMMON *const cm = &cpi->common;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
// Non-viewable frames are a special case and are treated as pure overhead.
if (!cm->show_frame)
@@ -253,10 +255,11 @@ static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) {
rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size;
// Clip the buffer level to the maximum specified buffer size.
- rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
+ rc->bits_off_target = AOMMIN(rc->bits_off_target, p_rc->maximum_buffer_size);
rc->buffer_level = rc->bits_off_target;
- if (cpi->use_svc) update_layer_buffer_level(&cpi->svc, encoded_frame_size);
+ if (cpi->ppi->use_svc)
+ update_layer_buffer_level(&cpi->svc, encoded_frame_size);
}
int av1_rc_get_default_min_gf_interval(int width, int height,
@@ -285,7 +288,24 @@ int av1_rc_get_default_max_gf_interval(double framerate, int min_gf_interval) {
return AOMMAX(interval, min_gf_interval);
}
-void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
+void av1_primary_rc_init(const AV1EncoderConfig *oxcf,
+ PRIMARY_RATE_CONTROL *p_rc) {
+ int min_gf_interval = oxcf->gf_cfg.min_gf_interval;
+ int max_gf_interval = oxcf->gf_cfg.max_gf_interval;
+ if (min_gf_interval == 0)
+ min_gf_interval = av1_rc_get_default_min_gf_interval(
+ oxcf->frm_dim_cfg.width, oxcf->frm_dim_cfg.height,
+ oxcf->input_cfg.init_framerate);
+ if (max_gf_interval == 0)
+ max_gf_interval = av1_rc_get_default_max_gf_interval(
+ oxcf->input_cfg.init_framerate, min_gf_interval);
+ p_rc->baseline_gf_interval = (min_gf_interval + max_gf_interval) / 2;
+ p_rc->this_key_frame_forced = 0;
+ p_rc->next_key_frame_forced = 0;
+}
+
+void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc,
+ const PRIMARY_RATE_CONTROL *const p_rc) {
const RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
int i;
@@ -302,8 +322,8 @@ void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
rc->last_q[KEY_FRAME] = rc_cfg->best_allowed_q;
rc->last_q[INTER_FRAME] = rc_cfg->worst_allowed_q;
- rc->buffer_level = rc->starting_buffer_level;
- rc->bits_off_target = rc->starting_buffer_level;
+ rc->buffer_level = p_rc->starting_buffer_level;
+ rc->bits_off_target = p_rc->starting_buffer_level;
rc->rolling_target_bits = rc->avg_frame_bandwidth;
rc->rolling_actual_bits = rc->avg_frame_bandwidth;
@@ -312,8 +332,6 @@ void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
rc->total_target_bits = 0;
rc->frames_since_key = 8; // Sensible default for first frame.
- rc->this_key_frame_forced = 0;
- rc->next_key_frame_forced = 0;
rc->frames_till_gf_update_due = 0;
rc->ni_av_qi = rc_cfg->worst_allowed_q;
@@ -337,7 +355,6 @@ void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
if (rc->max_gf_interval == 0)
rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
oxcf->input_cfg.init_framerate, rc->min_gf_interval);
- rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
rc->avg_frame_low_motion = 0;
rc->resize_state = ORIG;
@@ -349,6 +366,7 @@ void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
int av1_rc_drop_frame(AV1_COMP *cpi) {
const AV1EncoderConfig *oxcf = &cpi->oxcf;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
if (!oxcf->rc_cfg.drop_frames_water_mark) {
return 0;
@@ -360,7 +378,7 @@ int av1_rc_drop_frame(AV1_COMP *cpi) {
// If buffer is below drop_mark, for now just drop every other frame
// (starting with the next frame) until it increases back over drop_mark.
int drop_mark = (int)(oxcf->rc_cfg.drop_frames_water_mark *
- rc->optimal_buffer_level / 100);
+ p_rc->optimal_buffer_level / 100);
if ((rc->buffer_level > drop_mark) && (rc->decimation_factor > 0)) {
--rc->decimation_factor;
} else if (rc->buffer_level <= drop_mark && rc->decimation_factor == 0) {
@@ -384,6 +402,7 @@ int av1_rc_drop_frame(AV1_COMP *cpi) {
static int adjust_q_cbr(const AV1_COMP *cpi, int q, int active_worst_quality) {
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const AV1_COMMON *const cm = &cpi->common;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
const int max_delta = 16;
@@ -397,7 +416,7 @@ static int adjust_q_cbr(const AV1_COMP *cpi, int q, int active_worst_quality) {
(cm->width != cm->prev_frame->width ||
cm->height != cm->prev_frame->height || change_avg_frame_bandwidth);
// Apply some control/clamp to QP under certain conditions.
- if (cm->current_frame.frame_type != KEY_FRAME && !cpi->use_svc &&
+ if (cm->current_frame.frame_type != KEY_FRAME && !cpi->ppi->use_svc &&
rc->frames_since_key > 1 && !change_target_bits_mb &&
(!cpi->oxcf.rc_cfg.gf_cbr_boost_pct ||
!(refresh_frame_flags->alt_ref_frame ||
@@ -411,7 +430,7 @@ static int adjust_q_cbr(const AV1_COMP *cpi, int q, int active_worst_quality) {
// Adjust Q base on source content change from scene detection.
if (cpi->sf.rt_sf.check_scene_detection && rc->prev_avg_source_sad > 0 &&
rc->frames_since_key > 10) {
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
double delta =
(double)rc->avg_source_sad / (double)rc->prev_avg_source_sad - 1.0;
// Push Q downwards if content change is decreasing and buffer level
@@ -419,14 +438,14 @@ static int adjust_q_cbr(const AV1_COMP *cpi, int q, int active_worst_quality) {
// only for high Q to avoid excess overshoot.
// Else reduce decrease in Q from previous frame if content change is
// increasing and buffer is below max (so not undershooting).
- if (delta < 0.0 && rc->buffer_level > (rc->optimal_buffer_level >> 2) &&
+ if (delta < 0.0 && rc->buffer_level > (p_rc->optimal_buffer_level >> 2) &&
q > (rc->worst_quality >> 1)) {
double q_adj_factor = 1.0 + 0.5 * tanh(4.0 * delta);
double q_val = av1_convert_qindex_to_q(q, bit_depth);
q += av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
} else if (rc->q_1_frame - q > 0 && delta > 0.1 &&
- rc->buffer_level < AOMMIN(rc->maximum_buffer_size,
- rc->optimal_buffer_level << 1)) {
+ rc->buffer_level < AOMMIN(p_rc->maximum_buffer_size,
+ p_rc->optimal_buffer_level << 1)) {
q = (3 * q + rc->q_1_frame) >> 2;
}
}
@@ -452,8 +471,9 @@ static const RATE_FACTOR_LEVEL rate_factor_levels[FRAME_UPDATE_TYPES] = {
GF_ARF_LOW, // INTNL_ARF_UPDATE
};
-static RATE_FACTOR_LEVEL get_rate_factor_level(const GF_GROUP *const gf_group) {
- const FRAME_UPDATE_TYPE update_type = gf_group->update_type[gf_group->index];
+static RATE_FACTOR_LEVEL get_rate_factor_level(const GF_GROUP *const gf_group,
+ int gf_frame_index) {
+ const FRAME_UPDATE_TYPE update_type = gf_group->update_type[gf_frame_index];
assert(update_type < FRAME_UPDATE_TYPES);
return rate_factor_levels[update_type];
}
@@ -480,12 +500,13 @@ static double get_rate_correction_factor(const AV1_COMP *cpi, int width,
if (cpi->common.current_frame.frame_type == KEY_FRAME) {
rcf = rc->rate_correction_factors[KF_STD];
} else if (is_stat_consumption_stage(cpi)) {
- const RATE_FACTOR_LEVEL rf_lvl = get_rate_factor_level(&cpi->gf_group);
+ const RATE_FACTOR_LEVEL rf_lvl =
+ get_rate_factor_level(&cpi->ppi->gf_group, cpi->gf_frame_index);
rcf = rc->rate_correction_factors[rf_lvl];
} else {
if ((refresh_frame_flags->alt_ref_frame ||
refresh_frame_flags->golden_frame) &&
- !rc->is_src_frame_alt_ref && !cpi->use_svc &&
+ !rc->is_src_frame_alt_ref && !cpi->ppi->use_svc &&
(cpi->oxcf.rc_cfg.mode != AOM_CBR ||
cpi->oxcf.rc_cfg.gf_cbr_boost_pct > 20))
rcf = rc->rate_correction_factors[GF_ARF_STD];
@@ -524,12 +545,13 @@ static void set_rate_correction_factor(AV1_COMP *cpi, double factor, int width,
if (cpi->common.current_frame.frame_type == KEY_FRAME) {
rc->rate_correction_factors[KF_STD] = factor;
} else if (is_stat_consumption_stage(cpi)) {
- const RATE_FACTOR_LEVEL rf_lvl = get_rate_factor_level(&cpi->gf_group);
+ const RATE_FACTOR_LEVEL rf_lvl =
+ get_rate_factor_level(&cpi->ppi->gf_group, cpi->gf_frame_index);
rc->rate_correction_factors[rf_lvl] = factor;
} else {
if ((refresh_frame_flags->alt_ref_frame ||
refresh_frame_flags->golden_frame) &&
- !rc->is_src_frame_alt_ref && !cpi->use_svc &&
+ !rc->is_src_frame_alt_ref && !cpi->ppi->use_svc &&
(cpi->oxcf.rc_cfg.mode != AOM_CBR ||
cpi->oxcf.rc_cfg.gf_cbr_boost_pct > 20))
rc->rate_correction_factors[GF_ARF_STD] = factor;
@@ -564,7 +586,7 @@ void av1_rc_update_rate_correction_factors(AV1_COMP *cpi, int width,
} else {
projected_size_based_on_q = av1_estimate_bits_at_q(
cm->current_frame.frame_type, cm->quant_params.base_qindex, MBs,
- rate_correction_factor, cm->seq_params.bit_depth,
+ rate_correction_factor, cm->seq_params->bit_depth,
cpi->is_screen_content_type);
}
// Work out a size correction factor.
@@ -620,7 +642,7 @@ static int get_bits_per_mb(const AV1_COMP *cpi, int use_cyclic_refresh,
return use_cyclic_refresh
? av1_cyclic_refresh_rc_bits_per_mb(cpi, q, correction_factor)
: av1_rc_bits_per_mb(cm->current_frame.frame_type, q,
- correction_factor, cm->seq_params.bit_depth,
+ correction_factor, cm->seq_params->bit_depth,
cpi->is_screen_content_type);
}
@@ -724,26 +746,31 @@ static int get_active_quality(int q, int gfu_boost, int low, int high,
}
}
-static int get_kf_active_quality(const RATE_CONTROL *const rc, int q,
+static int get_kf_active_quality(const PRIMARY_RATE_CONTROL *const p_rc, int q,
aom_bit_depth_t bit_depth) {
int *kf_low_motion_minq;
int *kf_high_motion_minq;
ASSIGN_MINQ_TABLE(bit_depth, kf_low_motion_minq);
ASSIGN_MINQ_TABLE(bit_depth, kf_high_motion_minq);
- return get_active_quality(q, rc->kf_boost, kf_low, kf_high,
+ return get_active_quality(q, p_rc->kf_boost, kf_low, kf_high,
kf_low_motion_minq, kf_high_motion_minq);
}
-static int get_gf_active_quality(const RATE_CONTROL *const rc, int q,
- aom_bit_depth_t bit_depth) {
+static int get_gf_active_quality_no_rc(int gfu_boost, int q,
+ aom_bit_depth_t bit_depth) {
int *arfgf_low_motion_minq;
int *arfgf_high_motion_minq;
ASSIGN_MINQ_TABLE(bit_depth, arfgf_low_motion_minq);
ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
- return get_active_quality(q, rc->gfu_boost, gf_low, gf_high,
+ return get_active_quality(q, gfu_boost, gf_low, gf_high,
arfgf_low_motion_minq, arfgf_high_motion_minq);
}
+static int get_gf_active_quality(const PRIMARY_RATE_CONTROL *const p_rc, int q,
+ aom_bit_depth_t bit_depth) {
+ return get_gf_active_quality_no_rc(p_rc->gfu_boost, q, bit_depth);
+}
+
static int get_gf_high_motion_quality(int q, aom_bit_depth_t bit_depth) {
int *arfgf_high_motion_minq;
ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
@@ -782,8 +809,9 @@ static int calc_active_worst_quality_no_stats_cbr(const AV1_COMP *cpi) {
// (at buffer = critical level).
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *p_rc = &cpi->ppi->p_rc;
// Buffer level below which we push active_worst to worst_quality.
- int64_t critical_level = rc->optimal_buffer_level >> 3;
+ int64_t critical_level = p_rc->optimal_buffer_level >> 3;
int64_t buff_lvl_step = 0;
int adjustment = 0;
int active_worst_quality;
@@ -799,25 +827,26 @@ static int calc_active_worst_quality_no_stats_cbr(const AV1_COMP *cpi) {
rc->avg_frame_qindex[KEY_FRAME])
: rc->avg_frame_qindex[INTER_FRAME];
active_worst_quality = AOMMIN(rc->worst_quality, ambient_qp * 5 / 4);
- if (rc->buffer_level > rc->optimal_buffer_level) {
+ if (rc->buffer_level > p_rc->optimal_buffer_level) {
// Adjust down.
// Maximum limit for down adjustment, ~30%.
int max_adjustment_down = active_worst_quality / 3;
if (max_adjustment_down) {
- buff_lvl_step = ((rc->maximum_buffer_size - rc->optimal_buffer_level) /
- max_adjustment_down);
+ buff_lvl_step =
+ ((p_rc->maximum_buffer_size - p_rc->optimal_buffer_level) /
+ max_adjustment_down);
if (buff_lvl_step)
- adjustment = (int)((rc->buffer_level - rc->optimal_buffer_level) /
+ adjustment = (int)((rc->buffer_level - p_rc->optimal_buffer_level) /
buff_lvl_step);
active_worst_quality -= adjustment;
}
} else if (rc->buffer_level > critical_level) {
// Adjust up from ambient Q.
if (critical_level) {
- buff_lvl_step = (rc->optimal_buffer_level - critical_level);
+ buff_lvl_step = (p_rc->optimal_buffer_level - critical_level);
if (buff_lvl_step) {
adjustment = (int)((rc->worst_quality - ambient_qp) *
- (rc->optimal_buffer_level - rc->buffer_level) /
+ (p_rc->optimal_buffer_level - rc->buffer_level) /
buff_lvl_step);
}
active_worst_quality = ambient_qp + adjustment;
@@ -835,10 +864,11 @@ static int calc_active_best_quality_no_stats_cbr(const AV1_COMP *cpi,
int width, int height) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
const CurrentFrame *const current_frame = &cm->current_frame;
int *rtc_minq;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
int active_best_quality = rc->best_quality;
ASSIGN_MINQ_TABLE(bit_depth, rtc_minq);
@@ -846,7 +876,7 @@ static int calc_active_best_quality_no_stats_cbr(const AV1_COMP *cpi,
// Handle the special case for key frames forced when we have reached
// the maximum key frame interval. Here force the Q to a range
// based on the ambient Q to reduce the risk of popping.
- if (rc->this_key_frame_forced) {
+ if (p_rc->this_key_frame_forced) {
int qindex = rc->last_boosted_qindex;
double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
int delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
@@ -856,8 +886,8 @@ static int calc_active_best_quality_no_stats_cbr(const AV1_COMP *cpi,
// not first frame of one pass and kf_boost is set
double q_adj_factor = 1.0;
double q_val;
- active_best_quality =
- get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
+ active_best_quality = get_kf_active_quality(
+ p_rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
// Allow somewhat lower kf minq with small image formats.
if ((width * height) <= (352 * 288)) {
q_adj_factor -= 0.25;
@@ -868,7 +898,7 @@ static int calc_active_best_quality_no_stats_cbr(const AV1_COMP *cpi,
active_best_quality +=
av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
}
- } else if (!rc->is_src_frame_alt_ref && !cpi->use_svc &&
+ } else if (!rc->is_src_frame_alt_ref && !cpi->ppi->use_svc &&
cpi->oxcf.rc_cfg.gf_cbr_boost_pct &&
(refresh_frame_flags->golden_frame ||
refresh_frame_flags->alt_ref_frame)) {
@@ -880,7 +910,7 @@ static int calc_active_best_quality_no_stats_cbr(const AV1_COMP *cpi,
rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
q = rc->avg_frame_qindex[INTER_FRAME];
}
- active_best_quality = get_gf_active_quality(rc, q, bit_depth);
+ active_best_quality = get_gf_active_quality(p_rc, q, bit_depth);
} else {
// Use the lower of active_worst_quality and recent/average Q.
FRAME_TYPE frame_type =
@@ -913,9 +943,10 @@ static int rc_pick_q_and_bounds_no_stats_cbr(const AV1_COMP *cpi, int width,
int *top_index) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const CurrentFrame *const current_frame = &cm->current_frame;
int q;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
int active_worst_quality = calc_active_worst_quality_no_stats_cbr(cpi);
int active_best_quality = calc_active_best_quality_no_stats_cbr(
cpi, active_worst_quality, width, height);
@@ -932,7 +963,7 @@ static int rc_pick_q_and_bounds_no_stats_cbr(const AV1_COMP *cpi, int width,
*bottom_index = active_best_quality;
// Limit Q range for the adaptive loop.
- if (current_frame->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
+ if (current_frame->frame_type == KEY_FRAME && !p_rc->this_key_frame_forced &&
current_frame->frame_number != 0) {
int qdelta = 0;
aom_clear_system_state();
@@ -944,7 +975,7 @@ static int rc_pick_q_and_bounds_no_stats_cbr(const AV1_COMP *cpi, int width,
}
// Special case code to try and match quality with forced key frames
- if (current_frame->frame_type == KEY_FRAME && rc->this_key_frame_forced) {
+ if (current_frame->frame_type == KEY_FRAME && p_rc->this_key_frame_forced) {
q = rc->last_boosted_qindex;
} else {
q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
@@ -1018,7 +1049,7 @@ static int get_active_cq_level(const RATE_CONTROL *rc,
* \c oxcf->cq_level, or slightly modified for some
* special cases)
* \param[in] bit_depth Bit depth of the codec (same as
- * \c cm->seq_params.bit_depth)
+ * \c cm->seq_params->bit_depth)
* \return Returns selected q index to be used for encoding this frame.
*/
static int get_q_using_fixed_offsets(const AV1EncoderConfig *const oxcf,
@@ -1037,13 +1068,16 @@ static int get_q_using_fixed_offsets(const AV1EncoderConfig *const oxcf,
return cq_level;
}
offset_idx = 0;
- } else if (update_type == ARF_UPDATE || update_type == GF_UPDATE) {
- offset_idx = 1;
- } else if (update_type == INTNL_ARF_UPDATE) {
- offset_idx =
- AOMMIN(gf_group->layer_depth[gf_index], FIXED_QP_OFFSET_COUNT - 1);
- } else { // Leaf level / overlay frame.
- assert(update_type == LF_UPDATE || update_type == OVERLAY_UPDATE ||
+ } else if (update_type == ARF_UPDATE || update_type == GF_UPDATE ||
+ update_type == INTNL_ARF_UPDATE || update_type == LF_UPDATE) {
+ if (gf_group->layer_depth[gf_index] >=
+ gf_group->max_layer_depth_allowed + 1) { // Leaf.
+ return cq_level; // Directly Return worst quality allowed.
+ }
+ offset_idx = AOMMIN(gf_group->layer_depth[gf_index],
+ gf_group->max_layer_depth_allowed);
+ } else { // Overlay frame.
+ assert(update_type == OVERLAY_UPDATE ||
update_type == INTNL_OVERLAY_UPDATE);
return cq_level; // Directly Return worst quality allowed.
}
@@ -1081,10 +1115,11 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
int *bottom_index, int *top_index) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const CurrentFrame *const current_frame = &cm->current_frame;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const enum aom_rc_mode rc_mode = oxcf->rc_cfg.mode;
assert(has_no_stats_stage(cpi));
@@ -1097,7 +1132,7 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
const int cq_level =
get_active_cq_level(rc, oxcf, frame_is_intra_only(cm), cpi->superres_mode,
cm->superres_scale_denominator);
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
if (oxcf->q_cfg.use_fixed_qp_offsets) {
return get_q_using_fixed_offsets(oxcf, rc, gf_group, gf_index, cq_level,
@@ -1117,7 +1152,7 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
const int delta_qindex =
av1_compute_qdelta(rc, q_val, q_val * 0.25, bit_depth);
active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
- } else if (rc->this_key_frame_forced) {
+ } else if (p_rc->this_key_frame_forced) {
const int qindex = rc->last_boosted_qindex;
const double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
const int delta_qindex = av1_compute_qdelta(
@@ -1126,8 +1161,8 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
} else { // not first frame of one pass and kf_boost is set
double q_adj_factor = 1.0;
- active_best_quality =
- get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
+ active_best_quality = get_kf_active_quality(
+ p_rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
// Allow somewhat lower kf minq with small image formats.
if ((width * height) <= (352 * 288)) {
@@ -1148,14 +1183,29 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
// Use the lower of active_worst_quality and recent
// average Q as basis for GF/ARF best Q limit unless last frame was
// a key frame.
+ int avg_frame_qindex_inter_frame;
+ int avg_frame_qindex_key_frame;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ avg_frame_qindex_inter_frame =
+ (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
+ ? cpi->ppi->temp_avg_frame_qindex[INTER_FRAME]
+ : cpi->rc.avg_frame_qindex[INTER_FRAME];
+ avg_frame_qindex_key_frame =
+ (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
+ ? cpi->ppi->temp_avg_frame_qindex[KEY_FRAME]
+ : cpi->rc.avg_frame_qindex[KEY_FRAME];
+#else
+ avg_frame_qindex_inter_frame = rc->avg_frame_qindex[INTER_FRAME];
+ avg_frame_qindex_key_frame = rc->avg_frame_qindex[KEY_FRAME];
+#endif
q = (rc->frames_since_key > 1 &&
- rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
- ? rc->avg_frame_qindex[INTER_FRAME]
- : rc->avg_frame_qindex[KEY_FRAME];
+ avg_frame_qindex_inter_frame < active_worst_quality)
+ ? avg_frame_qindex_inter_frame
+ : avg_frame_qindex_key_frame;
// For constrained quality dont allow Q less than the cq level
if (rc_mode == AOM_CQ) {
if (q < cq_level) q = cq_level;
- active_best_quality = get_gf_active_quality(rc, q, bit_depth);
+ active_best_quality = get_gf_active_quality(p_rc, q, bit_depth);
// Constrained quality use slightly lower active best.
active_best_quality = active_best_quality * 15 / 16;
} else if (rc_mode == AOM_Q) {
@@ -1167,7 +1217,7 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
: av1_compute_qdelta(rc, q_val, q_val * 0.50, bit_depth);
active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
} else {
- active_best_quality = get_gf_active_quality(rc, q, bit_depth);
+ active_best_quality = get_gf_active_quality(p_rc, q, bit_depth);
}
} else {
if (rc_mode == AOM_Q) {
@@ -1206,8 +1256,8 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
{
int qdelta = 0;
aom_clear_system_state();
- if (current_frame->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
- current_frame->frame_number != 0) {
+ if (current_frame->frame_type == KEY_FRAME &&
+ !p_rc->this_key_frame_forced && current_frame->frame_number != 0) {
qdelta = av1_compute_qdelta_by_rate(
&cpi->rc, current_frame->frame_type, active_worst_quality, 2.0,
cpi->is_screen_content_type, bit_depth);
@@ -1226,7 +1276,7 @@ static int rc_pick_q_and_bounds_no_stats(const AV1_COMP *cpi, int width,
q = active_best_quality;
// Special case code to try and match quality with forced key frames
} else if ((current_frame->frame_type == KEY_FRAME) &&
- rc->this_key_frame_forced) {
+ p_rc->this_key_frame_forced) {
q = rc->last_boosted_qindex;
} else {
q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
@@ -1251,16 +1301,17 @@ static const double arf_layer_deltas[MAX_ARF_LAYERS + 1] = { 2.50, 2.00, 1.75,
1.50, 1.25, 1.15,
1.0 };
int av1_frame_type_qdelta(const AV1_COMP *cpi, int q) {
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const RATE_FACTOR_LEVEL rf_lvl = get_rate_factor_level(gf_group);
- const FRAME_TYPE frame_type = gf_group->frame_type[gf_group->index];
- const int arf_layer = AOMMIN(gf_group->layer_depth[gf_group->index], 6);
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ const RATE_FACTOR_LEVEL rf_lvl =
+ get_rate_factor_level(gf_group, cpi->gf_frame_index);
+ const FRAME_TYPE frame_type = gf_group->frame_type[cpi->gf_frame_index];
+ const int arf_layer = AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], 6);
const double rate_factor =
(rf_lvl == INTER_NORMAL) ? 1.0 : arf_layer_deltas[arf_layer];
return av1_compute_qdelta_by_rate(&cpi->rc, frame_type, q, rate_factor,
cpi->is_screen_content_type,
- cpi->common.seq_params.bit_depth);
+ cpi->common.seq_params->bit_depth);
}
// This unrestricted Q selection on CQ mode is useful when testing new features,
@@ -1275,7 +1326,7 @@ static int rc_pick_q_and_bounds_no_stats_cq(const AV1_COMP *cpi, int width,
const int cq_level =
get_active_cq_level(rc, oxcf, frame_is_intra_only(cm), cpi->superres_mode,
cm->superres_scale_denominator);
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
const int q = (int)av1_convert_qindex_to_q(cq_level, bit_depth);
(void)width;
(void)height;
@@ -1295,10 +1346,11 @@ static void get_intra_q_and_bounds(const AV1_COMP *cpi, int width, int height,
int cq_level, int is_fwd_kf) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
int active_best_quality;
int active_worst_quality = *active_worst;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
if (rc->frames_to_key <= 1 && oxcf->rc_cfg.mode == AOM_Q) {
// If the next frame is also a key frame or the current frame is the
@@ -1315,7 +1367,7 @@ static void get_intra_q_and_bounds(const AV1_COMP *cpi, int width, int height,
const int delta_qindex = av1_compute_qdelta(
rc, last_boosted_q, last_boosted_q * 0.25, bit_depth);
active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
- } else if (rc->this_key_frame_forced) {
+ } else if (p_rc->this_key_frame_forced) {
// Handle the special case for key frames forced when we have reached
// the maximum key frame interval. Here force the Q to a range
// based on the ambient Q to reduce the risk of popping.
@@ -1324,8 +1376,8 @@ static void get_intra_q_and_bounds(const AV1_COMP *cpi, int width, int height,
int qindex;
if (is_stat_consumption_stage_twopass(cpi) &&
- cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
- qindex = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
+ cpi->ppi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
+ qindex = AOMMIN(p_rc->last_kf_qindex, rc->last_boosted_qindex);
active_best_quality = qindex;
last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
@@ -1346,13 +1398,13 @@ static void get_intra_q_and_bounds(const AV1_COMP *cpi, int width, int height,
// Baseline value derived from cpi->active_worst_quality and kf boost.
active_best_quality =
- get_kf_active_quality(rc, active_worst_quality, bit_depth);
+ get_kf_active_quality(p_rc, active_worst_quality, bit_depth);
if (cpi->is_screen_content_type) {
active_best_quality /= 2;
}
if (is_stat_consumption_stage_twopass(cpi) &&
- cpi->twopass.kf_zeromotion_pct >= STATIC_KF_GROUP_THRESH) {
+ cpi->ppi->twopass.kf_zeromotion_pct >= STATIC_KF_GROUP_THRESH) {
active_best_quality /= 3;
}
@@ -1363,7 +1415,8 @@ static void get_intra_q_and_bounds(const AV1_COMP *cpi, int width, int height,
// Make a further adjustment based on the kf zero motion measure.
if (is_stat_consumption_stage_twopass(cpi))
- q_adj_factor += 0.05 - (0.001 * (double)cpi->twopass.kf_zeromotion_pct);
+ q_adj_factor +=
+ 0.05 - (0.001 * (double)cpi->ppi->twopass.kf_zeromotion_pct);
// Convert the adjustment factor to a qindex delta
// on active_best_quality.
@@ -1394,8 +1447,9 @@ static void adjust_active_best_and_worst_quality(const AV1_COMP *cpi,
int *active_best) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
- const int bit_depth = cpi->common.seq_params.bit_depth;
+ const int bit_depth = cpi->common.seq_params->bit_depth;
int active_best_quality = *active_best;
int active_worst_quality = *active_worst;
// Extension to max or min Q if undershoot or overshoot is outside
@@ -1406,20 +1460,21 @@ static void adjust_active_best_and_worst_quality(const AV1_COMP *cpi,
(refresh_frame_flags->golden_frame || is_intrl_arf_boost ||
refresh_frame_flags->alt_ref_frame))) {
active_best_quality -=
- (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast);
- active_worst_quality += (cpi->twopass.extend_maxq / 2);
+ (cpi->ppi->twopass.extend_minq + cpi->ppi->twopass.extend_minq_fast);
+ active_worst_quality += (cpi->ppi->twopass.extend_maxq / 2);
} else {
active_best_quality -=
- (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast) / 2;
- active_worst_quality += cpi->twopass.extend_maxq;
+ (cpi->ppi->twopass.extend_minq + cpi->ppi->twopass.extend_minq_fast) /
+ 2;
+ active_worst_quality += cpi->ppi->twopass.extend_maxq;
}
}
aom_clear_system_state();
#ifndef STRICT_RC
// Static forced key frames Q restrictions dealt with elsewhere.
- if (!(frame_is_intra_only(cm)) || !rc->this_key_frame_forced ||
- (cpi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) {
+ if (!(frame_is_intra_only(cm)) || !p_rc->this_key_frame_forced ||
+ (cpi->ppi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) {
const int qdelta = av1_frame_type_qdelta(cpi, active_worst_quality);
active_worst_quality =
AOMMAX(active_worst_quality + qdelta, active_best_quality);
@@ -1464,18 +1519,19 @@ static int get_q(const AV1_COMP *cpi, const int width, const int height,
const int active_best_quality) {
const AV1_COMMON *const cm = &cpi->common;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
int q;
if (cpi->oxcf.rc_cfg.mode == AOM_Q ||
- (frame_is_intra_only(cm) && !rc->this_key_frame_forced &&
- cpi->twopass.kf_zeromotion_pct >= STATIC_KF_GROUP_THRESH &&
+ (frame_is_intra_only(cm) && !p_rc->this_key_frame_forced &&
+ cpi->ppi->twopass.kf_zeromotion_pct >= STATIC_KF_GROUP_THRESH &&
rc->frames_to_key > 1)) {
q = active_best_quality;
// Special case code to try and match quality with forced key frames.
- } else if (frame_is_intra_only(cm) && rc->this_key_frame_forced) {
+ } else if (frame_is_intra_only(cm) && p_rc->this_key_frame_forced) {
// If static since last kf use better of last boosted and last kf q.
- if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
- q = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
+ if (cpi->ppi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
+ q = AOMMIN(p_rc->last_kf_qindex, rc->last_boosted_qindex);
} else {
q = AOMMIN(rc->last_boosted_qindex,
(active_best_quality + active_worst_quality) / 2);
@@ -1504,20 +1560,29 @@ static int get_active_best_quality(const AV1_COMP *const cpi,
const int active_worst_quality,
const int cq_level, const int gf_index) {
const AV1_COMMON *const cm = &cpi->common;
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
const enum aom_rc_mode rc_mode = oxcf->rc_cfg.mode;
int *inter_minq;
+ int avg_frame_qindex_inter_frame;
ASSIGN_MINQ_TABLE(bit_depth, inter_minq);
int active_best_quality = 0;
const int is_intrl_arf_boost =
gf_group->update_type[gf_index] == INTNL_ARF_UPDATE;
- const int is_leaf_frame =
- !(refresh_frame_flags->golden_frame ||
- refresh_frame_flags->alt_ref_frame || is_intrl_arf_boost);
+ int is_leaf_frame =
+ !(gf_group->update_type[gf_index] == ARF_UPDATE ||
+ gf_group->update_type[gf_index] == GF_UPDATE || is_intrl_arf_boost);
+
+ // TODO(jingning): Consider to rework this hack that covers issues incurred
+ // in lightfield setting.
+ if (cm->tiles.large_scale) {
+ is_leaf_frame = !(refresh_frame_flags->golden_frame ||
+ refresh_frame_flags->alt_ref_frame || is_intrl_arf_boost);
+ }
const int is_overlay_frame = rc->is_src_frame_alt_ref;
if (is_leaf_frame || is_overlay_frame) {
@@ -1532,31 +1597,35 @@ static int get_active_best_quality(const AV1_COMP *const cpi,
return active_best_quality;
}
- // TODO(chengchen): can we remove this condition?
- if (rc_mode == AOM_Q && !refresh_frame_flags->alt_ref_frame &&
- !refresh_frame_flags->golden_frame && !is_intrl_arf_boost) {
- return cq_level;
- }
-
// Determine active_best_quality for frames that are not leaf or overlay.
int q = active_worst_quality;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // For quality simulation purpose - for parallel frames use previous
+ // avg_frame_qindex
+ avg_frame_qindex_inter_frame =
+ (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
+ ? cpi->ppi->temp_avg_frame_qindex[INTER_FRAME]
+ : rc->avg_frame_qindex[INTER_FRAME];
+#else
+ avg_frame_qindex_inter_frame = rc->avg_frame_qindex[INTER_FRAME];
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
// Use the lower of active_worst_quality and recent
// average Q as basis for GF/ARF best Q limit unless last frame was
// a key frame.
if (rc->frames_since_key > 1 &&
- rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
- q = rc->avg_frame_qindex[INTER_FRAME];
+ avg_frame_qindex_inter_frame < active_worst_quality) {
+ q = avg_frame_qindex_inter_frame;
}
if (rc_mode == AOM_CQ && q < cq_level) q = cq_level;
- active_best_quality = get_gf_active_quality(rc, q, bit_depth);
+ active_best_quality = get_gf_active_quality(p_rc, q, bit_depth);
// Constrained quality use slightly lower active best.
if (rc_mode == AOM_CQ) active_best_quality = active_best_quality * 15 / 16;
const int min_boost = get_gf_high_motion_quality(q, bit_depth);
const int boost = min_boost - active_best_quality;
- active_best_quality = min_boost - (int)(boost * rc->arf_boost_factor);
+ active_best_quality = min_boost - (int)(boost * p_rc->arf_boost_factor);
if (!is_intrl_arf_boost) return active_best_quality;
- if (rc_mode == AOM_Q || rc_mode == AOM_CQ) active_best_quality = rc->arf_q;
+ if (rc_mode == AOM_Q || rc_mode == AOM_CQ) active_best_quality = p_rc->arf_q;
int this_height = gf_group_pyramid_level(gf_group, gf_index);
while (this_height > 1) {
active_best_quality = (active_best_quality + active_worst_quality + 1) / 2;
@@ -1565,6 +1634,87 @@ static int get_active_best_quality(const AV1_COMP *const cpi,
return active_best_quality;
}
+// Returns the q_index for a single frame in the GOP.
+// This function assumes that rc_mode == AOM_Q mode.
+int av1_q_mode_get_q_index(int base_q_index, const GF_GROUP *gf_group,
+ const int gf_index, int arf_q) {
+ const int is_intrl_arf_boost =
+ gf_group->update_type[gf_index] == INTNL_ARF_UPDATE;
+ int is_leaf_or_overlay_frame =
+ gf_group->update_type[gf_index] == LF_UPDATE ||
+ gf_group->update_type[gf_index] == OVERLAY_UPDATE ||
+ gf_group->update_type[gf_index] == INTNL_OVERLAY_UPDATE;
+
+ if (is_leaf_or_overlay_frame) return base_q_index;
+
+ if (!is_intrl_arf_boost) return arf_q;
+
+ int active_best_quality = arf_q;
+ int active_worst_quality = base_q_index;
+ int this_height = gf_group_pyramid_level(gf_group, gf_index);
+ while (this_height > 1) {
+ active_best_quality = (active_best_quality + active_worst_quality + 1) / 2;
+ --this_height;
+ }
+ return active_best_quality;
+}
+
+// Returns the q_index for the ARF in the GOP.
+int av1_get_arf_q_index(int base_q_index, int gfu_boost, int bit_depth,
+ int arf_boost_factor) {
+ int active_best_quality =
+ get_gf_active_quality_no_rc(gfu_boost, base_q_index, bit_depth);
+ const int min_boost = get_gf_high_motion_quality(base_q_index, bit_depth);
+ const int boost = min_boost - active_best_quality;
+ return min_boost - (int)(boost * arf_boost_factor);
+}
+
+static int rc_pick_q_and_bounds_q_mode(const AV1_COMP *cpi, int width,
+ int height, int gf_index,
+ int *bottom_index, int *top_index) {
+ const AV1_COMMON *const cm = &cpi->common;
+ const RATE_CONTROL *const rc = &cpi->rc;
+ const AV1EncoderConfig *const oxcf = &cpi->oxcf;
+ const int cq_level =
+ get_active_cq_level(rc, oxcf, frame_is_intra_only(cm), cpi->superres_mode,
+ cm->superres_scale_denominator);
+ int active_best_quality = 0;
+ int active_worst_quality = rc->active_worst_quality;
+ int q;
+
+ if (frame_is_intra_only(cm)) {
+ const int is_fwd_kf = cm->current_frame.frame_type == KEY_FRAME &&
+ cm->show_frame == 0 && cpi->no_show_fwd_kf;
+ get_intra_q_and_bounds(cpi, width, height, &active_best_quality,
+ &active_worst_quality, cq_level, is_fwd_kf);
+ } else {
+ // Active best quality limited by previous layer.
+ active_best_quality =
+ get_active_best_quality(cpi, active_worst_quality, cq_level, gf_index);
+ }
+
+ *top_index = active_worst_quality;
+ *bottom_index = active_best_quality;
+
+ *top_index = AOMMAX(*top_index, rc->best_quality);
+ *top_index = AOMMIN(*top_index, rc->worst_quality);
+
+ *bottom_index = AOMMAX(*bottom_index, rc->best_quality);
+ *bottom_index = AOMMIN(*bottom_index, rc->worst_quality);
+
+ q = active_best_quality;
+
+ q = AOMMAX(q, rc->best_quality);
+ q = AOMMIN(q, rc->worst_quality);
+
+ assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
+ assert(*bottom_index <= rc->worst_quality &&
+ *bottom_index >= rc->best_quality);
+ assert(q <= rc->worst_quality && q >= rc->best_quality);
+
+ return q;
+}
+
/*!\brief Picks q and q bounds given rate control parameters in \c cpi->rc.
*
* Handles the the general cases not covered by
@@ -1587,20 +1737,25 @@ static int rc_pick_q_and_bounds(const AV1_COMP *cpi, int width, int height,
const RATE_CONTROL *const rc = &cpi->rc;
const AV1EncoderConfig *const oxcf = &cpi->oxcf;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
assert(IMPLIES(has_no_stats_stage(cpi),
cpi->oxcf.rc_cfg.mode == AOM_Q &&
gf_group->update_type[gf_index] != ARF_UPDATE));
const int cq_level =
get_active_cq_level(rc, oxcf, frame_is_intra_only(cm), cpi->superres_mode,
cm->superres_scale_denominator);
- const int bit_depth = cm->seq_params.bit_depth;
+ const int bit_depth = cm->seq_params->bit_depth;
if (oxcf->q_cfg.use_fixed_qp_offsets) {
- return get_q_using_fixed_offsets(oxcf, rc, gf_group, gf_group->index,
+ return get_q_using_fixed_offsets(oxcf, rc, gf_group, cpi->gf_frame_index,
cq_level, bit_depth);
}
+ if (oxcf->rc_cfg.mode == AOM_Q) {
+ return rc_pick_q_and_bounds_q_mode(cpi, width, height, gf_index,
+ bottom_index, top_index);
+ }
+
int active_best_quality = 0;
int active_worst_quality = rc->active_worst_quality;
int q;
@@ -1620,8 +1775,7 @@ static int rc_pick_q_and_bounds(const AV1_COMP *cpi, int width, int height,
// Active best quality limited by previous layer.
const int pyramid_level = gf_group_pyramid_level(gf_group, gf_index);
- if ((pyramid_level <= 1) || (pyramid_level > MAX_ARF_LAYERS) ||
- (oxcf->rc_cfg.mode == AOM_Q)) {
+ if ((pyramid_level <= 1) || (pyramid_level > MAX_ARF_LAYERS)) {
active_best_quality = get_active_best_quality(cpi, active_worst_quality,
cq_level, gf_index);
} else {
@@ -1668,13 +1822,13 @@ static int rc_pick_q_and_bounds(const AV1_COMP *cpi, int width, int height,
return q;
}
-int av1_rc_pick_q_and_bounds(const AV1_COMP *cpi, RATE_CONTROL *rc, int width,
- int height, int gf_index, int *bottom_index,
- int *top_index) {
+int av1_rc_pick_q_and_bounds(const AV1_COMP *cpi, int width, int height,
+ int gf_index, int *bottom_index, int *top_index) {
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
int q;
// TODO(sarahparker) merge no-stats vbr and altref q computation
// with rc_pick_q_and_bounds().
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
if ((cpi->oxcf.rc_cfg.mode != AOM_Q ||
gf_group->update_type[gf_index] == ARF_UPDATE) &&
has_no_stats_stage(cpi)) {
@@ -1694,7 +1848,7 @@ int av1_rc_pick_q_and_bounds(const AV1_COMP *cpi, RATE_CONTROL *rc, int width,
q = rc_pick_q_and_bounds(cpi, width, height, gf_index, bottom_index,
top_index);
}
- if (gf_group->update_type[gf_index] == ARF_UPDATE) rc->arf_q = q;
+ if (gf_group->update_type[gf_index] == ARF_UPDATE) p_rc->arf_q = q;
return q;
}
@@ -1756,11 +1910,12 @@ void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
const AV1_COMMON *const cm = &cpi->common;
const CurrentFrame *const current_frame = &cm->current_frame;
RATE_CONTROL *const rc = &cpi->rc;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const RefreshFrameFlagsInfo *const refresh_frame_flags = &cpi->refresh_frame;
const int is_intrnl_arf =
- gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE;
+ gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE;
const int qindex = cm->quant_params.base_qindex;
@@ -1776,7 +1931,7 @@ void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
rc->avg_frame_qindex[KEY_FRAME] =
ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[KEY_FRAME] + qindex, 2);
} else {
- if ((cpi->use_svc && cpi->oxcf.rc_cfg.mode == AOM_CBR) ||
+ if ((cpi->ppi->use_svc && cpi->oxcf.rc_cfg.mode == AOM_CBR) ||
(!rc->is_src_frame_alt_ref &&
!(refresh_frame_flags->golden_frame || is_intrnl_arf ||
refresh_frame_flags->alt_ref_frame))) {
@@ -1784,7 +1939,7 @@ void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
rc->avg_frame_qindex[INTER_FRAME] =
ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[INTER_FRAME] + qindex, 2);
rc->ni_frames++;
- rc->tot_q += av1_convert_qindex_to_q(qindex, cm->seq_params.bit_depth);
+ rc->tot_q += av1_convert_qindex_to_q(qindex, cm->seq_params->bit_depth);
rc->avg_q = rc->tot_q / rc->ni_frames;
// Calculate the average Q for normal inter frames (not key or GFU
// frames).
@@ -1792,7 +1947,23 @@ void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
rc->ni_av_qi = rc->ni_tot_qi / rc->ni_frames;
}
}
-
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ /* TODO(FPMT): The current update is happening in cpi->rc.avg_frame_qindex,
+ * this need to be taken care appropriately in final FPMT implementation
+ * to carry these values to subsequent frames. The avg_frame_qindex update
+ * is accumulated across frames, so the values from all individual parallel
+ * frames need to be taken into account after all the parallel frames are
+ * encoded.
+ *
+ * The variable temp_avg_frame_qindex introduced only for quality simulation
+ * purpose, it retains the value previous to the parallel encode frames. The
+ * variable is updated based on the update flag.
+ */
+ if (cpi->do_frame_data_update && !rc->is_src_frame_alt_ref) {
+ for (int index = 0; index < FRAME_TYPES; index++)
+ cpi->ppi->temp_avg_frame_qindex[index] = rc->avg_frame_qindex[index];
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
// Keep record of last boosted (KF/GF/ARF) Q value.
// If the current frame is coded at a lower Q then we also update it.
// If all mbs in this group are skipped only update if the Q value is
@@ -1800,12 +1971,12 @@ void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
// This is used to help set quality in forced key frames to reduce popping
if ((qindex < rc->last_boosted_qindex) ||
(current_frame->frame_type == KEY_FRAME) ||
- (!rc->constrained_gf_group &&
+ (!p_rc->constrained_gf_group &&
(refresh_frame_flags->alt_ref_frame || is_intrnl_arf ||
(refresh_frame_flags->golden_frame && !rc->is_src_frame_alt_ref)))) {
rc->last_boosted_qindex = qindex;
}
- if (current_frame->frame_type == KEY_FRAME) rc->last_kf_qindex = qindex;
+ if (current_frame->frame_type == KEY_FRAME) p_rc->last_kf_qindex = qindex;
update_buffer_level(cpi, rc->projected_frame_size);
rc->prev_avg_frame_bandwidth = rc->avg_frame_bandwidth;
@@ -1853,6 +2024,7 @@ void av1_rc_postencode_update_drop_frame(AV1_COMP *cpi) {
cpi->rc.frames_to_key--;
cpi->rc.rc_2_frame = 0;
cpi->rc.rc_1_frame = 0;
+ cpi->rc.prev_avg_frame_bandwidth = cpi->rc.avg_frame_bandwidth;
}
int av1_find_qindex(double desired_q, aom_bit_depth_t bit_depth,
@@ -1954,7 +2126,7 @@ void av1_rc_set_gf_interval_range(const AV1_COMP *const cpi,
* The no.of.stats available in the case of LAP is limited,
* hence setting to max_gf_interval.
*/
- if (cpi->lap_enabled)
+ if (cpi->ppi->lap_enabled)
rc->static_scene_max_gf_interval = rc->max_gf_interval + 1;
else
rc->static_scene_max_gf_interval = MAX_STATIC_GF_GROUP_LENGTH;
@@ -2003,8 +2175,8 @@ static void vbr_rate_correction(AV1_COMP *cpi, int *this_frame_target) {
RATE_CONTROL *const rc = &cpi->rc;
int64_t vbr_bits_off_target = rc->vbr_bits_off_target;
const int stats_count =
- cpi->twopass.stats_buf_ctx->total_stats != NULL
- ? (int)cpi->twopass.stats_buf_ctx->total_stats->count
+ cpi->ppi->twopass.stats_buf_ctx->total_stats != NULL
+ ? (int)cpi->ppi->twopass.stats_buf_ctx->total_stats->count
: 0;
const int frame_window = AOMMIN(
16, (int)(stats_count - (int)cpi->common.current_frame.frame_number));
@@ -2048,16 +2220,17 @@ int av1_calc_pframe_target_size_one_pass_vbr(
const AV1_COMP *const cpi, FRAME_UPDATE_TYPE frame_update_type) {
static const int af_ratio = 10;
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
int64_t target;
#if USE_ALTREF_FOR_ONE_PASS
if (frame_update_type == KF_UPDATE || frame_update_type == GF_UPDATE ||
frame_update_type == ARF_UPDATE) {
- target = ((int64_t)rc->avg_frame_bandwidth * rc->baseline_gf_interval *
+ target = ((int64_t)rc->avg_frame_bandwidth * p_rc->baseline_gf_interval *
af_ratio) /
- (rc->baseline_gf_interval + af_ratio - 1);
+ (p_rc->baseline_gf_interval + af_ratio - 1);
} else {
- target = ((int64_t)rc->avg_frame_bandwidth * rc->baseline_gf_interval) /
- (rc->baseline_gf_interval + af_ratio - 1);
+ target = ((int64_t)rc->avg_frame_bandwidth * p_rc->baseline_gf_interval) /
+ (p_rc->baseline_gf_interval + af_ratio - 1);
}
if (target > INT_MAX) target = INT_MAX;
#else
@@ -2077,9 +2250,10 @@ int av1_calc_pframe_target_size_one_pass_cbr(
const AV1_COMP *cpi, FRAME_UPDATE_TYPE frame_update_type) {
const AV1EncoderConfig *oxcf = &cpi->oxcf;
const RATE_CONTROL *rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *p_rc = &cpi->ppi->p_rc;
const RateControlCfg *rc_cfg = &oxcf->rc_cfg;
- const int64_t diff = rc->optimal_buffer_level - rc->buffer_level;
- const int64_t one_pct_bits = 1 + rc->optimal_buffer_level / 100;
+ const int64_t diff = p_rc->optimal_buffer_level - rc->buffer_level;
+ const int64_t one_pct_bits = 1 + p_rc->optimal_buffer_level / 100;
int min_frame_target =
AOMMAX(rc->avg_frame_bandwidth >> 4, FRAME_OVERHEAD_BITS);
int target;
@@ -2087,17 +2261,17 @@ int av1_calc_pframe_target_size_one_pass_cbr(
if (rc_cfg->gf_cbr_boost_pct) {
const int af_ratio_pct = rc_cfg->gf_cbr_boost_pct + 100;
if (frame_update_type == GF_UPDATE || frame_update_type == OVERLAY_UPDATE) {
- target =
- (rc->avg_frame_bandwidth * rc->baseline_gf_interval * af_ratio_pct) /
- (rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
+ target = (rc->avg_frame_bandwidth * p_rc->baseline_gf_interval *
+ af_ratio_pct) /
+ (p_rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
} else {
- target = (rc->avg_frame_bandwidth * rc->baseline_gf_interval * 100) /
- (rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
+ target = (rc->avg_frame_bandwidth * p_rc->baseline_gf_interval * 100) /
+ (p_rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
}
} else {
target = rc->avg_frame_bandwidth;
}
- if (cpi->use_svc) {
+ if (cpi->ppi->use_svc) {
// Note that for layers, avg_frame_bandwidth is the cumulative
// per-frame-bandwidth. For the target size of this frame, use the
// layer average frame size (i.e., non-cumulative per-frame-bw).
@@ -2129,11 +2303,12 @@ int av1_calc_pframe_target_size_one_pass_cbr(
int av1_calc_iframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
const RATE_CONTROL *rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *p_rc = &cpi->ppi->p_rc;
int target;
if (cpi->common.current_frame.frame_number == 0) {
- target = ((rc->starting_buffer_level / 2) > INT_MAX)
+ target = ((p_rc->starting_buffer_level / 2) > INT_MAX)
? INT_MAX
- : (int)(rc->starting_buffer_level / 2);
+ : (int)(p_rc->starting_buffer_level / 2);
} else {
int kf_boost = 32;
double framerate = cpi->framerate;
@@ -2177,7 +2352,7 @@ void av1_set_reference_structure_one_pass_rt(AV1_COMP *cpi, int gf_update) {
int gld_idx = 0;
int alt_ref_idx = 0;
ext_refresh_frame_flags->update_pending = 1;
- svc->external_ref_frame_config = 1;
+ svc->set_ref_frame_config = 1;
ext_flags->ref_frame_flags = 0;
ext_refresh_frame_flags->last_frame = 1;
ext_refresh_frame_flags->golden_frame = 0;
@@ -2268,9 +2443,9 @@ static void rc_scene_detection_onepass_rt(AV1_COMP *cpi) {
int num_samples = 0;
const int thresh = 6;
// SAD is computed on 64x64 blocks
- const int sb_size_by_mb = (cm->seq_params.sb_size == BLOCK_128X128)
- ? (cm->seq_params.mib_size >> 1)
- : cm->seq_params.mib_size;
+ const int sb_size_by_mb = (cm->seq_params->sb_size == BLOCK_128X128)
+ ? (cm->seq_params->mib_size >> 1)
+ : cm->seq_params->mib_size;
const int sb_cols = (num_mi_cols + sb_size_by_mb - 1) / sb_size_by_mb;
const int sb_rows = (num_mi_rows + sb_size_by_mb - 1) / sb_size_by_mb;
uint64_t sum_sq_thresh = 10000; // sum = sqrt(thresh / 64*64)) ~1.5
@@ -2286,12 +2461,12 @@ static void rc_scene_detection_onepass_rt(AV1_COMP *cpi) {
(sbi_row < sb_rows - 1 && sbi_col < sb_cols - 1) &&
((sbi_row % 2 == 0 && sbi_col % 2 == 0) ||
(sbi_row % 2 != 0 && sbi_col % 2 != 0)))) {
- tmp_sad = cpi->fn_ptr[bsize].sdf(src_y, src_ystride, last_src_y,
- last_src_ystride);
+ tmp_sad = cpi->ppi->fn_ptr[bsize].sdf(src_y, src_ystride, last_src_y,
+ last_src_ystride);
if (check_light_change) {
unsigned int sse, variance;
- variance = cpi->fn_ptr[bsize].vf(src_y, src_ystride, last_src_y,
- last_src_ystride, &sse);
+ variance = cpi->ppi->fn_ptr[bsize].vf(
+ src_y, src_ystride, last_src_y, last_src_ystride, &sse);
// Note: sse - variance = ((sum * sum) >> 12)
// Detect large lighting change.
if (variance < (sse >> 1) && (sse - variance) > sum_sq_thresh) {
@@ -2344,7 +2519,8 @@ static void rc_scene_detection_onepass_rt(AV1_COMP *cpi) {
static int set_gf_interval_update_onepass_rt(AV1_COMP *cpi,
FRAME_TYPE frame_type) {
RATE_CONTROL *const rc = &cpi->rc;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
ResizePendingParams *const resize_pending_params =
&cpi->resize_pending_params;
int gf_update = 0;
@@ -2360,34 +2536,34 @@ static int set_gf_interval_update_onepass_rt(AV1_COMP *cpi,
if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ)
av1_cyclic_refresh_set_golden_update(cpi);
else
- rc->baseline_gf_interval = MAX_GF_INTERVAL;
- if (rc->baseline_gf_interval > rc->frames_to_key)
- rc->baseline_gf_interval = rc->frames_to_key;
- rc->gfu_boost = DEFAULT_GF_BOOST_RT;
- rc->constrained_gf_group =
- (rc->baseline_gf_interval >= rc->frames_to_key) ? 1 : 0;
- rc->frames_till_gf_update_due = rc->baseline_gf_interval;
- gf_group->index = 0;
+ p_rc->baseline_gf_interval = MAX_GF_INTERVAL;
+ if (p_rc->baseline_gf_interval > rc->frames_to_key)
+ p_rc->baseline_gf_interval = rc->frames_to_key;
+ p_rc->gfu_boost = DEFAULT_GF_BOOST_RT;
+ p_rc->constrained_gf_group =
+ (p_rc->baseline_gf_interval >= rc->frames_to_key) ? 1 : 0;
+ rc->frames_till_gf_update_due = p_rc->baseline_gf_interval;
+ cpi->gf_frame_index = 0;
// SVC does not use GF as periodic boost.
// TODO(marpan): Find better way to disable this for SVC.
- if (cpi->use_svc) {
+ if (cpi->ppi->use_svc) {
SVC *const svc = &cpi->svc;
- rc->baseline_gf_interval = MAX_STATIC_GF_GROUP_LENGTH - 1;
- rc->gfu_boost = 1;
- rc->constrained_gf_group = 0;
- rc->frames_till_gf_update_due = rc->baseline_gf_interval;
+ p_rc->baseline_gf_interval = MAX_STATIC_GF_GROUP_LENGTH - 1;
+ p_rc->gfu_boost = 1;
+ p_rc->constrained_gf_group = 0;
+ rc->frames_till_gf_update_due = p_rc->baseline_gf_interval;
for (int layer = 0;
layer < svc->number_spatial_layers * svc->number_temporal_layers;
++layer) {
LAYER_CONTEXT *const lc = &svc->layer_context[layer];
- lc->rc.baseline_gf_interval = rc->baseline_gf_interval;
- lc->rc.gfu_boost = rc->gfu_boost;
- lc->rc.constrained_gf_group = rc->constrained_gf_group;
+ lc->p_rc.baseline_gf_interval = p_rc->baseline_gf_interval;
+ lc->p_rc.gfu_boost = p_rc->gfu_boost;
+ lc->p_rc.constrained_gf_group = p_rc->constrained_gf_group;
lc->rc.frames_till_gf_update_due = rc->frames_till_gf_update_due;
lc->group_index = 0;
}
}
- gf_group->size = rc->baseline_gf_interval;
+ gf_group->size = p_rc->baseline_gf_interval;
gf_group->update_type[0] =
(frame_type == KEY_FRAME) ? KF_UPDATE : GF_UPDATE;
gf_update = 1;
@@ -2398,6 +2574,7 @@ static int set_gf_interval_update_onepass_rt(AV1_COMP *cpi,
static void resize_reset_rc(AV1_COMP *cpi, int resize_width, int resize_height,
int prev_width, int prev_height) {
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
SVC *const svc = &cpi->svc;
double tot_scale_change = 1.0;
int target_bits_per_frame;
@@ -2406,8 +2583,8 @@ static void resize_reset_rc(AV1_COMP *cpi, int resize_width, int resize_height,
tot_scale_change = (double)(resize_width * resize_height) /
(double)(prev_width * prev_height);
// Reset buffer level to optimal, update target size.
- rc->buffer_level = rc->optimal_buffer_level;
- rc->bits_off_target = rc->optimal_buffer_level;
+ rc->buffer_level = p_rc->optimal_buffer_level;
+ rc->bits_off_target = p_rc->optimal_buffer_level;
rc->this_frame_target =
av1_calc_pframe_target_size_one_pass_cbr(cpi, INTER_FRAME);
target_bits_per_frame = rc->this_frame_target;
@@ -2431,8 +2608,8 @@ static void resize_reset_rc(AV1_COMP *cpi, int resize_width, int resize_height,
svc->number_temporal_layers +
tl];
lc->rc.resize_state = rc->resize_state;
- lc->rc.buffer_level = lc->rc.optimal_buffer_level;
- lc->rc.bits_off_target = lc->rc.optimal_buffer_level;
+ lc->rc.buffer_level = lc->p_rc.optimal_buffer_level;
+ lc->rc.bits_off_target = lc->p_rc.optimal_buffer_level;
lc->rc.rate_correction_factors[INTER_FRAME] =
rc->rate_correction_factors[INTER_FRAME];
}
@@ -2464,6 +2641,7 @@ static void resize_reset_rc(AV1_COMP *cpi, int resize_width, int resize_height,
static void dynamic_resize_one_pass_cbr(AV1_COMP *cpi) {
const AV1_COMMON *const cm = &cpi->common;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
RESIZE_ACTION resize_action = NO_RESIZE;
const int avg_qp_thr1 = 70;
const int avg_qp_thr2 = 50;
@@ -2486,7 +2664,7 @@ static void dynamic_resize_one_pass_cbr(AV1_COMP *cpi) {
if (cpi->rc.frames_since_key > cpi->framerate) {
const int window = AOMMIN(30, (int)(2 * cpi->framerate));
rc->resize_avg_qp += rc->last_q[INTER_FRAME];
- if (cpi->rc.buffer_level < (int)(30 * rc->optimal_buffer_level / 100))
+ if (cpi->rc.buffer_level < (int)(30 * p_rc->optimal_buffer_level / 100))
++rc->resize_buffer_underflow;
++rc->resize_count;
// Check for resize action every "window" frames.
@@ -2548,8 +2726,9 @@ void av1_get_one_pass_rt_params(AV1_COMP *cpi,
EncodeFrameParams *const frame_params,
unsigned int frame_flags) {
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
AV1_COMMON *const cm = &cpi->common;
- GF_GROUP *const gf_group = &cpi->gf_group;
+ GF_GROUP *const gf_group = &cpi->ppi->gf_group;
SVC *const svc = &cpi->svc;
ResizePendingParams *const resize_pending_params =
&cpi->resize_pending_params;
@@ -2559,35 +2738,35 @@ void av1_get_one_pass_rt_params(AV1_COMP *cpi,
svc->number_temporal_layers);
// Turn this on to explicitly set the reference structure rather than
// relying on internal/default structure.
- if (cpi->use_svc) {
+ if (cpi->ppi->use_svc) {
av1_update_temporal_layer_framerate(cpi);
av1_restore_layer_context(cpi);
}
// Set frame type.
- if ((!cpi->use_svc && rc->frames_to_key == 0) ||
- (cpi->use_svc && svc->spatial_layer_id == 0 &&
+ if ((!cpi->ppi->use_svc && rc->frames_to_key == 0) ||
+ (cpi->ppi->use_svc && svc->spatial_layer_id == 0 &&
(cpi->oxcf.kf_cfg.key_freq_max == 0 ||
svc->current_superframe % cpi->oxcf.kf_cfg.key_freq_max == 0)) ||
(frame_flags & FRAMEFLAGS_KEY)) {
frame_params->frame_type = KEY_FRAME;
- rc->this_key_frame_forced =
+ p_rc->this_key_frame_forced =
cm->current_frame.frame_number != 0 && rc->frames_to_key == 0;
rc->frames_to_key = cpi->oxcf.kf_cfg.key_freq_max;
- rc->kf_boost = DEFAULT_KF_BOOST_RT;
- gf_group->update_type[gf_group->index] = KF_UPDATE;
- gf_group->frame_type[gf_group->index] = KEY_FRAME;
- gf_group->refbuf_state[gf_group->index] = REFBUF_RESET;
- if (cpi->use_svc) {
+ p_rc->kf_boost = DEFAULT_KF_BOOST_RT;
+ gf_group->update_type[cpi->gf_frame_index] = KF_UPDATE;
+ gf_group->frame_type[cpi->gf_frame_index] = KEY_FRAME;
+ gf_group->refbuf_state[cpi->gf_frame_index] = REFBUF_RESET;
+ if (cpi->ppi->use_svc) {
if (cm->current_frame.frame_number > 0)
av1_svc_reset_temporal_layers(cpi, 1);
svc->layer_context[layer].is_key_frame = 1;
}
} else {
frame_params->frame_type = INTER_FRAME;
- gf_group->update_type[gf_group->index] = LF_UPDATE;
- gf_group->frame_type[gf_group->index] = INTER_FRAME;
- gf_group->refbuf_state[gf_group->index] = REFBUF_UPDATE;
- if (cpi->use_svc) {
+ gf_group->update_type[cpi->gf_frame_index] = LF_UPDATE;
+ gf_group->frame_type[cpi->gf_frame_index] = INTER_FRAME;
+ gf_group->refbuf_state[cpi->gf_frame_index] = REFBUF_UPDATE;
+ if (cpi->ppi->use_svc) {
LAYER_CONTEXT *lc = &svc->layer_context[layer];
lc->is_key_frame =
svc->spatial_layer_id == 0
@@ -2596,7 +2775,7 @@ void av1_get_one_pass_rt_params(AV1_COMP *cpi,
}
}
// Check for scene change, for non-SVC for now.
- if (!cpi->use_svc && cpi->sf.rt_sf.check_scene_detection)
+ if (!cpi->ppi->use_svc && cpi->sf.rt_sf.check_scene_detection)
rc_scene_detection_onepass_rt(cpi);
// Check for dynamic resize, for single spatial layer for now.
// For temporal layers only check on base temporal layer.
@@ -2628,14 +2807,14 @@ void av1_get_one_pass_rt_params(AV1_COMP *cpi,
target = av1_calc_iframe_target_size_one_pass_cbr(cpi);
} else {
target = av1_calc_pframe_target_size_one_pass_cbr(
- cpi, gf_group->update_type[gf_group->index]);
+ cpi, gf_group->update_type[cpi->gf_frame_index]);
}
} else {
if (frame_params->frame_type == KEY_FRAME) {
target = av1_calc_iframe_target_size_one_pass_vbr(cpi);
} else {
target = av1_calc_pframe_target_size_one_pass_vbr(
- cpi, gf_group->update_type[gf_group->index]);
+ cpi, gf_group->update_type[cpi->gf_frame_index]);
}
}
if (cpi->oxcf.rc_cfg.mode == AOM_Q)
@@ -2644,11 +2823,21 @@ void av1_get_one_pass_rt_params(AV1_COMP *cpi,
av1_rc_set_frame_target(cpi, target, cm->width, cm->height);
rc->base_frame_target = target;
cm->current_frame.frame_type = frame_params->frame_type;
+ // For fixed mode SVC: if KSVC is enabled remove inter layer
+ // prediction on spatial enhancement layer frames for frames
+ // whose base is not KEY frame.
+ if (cpi->ppi->use_svc && !svc->use_flexible_mode && svc->ksvc_fixed_mode &&
+ svc->number_spatial_layers > 1 &&
+ !svc->layer_context[layer].is_key_frame) {
+ ExternalFlags *const ext_flags = &cpi->ext_flags;
+ ext_flags->ref_frame_flags ^= AOM_GOLD_FLAG;
+ }
}
int av1_encodedframe_overshoot_cbr(AV1_COMP *cpi, int *q) {
AV1_COMMON *const cm = &cpi->common;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
SPEED_FEATURES *const sf = &cpi->sf;
int thresh_qp = 7 * (rc->worst_quality >> 3);
// Lower thresh_qp for video (more overshoot at lower Q) to be
@@ -2670,8 +2859,8 @@ int av1_encodedframe_overshoot_cbr(AV1_COMP *cpi, int *q) {
// have settled down to a very different (low QP) state, then not adjusting
// them may cause next frame to select low QP and overshoot again.
cpi->rc.avg_frame_qindex[INTER_FRAME] = *q;
- rc->buffer_level = rc->optimal_buffer_level;
- rc->bits_off_target = rc->optimal_buffer_level;
+ rc->buffer_level = p_rc->optimal_buffer_level;
+ rc->bits_off_target = p_rc->optimal_buffer_level;
// Reset rate under/over-shoot flags.
cpi->rc.rc_1_frame = 0;
cpi->rc.rc_2_frame = 0;
@@ -2680,7 +2869,7 @@ int av1_encodedframe_overshoot_cbr(AV1_COMP *cpi, int *q) {
(int)(((uint64_t)target_size << BPER_MB_NORMBITS) / cm->mi_params.MBs);
// Rate correction factor based on target_bits_per_mb and qp (==max_QP).
// This comes from the inverse computation of vp9_rc_bits_per_mb().
- q2 = av1_convert_qindex_to_q(*q, cm->seq_params.bit_depth);
+ q2 = av1_convert_qindex_to_q(*q, cm->seq_params->bit_depth);
enumerator = 1800000; // Factor for inter frame.
enumerator += (int)(enumerator * q2) >> 12;
new_correction_factor = (double)target_bits_per_mb * q2 / enumerator;
diff --git a/third_party/libaom/source/libaom/av1/encoder/ratectrl.h b/third_party/libaom/source/libaom/av1/encoder/ratectrl.h
index 3f1756f5ca..a1567f038c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/ratectrl.h
+++ b/third_party/libaom/source/libaom/av1/encoder/ratectrl.h
@@ -129,11 +129,6 @@ typedef struct {
int this_frame_target; // Actual frame target after rc adjustment.
/*!
- * Target bit budget for the current GF / ARF group of frame.
- */
- int64_t gf_group_bits;
-
- /*!
* Projected size for current frame
*/
int projected_frame_size;
@@ -159,20 +154,6 @@ typedef struct {
int last_boosted_qindex;
/*!
- * Q used for last boosted (non leaf) frame
- */
- int last_kf_qindex;
-
- /*!
- * Boost factor used to calculate the extra bits allocated to ARFs and GFs
- */
- int gfu_boost;
- /*!
- * Boost factor used to calculate the extra bits allocated to the key frame
- */
- int kf_boost;
-
- /*!
* Correction factors used to adjust the q estimate for a given target rate
* in the encode loop.
*/
@@ -193,28 +174,10 @@ typedef struct {
*/
int intervals_till_gf_calculate_due;
- /*!
- * Stores the determined gf group lengths for a set of gf groups
- */
- int gf_intervals[MAX_NUM_GF_INTERVALS];
-
- /*!
- * The current group's index into gf_intervals[]
- */
- int cur_gf_index;
-
/*!\cond */
- int num_regions;
- REGIONS regions[MAX_FIRSTPASS_ANALYSIS_FRAMES];
- double cor_coeff[MAX_FIRSTPASS_ANALYSIS_FRAMES];
- int regions_offset; // offset of regions from the last keyframe
- int frames_till_regions_update;
-
int min_gf_interval;
int max_gf_interval;
int static_scene_max_gf_interval;
- int baseline_gf_interval;
- int constrained_gf_group;
/*!\endcond */
/*!
* Frames before the next key frame
@@ -222,8 +185,6 @@ typedef struct {
int frames_to_key;
/*!\cond */
int frames_since_key;
- int this_key_frame_forced;
- int next_key_frame_forced;
int is_src_frame_alt_ref;
int sframe_due;
@@ -269,18 +230,6 @@ typedef struct {
*/
int best_quality;
- /*!
- * Initial buffuer level in ms for CBR / low delay encoding
- */
- int64_t starting_buffer_level;
- /*!
- * Optimum / target buffuer level in ms for CBR / low delay encoding
- */
- int64_t optimal_buffer_level;
- /*!
- * Maximum target buffuer level in ms for CBR / low delay encoding
- */
- int64_t maximum_buffer_size;
/*!\cond */
// rate control history for last frame(1) and the frame before(2).
@@ -292,14 +241,8 @@ typedef struct {
int q_1_frame;
int q_2_frame;
- float_t arf_boost_factor;
-
/*!\endcond */
/*!
- * Q index used for ALT frame
- */
- int arf_q;
- /*!
* Proposed maximum alloed Q for current frame
*/
int active_worst_quality;
@@ -309,35 +252,119 @@ typedef struct {
int active_best_quality[MAX_ARF_LAYERS + 1];
/*!\cond */
+ // Track amount of low motion in scene
+ int avg_frame_low_motion;
+
+ // For dynamic resize, 1 pass cbr.
+ RESIZE_STATE resize_state;
+ int resize_avg_qp;
+ int resize_buffer_underflow;
+ int resize_count;
+ /*!\endcond */
+} RATE_CONTROL;
+
+/*!
+ * \brief Primary Rate Control parameters and status
+ */
+typedef struct {
+ // Sub-gop level Rate targetting variables
+
+ /*!
+ * Target bit budget for the current GF / ARF group of frame.
+ */
+ int64_t gf_group_bits;
+
+ /*!
+ * Boost factor used to calculate the extra bits allocated to the key frame
+ */
+ int kf_boost;
+
+ /*!
+ * Boost factor used to calculate the extra bits allocated to ARFs and GFs
+ */
+ int gfu_boost;
+
+ /*!
+ * Stores the determined gf group lengths for a set of gf groups
+ */
+ int gf_intervals[MAX_NUM_GF_INTERVALS];
+
+ /*!
+ * The current group's index into gf_intervals[]
+ */
+ int cur_gf_index;
+
+ /*!\cond */
+ int num_regions;
+
+ REGIONS regions[MAX_FIRSTPASS_ANALYSIS_FRAMES];
+ int regions_offset; // offset of regions from the last keyframe
+ int frames_till_regions_update;
+
+ int baseline_gf_interval;
+
+ int constrained_gf_group;
+
+ int this_key_frame_forced;
+
+ int next_key_frame_forced;
+ /*!\endcond */
+
+ /*!
+ * Initial buffuer level in ms for CBR / low delay encoding
+ */
+ int64_t starting_buffer_level;
+
+ /*!
+ * Optimum / target buffuer level in ms for CBR / low delay encoding
+ */
+ int64_t optimal_buffer_level;
+
+ /*!
+ * Maximum target buffuer level in ms for CBR / low delay encoding
+ */
+ int64_t maximum_buffer_size;
+
+ /*!
+ * Q index used for ALT frame
+ */
+ int arf_q;
+
+ /*!\cond */
+ float_t arf_boost_factor;
+
int base_layer_qp;
// Total number of stats used only for kf_boost calculation.
int num_stats_used_for_kf_boost;
+
// Total number of stats used only for gfu_boost calculation.
int num_stats_used_for_gfu_boost;
+
// Total number of stats required by gfu_boost calculation.
int num_stats_required_for_gfu_boost;
+
int next_is_fwd_key;
+
int enable_scenecut_detection;
- int use_arf_in_this_kf_group;
- // Track amount of low motion in scene
- int avg_frame_low_motion;
- // For dynamic resize, 1 pass cbr.
- RESIZE_STATE resize_state;
- int resize_avg_qp;
- int resize_buffer_underflow;
- int resize_count;
+ int use_arf_in_this_kf_group;
/*!\endcond */
-} RATE_CONTROL;
-/*!\cond */
+ /*!
+ * Q used for last boosted (non leaf) frame
+ */
+ int last_kf_qindex;
+} PRIMARY_RATE_CONTROL;
struct AV1_COMP;
struct AV1EncoderConfig;
+void av1_primary_rc_init(const struct AV1EncoderConfig *oxcf,
+ PRIMARY_RATE_CONTROL *p_rc);
+
void av1_rc_init(const struct AV1EncoderConfig *oxcf, int pass,
- RATE_CONTROL *rc);
+ RATE_CONTROL *rc, const PRIMARY_RATE_CONTROL *const p_rc);
int av1_estimate_bits_at_q(FRAME_TYPE frame_kind, int q, int mbs,
double correction_factor, aom_bit_depth_t bit_depth,
@@ -415,7 +442,6 @@ void av1_rc_compute_frame_size_bounds(const struct AV1_COMP *cpi,
*
* \ingroup rate_control
* \param[in] cpi Top level encoder structure
- * \param[in,out] rc Top level rate control structure
* \param[in] width Coded frame width
* \param[in] height Coded frame height
* \param[in] gf_index Index of this frame in the golden frame group
@@ -424,9 +450,8 @@ void av1_rc_compute_frame_size_bounds(const struct AV1_COMP *cpi,
* \return Returns selected q index to be used for encoding this frame.
* Also, updates \c rc->arf_q.
*/
-int av1_rc_pick_q_and_bounds(const struct AV1_COMP *cpi, RATE_CONTROL *rc,
- int width, int height, int gf_index,
- int *bottom_index, int *top_index);
+int av1_rc_pick_q_and_bounds(const struct AV1_COMP *cpi, int width, int height,
+ int gf_index, int *bottom_index, int *top_index);
/*!\brief Estimates q to achieve a target bits per frame
*
diff --git a/third_party/libaom/source/libaom/av1/encoder/rc_utils.h b/third_party/libaom/source/libaom/av1/encoder/rc_utils.h
index 98cec2e003..0a9d02d17b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rc_utils.h
+++ b/third_party/libaom/source/libaom/av1/encoder/rc_utils.h
@@ -19,18 +19,45 @@
extern "C" {
#endif
-static AOM_INLINE void set_rc_buffer_sizes(RATE_CONTROL *rc,
- const RateControlCfg *rc_cfg) {
+static AOM_INLINE void check_reset_rc_flag(AV1_COMP *cpi) {
+ RATE_CONTROL *rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ if (cpi->common.current_frame.frame_number >
+ (unsigned int)cpi->svc.number_spatial_layers) {
+ if (cpi->ppi->use_svc) {
+ av1_svc_check_reset_layer_rc_flag(cpi);
+ } else {
+ if (rc->avg_frame_bandwidth > (3 * rc->prev_avg_frame_bandwidth >> 1) ||
+ rc->avg_frame_bandwidth < (rc->prev_avg_frame_bandwidth >> 1)) {
+ rc->rc_1_frame = 0;
+ rc->rc_2_frame = 0;
+ rc->bits_off_target = p_rc->optimal_buffer_level;
+ rc->buffer_level = p_rc->optimal_buffer_level;
+ }
+ }
+ }
+}
+
+static AOM_INLINE void set_rc_buffer_sizes(AV1_COMP *cpi) {
+ RATE_CONTROL *rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
+ const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
+
const int64_t bandwidth = rc_cfg->target_bandwidth;
const int64_t starting = rc_cfg->starting_buffer_level_ms;
const int64_t optimal = rc_cfg->optimal_buffer_level_ms;
const int64_t maximum = rc_cfg->maximum_buffer_size_ms;
- rc->starting_buffer_level = starting * bandwidth / 1000;
- rc->optimal_buffer_level =
+ p_rc->starting_buffer_level = starting * bandwidth / 1000;
+ p_rc->optimal_buffer_level =
(optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000;
- rc->maximum_buffer_size =
+ p_rc->maximum_buffer_size =
(maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000;
+
+ // Under a configuration change, where maximum_buffer_size may change,
+ // keep buffer level clipped to the maximum allowed buffer size.
+ rc->bits_off_target = AOMMIN(rc->bits_off_target, p_rc->maximum_buffer_size);
+ rc->buffer_level = AOMMIN(rc->buffer_level, p_rc->maximum_buffer_size);
}
static AOM_INLINE void config_target_level(AV1_COMP *const cpi,
@@ -38,7 +65,7 @@ static AOM_INLINE void config_target_level(AV1_COMP *const cpi,
aom_clear_system_state();
AV1EncoderConfig *const oxcf = &cpi->oxcf;
- SequenceHeader *const seq_params = &cpi->common.seq_params;
+ SequenceHeader *const seq_params = cpi->common.seq_params;
TileConfig *const tile_cfg = &oxcf->tile_cfg;
RateControlCfg *const rc_cfg = &oxcf->rc_cfg;
@@ -48,11 +75,11 @@ static AOM_INLINE void config_target_level(AV1_COMP *const cpi,
av1_get_max_bitrate_for_level(target_level, tier, profile);
const int64_t max_bitrate = (int64_t)(level_bitrate_limit * 0.70);
rc_cfg->target_bandwidth = AOMMIN(rc_cfg->target_bandwidth, max_bitrate);
- // Also need to update cpi->twopass.bits_left.
- TWO_PASS *const twopass = &cpi->twopass;
+ // Also need to update cpi->ppi->twopass.bits_left.
+ TWO_PASS *const twopass = &cpi->ppi->twopass;
FIRSTPASS_STATS *stats = twopass->stats_buf_ctx->total_stats;
if (stats != NULL)
- cpi->twopass.bits_left =
+ cpi->ppi->twopass.bits_left =
(int64_t)(stats->duration * rc_cfg->target_bandwidth / 10000000.0);
// Adjust max over-shoot percentage.
@@ -226,6 +253,7 @@ static AOM_INLINE void recode_loop_update_q(
int *const low_cr_seen, const int loop_count) {
AV1_COMMON *const cm = &cpi->common;
RATE_CONTROL *const rc = &cpi->rc;
+ PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
*loop = 0;
@@ -263,14 +291,15 @@ static AOM_INLINE void recode_loop_update_q(
&frame_over_shoot_limit);
if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
- if (cm->current_frame.frame_type == KEY_FRAME && rc->this_key_frame_forced &&
+ if (cm->current_frame.frame_type == KEY_FRAME &&
+ p_rc->this_key_frame_forced &&
rc->projected_frame_size < rc->max_frame_bandwidth) {
int64_t kf_err;
const int64_t high_err_target = cpi->ambient_err;
const int64_t low_err_target = cpi->ambient_err >> 1;
#if CONFIG_AV1_HIGHBITDEPTH
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
kf_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
} else {
kf_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
@@ -323,11 +352,11 @@ static AOM_INLINE void recode_loop_update_q(
if (*q == *q_high &&
rc->projected_frame_size >= rc->max_frame_bandwidth) {
const double q_val_high_current =
- av1_convert_qindex_to_q(*q_high, cm->seq_params.bit_depth);
+ av1_convert_qindex_to_q(*q_high, cm->seq_params->bit_depth);
const double q_val_high_new =
q_val_high_current *
((double)rc->projected_frame_size / rc->max_frame_bandwidth);
- *q_high = av1_find_qindex(q_val_high_new, cm->seq_params.bit_depth,
+ *q_high = av1_find_qindex(q_val_high_new, cm->seq_params->bit_depth,
rc->best_quality, rc->worst_quality);
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/rd.c b/third_party/libaom/source/libaom/av1/encoder/rd.c
index 389b4bfe3b..e361264f16 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rd.c
+++ b/third_party/libaom/source/libaom/av1/encoder/rd.c
@@ -354,11 +354,45 @@ static const int rd_layer_depth_factor[7] = {
160, 160, 160, 160, 192, 208, 224
};
+// Returns the default rd multiplier for inter frames for a given qindex.
+// The function here is a first pass estimate based on data from
+// a previous Vizer run
+static double def_inter_rd_multiplier(int qindex) {
+ return 3.2 + (0.0035 * (double)qindex);
+}
+
+// Returns the default rd multiplier for ARF/Golden Frames for a given qindex.
+// The function here is a first pass estimate based on data from
+// a previous Vizer run
+static double def_arf_rd_multiplier(int qindex) {
+ return 3.25 + (0.0035 * (double)qindex);
+}
+
+// Returns the default rd multiplier for key frames for a given qindex.
+// The function here is a first pass estimate based on data from
+// a previous Vizer run
+static double def_kf_rd_multiplier(int qindex) {
+ return 3.3 + (0.0035 * (double)qindex);
+}
+
int av1_compute_rd_mult_based_on_qindex(const AV1_COMP *cpi, int qindex) {
- const int q = av1_dc_quant_QTX(qindex, 0, cpi->common.seq_params.bit_depth);
- int rdmult = (int)(((int64_t)88 * q * q) / 24);
+ const int q = av1_dc_quant_QTX(qindex, 0, cpi->common.seq_params->bit_depth);
+ const FRAME_UPDATE_TYPE update_type =
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index];
+ int rdmult = q * q;
+
+ if (update_type == KF_UPDATE) {
+ double def_rd_q_mult = def_kf_rd_multiplier(qindex);
+ rdmult = (int)((double)rdmult * def_rd_q_mult);
+ } else if ((update_type == GF_UPDATE) || (update_type == ARF_UPDATE)) {
+ double def_rd_q_mult = def_arf_rd_multiplier(qindex);
+ rdmult = (int)((double)rdmult * def_rd_q_mult);
+ } else {
+ double def_rd_q_mult = def_inter_rd_multiplier(qindex);
+ rdmult = (int)((double)rdmult * def_rd_q_mult);
+ }
- switch (cpi->common.seq_params.bit_depth) {
+ switch (cpi->common.seq_params->bit_depth) {
case AOM_BITS_8: break;
case AOM_BITS_10: rdmult = ROUND_POWER_OF_TWO(rdmult, 4); break;
case AOM_BITS_12: rdmult = ROUND_POWER_OF_TWO(rdmult, 8); break;
@@ -373,9 +407,10 @@ int av1_compute_rd_mult(const AV1_COMP *cpi, int qindex) {
int64_t rdmult = av1_compute_rd_mult_based_on_qindex(cpi, qindex);
if (is_stat_consumption_stage(cpi) &&
(cpi->common.current_frame.frame_type != KEY_FRAME)) {
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const int boost_index = AOMMIN(15, (cpi->rc.gfu_boost / 100));
- const int layer_depth = AOMMIN(gf_group->layer_depth[gf_group->index], 6);
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ const int boost_index = AOMMIN(15, (cpi->ppi->p_rc.gfu_boost / 100));
+ const int layer_depth =
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], 6);
// Layer depth adjustment
rdmult = (rdmult * rd_layer_depth_factor[layer_depth]) >> 7;
@@ -386,21 +421,30 @@ int av1_compute_rd_mult(const AV1_COMP *cpi, int qindex) {
return (int)rdmult;
}
-int av1_get_deltaq_offset(const AV1_COMP *cpi, int qindex, double beta) {
+int av1_get_deltaq_offset(aom_bit_depth_t bit_depth, int qindex, double beta) {
assert(beta > 0.0);
- int q = av1_dc_quant_QTX(qindex, 0, cpi->common.seq_params.bit_depth);
+ int q = av1_dc_quant_QTX(qindex, 0, bit_depth);
int newq = (int)rint(q / sqrt(beta));
int orig_qindex = qindex;
+ if (newq == q) {
+ return 0;
+ }
if (newq < q) {
- do {
+ while (qindex > 0) {
qindex--;
- q = av1_dc_quant_QTX(qindex, 0, cpi->common.seq_params.bit_depth);
- } while (newq < q && qindex > 0);
+ q = av1_dc_quant_QTX(qindex, 0, bit_depth);
+ if (newq >= q) {
+ break;
+ }
+ }
} else {
- do {
+ while (qindex < MAXQ) {
qindex++;
- q = av1_dc_quant_QTX(qindex, 0, cpi->common.seq_params.bit_depth);
- } while (newq > q && qindex < MAXQ);
+ q = av1_dc_quant_QTX(qindex, 0, bit_depth);
+ if (newq <= q) {
+ break;
+ }
+ }
}
return qindex - orig_qindex;
}
@@ -409,7 +453,7 @@ int av1_get_adaptive_rdmult(const AV1_COMP *cpi, double beta) {
assert(beta > 0.0);
const AV1_COMMON *cm = &cpi->common;
int q = av1_dc_quant_QTX(cm->quant_params.base_qindex, 0,
- cm->seq_params.bit_depth);
+ cm->seq_params->bit_depth);
return (int)(av1_compute_rd_mult(cpi, q) / beta);
}
@@ -433,7 +477,7 @@ static int compute_rd_thresh_factor(int qindex, aom_bit_depth_t bit_depth) {
}
void av1_set_sad_per_bit(const AV1_COMP *cpi, int *sadperbit, int qindex) {
- switch (cpi->common.seq_params.bit_depth) {
+ switch (cpi->common.seq_params->bit_depth) {
case AOM_BITS_8: *sadperbit = sad_per_bit_lut_8[qindex]; break;
case AOM_BITS_10: *sadperbit = sad_per_bit_lut_10[qindex]; break;
case AOM_BITS_12: *sadperbit = sad_per_bit_lut_12[qindex]; break;
@@ -450,7 +494,7 @@ static void set_block_thresholds(const AV1_COMMON *cm, RD_OPT *rd) {
av1_get_qindex(&cm->seg, segment_id, cm->quant_params.base_qindex) +
cm->quant_params.y_dc_delta_q,
0, MAXQ);
- const int q = compute_rd_thresh_factor(qindex, cm->seq_params.bit_depth);
+ const int q = compute_rd_thresh_factor(qindex, cm->seq_params->bit_depth);
for (bsize = 0; bsize < BLOCK_SIZES_ALL; ++bsize) {
// Threshold here seems unnecessarily harsh but fine given actual
@@ -577,6 +621,13 @@ void av1_fill_mv_costs(const nmv_context *nmvc, int integer_mv, int usehp,
}
}
+void av1_fill_dv_costs(const nmv_context *ndvc, IntraBCMVCosts *dv_costs) {
+ dv_costs->dv_costs[0] = &dv_costs->dv_costs_alloc[0][MV_MAX];
+ dv_costs->dv_costs[1] = &dv_costs->dv_costs_alloc[1][MV_MAX];
+ av1_build_nmv_cost_table(dv_costs->joint_mv, dv_costs->dv_costs, ndvc,
+ MV_SUBPEL_NONE);
+}
+
void av1_initialize_rd_consts(AV1_COMP *cpi) {
AV1_COMMON *const cm = &cpi->common;
MACROBLOCK *const x = &cpi->td.mb;
@@ -610,14 +661,9 @@ void av1_initialize_rd_consts(AV1_COMP *cpi) {
cost_upd_freq.mode == COST_UPD_TILE || fill_costs)
av1_fill_mode_rates(cm, &x->mode_costs, cm->fc);
- if (!use_nonrd_pick_mode && frame_is_intra_only(cm) &&
- cm->features.allow_screen_content_tools &&
+ if (!use_nonrd_pick_mode && av1_allow_intrabc(cm) &&
!is_stat_generation_stage(cpi)) {
- IntraBCMVCosts *const dv_costs = &cpi->dv_costs;
- int *dvcost[2] = { &dv_costs->mv_component[0][MV_MAX],
- &dv_costs->mv_component[1][MV_MAX] };
- av1_build_nmv_cost_table(dv_costs->joint_mv, dvcost, &cm->fc->ndvc,
- MV_SUBPEL_NONE);
+ av1_fill_dv_costs(&cm->fc->ndvc, x->dv_costs);
}
}
@@ -1016,12 +1062,16 @@ void av1_mv_pred(const AV1_COMP *cpi, MACROBLOCK *x, uint8_t *ref_y_buffer,
const uint8_t *const ref_y_ptr =
&ref_y_buffer[ref_y_stride * fp_row + fp_col];
// Find sad for current vector.
- const int this_sad = cpi->fn_ptr[block_size].sdf(
+ const int this_sad = cpi->ppi->fn_ptr[block_size].sdf(
src_y_ptr, x->plane[0].src.stride, ref_y_ptr, ref_y_stride);
// Note if it is the best so far.
if (this_sad < best_sad) {
best_sad = this_sad;
}
+ if (i == 0)
+ x->pred_mv0_sad[ref_frame] = this_sad;
+ else if (i == 1)
+ x->pred_mv1_sad[ref_frame] = this_sad;
}
// Note the index of the mv that worked best in the reference list.
@@ -1287,7 +1337,7 @@ void av1_update_rd_thresh_fact(const AV1_COMMON *const cm,
const THR_MODES top_mode = MAX_MODES;
const int max_rd_thresh_factor = use_adaptive_rd_thresh * RD_THRESH_MAX_FACT;
- const int bsize_is_1_to_4 = bsize > cm->seq_params.sb_size;
+ const int bsize_is_1_to_4 = bsize > cm->seq_params->sb_size;
BLOCK_SIZE min_size, max_size;
if (bsize_is_1_to_4) {
// This part handles block sizes with 1:4 and 4:1 aspect ratios
@@ -1296,7 +1346,7 @@ void av1_update_rd_thresh_fact(const AV1_COMMON *const cm,
max_size = bsize;
} else {
min_size = AOMMAX(bsize - 2, BLOCK_4X4);
- max_size = AOMMIN(bsize + 2, (int)cm->seq_params.sb_size);
+ max_size = AOMMIN(bsize + 2, (int)cm->seq_params->sb_size);
}
for (THR_MODES mode = 0; mode < top_mode; ++mode) {
diff --git a/third_party/libaom/source/libaom/av1/encoder/rd.h b/third_party/libaom/source/libaom/av1/encoder/rd.h
index e37c86b9d5..c1ba819ae2 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rd.h
+++ b/third_party/libaom/source/libaom/av1/encoder/rd.h
@@ -81,20 +81,6 @@ typedef struct RD_OPT {
double r0;
} RD_OPT;
-typedef struct {
- // Cost of transmitting the actual motion vector.
- // mv_component[0][i] is the cost of motion vector with horizontal component
- // (mv_row) equal to i - MV_MAX.
- // mv_component[1][i] is the cost of motion vector with vertical component
- // (mv_col) equal to i - MV_MAX.
- int mv_component[2][MV_VALS];
-
- // joint_mv[i] is the cost of transmitting joint mv(MV_JOINT_TYPE) of
- // type i.
- // TODO(huisu@google.com): we can update dv_joint_cost per SB.
- int joint_mv[MV_JOINTS];
-} IntraBCMVCosts;
-
static INLINE void av1_init_rd_stats(RD_STATS *rd_stats) {
#if CONFIG_RD_DEBUG
int plane;
@@ -110,12 +96,6 @@ static INLINE void av1_init_rd_stats(RD_STATS *rd_stats) {
// encoded, as there will only be 1 plane
for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
rd_stats->txb_coeff_cost[plane] = 0;
- {
- int r, c;
- for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r)
- for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c)
- rd_stats->txb_coeff_cost_map[plane][r][c] = 0;
- }
}
#endif
}
@@ -135,19 +115,18 @@ static INLINE void av1_invalid_rd_stats(RD_STATS *rd_stats) {
// encoded, as there will only be 1 plane
for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
rd_stats->txb_coeff_cost[plane] = INT_MAX;
- {
- int r, c;
- for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r)
- for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c)
- rd_stats->txb_coeff_cost_map[plane][r][c] = INT16_MAX;
- }
}
#endif
}
static INLINE void av1_merge_rd_stats(RD_STATS *rd_stats_dst,
const RD_STATS *rd_stats_src) {
- assert(rd_stats_dst->rate != INT_MAX && rd_stats_src->rate != INT_MAX);
+ if (rd_stats_dst->rate == INT_MAX || rd_stats_src->rate == INT_MAX) {
+ // If rd_stats_dst or rd_stats_src has invalid rate, we will make
+ // rd_stats_dst invalid.
+ av1_invalid_rd_stats(rd_stats_dst);
+ return;
+ }
rd_stats_dst->rate = (int)AOMMIN(
((int64_t)rd_stats_dst->rate + (int64_t)rd_stats_src->rate), INT_MAX);
if (!rd_stats_dst->zero_rate)
@@ -160,18 +139,6 @@ static INLINE void av1_merge_rd_stats(RD_STATS *rd_stats_dst,
// encoded, as there will only be 1 plane
for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
rd_stats_dst->txb_coeff_cost[plane] += rd_stats_src->txb_coeff_cost[plane];
- {
- // TODO(angiebird): optimize this part
- int r, c;
- int ref_txb_coeff_cost = 0;
- for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r)
- for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
- rd_stats_dst->txb_coeff_cost_map[plane][r][c] +=
- rd_stats_src->txb_coeff_cost_map[plane][r][c];
- ref_txb_coeff_cost += rd_stats_dst->txb_coeff_cost_map[plane][r][c];
- }
- assert(ref_txb_coeff_cost == rd_stats_dst->txb_coeff_cost[plane]);
- }
}
#endif
}
@@ -375,9 +342,11 @@ void av1_fill_coeff_costs(CoeffCosts *coeff_costs, FRAME_CONTEXT *fc,
void av1_fill_mv_costs(const nmv_context *nmvc, int integer_mv, int usehp,
MvCosts *mv_costs);
+void av1_fill_dv_costs(const nmv_context *ndvc, IntraBCMVCosts *dv_costs);
+
int av1_get_adaptive_rdmult(const struct AV1_COMP *cpi, double beta);
-int av1_get_deltaq_offset(const struct AV1_COMP *cpi, int qindex, double beta);
+int av1_get_deltaq_offset(aom_bit_depth_t bit_depth, int qindex, double beta);
#ifdef __cplusplus
} // extern "C"
diff --git a/third_party/libaom/source/libaom/av1/encoder/rdopt.c b/third_party/libaom/source/libaom/av1/encoder/rdopt.c
index 6200ac11dd..3ca0cb4143 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rdopt.c
+++ b/third_party/libaom/source/libaom/av1/encoder/rdopt.c
@@ -627,8 +627,8 @@ static int64_t get_sse(const AV1_COMP *cpi, const MACROBLOCK *x,
get_plane_block_size(mbmi->bsize, pd->subsampling_x, pd->subsampling_y);
unsigned int sse;
- cpi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf, pd->dst.stride,
- &sse);
+ cpi->ppi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf,
+ pd->dst.stride, &sse);
total_sse += sse;
if (!plane && sse_y) *sse_y = sse;
}
@@ -1156,13 +1156,16 @@ static int64_t handle_newmv(const AV1_COMP *const cpi, MACROBLOCK *const x,
int_mv best_mv;
av1_single_motion_search(cpi, x, bsize, ref_idx, rate_mv, search_range,
- mode_info, &best_mv);
+ mode_info, &best_mv, args);
if (best_mv.as_int == INVALID_MV) return INT64_MAX;
args->single_newmv[ref_mv_idx][refs[0]] = best_mv;
args->single_newmv_rate[ref_mv_idx][refs[0]] = *rate_mv;
args->single_newmv_valid[ref_mv_idx][refs[0]] = 1;
cur_mv[0].as_int = best_mv.as_int;
+
+ // Return after single_newmv is set.
+ if (mode_info[mbmi->ref_mv_idx].skip) return INT64_MAX;
}
return 0;
@@ -1276,7 +1279,7 @@ static int64_t motion_mode_rd(
uint8_t best_blk_skip[MAX_MIB_SIZE * MAX_MIB_SIZE];
uint8_t best_tx_type_map[MAX_MIB_SIZE * MAX_MIB_SIZE];
const int rate_mv0 = *rate_mv;
- const int interintra_allowed = cm->seq_params.enable_interintra_compound &&
+ const int interintra_allowed = cm->seq_params->enable_interintra_compound &&
is_interintra_allowed(mbmi) &&
mbmi->compound_idx;
WARP_SAMPLE_INFO *const warp_sample_info =
@@ -1319,7 +1322,7 @@ static int64_t motion_mode_rd(
const int switchable_rate =
av1_is_interp_needed(xd)
? av1_get_switchable_rate(x, xd, interp_filter,
- cm->seq_params.enable_dual_filter)
+ cm->seq_params->enable_dual_filter)
: 0;
int64_t best_rd = INT64_MAX;
int best_rate_mv = rate_mv0;
@@ -1355,11 +1358,18 @@ static int64_t motion_mode_rd(
// Do not search OBMC if the probability of selecting it is below a
// predetermined threshold for this update_type and block size.
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
- const int prune_obmc = cpi->frame_probs.obmc_probs[update_type][bsize] <
- cpi->sf.inter_sf.prune_obmc_prob_thresh;
- if ((!cpi->oxcf.motion_mode_cfg.enable_obmc ||
- cpi->sf.rt_sf.use_nonrd_pick_mode || prune_obmc) &&
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
+ int obmc_probability;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ obmc_probability =
+ cpi->ppi->temp_frame_probs.obmc_probs[update_type][bsize];
+#else
+ obmc_probability = cpi->frame_probs.obmc_probs[update_type][bsize];
+#endif
+ const int prune_obmc =
+ obmc_probability < cpi->sf.inter_sf.prune_obmc_prob_thresh;
+ if ((!cpi->oxcf.motion_mode_cfg.enable_obmc || prune_obmc) &&
mbmi->motion_mode == OBMC_CAUSAL)
continue;
@@ -1373,7 +1383,7 @@ static int64_t motion_mode_rd(
assert(!is_comp_pred);
if (have_newmv_in_inter_mode(this_mode)) {
av1_single_motion_search(cpi, x, bsize, 0, &tmp_rate_mv, INT_MAX, NULL,
- &mbmi->mv[0]);
+ &mbmi->mv[0], NULL);
tmp_rate2 = rate2_nocoeff - rate_mv0 + tmp_rate_mv;
}
if ((mbmi->mv[0].as_int != cur_mv) || eval_motion_mode) {
@@ -1897,10 +1907,11 @@ static bool ref_mv_idx_early_breakout(
}
// Compute the estimated RD cost for the motion vector with simple translation.
-static int64_t simple_translation_pred_rd(
- AV1_COMP *const cpi, MACROBLOCK *x, RD_STATS *rd_stats,
- HandleInterModeArgs *args, int ref_mv_idx, inter_mode_info *mode_info,
- int64_t ref_best_rd, BLOCK_SIZE bsize) {
+static int64_t simple_translation_pred_rd(AV1_COMP *const cpi, MACROBLOCK *x,
+ RD_STATS *rd_stats,
+ HandleInterModeArgs *args,
+ int ref_mv_idx, int64_t ref_best_rd,
+ BLOCK_SIZE bsize) {
MACROBLOCKD *xd = &x->e_mbd;
MB_MODE_INFO *mbmi = xd->mi[0];
MB_MODE_INFO_EXT *const mbmi_ext = &x->mbmi_ext;
@@ -1933,7 +1944,6 @@ static int64_t simple_translation_pred_rd(
const int drl_cost =
get_drl_cost(mbmi, mbmi_ext, mode_costs->drl_mode_cost0, ref_frame_type);
rd_stats->rate += drl_cost;
- mode_info[ref_mv_idx].drl_cost = drl_cost;
int_mv cur_mv[2];
if (!build_cur_mv(cur_mv, mbmi->mode, cm, x, 0)) {
@@ -1987,8 +1997,8 @@ static INLINE bool mask_check_bit(int mask, int index) {
static int ref_mv_idx_to_search(AV1_COMP *const cpi, MACROBLOCK *x,
RD_STATS *rd_stats,
HandleInterModeArgs *const args,
- int64_t ref_best_rd, inter_mode_info *mode_info,
- BLOCK_SIZE bsize, const int ref_set) {
+ int64_t ref_best_rd, BLOCK_SIZE bsize,
+ const int ref_set) {
AV1_COMMON *const cm = &cpi->common;
const MACROBLOCKD *const xd = &x->e_mbd;
const MB_MODE_INFO *const mbmi = xd->mi[0];
@@ -2027,7 +2037,7 @@ static int ref_mv_idx_to_search(AV1_COMP *const cpi, MACROBLOCK *x,
continue;
}
idx_rdcost[ref_mv_idx] = simple_translation_pred_rd(
- cpi, x, rd_stats, args, ref_mv_idx, mode_info, ref_best_rd, bsize);
+ cpi, x, rd_stats, args, ref_mv_idx, ref_best_rd, bsize);
}
// Find the index with the best RD cost.
int best_idx = 0;
@@ -2171,14 +2181,17 @@ typedef struct {
static AOM_INLINE void get_block_level_tpl_stats(
AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row, int mi_col, int *valid_refs,
PruneInfoFromTpl *inter_cost_info_from_tpl) {
- const GF_GROUP *const gf_group = &cpi->gf_group;
AV1_COMMON *const cm = &cpi->common;
- assert(IMPLIES(gf_group->size > 0, gf_group->index < gf_group->size));
- const int tpl_idx = gf_group->index;
- TplParams *const tpl_data = &cpi->tpl_data;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int tpl_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
+ if (tpl_idx >= MAX_TPL_FRAME_IDX) {
+ return;
+ }
const TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
- if (tpl_idx >= MAX_TPL_FRAME_IDX || !tpl_frame->is_valid) {
+ if (!tpl_frame->is_valid) {
return;
}
@@ -2274,101 +2287,6 @@ static AOM_INLINE int prune_modes_based_on_tpl_stats(
return 0;
}
-// If the current mode being searched is NEWMV, this function will look
-// at previously searched MVs and check if they are the same
-// as the current MV. If it finds that this MV is repeated, it compares
-// the cost to the previous MV and skips the rest of the search if it is
-// more expensive.
-static int skip_repeated_newmv(
- AV1_COMP *const cpi, MACROBLOCK *x, BLOCK_SIZE bsize,
- const int do_tx_search, const PREDICTION_MODE this_mode,
- MB_MODE_INFO *best_mbmi, motion_mode_candidate *motion_mode_cand,
- int64_t *ref_best_rd, RD_STATS *best_rd_stats, RD_STATS *best_rd_stats_y,
- RD_STATS *best_rd_stats_uv, inter_mode_info *mode_info,
- HandleInterModeArgs *args, int drl_cost, const int *refs, int_mv *cur_mv,
- int64_t *best_rd, const BUFFER_SET orig_dst, int ref_mv_idx) {
- // This feature only works for NEWMV when a previous mv has been searched
- if (this_mode != NEWMV || ref_mv_idx == 0) return 0;
- MACROBLOCKD *xd = &x->e_mbd;
- const AV1_COMMON *cm = &cpi->common;
- const int num_planes = av1_num_planes(cm);
-
- int skip = 0;
- int this_rate_mv = 0;
- int i;
- for (i = 0; i < ref_mv_idx; ++i) {
- // Check if the motion search result same as previous results
- if (cur_mv[0].as_int == args->single_newmv[i][refs[0]].as_int &&
- args->single_newmv_valid[i][refs[0]]) {
- // If the compared mode has no valid rd, it is unlikely this
- // mode will be the best mode
- if (mode_info[i].rd == INT64_MAX) {
- skip = 1;
- break;
- }
- // Compare the cost difference including drl cost and mv cost
- if (mode_info[i].mv.as_int != INVALID_MV) {
- const int compare_cost = mode_info[i].rate_mv + mode_info[i].drl_cost;
- const int_mv ref_mv = av1_get_ref_mv(x, 0);
- this_rate_mv = av1_mv_bit_cost(
- &mode_info[i].mv.as_mv, &ref_mv.as_mv, x->mv_costs->nmv_joint_cost,
- x->mv_costs->mv_cost_stack, MV_COST_WEIGHT);
- const int this_cost = this_rate_mv + drl_cost;
-
- if (compare_cost <= this_cost) {
- // Skip this mode if it is more expensive as the previous result
- // for this MV
- skip = 1;
- break;
- } else {
- // If the cost is less than current best result, make this
- // the best and update corresponding variables unless the
- // best_mv is the same as ref_mv. In this case we skip and
- // rely on NEAR(EST)MV instead
- if (best_mbmi->ref_mv_idx == i &&
- best_mbmi->mv[0].as_int != ref_mv.as_int) {
- assert(*best_rd != INT64_MAX);
- assert(best_mbmi->mv[0].as_int == mode_info[i].mv.as_int);
- best_mbmi->ref_mv_idx = ref_mv_idx;
- motion_mode_cand->rate_mv = this_rate_mv;
- best_rd_stats->rate += this_cost - compare_cost;
- *best_rd =
- RDCOST(x->rdmult, best_rd_stats->rate, best_rd_stats->dist);
- // We also need to update mode_info here because we are setting
- // (ref_)best_rd here. So we will not be able to search the same
- // mode again with the current configuration.
- mode_info[ref_mv_idx].mv.as_int = best_mbmi->mv[0].as_int;
- mode_info[ref_mv_idx].rate_mv = this_rate_mv;
- mode_info[ref_mv_idx].rd = *best_rd;
- if (*best_rd < *ref_best_rd) *ref_best_rd = *best_rd;
- break;
- }
- }
- }
- }
- }
- if (skip) {
- const THR_MODES mode_enum = get_prediction_mode_idx(
- best_mbmi->mode, best_mbmi->ref_frame[0], best_mbmi->ref_frame[1]);
- // Collect mode stats for multiwinner mode processing
- store_winner_mode_stats(
- &cpi->common, x, best_mbmi, best_rd_stats, best_rd_stats_y,
- best_rd_stats_uv, mode_enum, NULL, bsize, *best_rd,
- cpi->sf.winner_mode_sf.multi_winner_mode_type, do_tx_search);
- args->modelled_rd[this_mode][ref_mv_idx][refs[0]] =
- args->modelled_rd[this_mode][i][refs[0]];
- args->simple_rd[this_mode][ref_mv_idx][refs[0]] =
- args->simple_rd[this_mode][i][refs[0]];
- mode_info[ref_mv_idx].rd = mode_info[i].rd;
- mode_info[ref_mv_idx].rate_mv = this_rate_mv;
- mode_info[ref_mv_idx].mv.as_int = mode_info[i].mv.as_int;
-
- restore_dst_buf(xd, orig_dst, num_planes);
- return 1;
- }
- return 0;
-}
-
/*!\brief High level function to select parameters for compound mode.
*
* \ingroup inter_mode_search
@@ -2427,7 +2345,7 @@ static int process_compound_inter_mode(
MB_MODE_INFO *mbmi = xd->mi[0];
const AV1_COMMON *cm = &cpi->common;
const int masked_compound_used = is_any_masked_compound_used(bsize) &&
- cm->seq_params.enable_masked_compound;
+ cm->seq_params->enable_masked_compound;
int mode_search_mask = (1 << COMPOUND_AVERAGE) | (1 << COMPOUND_DISTWTD) |
(1 << COMPOUND_WEDGE) | (1 << COMPOUND_DIFFWTD);
@@ -2506,6 +2424,76 @@ static int prune_ref_mv_idx_search(int ref_mv_idx, int best_ref_mv_idx,
return 0;
}
+/*!\brief Prunes ZeroMV Search Using Best NEWMV's SSE
+ *
+ * \ingroup inter_mode_search
+ *
+ * Compares the sse of zero mv and the best sse found in single new_mv. If the
+ * sse of the zero_mv is higher, returns 1 to signal zero_mv can be skipped.
+ * Else returns 0.
+ *
+ * Note that the sse of here comes from single_motion_search. So it is
+ * interpolated with the filter in motion search, not the actual interpolation
+ * filter used in encoding.
+ *
+ * \param[in] fn_ptr A table of function pointers to compute SSE.
+ * \param[in] x Pointer to struct holding all the data for
+ * the current macroblock.
+ * \param[in] bsize The current block_size.
+ * \param[in] args The args to handle_inter_mode, used to track
+ * the best SSE.
+ * \return Returns 1 if zero_mv is pruned, 0 otherwise.
+ */
+static AOM_INLINE int prune_zero_mv_with_sse(
+ const aom_variance_fn_ptr_t *fn_ptr, const MACROBLOCK *x, BLOCK_SIZE bsize,
+ const HandleInterModeArgs *args) {
+ const MACROBLOCKD *xd = &x->e_mbd;
+ const MB_MODE_INFO *mbmi = xd->mi[0];
+
+ const int is_comp_pred = has_second_ref(mbmi);
+ const MV_REFERENCE_FRAME *refs = mbmi->ref_frame;
+
+ // Check that the global mv is the same as ZEROMV
+ assert(mbmi->mv[0].as_int == 0);
+ assert(IMPLIES(is_comp_pred, mbmi->mv[0].as_int == 0));
+ assert(xd->global_motion[refs[0]].wmtype == TRANSLATION ||
+ xd->global_motion[refs[0]].wmtype == IDENTITY);
+
+ // Don't prune if we have invalid data
+ for (int idx = 0; idx < 1 + is_comp_pred; idx++) {
+ assert(mbmi->mv[0].as_int == 0);
+ if (args->best_single_sse_in_refs[refs[idx]] == INT32_MAX) {
+ return 0;
+ }
+ }
+
+ // Sum up the sse of ZEROMV and best NEWMV
+ unsigned int this_sse_sum = 0;
+ unsigned int best_sse_sum = 0;
+ for (int idx = 0; idx < 1 + is_comp_pred; idx++) {
+ const struct macroblock_plane *const p = &x->plane[AOM_PLANE_Y];
+ const struct macroblockd_plane *pd = xd->plane;
+ const struct buf_2d *src_buf = &p->src;
+ const struct buf_2d *ref_buf = &pd->pre[idx];
+ const uint8_t *src = src_buf->buf;
+ const uint8_t *ref = ref_buf->buf;
+ const int src_stride = src_buf->stride;
+ const int ref_stride = ref_buf->stride;
+
+ unsigned int this_sse;
+ fn_ptr[bsize].vf(ref, ref_stride, src, src_stride, &this_sse);
+ this_sse_sum += this_sse;
+
+ const unsigned int best_sse = args->best_single_sse_in_refs[refs[idx]];
+ best_sse_sum += best_sse;
+ }
+ if (this_sse_sum > best_sse_sum) {
+ return 1;
+ }
+
+ return 0;
+}
+
/*!\brief AV1 inter mode RD computation
*
* \ingroup inter_mode_search
@@ -2589,12 +2577,11 @@ static int64_t handle_inter_mode(
const int is_comp_pred = has_second_ref(mbmi);
const PREDICTION_MODE this_mode = mbmi->mode;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const int tpl_idx = gf_group->index;
- TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[tpl_idx];
+ const int tpl_idx = cpi->gf_frame_index;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const int prune_modes_based_on_tpl =
cpi->sf.inter_sf.prune_inter_modes_based_on_tpl &&
- tpl_idx < MAX_TPL_FRAME_IDX && tpl_frame->is_valid;
+ tpl_idx < MAX_TPL_FRAME_IDX && tpl_data->tpl_frame[tpl_idx].is_valid;
int i;
// Reference frames for this mode
const int refs[2] = { mbmi->ref_frame[0],
@@ -2606,10 +2593,10 @@ static int64_t handle_inter_mode(
// of these currently holds the best predictor, and use the other
// one for future predictions. In the end, copy from tmp_buf to
// dst if necessary.
- struct macroblockd_plane *p = xd->plane;
+ struct macroblockd_plane *pd = xd->plane;
const BUFFER_SET orig_dst = {
- { p[0].dst.buf, p[1].dst.buf, p[2].dst.buf },
- { p[0].dst.stride, p[1].dst.stride, p[2].dst.stride },
+ { pd[0].dst.buf, pd[1].dst.buf, pd[2].dst.buf },
+ { pd[0].dst.stride, pd[1].dst.stride, pd[2].dst.stride },
};
const BUFFER_SET tmp_dst = { { tmp_buf, tmp_buf + 1 * MAX_SB_SQUARE,
tmp_buf + 2 * MAX_SB_SQUARE },
@@ -2645,8 +2632,8 @@ static int64_t handle_inter_mode(
// Save MV results from first 2 ref_mv_idx.
int_mv save_mv[MAX_REF_MV_SEARCH - 1][2];
int best_ref_mv_idx = -1;
- const int idx_mask = ref_mv_idx_to_search(cpi, x, rd_stats, args, ref_best_rd,
- mode_info, bsize, ref_set);
+ const int idx_mask =
+ ref_mv_idx_to_search(cpi, x, rd_stats, args, ref_best_rd, bsize, ref_set);
const int16_t mode_ctx =
av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
const ModeCosts *mode_costs = &x->mode_costs;
@@ -2669,9 +2656,14 @@ static int64_t handle_inter_mode(
// WARPED_CAUSAL)
// 6.) Update stats if best so far
for (int ref_mv_idx = 0; ref_mv_idx < ref_set; ++ref_mv_idx) {
+ mbmi->ref_mv_idx = ref_mv_idx;
+
mode_info[ref_mv_idx].full_search_mv.as_int = INVALID_MV;
- mode_info[ref_mv_idx].mv.as_int = INVALID_MV;
- mode_info[ref_mv_idx].rd = INT64_MAX;
+ mode_info[ref_mv_idx].full_mv_bestsme = INT_MAX;
+ const int drl_cost = get_drl_cost(
+ mbmi, mbmi_ext, mode_costs->drl_mode_cost0, ref_frame_type);
+ mode_info[ref_mv_idx].drl_cost = drl_cost;
+ mode_info[ref_mv_idx].skip = 0;
if (!mask_check_bit(idx_mask, ref_mv_idx)) {
// MV did not perform well in simple translation search. Skip it.
@@ -2695,14 +2687,10 @@ static int64_t handle_inter_mode(
mbmi->num_proj_ref = 0;
mbmi->motion_mode = SIMPLE_TRANSLATION;
- mbmi->ref_mv_idx = ref_mv_idx;
// Compute cost for signalling this DRL index
rd_stats->rate = base_rate;
- const int drl_cost = get_drl_cost(
- mbmi, mbmi_ext, mode_costs->drl_mode_cost0, ref_frame_type);
rd_stats->rate += drl_cost;
- mode_info[ref_mv_idx].drl_cost = drl_cost;
int rs = 0;
int compmode_interinter_cost = 0;
@@ -2731,17 +2719,16 @@ static int64_t handle_inter_mode(
if (newmv_ret_val != 0) continue;
- rd_stats->rate += rate_mv;
+ if (is_inter_singleref_mode(this_mode) &&
+ cur_mv[0].as_int != INVALID_MV) {
+ const MV_REFERENCE_FRAME ref = refs[0];
+ const unsigned int this_sse = x->pred_sse[ref];
+ if (this_sse < args->best_single_sse_in_refs[ref]) {
+ args->best_single_sse_in_refs[ref] = this_sse;
+ }
+ }
- // skip NEWMV mode in drl if the motion search result is the same
- // as a previous result
- if (cpi->sf.inter_sf.skip_repeated_newmv &&
- skip_repeated_newmv(cpi, x, bsize, do_tx_search, this_mode,
- &best_mbmi, motion_mode_cand, &ref_best_rd,
- &best_rd_stats, &best_rd_stats_y,
- &best_rd_stats_uv, mode_info, args, drl_cost,
- refs, cur_mv, &best_rd, orig_dst, ref_mv_idx))
- continue;
+ rd_stats->rate += rate_mv;
}
// Copy the motion vector for this mode into mbmi struct
for (i = 0; i < is_comp_pred + 1; ++i) {
@@ -2760,6 +2747,14 @@ static int64_t handle_inter_mode(
cpi->sf.inter_sf.prune_ref_mv_idx_search))
continue;
+ if (cpi->sf.gm_sf.prune_zero_mv_with_sse &&
+ cpi->sf.gm_sf.gm_search_type == GM_DISABLE_SEARCH &&
+ (this_mode == GLOBALMV || this_mode == GLOBAL_GLOBALMV)) {
+ if (prune_zero_mv_with_sse(cpi->ppi->fn_ptr, x, bsize, args)) {
+ continue;
+ }
+ }
+
#if CONFIG_COLLECT_COMPONENT_TIMING
start_timing(cpi, compound_type_rd_time);
#endif
@@ -2843,12 +2838,6 @@ static int64_t handle_inter_mode(
if (ret_val != INT64_MAX) {
int64_t tmp_rd = RDCOST(x->rdmult, rd_stats->rate, rd_stats->dist);
- if (tmp_rd < mode_info[ref_mv_idx].rd) {
- // Only update mode_info if the new result is actually better.
- mode_info[ref_mv_idx].mv.as_int = mbmi->mv[0].as_int;
- mode_info[ref_mv_idx].rate_mv = rate_mv;
- mode_info[ref_mv_idx].rd = tmp_rd;
- }
const THR_MODES mode_enum = get_prediction_mode_idx(
mbmi->mode, mbmi->ref_frame[0], mbmi->ref_frame[1]);
// Collect mode stats for multiwinner mode processing
@@ -2928,11 +2917,11 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
const int mi_col = xd->mi_col;
const int w = block_size_wide[bsize];
const int h = block_size_high[bsize];
- const int sb_row = mi_row >> cm->seq_params.mib_size_log2;
- const int sb_col = mi_col >> cm->seq_params.mib_size_log2;
+ const int sb_row = mi_row >> cm->seq_params->mib_size_log2;
+ const int sb_col = mi_col >> cm->seq_params->mib_size_log2;
MB_MODE_INFO_EXT *const mbmi_ext = &x->mbmi_ext;
- MV_REFERENCE_FRAME ref_frame = INTRA_FRAME;
+ const MV_REFERENCE_FRAME ref_frame = INTRA_FRAME;
av1_find_mv_refs(cm, xd, mbmi, ref_frame, mbmi_ext->ref_mv_count,
xd->ref_mv_stack, xd->weight, NULL, mbmi_ext->global_mvs,
mbmi_ext->mode_context);
@@ -2952,7 +2941,7 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
int_mv dv_ref = nearestmv.as_int == 0 ? nearmv : nearestmv;
if (dv_ref.as_int == 0) {
- av1_find_ref_dv(&dv_ref, tile, cm->seq_params.mib_size, mi_row);
+ av1_find_ref_dv(&dv_ref, tile, cm->seq_params->mib_size, mi_row);
}
// Ref DV should not have sub-pel.
assert((dv_ref.as_mv.col & 7) == 0);
@@ -2983,7 +2972,7 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
av1_make_default_fullpel_ms_params(&fullms_params, cpi, x, bsize,
&dv_ref.as_mv, lookahead_search_sites,
/*fine_search_interval=*/0);
- const IntraBCMVCosts *const dv_costs = &cpi->dv_costs;
+ const IntraBCMVCosts *const dv_costs = x->dv_costs;
av1_set_ms_to_intra_mode(&fullms_params, dv_costs);
for (enum IntrabcMotionDirection dir = IBC_MOTION_ABOVE;
@@ -2997,19 +2986,19 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
fullms_params.mv_limits.row_min =
(tile->mi_row_start - mi_row) * MI_SIZE;
fullms_params.mv_limits.row_max =
- (sb_row * cm->seq_params.mib_size - mi_row) * MI_SIZE - h;
+ (sb_row * cm->seq_params->mib_size - mi_row) * MI_SIZE - h;
break;
case IBC_MOTION_LEFT:
fullms_params.mv_limits.col_min =
(tile->mi_col_start - mi_col) * MI_SIZE;
fullms_params.mv_limits.col_max =
- (sb_col * cm->seq_params.mib_size - mi_col) * MI_SIZE - w;
+ (sb_col * cm->seq_params->mib_size - mi_col) * MI_SIZE - w;
// TODO(aconverse@google.com): Minimize the overlap between above and
// left areas.
fullms_params.mv_limits.row_min =
(tile->mi_row_start - mi_row) * MI_SIZE;
int bottom_coded_mi_edge =
- AOMMIN((sb_row + 1) * cm->seq_params.mib_size, tile->mi_row_end);
+ AOMMIN((sb_row + 1) * cm->seq_params->mib_size, tile->mi_row_end);
fullms_params.mv_limits.row_max =
(bottom_coded_mi_edge - mi_row) * MI_SIZE - h;
break;
@@ -3047,7 +3036,7 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
get_fullmv_from_mv(&dv)))
continue;
if (!av1_is_dv_valid(dv, cm, xd, mi_row, mi_col, bsize,
- cm->seq_params.mib_size_log2))
+ cm->seq_params->mib_size_log2))
continue;
// DV should not have sub-pel.
@@ -3065,12 +3054,10 @@ static int64_t rd_pick_intrabc_mode_sb(const AV1_COMP *cpi, MACROBLOCK *x,
av1_enc_build_inter_predictor(cm, xd, mi_row, mi_col, NULL, bsize, 0,
av1_num_planes(cm) - 1);
- int *dvcost[2] = { (int *)&dv_costs->mv_component[0][MV_MAX],
- (int *)&dv_costs->mv_component[1][MV_MAX] };
// TODO(aconverse@google.com): The full motion field defining discount
// in MV_COST_WEIGHT is too large. Explore other values.
const int rate_mv = av1_mv_bit_cost(&dv, &dv_ref.as_mv, dv_costs->joint_mv,
- dvcost, MV_COST_WEIGHT_SUB);
+ dv_costs->dv_costs, MV_COST_WEIGHT_SUB);
const int rate_mode = x->mode_costs.intrabc_cost[1];
RD_STATS rd_stats_yuv, rd_stats_y, rd_stats_uv;
if (!av1_txfm_search(cpi, x, bsize, &rd_stats_yuv, &rd_stats_y,
@@ -3186,7 +3173,6 @@ static AOM_INLINE void rd_pick_skip_mode(
const int num_planes = av1_num_planes(cm);
MACROBLOCKD *const xd = &x->e_mbd;
MB_MODE_INFO *const mbmi = xd->mi[0];
- const TxfmSearchParams *txfm_params = &x->txfm_search_params;
x->compound_idx = 1; // COMPOUND_AVERAGE
RD_STATS skip_mode_rd_stats;
@@ -3247,6 +3233,8 @@ static AOM_INLINE void rd_pick_skip_mode(
mbmi->motion_mode = SIMPLE_TRANSLATION;
mbmi->ref_mv_idx = 0;
mbmi->skip_mode = mbmi->skip_txfm = 1;
+ mbmi->palette_mode_info.palette_size[0] = 0;
+ mbmi->palette_mode_info.palette_size[1] = 0;
set_default_interp_filters(mbmi, cm->features.interp_filter);
@@ -3283,45 +3271,12 @@ static AOM_INLINE void rd_pick_skip_mode(
assert(mode_index != THR_INVALID);
search_state->best_mbmode.skip_mode = 1;
search_state->best_mbmode = *mbmi;
-
- search_state->best_mbmode.skip_mode = search_state->best_mbmode.skip_txfm =
- 1;
- search_state->best_mbmode.mode = NEAREST_NEARESTMV;
- search_state->best_mbmode.ref_frame[0] = mbmi->ref_frame[0];
- search_state->best_mbmode.ref_frame[1] = mbmi->ref_frame[1];
- search_state->best_mbmode.mv[0].as_int = mbmi->mv[0].as_int;
- search_state->best_mbmode.mv[1].as_int = mbmi->mv[1].as_int;
- search_state->best_mbmode.ref_mv_idx = 0;
-
- // Set up tx_size related variables for skip-specific loop filtering.
- search_state->best_mbmode.tx_size =
- block_signals_txsize(bsize)
- ? tx_size_from_tx_mode(bsize, txfm_params->tx_mode_search_type)
- : max_txsize_rect_lookup[bsize];
memset(search_state->best_mbmode.inter_tx_size,
search_state->best_mbmode.tx_size,
sizeof(search_state->best_mbmode.inter_tx_size));
set_txfm_ctxs(search_state->best_mbmode.tx_size, xd->width, xd->height,
search_state->best_mbmode.skip_txfm && is_inter_block(mbmi),
xd);
-
- // Set up color-related variables for skip mode.
- search_state->best_mbmode.uv_mode = UV_DC_PRED;
- search_state->best_mbmode.palette_mode_info.palette_size[0] = 0;
- search_state->best_mbmode.palette_mode_info.palette_size[1] = 0;
-
- search_state->best_mbmode.comp_group_idx = 0;
- search_state->best_mbmode.compound_idx = x->compound_idx;
- search_state->best_mbmode.interinter_comp.type = COMPOUND_AVERAGE;
- search_state->best_mbmode.motion_mode = SIMPLE_TRANSLATION;
-
- search_state->best_mbmode.interintra_mode =
- (INTERINTRA_MODE)(II_DC_PRED - 1);
- search_state->best_mbmode.filter_intra_mode_info.use_filter_intra = 0;
-
- set_default_interp_filters(&search_state->best_mbmode,
- cm->features.interp_filter);
-
search_state->best_mode_index = mode_index;
// Update rd_cost
@@ -3798,7 +3753,7 @@ static AOM_INLINE void set_params_rd_pick_inter_mode(
// compound ref.
if (skip_ref_frame_mask & (1 << ref_frame) &&
!is_ref_frame_used_by_compound_ref(ref_frame, skip_ref_frame_mask) &&
- !is_ref_frame_used_in_cache(ref_frame, x->intermode_cache)) {
+ !is_ref_frame_used_in_cache(ref_frame, x->mb_mode_cache)) {
continue;
}
assert(get_ref_frame_yv12_buf(cm, ref_frame) != NULL);
@@ -3824,7 +3779,7 @@ static AOM_INLINE void set_params_rd_pick_inter_mode(
}
if (skip_ref_frame_mask & (1 << ref_frame) &&
- !is_ref_frame_used_in_cache(ref_frame, x->intermode_cache)) {
+ !is_ref_frame_used_in_cache(ref_frame, x->mb_mode_cache)) {
continue;
}
// Ref mv list population is not required, when compound references are
@@ -3841,9 +3796,16 @@ static AOM_INLINE void set_params_rd_pick_inter_mode(
}
av1_count_overlappable_neighbors(cm, xd);
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
- const int prune_obmc = cpi->frame_probs.obmc_probs[update_type][bsize] <
- cpi->sf.inter_sf.prune_obmc_prob_thresh;
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
+ int obmc_probability;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ obmc_probability = cpi->ppi->temp_frame_probs.obmc_probs[update_type][bsize];
+#else
+ obmc_probability = cpi->frame_probs.obmc_probs[update_type][bsize];
+#endif
+ const int prune_obmc =
+ obmc_probability < cpi->sf.inter_sf.prune_obmc_prob_thresh;
if (cpi->oxcf.motion_mode_cfg.enable_obmc && !prune_obmc) {
if (check_num_overlappable_neighbors(mbmi) &&
is_motion_variation_allowed_bsize(bsize)) {
@@ -3874,6 +3836,10 @@ static AOM_INLINE void set_params_rd_pick_inter_mode(
set_mode_eval_params(cpi, x, MODE_EVAL);
x->comp_rd_stats_idx = 0;
+
+ for (int idx = 0; idx < REF_FRAMES; idx++) {
+ args->best_single_sse_in_refs[idx] = INT32_MAX;
+ }
}
static AOM_INLINE void init_inter_mode_search_state(
@@ -4060,8 +4026,8 @@ static int inter_mode_search_order_independent_skip(
}
// Reuse the prediction mode in cache
- if (x->use_intermode_cache) {
- const MB_MODE_INFO *cached_mi = x->intermode_cache;
+ if (x->use_mb_mode_cache) {
+ const MB_MODE_INFO *cached_mi = x->mb_mode_cache;
const PREDICTION_MODE cached_mode = cached_mi->mode;
const MV_REFERENCE_FRAME *cached_frame = cached_mi->ref_frame;
const int cached_mode_is_single = cached_frame[1] <= INTRA_FRAME;
@@ -4156,12 +4122,12 @@ static int inter_mode_search_order_independent_skip(
}
// If we are reusing the prediction from cache, and the current frame is
// required by the cache, then we cannot prune it.
- if (is_ref_frame_used_in_cache(ref_type, x->intermode_cache)) {
+ if (is_ref_frame_used_in_cache(ref_type, x->mb_mode_cache)) {
skip_ref = 0;
// If the cache only needs the current reference type for compound
// prediction, then we can skip motion mode search.
skip_motion_mode = (ref_type <= ALTREF_FRAME &&
- x->intermode_cache->ref_frame[1] > INTRA_FRAME);
+ x->mb_mode_cache->ref_frame[1] > INTRA_FRAME);
}
if (skip_ref) return 1;
}
@@ -4452,12 +4418,14 @@ static INLINE void match_ref_frame(const MB_MODE_INFO *const mbmi,
// Prune compound mode using ref frames of neighbor blocks.
static INLINE int compound_skip_using_neighbor_refs(
MACROBLOCKD *const xd, const PREDICTION_MODE this_mode,
- const MV_REFERENCE_FRAME *ref_frames, int prune_compound_using_neighbors) {
+ const MV_REFERENCE_FRAME *ref_frames, int prune_ext_comp_using_neighbors) {
// Exclude non-extended compound modes from pruning
if (this_mode == NEAREST_NEARESTMV || this_mode == NEAR_NEARMV ||
this_mode == NEW_NEWMV || this_mode == GLOBAL_GLOBALMV)
return 0;
+ if (prune_ext_comp_using_neighbors >= 3) return 1;
+
int is_ref_match[2] = { 0 }; // 0 - match for forward refs
// 1 - match for backward refs
// Check if ref frames of this block matches with left neighbor.
@@ -4472,7 +4440,7 @@ static INLINE int compound_skip_using_neighbor_refs(
const int track_ref_match = is_ref_match[0] + is_ref_match[1];
// Pruning based on ref frame match with neighbors.
- if (track_ref_match >= prune_compound_using_neighbors) return 0;
+ if (track_ref_match >= prune_ext_comp_using_neighbors) return 0;
return 1;
}
@@ -4629,10 +4597,10 @@ static AOM_INLINE void evaluate_motion_mode_for_winner_candidates(
if (!is_inter_singleref_mode(mbmi->mode)) continue;
x->txfm_search_info.skip_txfm = 0;
- struct macroblockd_plane *p = xd->plane;
+ struct macroblockd_plane *pd = xd->plane;
const BUFFER_SET orig_dst = {
- { p[0].dst.buf, p[1].dst.buf, p[2].dst.buf },
- { p[0].dst.stride, p[1].dst.stride, p[2].dst.stride },
+ { pd[0].dst.buf, pd[1].dst.buf, pd[2].dst.buf },
+ { pd[0].dst.stride, pd[1].dst.stride, pd[2].dst.stride },
};
set_ref_ptrs(cm, xd, mbmi->ref_frame[0], mbmi->ref_frame[1]);
@@ -4681,8 +4649,7 @@ typedef struct {
int skip_ref_frame_mask;
int reach_first_comp_mode;
int mode_thresh_mul_fact;
- int intra_mode_idx_ls[INTRA_MODES];
- int intra_mode_num;
+ int *intra_mode_idx_ls;
int num_single_modes_processed;
int prune_cpd_using_sr_stats_ready;
} InterModeSFArgs;
@@ -4693,7 +4660,6 @@ static int skip_inter_mode(AV1_COMP *cpi, MACROBLOCK *x, const BLOCK_SIZE bsize,
InterModeSFArgs *args) {
const SPEED_FEATURES *const sf = &cpi->sf;
MACROBLOCKD *const xd = &x->e_mbd;
- MB_MODE_INFO *const mbmi = xd->mi[0];
// Get the actual prediction mode we are trying in this iteration
const THR_MODES mode_enum = av1_default_mode_order[midx];
const MODE_DEFINITION *mode_def = &av1_mode_defs[mode_enum];
@@ -4703,6 +4669,8 @@ static int skip_inter_mode(AV1_COMP *cpi, MACROBLOCK *x, const BLOCK_SIZE bsize,
const MV_REFERENCE_FRAME second_ref_frame = ref_frames[1];
const int comp_pred = second_ref_frame > INTRA_FRAME;
+ if (ref_frame == INTRA_FRAME) return 1;
+
// Check if this mode should be skipped because it is incompatible with the
// current frame
if (inter_mode_compatible_skip(cpi, x, bsize, this_mode, ref_frames))
@@ -4739,23 +4707,6 @@ static int skip_inter_mode(AV1_COMP *cpi, MACROBLOCK *x, const BLOCK_SIZE bsize,
return 1;
}
- // Speed features to prune out INTRA frames
- if (ref_frame == INTRA_FRAME) {
- if ((!cpi->oxcf.intra_mode_cfg.enable_smooth_intra ||
- sf->intra_sf.disable_smooth_intra) &&
- (mbmi->mode == SMOOTH_PRED || mbmi->mode == SMOOTH_H_PRED ||
- mbmi->mode == SMOOTH_V_PRED))
- return 1;
- if (!cpi->oxcf.intra_mode_cfg.enable_paeth_intra &&
- mbmi->mode == PAETH_PRED)
- return 1;
-
- // Intra modes will be handled in another loop later.
- assert(args->intra_mode_num < INTRA_MODES);
- args->intra_mode_idx_ls[args->intra_mode_num++] = mode_enum;
- return 1;
- }
-
if (sf->inter_sf.prune_compound_using_single_ref && comp_pred) {
// After we done with single reference modes, find the 2nd best RD
// for a reference frame. Only search compound modes that have a reference
@@ -4770,10 +4721,10 @@ static int skip_inter_mode(AV1_COMP *cpi, MACROBLOCK *x, const BLOCK_SIZE bsize,
return 1;
}
- if (sf->inter_sf.prune_compound_using_neighbors && comp_pred) {
+ if (sf->inter_sf.prune_ext_comp_using_neighbors && comp_pred) {
if (compound_skip_using_neighbor_refs(
xd, this_mode, ref_frames,
- sf->inter_sf.prune_compound_using_neighbors))
+ sf->inter_sf.prune_ext_comp_using_neighbors))
return 1;
}
@@ -4851,8 +4802,9 @@ static void tx_search_best_inter_candidates(
: INT64_MAX;
*yrd = INT64_MAX;
int64_t best_rd_in_this_partition = INT64_MAX;
+ int num_inter_mode_cands = inter_modes_info->num;
// Iterate over best inter mode candidates and perform tx search
- for (int j = 0; j < inter_modes_info->num; ++j) {
+ for (int j = 0; j < num_inter_mode_cands; ++j) {
const int data_idx = inter_modes_info->rd_idx_pair_arr[j].idx;
*mbmi = inter_modes_info->mbmi_arr[data_idx];
int64_t curr_est_rd = inter_modes_info->est_rd_arr[data_idx];
@@ -4930,6 +4882,27 @@ static void tx_search_best_inter_candidates(
update_search_state(search_state, rd_cost, ctx, &rd_stats, &rd_stats_y,
&rd_stats_uv, mode_enum, x, txfm_search_done);
search_state->best_skip_rd[0] = skip_rd;
+ // Limit the total number of modes to be evaluated if the first is valid
+ // and transform skip or compound
+ if (cpi->sf.inter_sf.inter_mode_txfm_breakout) {
+ if (!j && (search_state->best_mbmode.skip_txfm || rd_stats.skip_txfm)) {
+ // Evaluate more candidates at high quantizers where occurrence of
+ // transform skip is high.
+ const int max_cands_cap[5] = { 2, 3, 5, 7, 9 };
+ const int qindex_band = (5 * x->qindex) >> QINDEX_BITS;
+ num_inter_mode_cands =
+ AOMMIN(max_cands_cap[qindex_band], inter_modes_info->num);
+ } else if (!j && has_second_ref(&search_state->best_mbmode)) {
+ const int aggr = cpi->sf.inter_sf.inter_mode_txfm_breakout - 1;
+ // Evaluate more candidates at low quantizers where occurrence of
+ // single reference mode is high.
+ const int max_cands_cap_cmp[2][4] = { { 10, 7, 5, 4 },
+ { 10, 7, 5, 3 } };
+ const int qindex_band_cmp = (4 * x->qindex) >> QINDEX_BITS;
+ num_inter_mode_cands = AOMMIN(
+ max_cands_cap_cmp[aggr][qindex_band_cmp], inter_modes_info->num);
+ }
+ }
}
}
}
@@ -5050,13 +5023,41 @@ static AOM_INLINE void search_intra_modes_in_interframe(
const int num_4x4 = bsize_to_num_blk(bsize);
// Performs luma search
- for (int j = 0; j < sf_args->intra_mode_num; ++j) {
+ int64_t best_model_rd = INT64_MAX;
+ int64_t top_intra_model_rd[TOP_INTRA_MODEL_COUNT];
+ for (int i = 0; i < TOP_INTRA_MODEL_COUNT; i++) {
+ top_intra_model_rd[i] = INT64_MAX;
+ }
+ for (int mode_idx = INTRA_MODE_START; mode_idx < LUMA_MODE_COUNT;
+ ++mode_idx) {
if (sf->intra_sf.skip_intra_in_interframe &&
search_state->intra_search_state.skip_intra_modes)
break;
- const THR_MODES mode_enum = sf_args->intra_mode_idx_ls[j];
- const MODE_DEFINITION *mode_def = &av1_mode_defs[mode_enum];
- const PREDICTION_MODE this_mode = mode_def->mode;
+ set_y_mode_and_delta_angle(mode_idx, mbmi);
+
+ // Use intra_y_mode_mask speed feature to skip intra mode evaluation.
+ if (sf_args->mode_skip_mask->pred_modes[INTRA_FRAME] & (1 << mbmi->mode))
+ continue;
+
+ THR_MODES mode_enum = 0;
+ for (int i = 0; i < INTRA_MODE_END; ++i) {
+ if (mbmi->mode == av1_mode_defs[sf_args->intra_mode_idx_ls[i]].mode) {
+ mode_enum = sf_args->intra_mode_idx_ls[i];
+ break;
+ }
+ }
+ if ((!cpi->oxcf.intra_mode_cfg.enable_smooth_intra ||
+ cpi->sf.intra_sf.disable_smooth_intra) &&
+ (mbmi->mode == SMOOTH_PRED || mbmi->mode == SMOOTH_H_PRED ||
+ mbmi->mode == SMOOTH_V_PRED))
+ continue;
+ if (!cpi->oxcf.intra_mode_cfg.enable_paeth_intra &&
+ mbmi->mode == PAETH_PRED)
+ continue;
+ if (av1_is_directional_mode(mbmi->mode) &&
+ av1_use_angle_delta(bsize) == 0 && mbmi->angle_delta[PLANE_TYPE_Y] != 0)
+ continue;
+ const PREDICTION_MODE this_mode = mbmi->mode;
assert(av1_mode_defs[mode_enum].ref_frame[0] == INTRA_FRAME);
assert(av1_mode_defs[mode_enum].ref_frame[1] == NONE_FRAME);
@@ -5084,7 +5085,8 @@ static AOM_INLINE void search_intra_modes_in_interframe(
int64_t intra_rd_y = INT64_MAX;
const int is_luma_result_valid = av1_handle_intra_y_mode(
intra_search_state, cpi, x, bsize, intra_ref_frame_cost, ctx,
- &intra_rd_stats_y, search_state->best_rd, &mode_cost_y, &intra_rd_y);
+ &intra_rd_stats_y, search_state->best_rd, &mode_cost_y, &intra_rd_y,
+ &best_model_rd, top_intra_model_rd);
if (is_luma_result_valid && intra_rd_y < yrd_threshold) {
is_best_y_mode_intra = 1;
if (intra_rd_y < best_rd_y) {
@@ -5147,12 +5149,6 @@ static AOM_INLINE void search_intra_modes_in_interframe(
intra_rd_stats_uv.rate +
intra_mode_info_cost_uv(cpi, x, mbmi, bsize, uv_mode_cost);
}
- if (mode != DC_PRED && mode != PAETH_PRED) {
- const int intra_cost_penalty = av1_get_intra_cost_penalty(
- cm->quant_params.base_qindex, cm->quant_params.y_dc_delta_q,
- cm->seq_params.bit_depth);
- intra_rd_stats.rate += intra_cost_penalty;
- }
// Intra block is always coded as non-skip
intra_rd_stats.skip_txfm = 0;
@@ -5189,6 +5185,84 @@ static AOM_INLINE void search_intra_modes_in_interframe(
}
}
+#if !CONFIG_REALTIME_ONLY
+// Prepare inter_cost and intra_cost from TPL stats, which are used as ML
+// features in intra mode pruning.
+static AOM_INLINE void calculate_cost_from_tpl_data(
+ const AV1_COMP *cpi, MACROBLOCK *x, BLOCK_SIZE bsize, int mi_row,
+ int mi_col, int64_t *inter_cost, int64_t *intra_cost) {
+ const AV1_COMMON *const cm = &cpi->common;
+ // Only consider full SB.
+ const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
+ const int tpl_bsize_1d = cpi->ppi->tpl_data.tpl_bsize_1d;
+ const int len = (block_size_wide[sb_size] / tpl_bsize_1d) *
+ (block_size_high[sb_size] / tpl_bsize_1d);
+ SuperBlockEnc *sb_enc = &x->sb_enc;
+ if (sb_enc->tpl_data_count == len) {
+ const BLOCK_SIZE tpl_bsize = convert_length_to_bsize(tpl_bsize_1d);
+ const int tpl_stride = sb_enc->tpl_stride;
+ const int tplw = mi_size_wide[tpl_bsize];
+ const int tplh = mi_size_high[tpl_bsize];
+ const int nw = mi_size_wide[bsize] / tplw;
+ const int nh = mi_size_high[bsize] / tplh;
+ if (nw >= 1 && nh >= 1) {
+ const int of_h = mi_row % mi_size_high[sb_size];
+ const int of_w = mi_col % mi_size_wide[sb_size];
+ const int start = of_h / tplh * tpl_stride + of_w / tplw;
+
+ for (int k = 0; k < nh; k++) {
+ for (int l = 0; l < nw; l++) {
+ *inter_cost += sb_enc->tpl_inter_cost[start + k * tpl_stride + l];
+ *intra_cost += sb_enc->tpl_intra_cost[start + k * tpl_stride + l];
+ }
+ }
+ *inter_cost /= nw * nh;
+ *intra_cost /= nw * nh;
+ }
+ }
+}
+#endif // !CONFIG_REALTIME_ONLY
+
+// When the speed feature skip_intra_in_interframe > 0, enable ML model to prune
+// intra mode search.
+static AOM_INLINE void skip_intra_modes_in_interframe(
+ AV1_COMMON *const cm, struct macroblock *x, BLOCK_SIZE bsize,
+ InterModeSearchState *search_state, int64_t inter_cost, int64_t intra_cost,
+ int skip_intra_in_interframe) {
+ MACROBLOCKD *const xd = &x->e_mbd;
+ if (inter_cost >= 0 && intra_cost >= 0) {
+ aom_clear_system_state();
+ const NN_CONFIG *nn_config = (AOMMIN(cm->width, cm->height) <= 480)
+ ? &av1_intrap_nn_config
+ : &av1_intrap_hd_nn_config;
+ float nn_features[6];
+ float scores[2] = { 0.0f };
+
+ nn_features[0] = (float)search_state->best_mbmode.skip_txfm;
+ nn_features[1] = (float)mi_size_wide_log2[bsize];
+ nn_features[2] = (float)mi_size_high_log2[bsize];
+ nn_features[3] = (float)intra_cost;
+ nn_features[4] = (float)inter_cost;
+ const int ac_q = av1_ac_quant_QTX(x->qindex, 0, xd->bd);
+ const int ac_q_max = av1_ac_quant_QTX(255, 0, xd->bd);
+ nn_features[5] = (float)(ac_q_max / ac_q);
+
+ av1_nn_predict(nn_features, nn_config, 1, scores);
+ aom_clear_system_state();
+
+ // For two parameters, the max prob returned from av1_nn_softmax equals
+ // 1.0 / (1.0 + e^(-|diff_score|)). Here use scores directly to avoid the
+ // calling of av1_nn_softmax.
+ const float thresh[2] = { 1.4f, 1.4f };
+ if (scores[1] > scores[0] + thresh[skip_intra_in_interframe - 1]) {
+ search_state->intra_search_state.skip_intra_modes = 1;
+ }
+ } else if ((search_state->best_mbmode.skip_txfm) &&
+ (skip_intra_in_interframe >= 2)) {
+ search_state->intra_search_state.skip_intra_modes = 1;
+ }
+}
+
// TODO(chiyotsai@google.com): See the todo for av1_rd_pick_intra_mode_sb.
void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
struct macroblock *x, struct RD_STATS *rd_cost,
@@ -5231,6 +5305,7 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
-1,
-1,
-1,
+ { 0 },
{ 0 } };
for (i = 0; i < MODE_CTX_REF_FRAMES; ++i) args.cmp_mode[i] = -1;
// Indicates the appropriate number of simple translation winner modes for
@@ -5265,10 +5340,13 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
mbmi->partition != PARTITION_HORZ) ||
cpi->sf.inter_sf.prune_ref_frame_for_rect_partitions >= 2) {
picked_ref_frames_mask =
- fetch_picked_ref_frames_mask(x, bsize, cm->seq_params.mib_size);
+ fetch_picked_ref_frames_mask(x, bsize, cm->seq_params->mib_size);
}
}
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ start_timing(cpi, set_params_rd_pick_inter_mode_time);
+#endif
// Skip ref frames that never selected by square blocks.
const int skip_ref_frame_mask =
picked_ref_frames_mask ? ~picked_ref_frames_mask : 0;
@@ -5280,6 +5358,9 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
set_params_rd_pick_inter_mode(cpi, x, &args, bsize, &mode_skip_mask,
skip_ref_frame_mask, ref_costs_single,
ref_costs_comp, yv12_mb);
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ end_timing(cpi, set_params_rd_pick_inter_mode_time);
+#endif
int64_t best_est_rd = INT64_MAX;
const InterModeRdModel *md = &tile_data->inter_mode_rd_models[bsize];
@@ -5292,6 +5373,10 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
cpi->sf.rt_sf.force_tx_search_off);
InterModesInfo *inter_modes_info = x->inter_modes_info;
inter_modes_info->num = 0;
+ int intra_mode_idx_ls[INTRA_MODES];
+ for (i = 0; i < INTRA_MODES; ++i) {
+ intra_mode_idx_ls[i] = i + THR_DC;
+ }
// Temporary buffers used by handle_inter_mode().
uint8_t *const tmp_buf = get_buf_by_bd(xd, x->tmp_pred_bufs[0]);
@@ -5337,40 +5422,13 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
const int do_pruning =
(AOMMIN(cm->width, cm->height) > 480 && cpi->speed <= 1) ? 0 : 1;
if (do_pruning && sf->intra_sf.skip_intra_in_interframe &&
- cpi->oxcf.algo_cfg.enable_tpl_model) {
- // Only consider full SB.
- const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
- const int tpl_bsize_1d = cpi->tpl_data.tpl_bsize_1d;
- const int len = (block_size_wide[sb_size] / tpl_bsize_1d) *
- (block_size_high[sb_size] / tpl_bsize_1d);
- SuperBlockEnc *sb_enc = &x->sb_enc;
- if (sb_enc->tpl_data_count == len) {
- const BLOCK_SIZE tpl_bsize = convert_length_to_bsize(tpl_bsize_1d);
- const int tpl_stride = sb_enc->tpl_stride;
- const int tplw = mi_size_wide[tpl_bsize];
- const int tplh = mi_size_high[tpl_bsize];
- const int nw = mi_size_wide[bsize] / tplw;
- const int nh = mi_size_high[bsize] / tplh;
- if (nw >= 1 && nh >= 1) {
- const int of_h = mi_row % mi_size_high[sb_size];
- const int of_w = mi_col % mi_size_wide[sb_size];
- const int start = of_h / tplh * tpl_stride + of_w / tplw;
-
- for (int k = 0; k < nh; k++) {
- for (int l = 0; l < nw; l++) {
- inter_cost += sb_enc->tpl_inter_cost[start + k * tpl_stride + l];
- intra_cost += sb_enc->tpl_intra_cost[start + k * tpl_stride + l];
- }
- }
- inter_cost /= nw * nh;
- intra_cost /= nw * nh;
- }
- }
- }
+ cpi->oxcf.algo_cfg.enable_tpl_model)
+ calculate_cost_from_tpl_data(cpi, x, bsize, mi_row, mi_col, &inter_cost,
+ &intra_cost);
#endif // !CONFIG_REALTIME_ONLY
// Initialize best mode stats for winner mode processing
- av1_zero(x->winner_mode_stats);
+ av1_zero_array(x->winner_mode_stats, MAX_WINNER_MODE_COUNT_INTER);
x->winner_mode_count = 0;
store_winner_mode_stats(&cpi->common, x, mbmi, NULL, NULL, NULL, THR_INVALID,
NULL, bsize, best_rd_so_far,
@@ -5389,20 +5447,20 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
skip_ref_frame_mask,
0,
mode_thresh_mul_fact,
- { 0 },
- 0,
+ intra_mode_idx_ls,
0,
0 };
int64_t best_inter_yrd = INT64_MAX;
- // This is the main loop of this function. It loops over all possible modes
- // and calls handle_inter_mode() to compute the RD for each.
+ // This is the main loop of this function. It loops over all possible inter
+ // modes and calls handle_inter_mode() to compute the RD for each.
// Here midx is just an iterator index that should not be used by itself
// except to keep track of the number of modes searched. It should be used
// with av1_default_mode_order to get the enum that defines the mode, which
// can be used with av1_mode_defs to get the prediction mode and the ref
// frames.
- for (THR_MODES midx = THR_MODE_START; midx < THR_MODE_END; ++midx) {
+ for (THR_MODES midx = THR_INTER_MODE_START; midx < THR_INTER_MODE_END;
+ ++midx) {
// Get the actual prediction mode we are trying in this iteration
const THR_MODES mode_enum = av1_default_mode_order[midx];
const MODE_DEFINITION *mode_def = &av1_mode_defs[mode_enum];
@@ -5420,9 +5478,16 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
txfm_info->skip_txfm = 0;
sf_args.num_single_modes_processed += is_single_pred;
set_ref_ptrs(cm, xd, ref_frame, second_ref_frame);
-
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ start_timing(cpi, skip_inter_mode_time);
+#endif
// Apply speed features to decide if this inter mode can be skipped
- if (skip_inter_mode(cpi, x, bsize, ref_frame_rd, midx, &sf_args)) continue;
+ const int is_skip_inter_mode =
+ skip_inter_mode(cpi, x, bsize, ref_frame_rd, midx, &sf_args);
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ end_timing(cpi, skip_inter_mode_time);
+#endif
+ if (is_skip_inter_mode) continue;
// Select prediction reference frames.
for (i = 0; i < num_planes; i++) {
@@ -5549,36 +5614,11 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
// Gate intra mode evaluation if best of inter is skip except when source
// variance is extremely low
const unsigned int src_var_thresh_intra_skip = 1;
- if (sf->intra_sf.skip_intra_in_interframe &&
- (x->source_variance > src_var_thresh_intra_skip)) {
- if (inter_cost >= 0 && intra_cost >= 0) {
- aom_clear_system_state();
- const NN_CONFIG *nn_config = (AOMMIN(cm->width, cm->height) <= 480)
- ? &av1_intrap_nn_config
- : &av1_intrap_hd_nn_config;
- float nn_features[6];
- float scores[2] = { 0.0f };
- float probs[2] = { 0.0f };
-
- nn_features[0] = (float)search_state.best_mbmode.skip_txfm;
- nn_features[1] = (float)mi_size_wide_log2[bsize];
- nn_features[2] = (float)mi_size_high_log2[bsize];
- nn_features[3] = (float)intra_cost;
- nn_features[4] = (float)inter_cost;
- const int ac_q = av1_ac_quant_QTX(x->qindex, 0, xd->bd);
- const int ac_q_max = av1_ac_quant_QTX(255, 0, xd->bd);
- nn_features[5] = (float)(ac_q_max / ac_q);
-
- av1_nn_predict(nn_features, nn_config, 1, scores);
- aom_clear_system_state();
- av1_nn_softmax(scores, probs, 2);
-
- if (probs[1] > 0.8) search_state.intra_search_state.skip_intra_modes = 1;
- } else if ((search_state.best_mbmode.skip_txfm) &&
- (sf->intra_sf.skip_intra_in_interframe >= 2)) {
- search_state.intra_search_state.skip_intra_modes = 1;
- }
- }
+ const int skip_intra_in_interframe = sf->intra_sf.skip_intra_in_interframe;
+ if (skip_intra_in_interframe &&
+ (x->source_variance > src_var_thresh_intra_skip))
+ skip_intra_modes_in_interframe(cm, x, bsize, &search_state, inter_cost,
+ intra_cost, skip_intra_in_interframe);
const unsigned int intra_ref_frame_cost = ref_costs_single[INTRA_FRAME];
search_intra_modes_in_interframe(&search_state, cpi, x, rd_cost, bsize, ctx,
@@ -5588,6 +5628,9 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
end_timing(cpi, handle_intra_mode_time);
#endif
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ start_timing(cpi, refine_winner_mode_tx_time);
+#endif
int winner_mode_count =
cpi->sf.winner_mode_sf.multi_winner_mode_type ? x->winner_mode_count : 1;
// In effect only when fast tx search speed features are enabled.
@@ -5595,6 +5638,9 @@ void av1_rd_pick_inter_mode(struct AV1_COMP *cpi, struct TileDataEnc *tile_data,
cpi, x, rd_cost, bsize, ctx, &search_state.best_mode_index,
&search_state.best_mbmode, yv12_mb, search_state.best_rate_y,
search_state.best_rate_uv, &search_state.best_skip2, winner_mode_count);
+#if CONFIG_COLLECT_COMPONENT_TIMING
+ end_timing(cpi, refine_winner_mode_tx_time);
+#endif
// Initialize default mode evaluation params
set_mode_eval_params(cpi, x, DEFAULT_EVAL);
@@ -5803,7 +5849,7 @@ void av1_rd_pick_inter_mode_sb_seg_skip(const AV1_COMP *cpi,
for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
mbmi->interp_filters = av1_broadcast_interp_filter(i);
rs = av1_get_switchable_rate(x, xd, interp_filter,
- cm->seq_params.enable_dual_filter);
+ cm->seq_params->enable_dual_filter);
if (rs < best_rs) {
best_rs = rs;
best_filter = mbmi->interp_filters.as_filters.y_filter;
@@ -5814,7 +5860,7 @@ void av1_rd_pick_inter_mode_sb_seg_skip(const AV1_COMP *cpi,
// Set the appropriate filter
mbmi->interp_filters = av1_broadcast_interp_filter(best_filter);
rate2 += av1_get_switchable_rate(x, xd, interp_filter,
- cm->seq_params.enable_dual_filter);
+ cm->seq_params->enable_dual_filter);
if (cm->current_frame.reference_mode == REFERENCE_MODE_SELECT)
rate2 += comp_inter_cost[comp_pred];
diff --git a/third_party/libaom/source/libaom/av1/encoder/rdopt.h b/third_party/libaom/source/libaom/av1/encoder/rdopt.h
index 362da7b798..055a49e9f1 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rdopt.h
+++ b/third_party/libaom/source/libaom/av1/encoder/rdopt.h
@@ -217,10 +217,10 @@ static INLINE int av1_encoder_get_relative_dist(int a, int b) {
static INLINE int av1_get_sb_mi_size(const AV1_COMMON *const cm) {
const int mi_alloc_size_1d = mi_size_wide[cm->mi_params.mi_alloc_bsize];
int sb_mi_rows =
- (mi_size_wide[cm->seq_params.sb_size] + mi_alloc_size_1d - 1) /
+ (mi_size_wide[cm->seq_params->sb_size] + mi_alloc_size_1d - 1) /
mi_alloc_size_1d;
- assert(mi_size_wide[cm->seq_params.sb_size] ==
- mi_size_high[cm->seq_params.sb_size]);
+ assert(mi_size_wide[cm->seq_params->sb_size] ==
+ mi_size_high[cm->seq_params->sb_size]);
int sb_mi_size = sb_mi_rows * sb_mi_rows;
return sb_mi_size;
diff --git a/third_party/libaom/source/libaom/av1/encoder/rdopt_utils.h b/third_party/libaom/source/libaom/av1/encoder/rdopt_utils.h
index ddd180f7ed..f00037992e 100644
--- a/third_party/libaom/source/libaom/av1/encoder/rdopt_utils.h
+++ b/third_party/libaom/source/libaom/av1/encoder/rdopt_utils.h
@@ -433,8 +433,10 @@ static INLINE void set_tx_type_prune(const SPEED_FEATURES *sf,
txfm_params->prune_2d_txfm_mode = sf->tx_sf.tx_type_search.prune_2d_txfm_mode;
if (!winner_mode_tx_type_pruning) return;
- const int prune_mode[2][2] = { { TX_TYPE_PRUNE_4, TX_TYPE_PRUNE_0 },
- { TX_TYPE_PRUNE_5, TX_TYPE_PRUNE_2 } };
+ const int prune_mode[4][2] = { { TX_TYPE_PRUNE_3, TX_TYPE_PRUNE_0 },
+ { TX_TYPE_PRUNE_4, TX_TYPE_PRUNE_0 },
+ { TX_TYPE_PRUNE_5, TX_TYPE_PRUNE_2 },
+ { TX_TYPE_PRUNE_5, TX_TYPE_PRUNE_3 } };
txfm_params->prune_2d_txfm_mode =
prune_mode[winner_mode_tx_type_pruning - 1][is_winner_mode];
}
@@ -569,7 +571,7 @@ static INLINE CFL_ALLOWED_TYPE store_cfl_required_rdo(const AV1_COMMON *cm,
const MACROBLOCK *x) {
const MACROBLOCKD *xd = &x->e_mbd;
- if (cm->seq_params.monochrome || !xd->is_chroma_ref) return CFL_DISALLOWED;
+ if (cm->seq_params->monochrome || !xd->is_chroma_ref) return CFL_DISALLOWED;
if (!xd->is_chroma_ref) {
// For non-chroma-reference blocks, we should always store the luma pixels,
diff --git a/third_party/libaom/source/libaom/av1/encoder/segmentation.c b/third_party/libaom/source/libaom/av1/encoder/segmentation.c
index de17d571ff..edb6ef67fa 100644
--- a/third_party/libaom/source/libaom/av1/encoder/segmentation.c
+++ b/third_party/libaom/source/libaom/av1/encoder/segmentation.c
@@ -175,6 +175,14 @@ void av1_choose_segmap_coding_method(AV1_COMMON *cm, MACROBLOCKD *xd) {
int no_pred_cost;
int t_pred_cost = INT_MAX;
int tile_col, tile_row, mi_row, mi_col;
+
+ if (!seg->update_map) return;
+ if (cm->features.primary_ref_frame == PRIMARY_REF_NONE) {
+ seg->temporal_update = 0;
+ assert(seg->update_data == 1);
+ return;
+ }
+
unsigned temporal_predictor_count[SEG_TEMPORAL_PRED_CTXS][2] = { { 0 } };
unsigned no_pred_segcounts[MAX_SEGMENTS] = { 0 };
unsigned t_unpred_seg_counts[MAX_SEGMENTS] = { 0 };
@@ -194,15 +202,15 @@ void av1_choose_segmap_coding_method(AV1_COMMON *cm, MACROBLOCKD *xd) {
tile_info.mi_row_start * cm->mi_params.mi_stride +
tile_info.mi_col_start;
for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
- mi_row += cm->seq_params.mib_size,
- mi_ptr += cm->seq_params.mib_size * cm->mi_params.mi_stride) {
+ mi_row += cm->seq_params->mib_size,
+ mi_ptr += cm->seq_params->mib_size * cm->mi_params.mi_stride) {
MB_MODE_INFO **mi = mi_ptr;
for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
- mi_col += cm->seq_params.mib_size,
- mi += cm->seq_params.mib_size) {
+ mi_col += cm->seq_params->mib_size,
+ mi += cm->seq_params->mib_size) {
count_segs_sb(cm, xd, &tile_info, mi, no_pred_segcounts,
temporal_predictor_count, t_unpred_seg_counts, mi_row,
- mi_col, cm->seq_params.sb_size);
+ mi_col, cm->seq_params->sb_size);
}
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.c b/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.c
index 1c556c2a09..dbfcaabbd6 100644
--- a/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.c
+++ b/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.c
@@ -8,7 +8,6 @@
* Media Patent License 1.0 was not distributed with this source code in the
* PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/
-#include <float.h>
#include "av1/common/av1_common_int.h"
#include "av1/encoder/sparse_linear_solver.h"
#include "config/aom_config.h"
@@ -408,4 +407,4 @@ void av1_steepest_descent_sparse(const SPARSE_MTX *A, const double *b, int bl,
aom_free(Ad);
}
-#endif // CONFIG_OPFL
+#endif // CONFIG_OPTICAL_FLOW_API
diff --git a/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.h b/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.h
index 3cacb51b93..a3f2f7b964 100644
--- a/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.h
+++ b/third_party/libaom/source/libaom/av1/encoder/sparse_linear_solver.h
@@ -9,8 +9,8 @@
* PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/
-#ifndef AV1_COMMON_SPARSE_LINEAR_SOLVER_H_
-#define AV1_COMMON_SPARSE_LINEAR_SOLVER_H_
+#ifndef AOM_AV1_ENCODER_SPARSE_LINEAR_SOLVER_H_
+#define AOM_AV1_ENCODER_SPARSE_LINEAR_SOLVER_H_
#ifdef __cplusplus
extern "C" {
@@ -64,4 +64,4 @@ void av1_steepest_descent_sparse(const SPARSE_MTX *A, const double *b, int bl,
} // extern "C"
#endif
-#endif /* AV1_COMMON_SPARSE_LINEAR_SOLVER_H_ */
+#endif /* AOM_AV1_ENCODER_SPARSE_LINEAR_SOLVER_H_ */
diff --git a/third_party/libaom/source/libaom/av1/encoder/speed_features.c b/third_party/libaom/source/libaom/av1/encoder/speed_features.c
index 2244aaae91..916a818513 100644
--- a/third_party/libaom/source/libaom/av1/encoder/speed_features.c
+++ b/third_party/libaom/source/libaom/av1/encoder/speed_features.c
@@ -274,6 +274,20 @@ static void set_allintra_speed_feature_framesize_dependent(
sf->part_sf.use_square_partition_only_threshold = BLOCK_16X16;
}
+
+ if (speed >= 7) {
+ if (!is_480p_or_larger) {
+ sf->rt_sf.nonrd_check_partition_merge_mode = 2;
+ }
+ }
+
+ if (speed >= 8) {
+ // TODO(kyslov): add more speed features to control speed/quality
+ }
+
+ if (speed >= 9) {
+ // TODO(kyslov): add more speed features to control speed/quality
+ }
}
static void set_allintra_speed_features_framesize_independent(
@@ -289,8 +303,11 @@ static void set_allintra_speed_features_framesize_independent(
sf->part_sf.prune_part4_search = 2;
sf->part_sf.simple_motion_search_prune_rect = 1;
sf->part_sf.ml_predict_breakout_level = use_hbd ? 1 : 3;
+ sf->part_sf.reuse_prev_rd_results_for_part_ab = 1;
+ sf->part_sf.use_best_rd_for_pruning = 1;
sf->intra_sf.intra_pruning_with_hog = 1;
+ sf->intra_sf.dv_cost_upd_level = INTERNAL_COST_UPD_OFF;
sf->tx_sf.adaptive_txb_search_level = 1;
sf->tx_sf.intra_tx_size_search_init_depth_sqr = 1;
@@ -300,7 +317,7 @@ static void set_allintra_speed_features_framesize_independent(
sf->rt_sf.use_nonrd_pick_mode = 0;
sf->rt_sf.use_real_time_ref_set = 0;
- if (cpi->twopass.fr_content_type == FC_GRAPHICS_ANIMATION ||
+ if (cpi->ppi->twopass.fr_content_type == FC_GRAPHICS_ANIMATION ||
cpi->use_screen_content_tools) {
sf->mv_sf.exhaustive_searches_thresh = (1 << 20);
} else {
@@ -318,10 +335,12 @@ static void set_allintra_speed_features_framesize_independent(
// speed feature accordingly
sf->part_sf.simple_motion_search_split = allow_screen_content_tools ? 1 : 2;
sf->part_sf.ml_predict_breakout_level = use_hbd ? 2 : 3;
+ sf->part_sf.reuse_best_prediction_for_part_ab = 1;
sf->mv_sf.exhaustive_searches_thresh <<= 1;
sf->intra_sf.prune_palette_search_level = 1;
+ sf->intra_sf.top_intra_model_count_allowed = 3;
sf->tx_sf.adaptive_txb_search_level = 2;
sf->tx_sf.inter_tx_size_search_init_depth_rect = 1;
@@ -348,6 +367,7 @@ static void set_allintra_speed_features_framesize_independent(
sf->intra_sf.disable_smooth_intra = 1;
sf->intra_sf.intra_pruning_with_hog = 2;
+ sf->intra_sf.prune_filter_intra_level = 1;
sf->rd_sf.perform_coeff_opt = 3;
@@ -397,9 +417,6 @@ static void set_allintra_speed_features_framesize_independent(
sf->intra_sf.intra_uv_mode_mask[TX_16X16] = UV_INTRA_DC_H_V_CFL;
sf->intra_sf.intra_uv_mode_mask[TX_32X32] = UV_INTRA_DC_H_V_CFL;
sf->intra_sf.intra_uv_mode_mask[TX_64X64] = UV_INTRA_DC_H_V_CFL;
- sf->intra_sf.intra_y_mode_mask[TX_16X16] = INTRA_DC_H_V;
- sf->intra_sf.intra_y_mode_mask[TX_32X32] = INTRA_DC_H_V;
- sf->intra_sf.intra_y_mode_mask[TX_64X64] = INTRA_DC_H_V;
sf->intra_sf.prune_chroma_modes_using_luma_winner = 1;
sf->mv_sf.simple_motion_subpel_force_stop = HALF_PEL;
@@ -408,7 +425,7 @@ static void set_allintra_speed_features_framesize_independent(
sf->tpl_sf.subpel_force_stop = HALF_PEL;
sf->tpl_sf.search_method = FAST_BIGDIA;
- sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 1;
+ sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 2;
sf->tx_sf.tx_type_search.fast_intra_tx_type_search = 1;
sf->tx_sf.tx_type_search.prune_2d_txfm_mode = TX_TYPE_PRUNE_3;
sf->tx_sf.tx_type_search.prune_tx_type_est_rd = 1;
@@ -443,9 +460,10 @@ static void set_allintra_speed_features_framesize_independent(
}
if (speed >= 6) {
- sf->intra_sf.disable_filter_intra = 1;
+ sf->intra_sf.prune_filter_intra_level = 2;
sf->intra_sf.chroma_intra_pruning_with_hog = 4;
sf->intra_sf.intra_pruning_with_hog = 4;
+ sf->intra_sf.cfl_search_range = 1;
sf->part_sf.prune_rectangular_split_based_on_qidx =
allow_screen_content_tools ? 0 : 1;
@@ -458,7 +476,7 @@ static void set_allintra_speed_features_framesize_independent(
sf->mv_sf.use_bsize_dependent_search_method = 1;
- sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 2;
+ sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 3;
sf->tx_sf.tx_type_search.prune_tx_type_est_rd = 0;
// Use largest txfm block size for square coding blocks.
sf->tx_sf.intra_tx_size_search_init_depth_sqr = 2;
@@ -466,10 +484,39 @@ static void set_allintra_speed_features_framesize_independent(
sf->rd_sf.perform_coeff_opt = 6;
sf->lpf_sf.cdef_pick_method = CDEF_FAST_SEARCH_LVL4;
+ sf->lpf_sf.lpf_pick = LPF_PICK_FROM_Q;
sf->winner_mode_sf.multi_winner_mode_type = MULTI_WINNER_MODE_OFF;
}
+ if (speed >= 7) {
+ sf->part_sf.default_min_partition_size = BLOCK_8X8;
+ sf->part_sf.partition_search_type = VAR_BASED_PARTITION;
+
+ sf->lpf_sf.cdef_pick_method = CDEF_PICK_FROM_Q;
+
+ sf->rt_sf.mode_search_skip_flags |= FLAG_SKIP_INTRA_DIRMISMATCH;
+ sf->rt_sf.use_nonrd_pick_mode = 1;
+ sf->rt_sf.nonrd_check_partition_merge_mode = 1;
+ sf->rt_sf.nonrd_check_partition_split = 0;
+ sf->rt_sf.skip_intra_pred_if_tx_skip = 1;
+ // Set mask for intra modes.
+ for (int i = 0; i < BLOCK_SIZES; ++i)
+ if (i >= BLOCK_32X32)
+ sf->rt_sf.intra_y_mode_bsize_mask_nrd[i] = INTRA_DC;
+ else
+ // Use DC, H, V intra mode for block sizes < 32X32.
+ sf->rt_sf.intra_y_mode_bsize_mask_nrd[i] = INTRA_DC_H_V;
+ }
+
+ if (speed >= 8) {
+ // TODO(kyslov): add more speed features to control speed/quality
+ }
+
+ if (speed >= 9) {
+ // TODO(kyslov): add more speed features to control speed/quality
+ }
+
// Intra txb hash is currently not compatible with multi-winner mode as the
// hashes got reset during multi-winner mode processing.
assert(IMPLIES(
@@ -480,6 +527,7 @@ static void set_allintra_speed_features_framesize_independent(
static void set_good_speed_feature_framesize_dependent(
const AV1_COMP *const cpi, SPEED_FEATURES *const sf, int speed) {
const AV1_COMMON *const cm = &cpi->common;
+ const int is_480p_or_lesser = AOMMIN(cm->width, cm->height) <= 480;
const int is_480p_or_larger = AOMMIN(cm->width, cm->height) >= 480;
const int is_720p_or_larger = AOMMIN(cm->width, cm->height) >= 720;
const int is_1080p_or_larger = AOMMIN(cm->width, cm->height) >= 1080;
@@ -518,7 +566,16 @@ static void set_good_speed_feature_framesize_dependent(
sf->mv_sf.use_downsampled_sad = 1;
}
+ if (!is_720p_or_larger) {
+ const RateControlCfg *const rc_cfg = &cpi->oxcf.rc_cfg;
+ const int rate_tolerance =
+ AOMMIN(rc_cfg->under_shoot_pct, rc_cfg->over_shoot_pct);
+ sf->hl_sf.recode_tolerance = 25 + (rate_tolerance >> 2);
+ }
+
if (speed >= 1) {
+ if (is_480p_or_lesser) sf->inter_sf.skip_newmv_in_drl = 1;
+
if (is_720p_or_larger) {
sf->part_sf.use_square_partition_only_threshold = BLOCK_128X128;
} else if (is_480p_or_larger) {
@@ -561,6 +618,12 @@ static void set_good_speed_feature_framesize_dependent(
}
if (is_480p_or_larger) {
+ sf->inter_sf.disable_interintra_wedge_var_thresh = 100;
+ } else {
+ sf->inter_sf.disable_interintra_wedge_var_thresh = UINT_MAX;
+ }
+
+ if (is_480p_or_larger) {
sf->tx_sf.tx_type_search.prune_tx_type_using_stats = 1;
if (use_hbd) sf->tx_sf.prune_tx_size_level = 2;
} else {
@@ -573,6 +636,8 @@ static void set_good_speed_feature_framesize_dependent(
}
if (speed >= 3) {
+ sf->inter_sf.skip_newmv_in_drl = 2;
+
sf->part_sf.ml_early_term_after_part_split_level = 0;
if (is_720p_or_larger) {
@@ -584,6 +649,10 @@ static void set_good_speed_feature_framesize_dependent(
sf->part_sf.partition_search_breakout_rate_thr = 120;
}
if (use_hbd) sf->tx_sf.prune_tx_size_level = 3;
+
+ if (is_480p_or_larger) sf->intra_sf.top_intra_model_count_allowed = 2;
+
+ sf->inter_sf.disable_interintra_wedge_var_thresh = UINT_MAX;
}
if (speed >= 4) {
@@ -598,11 +667,14 @@ static void set_good_speed_feature_framesize_dependent(
}
sf->inter_sf.prune_obmc_prob_thresh = INT_MAX;
+ if (is_480p_or_lesser) sf->inter_sf.skip_newmv_in_drl = 3;
if (is_720p_or_larger)
sf->hl_sf.recode_tolerance = 32;
else
sf->hl_sf.recode_tolerance = 55;
+
+ sf->intra_sf.top_intra_model_count_allowed = 2;
}
if (speed >= 5) {
@@ -612,6 +684,8 @@ static void set_good_speed_feature_framesize_dependent(
sf->inter_sf.prune_warped_prob_thresh = 8;
}
if (is_720p_or_larger) sf->hl_sf.recode_tolerance = 40;
+
+ sf->inter_sf.skip_newmv_in_drl = 4;
}
if (speed >= 6) {
@@ -630,7 +704,9 @@ static void set_good_speed_feature_framesize_dependent(
}
if (!is_720p_or_larger) {
- sf->inter_sf.mv_cost_upd_level = 2;
+ sf->inter_sf.mv_cost_upd_level = INTERNAL_COST_UPD_SBROW_SET;
+ sf->inter_sf.coeff_cost_upd_level = INTERNAL_COST_UPD_SBROW;
+ sf->inter_sf.mode_cost_upd_level = INTERNAL_COST_UPD_SBROW;
}
if (is_720p_or_larger) {
@@ -650,10 +726,10 @@ static void set_good_speed_feature_framesize_dependent(
static void set_good_speed_features_framesize_independent(
const AV1_COMP *const cpi, SPEED_FEATURES *const sf, int speed) {
const AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int boosted = frame_is_boosted(cpi);
const int is_boosted_arf2_bwd_type =
- boosted || gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE;
+ boosted || gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE;
const int allow_screen_content_tools =
cm->features.allow_screen_content_tools;
const int use_hbd = cpi->oxcf.use_highbitdepth;
@@ -670,6 +746,8 @@ static void set_good_speed_features_framesize_independent(
sf->part_sf.prune_part4_search = 2;
sf->part_sf.simple_motion_search_prune_rect = 1;
sf->part_sf.ml_predict_breakout_level = use_hbd ? 1 : 3;
+ sf->part_sf.reuse_prev_rd_results_for_part_ab = 1;
+ sf->part_sf.use_best_rd_for_pruning = 1;
// TODO(debargha): Test, tweak and turn on either 1 or 2
sf->inter_sf.inter_mode_rd_model_estimation = 1;
@@ -698,7 +776,7 @@ static void set_good_speed_features_framesize_independent(
sf->rt_sf.use_nonrd_pick_mode = 0;
sf->rt_sf.use_real_time_ref_set = 0;
- if (cpi->twopass.fr_content_type == FC_GRAPHICS_ANIMATION ||
+ if (cpi->ppi->twopass.fr_content_type == FC_GRAPHICS_ANIMATION ||
cpi->use_screen_content_tools) {
sf->mv_sf.exhaustive_searches_thresh = (1 << 20);
} else {
@@ -725,7 +803,6 @@ static void set_good_speed_features_framesize_independent(
sf->mv_sf.use_accurate_subpel_search = USE_4_TAPS;
sf->mv_sf.disable_extensive_joint_motion_search = 1;
- sf->inter_sf.disable_interinter_wedge_newmv_search = boosted ? 0 : 1;
sf->inter_sf.prune_comp_search_by_single_result = boosted ? 2 : 1;
sf->inter_sf.prune_comp_type_by_comp_avg = 1;
sf->inter_sf.prune_comp_type_by_model_rd = boosted ? 0 : 1;
@@ -736,7 +813,6 @@ static void set_good_speed_features_framesize_independent(
sf->inter_sf.reduce_inter_modes = boosted ? 1 : 3;
sf->inter_sf.reuse_inter_intra_mode = 1;
sf->inter_sf.selective_ref_frame = 2;
- sf->inter_sf.skip_repeated_newmv = 1;
sf->interp_sf.use_interp_filter = 1;
@@ -766,7 +842,11 @@ static void set_good_speed_features_framesize_independent(
if (speed >= 2) {
sf->hl_sf.recode_loop = ALLOW_RECODE_KFARFGF;
+ sf->fp_sf.skip_motion_search_threshold = 25;
+
sf->part_sf.allow_partition_search_skip = 1;
+ sf->part_sf.reuse_best_prediction_for_part_ab =
+ !frame_is_intra_only(&cpi->common);
sf->mv_sf.auto_mv_step_size = 1;
sf->mv_sf.simple_motion_subpel_force_stop = QUARTER_PEL;
@@ -778,20 +858,21 @@ static void set_good_speed_features_framesize_independent(
// bit more closely to figure out why.
sf->inter_sf.adaptive_rd_thresh = 1;
sf->inter_sf.comp_inter_joint_search_thresh = BLOCK_SIZES_ALL;
- sf->inter_sf.disable_interintra_wedge_var_thresh = 100;
sf->inter_sf.disable_interinter_wedge_var_thresh = 100;
sf->inter_sf.fast_interintra_wedge_search = 1;
sf->inter_sf.prune_comp_search_by_single_result = boosted ? 4 : 1;
- sf->inter_sf.prune_compound_using_neighbors = 1;
+ sf->inter_sf.prune_ext_comp_using_neighbors = 1;
sf->inter_sf.prune_comp_using_best_single_mode_ref = 2;
sf->inter_sf.prune_comp_type_by_comp_avg = 2;
- sf->inter_sf.reuse_best_prediction_for_part_ab = 1;
sf->inter_sf.selective_ref_frame = 3;
sf->inter_sf.use_dist_wtd_comp_flag = DIST_WTD_COMP_DISABLED;
// Enable fast search only for COMPOUND_DIFFWTD type.
sf->inter_sf.enable_fast_compound_mode_search = 1;
sf->inter_sf.reuse_mask_search_results = 1;
sf->inter_sf.txfm_rd_gate_level = boosted ? 0 : 1;
+ sf->inter_sf.disable_interinter_wedge_newmv_search =
+ is_boosted_arf2_bwd_type ? 0 : 1;
+ sf->inter_sf.inter_mode_txfm_breakout = boosted ? 0 : 1;
// TODO(Sachin): Enable/Enhance this speed feature for speed 2 & 3
sf->interp_sf.adaptive_interp_filter_search = 1;
@@ -831,7 +912,8 @@ static void set_good_speed_features_framesize_independent(
sf->mv_sf.search_method = DIAMOND;
sf->mv_sf.disable_second_mv = 2;
- sf->inter_sf.mv_cost_upd_level = 1;
+ sf->inter_sf.disable_interinter_wedge_newmv_search = boosted ? 0 : 1;
+ sf->inter_sf.mv_cost_upd_level = INTERNAL_COST_UPD_SBROW;
sf->inter_sf.disable_onesided_comp = 1;
// TODO(yunqing): evaluate this speed feature for speed 1 & 2, and combine
// it with cpi->sf.disable_wedge_search_var_thresh.
@@ -843,10 +925,11 @@ static void set_good_speed_features_framesize_independent(
sf->inter_sf.prune_comp_search_by_single_result = boosted ? 4 : 2;
sf->inter_sf.selective_ref_frame = 5;
sf->inter_sf.skip_repeated_ref_mv = 1;
- sf->inter_sf.skip_repeated_full_newmv = 1;
sf->inter_sf.reuse_compound_type_decision = 1;
sf->inter_sf.txfm_rd_gate_level =
boosted ? 0 : (is_boosted_arf2_bwd_type ? 1 : 2);
+ sf->inter_sf.enable_fast_wedge_mask_search = 1;
+ sf->inter_sf.inter_mode_txfm_breakout = boosted ? 0 : 2;
sf->interp_sf.adaptive_interp_filter_search = 2;
@@ -865,6 +948,8 @@ static void set_good_speed_features_framesize_independent(
sf->tx_sf.adaptive_txb_search_level = boosted ? 2 : 3;
sf->tx_sf.tx_type_search.use_skip_flag_prediction = 2;
sf->tx_sf.use_intra_txb_hash = 1;
+ sf->tx_sf.tx_type_search.prune_2d_txfm_mode = TX_TYPE_PRUNE_3;
+ sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 1;
// TODO(any): Refactor the code related to following winner mode speed
// features
@@ -874,10 +959,10 @@ static void set_good_speed_features_framesize_independent(
frame_is_intra_only(&cpi->common) ? 0 : 1;
sf->winner_mode_sf.enable_winner_mode_for_use_tx_domain_dist = 1;
sf->winner_mode_sf.motion_mode_for_winner_cand =
- boosted
- ? 0
- : gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE ? 1
- : 2;
+ boosted ? 0
+ : gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE
+ ? 1
+ : 2;
// TODO(any): evaluate if these lpf features can be moved to speed 2.
// For screen content, "prune_sgr_based_on_wiener = 2" cause large quality
@@ -889,6 +974,8 @@ static void set_good_speed_features_framesize_independent(
}
if (speed >= 4) {
+ sf->gm_sf.prune_zero_mv_with_sse = 1;
+
sf->mv_sf.subpel_search_method = SUBPEL_TREE_PRUNED_MORE;
sf->part_sf.simple_motion_search_prune_agg = 2;
@@ -901,7 +988,7 @@ static void set_good_speed_features_framesize_independent(
sf->inter_sf.txfm_rd_gate_level = boosted ? 0 : 3;
sf->inter_sf.prune_inter_modes_based_on_tpl = boosted ? 0 : 2;
- sf->inter_sf.prune_compound_using_neighbors = 2;
+ sf->inter_sf.prune_ext_comp_using_neighbors = 2;
sf->inter_sf.prune_obmc_prob_thresh = INT_MAX;
sf->interp_sf.cb_pred_filter_search = 1;
@@ -911,9 +998,10 @@ static void set_good_speed_features_framesize_independent(
sf->intra_sf.intra_uv_mode_mask[TX_16X16] = UV_INTRA_DC_H_V_CFL;
sf->intra_sf.intra_uv_mode_mask[TX_32X32] = UV_INTRA_DC_H_V_CFL;
sf->intra_sf.intra_uv_mode_mask[TX_64X64] = UV_INTRA_DC_H_V_CFL;
- sf->intra_sf.intra_y_mode_mask[TX_16X16] = INTRA_DC_H_V;
- sf->intra_sf.intra_y_mode_mask[TX_32X32] = INTRA_DC_H_V;
- sf->intra_sf.intra_y_mode_mask[TX_64X64] = INTRA_DC_H_V;
+ // TODO(any): "intra_y_mode_mask" doesn't help much at speed 4.
+ // sf->intra_sf.intra_y_mode_mask[TX_16X16] = INTRA_DC_H_V;
+ // sf->intra_sf.intra_y_mode_mask[TX_32X32] = INTRA_DC_H_V;
+ // sf->intra_sf.intra_y_mode_mask[TX_64X64] = INTRA_DC_H_V;
// TODO(any): Experiment with this speed feature set to 2 for higher quality
// presets as well
sf->intra_sf.skip_intra_in_interframe = 2;
@@ -923,10 +1011,10 @@ static void set_good_speed_features_framesize_independent(
sf->tpl_sf.prune_starting_mv = 2;
sf->tpl_sf.subpel_force_stop = HALF_PEL;
sf->tpl_sf.search_method = FAST_BIGDIA;
+ sf->tpl_sf.gop_length_decision_method = 1;
- sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 1;
+ sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 2;
sf->tx_sf.tx_type_search.fast_intra_tx_type_search = 1;
- sf->tx_sf.tx_type_search.prune_2d_txfm_mode = TX_TYPE_PRUNE_3;
sf->tx_sf.tx_type_search.prune_tx_type_est_rd = 1;
// TODO(any): Experiment with enabling of this speed feature as hash state
// is reset during winner mode processing
@@ -948,9 +1036,14 @@ static void set_good_speed_features_framesize_independent(
}
if (speed >= 5) {
+ sf->fp_sf.reduce_mv_step_param = 4;
+
sf->part_sf.simple_motion_search_prune_agg = 3;
sf->part_sf.ext_partition_eval_thresh =
allow_screen_content_tools ? BLOCK_8X8 : BLOCK_16X16;
+ sf->part_sf.prune_sub_8x8_partition_level =
+ (allow_screen_content_tools || frame_is_intra_only(&cpi->common)) ? 0
+ : 2;
sf->inter_sf.disable_interinter_wedge_var_thresh = UINT_MAX;
sf->inter_sf.prune_inter_modes_if_skippable = 1;
@@ -974,8 +1067,11 @@ static void set_good_speed_features_framesize_independent(
sf->tpl_sf.prune_starting_mv = 3;
sf->tpl_sf.use_y_only_rate_distortion = 1;
sf->tpl_sf.subpel_force_stop = FULL_PEL;
+ sf->tpl_sf.gop_length_decision_method = 2;
sf->winner_mode_sf.dc_blk_pred_level = 1;
+
+ sf->fp_sf.disable_recon = 1;
}
if (speed >= 6) {
@@ -986,9 +1082,14 @@ static void set_good_speed_features_framesize_independent(
sf->inter_sf.prune_inter_modes_based_on_tpl = boosted ? 0 : 3;
sf->inter_sf.prune_nearmv_using_neighbors = 1;
sf->inter_sf.selective_ref_frame = 6;
+ sf->inter_sf.prune_ext_comp_using_neighbors = 3;
sf->intra_sf.chroma_intra_pruning_with_hog = 4;
sf->intra_sf.intra_pruning_with_hog = 4;
+ sf->intra_sf.intra_uv_mode_mask[TX_32X32] = UV_INTRA_DC;
+ sf->intra_sf.intra_uv_mode_mask[TX_64X64] = UV_INTRA_DC;
+ sf->intra_sf.intra_y_mode_mask[TX_32X32] = INTRA_DC;
+ sf->intra_sf.intra_y_mode_mask[TX_64X64] = INTRA_DC;
sf->part_sf.prune_rectangular_split_based_on_qidx =
boosted || allow_screen_content_tools ? 0 : 1;
@@ -1000,10 +1101,10 @@ static void set_good_speed_features_framesize_independent(
sf->mv_sf.simple_motion_subpel_force_stop = FULL_PEL;
sf->mv_sf.use_bsize_dependent_search_method = 1;
- sf->tpl_sf.disable_gop_length_decision = 1;
+ sf->tpl_sf.gop_length_decision_method = 3;
sf->tpl_sf.disable_filtered_key_tpl = 1;
- sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 2;
+ sf->tx_sf.tx_type_search.winner_mode_tx_type_pruning = 4;
sf->tx_sf.use_intra_txb_hash = 1;
sf->tx_sf.tx_type_search.prune_tx_type_est_rd = 0;
@@ -1052,10 +1153,13 @@ static void set_rt_speed_feature_framesize_dependent(const AV1_COMP *const cpi,
#endif
}
} else {
- if (speed == 8 && !cpi->use_svc) {
+ if (speed == 8 && !cpi->ppi->use_svc) {
sf->rt_sf.short_circuit_low_temp_var = 0;
sf->rt_sf.use_nonrd_altref_frame = 1;
}
+ if (speed >= 9) {
+ sf->rt_sf.skip_cdef_sb = 1;
+ }
}
if (!is_480p_or_larger) {
if (speed == 7) {
@@ -1088,6 +1192,8 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
sf->part_sf.less_rectangular_check_level = 1;
sf->part_sf.ml_prune_partition = 1;
sf->part_sf.prune_ext_partition_types_search_level = 1;
+ sf->part_sf.reuse_prev_rd_results_for_part_ab = 1;
+ sf->part_sf.use_best_rd_for_pruning = 1;
// TODO(debargha): Test, tweak and turn on either 1 or 2
sf->inter_sf.inter_mode_rd_model_estimation = 0;
@@ -1103,6 +1209,7 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
sf->interp_sf.use_fast_interpolation_filter_search = 1;
+ sf->intra_sf.dv_cost_upd_level = INTERNAL_COST_UPD_OFF;
sf->intra_sf.intra_pruning_with_hog = 1;
sf->mv_sf.full_pixel_search_level = 1;
@@ -1140,7 +1247,6 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
sf->inter_sf.prune_comp_search_by_single_result = 1;
sf->inter_sf.reuse_inter_intra_mode = 1;
sf->inter_sf.selective_ref_frame = 2;
- sf->inter_sf.skip_repeated_newmv = 1;
sf->inter_sf.disable_interintra_wedge_var_thresh = 0;
sf->inter_sf.disable_interinter_wedge_var_thresh = 0;
sf->inter_sf.prune_comp_type_by_comp_avg = 1;
@@ -1191,7 +1297,7 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
if (speed >= 3) {
sf->hl_sf.recode_loop = ALLOW_RECODE_KFARFGF;
- sf->gm_sf.gm_search_type = GM_DISABLE_SEARCH;
+ sf->gm_sf.gm_search_type = GM_REDUCED_REF_SEARCH_SKIP_L2_L3_ARF2;
sf->part_sf.less_rectangular_check_level = 2;
@@ -1202,7 +1308,7 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
// sf->mv_sf.adaptive_motion_search = 1;
sf->inter_sf.adaptive_rd_thresh = 2;
- sf->inter_sf.mv_cost_upd_level = 1;
+ sf->inter_sf.mv_cost_upd_level = INTERNAL_COST_UPD_SBROW;
// TODO(yunqing): evaluate this speed feature for speed 1 & 2, and combine
// it with cpi->sf.disable_wedge_search_var_thresh.
sf->inter_sf.disable_interintra_wedge_var_thresh = UINT_MAX;
@@ -1306,12 +1412,20 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
sf->part_sf.default_min_partition_size = BLOCK_8X8;
sf->part_sf.partition_search_type = VAR_BASED_PARTITION;
+ sf->gm_sf.gm_search_type = GM_DISABLE_SEARCH;
+
sf->mv_sf.search_method = FAST_DIAMOND;
sf->mv_sf.subpel_force_stop = QUARTER_PEL;
sf->mv_sf.subpel_search_method = SUBPEL_TREE_PRUNED;
sf->inter_sf.inter_mode_rd_model_estimation = 2;
+ // Disable intra_y_mode_mask pruning since the performance at speed 7 isn't
+ // good. May need more study.
+ for (int i = 0; i < TX_SIZES; ++i) {
+ sf->intra_sf.intra_y_mode_mask[i] = INTRA_ALL;
+ }
+
sf->lpf_sf.lpf_pick = LPF_PICK_FROM_Q;
sf->rt_sf.mode_search_skip_flags |= FLAG_SKIP_INTRA_DIRMISMATCH;
@@ -1348,7 +1462,7 @@ static void set_rt_speed_features_framesize_independent(AV1_COMP *cpi,
// TODO(marpan): Look into why enabling skip_loopfilter_non_reference is
// not bitexact on rtc testset, its very close (< ~0.01 bdrate), but not
// always bitexact.
- if (cpi->use_svc && cpi->svc.non_reference_frame &&
+ if (cpi->ppi->use_svc && cpi->svc.non_reference_frame &&
sf->lpf_sf.cdef_pick_method == CDEF_PICK_FROM_Q &&
sf->lpf_sf.lpf_pick == LPF_PICK_FROM_Q)
sf->rt_sf.skip_loopfilter_non_reference = 1;
@@ -1398,8 +1512,14 @@ static AOM_INLINE void init_hl_sf(HIGH_LEVEL_SPEED_FEATURES *hl_sf) {
hl_sf->second_alt_ref_filtering = 1;
}
+static AOM_INLINE void init_fp_sf(FIRST_PASS_SPEED_FEATURES *fp_sf) {
+ fp_sf->reduce_mv_step_param = 3;
+ fp_sf->skip_motion_search_threshold = 0;
+ fp_sf->disable_recon = 0;
+}
+
static AOM_INLINE void init_tpl_sf(TPL_SPEED_FEATURES *tpl_sf) {
- tpl_sf->disable_gop_length_decision = 0;
+ tpl_sf->gop_length_decision_method = 0;
tpl_sf->prune_intra_modes = 0;
tpl_sf->prune_starting_mv = 0;
tpl_sf->reduce_first_step_size = 0;
@@ -1415,6 +1535,7 @@ static AOM_INLINE void init_tpl_sf(TPL_SPEED_FEATURES *tpl_sf) {
static AOM_INLINE void init_gm_sf(GLOBAL_MOTION_SPEED_FEATURES *gm_sf) {
gm_sf->gm_search_type = GM_FULL_SEARCH;
gm_sf->prune_ref_frame_for_gm_search = 0;
+ gm_sf->prune_zero_mv_with_sse = 0;
}
static AOM_INLINE void init_part_sf(PARTITION_SPEED_FEATURES *part_sf) {
@@ -1454,6 +1575,9 @@ static AOM_INLINE void init_part_sf(PARTITION_SPEED_FEATURES *part_sf) {
part_sf->ml_predict_breakout_level = 0;
part_sf->prune_sub_8x8_partition_level = 0;
part_sf->simple_motion_search_rect_split = 0;
+ part_sf->reuse_prev_rd_results_for_part_ab = 0;
+ part_sf->reuse_best_prediction_for_part_ab = 0;
+ part_sf->use_best_rd_for_pruning = 0;
}
static AOM_INLINE void init_mv_sf(MV_SPEED_FEATURES *mv_sf) {
@@ -1487,16 +1611,17 @@ static AOM_INLINE void init_inter_sf(INTER_MODE_SPEED_FEATURES *inter_sf) {
inter_sf->fast_wedge_sign_estimate = 0;
inter_sf->use_dist_wtd_comp_flag = DIST_WTD_COMP_ENABLED;
inter_sf->reuse_inter_intra_mode = 0;
- inter_sf->mv_cost_upd_level = 0;
+ inter_sf->mv_cost_upd_level = INTERNAL_COST_UPD_SB;
+ inter_sf->coeff_cost_upd_level = INTERNAL_COST_UPD_SB;
+ inter_sf->mode_cost_upd_level = INTERNAL_COST_UPD_SB;
inter_sf->prune_inter_modes_based_on_tpl = 0;
inter_sf->prune_nearmv_using_neighbors = 0;
inter_sf->prune_comp_search_by_single_result = 0;
inter_sf->skip_repeated_ref_mv = 0;
- inter_sf->skip_repeated_newmv = 0;
- inter_sf->skip_repeated_full_newmv = 0;
+ inter_sf->skip_newmv_in_drl = 0;
inter_sf->inter_mode_rd_model_estimation = 0;
inter_sf->prune_compound_using_single_ref = 0;
- inter_sf->prune_compound_using_neighbors = 0;
+ inter_sf->prune_ext_comp_using_neighbors = 0;
inter_sf->prune_comp_using_best_single_mode_ref = 0;
inter_sf->disable_onesided_comp = 0;
inter_sf->prune_mode_search_simple_translation = 0;
@@ -1514,9 +1639,10 @@ static AOM_INLINE void init_inter_sf(INTER_MODE_SPEED_FEATURES *inter_sf) {
inter_sf->txfm_rd_gate_level = 0;
inter_sf->prune_inter_modes_if_skippable = 0;
inter_sf->disable_masked_comp = 0;
- inter_sf->reuse_best_prediction_for_part_ab = 0;
inter_sf->enable_fast_compound_mode_search = 0;
inter_sf->reuse_mask_search_results = 0;
+ inter_sf->enable_fast_wedge_mask_search = 0;
+ inter_sf->inter_mode_txfm_breakout = 0;
}
static AOM_INLINE void init_interp_sf(INTERP_FILTER_SPEED_FEATURES *interp_sf) {
@@ -1529,6 +1655,7 @@ static AOM_INLINE void init_interp_sf(INTERP_FILTER_SPEED_FEATURES *interp_sf) {
}
static AOM_INLINE void init_intra_sf(INTRA_MODE_SPEED_FEATURES *intra_sf) {
+ intra_sf->dv_cost_upd_level = INTERNAL_COST_UPD_SB;
intra_sf->skip_intra_in_interframe = 1;
intra_sf->intra_pruning_with_hog = 0;
intra_sf->chroma_intra_pruning_with_hog = 0;
@@ -1539,8 +1666,10 @@ static AOM_INLINE void init_intra_sf(INTRA_MODE_SPEED_FEATURES *intra_sf) {
intra_sf->intra_uv_mode_mask[i] = UV_INTRA_ALL;
}
intra_sf->disable_smooth_intra = 0;
- intra_sf->disable_filter_intra = 0;
+ intra_sf->prune_filter_intra_level = 0;
intra_sf->prune_chroma_modes_using_luma_winner = 0;
+ intra_sf->cfl_search_range = 3;
+ intra_sf->top_intra_model_count_allowed = TOP_INTRA_MODEL_COUNT;
}
static AOM_INLINE void init_tx_sf(TX_SPEED_FEATURES *tx_sf) {
@@ -1650,9 +1779,11 @@ void av1_set_speed_features_framesize_dependent(AV1_COMP *cpi, int speed) {
break;
}
- if (!cpi->seq_params_locked) {
- cpi->common.seq_params.enable_masked_compound &=
+ if (!cpi->ppi->seq_params_locked) {
+ cpi->common.seq_params->enable_masked_compound &=
!sf->inter_sf.disable_masked_comp;
+ cpi->common.seq_params->enable_interintra_compound &=
+ (sf->inter_sf.disable_interintra_wedge_var_thresh != UINT_MAX);
}
// This is only used in motion vector unit test.
@@ -1662,9 +1793,9 @@ void av1_set_speed_features_framesize_dependent(AV1_COMP *cpi, int speed) {
cpi->mv_search_params.find_fractional_mv_step = av1_return_min_sub_pixel_mv;
if ((cpi->oxcf.row_mt == 1) && (cpi->oxcf.max_threads > 1)) {
- if (sf->inter_sf.mv_cost_upd_level > 1) {
+ if (sf->inter_sf.mv_cost_upd_level < INTERNAL_COST_UPD_SBROW) {
// Set mv_cost_upd_level to use row level update.
- sf->inter_sf.mv_cost_upd_level = 1;
+ sf->inter_sf.mv_cost_upd_level = INTERNAL_COST_UPD_SBROW;
}
}
}
@@ -1676,6 +1807,7 @@ void av1_set_speed_features_framesize_independent(AV1_COMP *cpi, int speed) {
int i;
init_hl_sf(&sf->hl_sf);
+ init_fp_sf(&sf->fp_sf);
init_tpl_sf(&sf->tpl_sf);
init_gm_sf(&sf->gm_sf);
init_part_sf(&sf->part_sf);
@@ -1701,12 +1833,12 @@ void av1_set_speed_features_framesize_independent(AV1_COMP *cpi, int speed) {
break;
}
- if (!cpi->seq_params_locked) {
- cpi->common.seq_params.enable_dual_filter &=
+ if (!cpi->ppi->seq_params_locked) {
+ cpi->common.seq_params->enable_dual_filter &=
!sf->interp_sf.disable_dual_filter;
- cpi->common.seq_params.enable_restoration &= !sf->lpf_sf.disable_lr_filter;
+ cpi->common.seq_params->enable_restoration &= !sf->lpf_sf.disable_lr_filter;
- cpi->common.seq_params.enable_interintra_compound &=
+ cpi->common.seq_params->enable_interintra_compound &=
(sf->inter_sf.disable_interintra_wedge_var_thresh != UINT_MAX);
}
@@ -1821,10 +1953,11 @@ void av1_set_speed_features_qindex_dependent(AV1_COMP *cpi, int speed) {
SPEED_FEATURES *const sf = &cpi->sf;
WinnerModeParams *const winner_mode_params = &cpi->winner_mode_params;
const int boosted = frame_is_boosted(cpi);
+ const int is_480p_or_larger = AOMMIN(cm->width, cm->height) >= 480;
const int is_720p_or_larger = AOMMIN(cm->width, cm->height) >= 720;
const int is_1080p_or_larger = AOMMIN(cm->width, cm->height) >= 1080;
const int is_arf2_bwd_type =
- cpi->gf_group.update_type[cpi->gf_group.index] == INTNL_ARF_UPDATE;
+ cpi->ppi->gf_group.update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE;
if (cpi->oxcf.mode == REALTIME) return;
@@ -1832,7 +1965,6 @@ void av1_set_speed_features_qindex_dependent(AV1_COMP *cpi, int speed) {
// qindex_thresh for resolution < 720p
const int qindex_thresh = boosted ? 70 : (is_arf2_bwd_type ? 110 : 140);
if (!is_720p_or_larger && cm->quant_params.base_qindex <= qindex_thresh) {
- sf->inter_sf.skip_repeated_newmv = 1;
sf->part_sf.simple_motion_search_split =
cm->features.allow_screen_content_tools ? 1 : 2;
sf->part_sf.simple_motion_search_early_term_none = 1;
@@ -1849,7 +1981,6 @@ void av1_set_speed_features_qindex_dependent(AV1_COMP *cpi, int speed) {
sf->tx_sf.inter_tx_size_search_init_depth_rect = 1;
sf->tx_sf.inter_tx_size_search_init_depth_sqr = 1;
sf->tx_sf.intra_tx_size_search_init_depth_rect = 1;
- sf->inter_sf.skip_repeated_newmv = 1;
sf->tx_sf.model_based_prune_tx_search_level = 0;
if (is_1080p_or_larger && cm->quant_params.base_qindex <= 108) {
@@ -1866,28 +1997,25 @@ void av1_set_speed_features_qindex_dependent(AV1_COMP *cpi, int speed) {
}
}
- if (speed >= 3) {
- // Disable extended partitions for lower quantizers
- const int qindex_thresh =
- cm->features.allow_screen_content_tools ? 50 : 100;
- if (cm->quant_params.base_qindex <= qindex_thresh && !boosted) {
- sf->part_sf.ext_partition_eval_thresh = BLOCK_128X128;
- }
- }
-
- if (speed >= 4) {
+ if (speed >= 2) {
// Disable extended partitions for lower quantizers
- const int qindex_thresh = boosted ? 80 : 120;
- if (cm->quant_params.base_qindex <= qindex_thresh &&
- !frame_is_intra_only(&cpi->common)) {
- sf->part_sf.ext_partition_eval_thresh = BLOCK_128X128;
+ const int aggr = AOMMIN(3, speed - 2);
+ const int qindex_thresh1[4] = { 50, 50, 80, 100 };
+ const int qindex_thresh2[4] = { 80, 100, 120, 160 };
+ int qindex_thresh;
+ int disable_ext_part;
+ if (aggr <= 1) {
+ const int qthresh2 =
+ (!aggr && !is_480p_or_larger) ? 70 : qindex_thresh2[aggr];
+ qindex_thresh = cm->features.allow_screen_content_tools
+ ? qindex_thresh1[aggr]
+ : qthresh2;
+ disable_ext_part = !boosted;
+ } else {
+ qindex_thresh = boosted ? qindex_thresh1[aggr] : qindex_thresh2[aggr];
+ disable_ext_part = !frame_is_intra_only(cm);
}
- }
-
- if (speed >= 5) {
- const int qindex_thresh = boosted ? 100 : 160;
- if (cm->quant_params.base_qindex <= qindex_thresh &&
- !frame_is_intra_only(&cpi->common)) {
+ if (cm->quant_params.base_qindex <= qindex_thresh && disable_ext_part) {
sf->part_sf.ext_partition_eval_thresh = BLOCK_128X128;
}
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/speed_features.h b/third_party/libaom/source/libaom/av1/encoder/speed_features.h
index 90765febfb..3cf4c3d10b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/speed_features.h
+++ b/third_party/libaom/source/libaom/av1/encoder/speed_features.h
@@ -287,17 +287,30 @@ enum {
SUPERRES_AUTO_DUAL, // Tries no superres and q-based superres ratios
SUPERRES_AUTO_SOLO, // Only apply the q-based superres ratio
} UENUM1BYTE(SUPERRES_AUTO_SEARCH_TYPE);
-
/*!\endcond */
+
+/*!\enum INTERNAL_COST_UPDATE_TYPE
+ * \brief This enum decides internally how often to update the entropy costs
+ *
+ * INTERNAL_COST_UPD_TYPE is similar to \ref COST_UPDATE_TYPE but has slightly
+ * more flexibility in update frequency. This enum is separate from \ref
+ * COST_UPDATE_TYPE because although \ref COST_UPDATE_TYPE is not exposed, its
+ * values are public so it cannot be modified without breaking public API.
+ */
+typedef enum {
+ INTERNAL_COST_UPD_OFF, /*!< Turn off cost updates. */
+ INTERNAL_COST_UPD_SBROW_SET, /*!< Update every row_set of height 256 pixs. */
+ INTERNAL_COST_UPD_SBROW, /*!< Update every sb rows inside a tile. */
+ INTERNAL_COST_UPD_SB, /*!< Update every sb. */
+} INTERNAL_COST_UPDATE_TYPE;
+
/*!
* \brief Sequence/frame level speed vs quality features
*/
typedef struct HIGH_LEVEL_SPEED_FEATURES {
- /*!\cond */
- // Frame level coding parameter update
+ /*! Frame level coding parameter update. */
int frame_parameter_update;
- /*!\endcond */
/*!
* Cases and frame types for which the recode loop is enabled.
*/
@@ -309,25 +322,27 @@ typedef struct HIGH_LEVEL_SPEED_FEATURES {
*/
int recode_tolerance;
- /*!\cond */
- // Determine how motion vector precision is chosen. The possibilities are:
- // LAST_MV_DATA: use the mv data from the last coded frame
- // CURRENT_Q: use the current q as a threshold
- // QTR_ONLY: use quarter pel precision only.
+ /*!
+ * Determine how motion vector precision is chosen. The possibilities are:
+ * LAST_MV_DATA: use the mv data from the last coded frame
+ * CURRENT_Q: use the current q as a threshold
+ * QTR_ONLY: use quarter pel precision only.
+ */
MV_PREC_LOGIC high_precision_mv_usage;
- // Always set to 0. If on it enables 0 cost background transmission
- // (except for the initial transmission of the segmentation). The feature is
- // disabled because the addition of very large block sizes make the
- // backgrounds very to cheap to encode, and the segmentation we have
- // adds overhead.
+ /*!
+ * Always set to 0. If on it enables 0 cost background transmission
+ * (except for the initial transmission of the segmentation). The feature is
+ * disabled because the addition of very large block sizes make the
+ * backgrounds very to cheap to encode, and the segmentation we have
+ * adds overhead.
+ */
int static_segmentation;
/*!
* Superres-auto mode search type:
*/
SUPERRES_AUTO_SEARCH_TYPE superres_auto_search_type;
- /*!\endcond */
/*!
* Enable/disable extra screen content test by encoding key frame twice.
@@ -340,10 +355,39 @@ typedef struct HIGH_LEVEL_SPEED_FEATURES {
int second_alt_ref_filtering;
} HIGH_LEVEL_SPEED_FEATURES;
+/*!
+ * Speed features for the first pass.
+ */
+typedef struct FIRST_PASS_SPEED_FEATURES {
+ /*!
+ * \brief Reduces the mv search window.
+ * By default, the initial search window is around
+ * MIN(MIN(dims), MAX_FULL_PEL_VAL) = MIN(MIN(dims), 1023).
+ * Each step reduction decrease the window size by about a factor of 2.
+ */
+ int reduce_mv_step_param;
+
+ /*!
+ * \brief Skips the motion search when the zero mv has small sse.
+ */
+ int skip_motion_search_threshold;
+
+ /*!
+ * \brief Skips reconstruction by using source buffers for prediction
+ */
+ int disable_recon;
+} FIRST_PASS_SPEED_FEATURES;
+
/*!\cond */
typedef struct TPL_SPEED_FEATURES {
- // Enable/disable GOP length adaptive decision.
- int disable_gop_length_decision;
+ // GOP length adaptive decision.
+ // If set to 0, tpl model decides whether a shorter gf interval is better.
+ // If set to 1, tpl stats of ARFs from base layer, (base+1) layer and
+ // (base+2) layer decide whether a shorter gf interval is better.
+ // If set to 2, tpl stats of ARFs from base layer, (base+1) layer and GF boost
+ // decide whether a shorter gf interval is better.
+ // If set to 3, gop length adaptive decision is disabled.
+ int gop_length_decision_method;
// Prune the intra modes search by tpl.
// If set to 0, we will search all intra modes from DC_PRED to PAETH_PRED.
// If set to 1, we only search DC_PRED, V_PRED, and H_PRED.
@@ -387,6 +431,10 @@ typedef struct GLOBAL_MOTION_SPEED_FEATURES {
// given direction(past/future), if the evaluated ref_frame in that direction
// yields gm_type as INVALID/TRANSLATION/IDENTITY
int prune_ref_frame_for_gm_search;
+
+ // When the current GM type is set to ZEROMV, prune ZEROMV if its performance
+ // is worse than NEWMV under SSE metric.
+ int prune_zero_mv_with_sse;
} GLOBAL_MOTION_SPEED_FEATURES;
typedef struct PARTITION_SPEED_FEATURES {
@@ -511,6 +559,53 @@ typedef struct PARTITION_SPEED_FEATURES {
// Prune rectangular split based on simple motion search split/no_split score.
// 0: disable pruning, 1: enable pruning
int simple_motion_search_rect_split;
+
+ // The current encoder adopts a DFS search for block partitions.
+ // Therefore the mode selection and associated rdcost is ready for smaller
+ // blocks before the mode selection for some partition types.
+ // AB partition could use previous rd information and skip mode search.
+ // An example is:
+ //
+ // current block
+ // +---+---+
+ // | |
+ // + +
+ // | |
+ // +-------+
+ //
+ // SPLIT partition has been searched first before trying HORZ_A
+ // +---+---+
+ // | R | R |
+ // +---+---+
+ // | R | R |
+ // +---+---+
+ //
+ // HORZ_A
+ // +---+---+
+ // | | |
+ // +---+---+
+ // | |
+ // +-------+
+ //
+ // With this speed feature, the top two sub blocks can directly use rdcost
+ // searched in split partition, and the mode info is also copied from
+ // saved info. Similarly, the bottom rectangular block can also use
+ // the available information from previous rectangular search.
+ int reuse_prev_rd_results_for_part_ab;
+
+ // Reuse the best prediction modes found in PARTITION_SPLIT and PARTITION_RECT
+ // when encoding PARTITION_AB.
+ int reuse_best_prediction_for_part_ab;
+
+ // The current partition search records the best rdcost so far and uses it
+ // in mode search and transform search to early skip when some criteria is
+ // met. For example, when the current rdcost is larger than the best rdcost,
+ // or the model rdcost is larger than the best rdcost times some thresholds.
+ // By default, this feature is turned on to speed up the encoder partition
+ // search.
+ // If disabling it, at speed 0, 30 frames, we could get
+ // about -0.25% quality gain (psnr, ssim, vmaf), with about 13% slowdown.
+ int use_best_rd_for_pruning;
} PARTITION_SPEED_FEATURES;
typedef struct MV_SPEED_FEATURES {
@@ -621,16 +716,19 @@ typedef struct INTER_MODE_SPEED_FEATURES {
int alt_ref_search_fp;
- // flag to skip NEWMV mode in drl if the motion search result is the same
- int skip_repeated_newmv;
-
- // Skip the current ref_mv in NEW_MV mode if we have already encountered
- // another ref_mv in the drl such that:
- // 1. The other drl has the same fullpel_mv during the SIMPLE_TRANSLATION
- // search process as the current fullpel_mv.
- // 2. The rate needed to encode the current fullpel_mv is larger than that
- // for the other ref_mv.
- int skip_repeated_full_newmv;
+ // Skip the current ref_mv in NEW_MV mode based on mv, rate cost, etc.
+ // This speed feature equaling 0 means no skipping.
+ // If the speed feature equals 1 or 2, skip the current ref_mv in NEW_MV mode
+ // if we have already encountered ref_mv in the drl such that:
+ // 1. The other drl has the same mv during the SIMPLE_TRANSLATION search
+ // process as the current mv.
+ // 2. The rate needed to encode the current mv is larger than that for the
+ // other ref_mv.
+ // The speed feature equaling 1 means using subpel mv in the comparison.
+ // The speed feature equaling 2 means using fullpel mv in the comparison.
+ // If the speed feature >= 3, skip the current ref_mv in NEW_MV mode based on
+ // known full_mv bestsme and drl cost.
+ int skip_newmv_in_drl;
// This speed feature checks duplicate ref MVs among NEARESTMV, NEARMV,
// GLOBALMV and skips NEARMV or GLOBALMV (in order) if a duplicate is found
@@ -677,12 +775,14 @@ typedef struct INTER_MODE_SPEED_FEATURES {
// the single reference modes, it is one of the two best performers.
int prune_compound_using_single_ref;
- // Skip extended compound mode using ref frames of above and left neighbor
+ // Skip extended compound mode (NEAREST_NEWMV, NEW_NEARESTMV, NEAR_NEWMV,
+ // NEW_NEARMV) using ref frames of above and left neighbor
// blocks.
// 0 : no pruning
- // 1 : prune extended compound mode (less aggressiveness)
- // 2 : prune extended compound mode (high aggressiveness)
- int prune_compound_using_neighbors;
+ // 1 : prune ext compound modes using neighbor blocks (less aggressiveness)
+ // 2 : prune ext compound modes using neighbor blocks (high aggressiveness)
+ // 3 : prune ext compound modes unconditionally (highest aggressiveness)
+ int prune_ext_comp_using_neighbors;
// Skip extended compound mode when ref frame corresponding to NEWMV does not
// have NEWMV as single mode winner.
@@ -722,12 +822,15 @@ typedef struct INTER_MODE_SPEED_FEATURES {
// Decide when and how to use joint_comp.
DIST_WTD_COMP_FLAG use_dist_wtd_comp_flag;
- // To skip cost update for mv.
- // mv_cost_upd_level indicates the aggressiveness of skipping.
- // 0: update happens at each sb level.
- // 1: update happens once for each sb row.
- // 2: update happens once for a set of rows.
- int mv_cost_upd_level;
+ // Clip the frequency of updating the mv cost.
+ INTERNAL_COST_UPDATE_TYPE mv_cost_upd_level;
+
+ // Clip the frequency of updating the coeff cost.
+ INTERNAL_COST_UPDATE_TYPE coeff_cost_upd_level;
+
+ // Clip the frequency of updating the mode cost.
+ INTERNAL_COST_UPDATE_TYPE mode_cost_upd_level;
+
// Prune inter modes based on tpl stats
// 0 : no pruning
// 1 - 3 indicate increasing aggressiveness in order.
@@ -750,15 +853,17 @@ typedef struct INTER_MODE_SPEED_FEATURES {
// Enable/disable masked compound.
int disable_masked_comp;
- // Reuse the best prediction modes found in PARTITION_SPLIT and PARTITION_RECT
- // when encoding PARTITION_AB.
- int reuse_best_prediction_for_part_ab;
-
// Enable/disable the fast compound mode search.
int enable_fast_compound_mode_search;
// Reuse masked compound type search results
int reuse_mask_search_results;
+
+ // Enable/disable fast search for wedge masks
+ int enable_fast_wedge_mask_search;
+
+ // Early breakout from transform search of inter modes
+ int inter_mode_txfm_breakout;
} INTER_MODE_SPEED_FEATURES;
typedef struct INTERP_FILTER_SPEED_FEATURES {
@@ -808,8 +913,11 @@ typedef struct INTRA_MODE_SPEED_FEATURES {
// Enable/disable smooth intra modes.
int disable_smooth_intra;
- // Enable/disable filter intra modes.
- int disable_filter_intra;
+ // Prune filter intra modes in intra frames.
+ // 0 : No pruning
+ // 1 : Evaluate applicable filter intra modes based on best intra mode so far
+ // 2 : Do not evaluate filter intra modes
+ int prune_filter_intra_level;
// prune palette search
// 0: No pruning
@@ -825,6 +933,27 @@ typedef struct INTRA_MODE_SPEED_FEATURES {
// 1: Prune chroma intra modes other than UV_DC_PRED, UV_SMOOTH_PRED,
// UV_CFL_PRED and the mode that corresponds to luma intra mode winner.
int prune_chroma_modes_using_luma_winner;
+
+ // Clip the frequency of updating the mv cost for intrabc.
+ INTERNAL_COST_UPDATE_TYPE dv_cost_upd_level;
+
+ // We use DCT_DCT transform followed by computing SATD (Sum of Absolute
+ // Transformed Differences) as an estimation of RD score to quickly find the
+ // best possible Chroma from Luma (CFL) parameter. Then we do a full RD search
+ // near the best possible parameter. The search range is set here.
+ // The range of cfl_searh_range should be [1, 33], and the following are the
+ // recommended values.
+ // 1: Fastest mode.
+ // 3: Default mode that provides good speedup without losing compression
+ // performance at speed 0.
+ // 33: Exhaustive rd search (33 == CFL_MAGS_SIZE). This mode should only
+ // be used for debugging purpose.
+ int cfl_search_range;
+
+ // TOP_INTRA_MODEL_COUNT is 4 that is the number of top model rd to store in
+ // intra mode decision. Here, add a speed feature to reduce this number for
+ // higher speeds.
+ int top_intra_model_count_allowed;
} INTRA_MODE_SPEED_FEATURES;
typedef struct TX_SPEED_FEATURES {
@@ -1082,6 +1211,11 @@ typedef struct REAL_TIME_SPEED_FEATURES {
// Skips mode checks more agressively in nonRD mode
int nonrd_agressive_skip;
+
+ // Skip cdef on 64x64 blocks when NEWMV or INTRA is not picked or color
+ // sensitivity is off. When color sensitivity is on for a superblock, all
+ // 64x64 blocks within will not skip.
+ int skip_cdef_sb;
} REAL_TIME_SPEED_FEATURES;
/*!\endcond */
@@ -1096,6 +1230,11 @@ typedef struct SPEED_FEATURES {
HIGH_LEVEL_SPEED_FEATURES hl_sf;
/*!
+ * Speed features for the first pass.
+ */
+ FIRST_PASS_SPEED_FEATURES fp_sf;
+
+ /*!
* Speed features related to how tpl's searches are done.
*/
TPL_SPEED_FEATURES tpl_sf;
diff --git a/third_party/libaom/source/libaom/av1/encoder/superres_scale.c b/third_party/libaom/source/libaom/av1/encoder/superres_scale.c
index bcd3fefdfe..283faabe61 100644
--- a/third_party/libaom/source/libaom/av1/encoder/superres_scale.c
+++ b/third_party/libaom/source/libaom/av1/encoder/superres_scale.c
@@ -80,7 +80,7 @@ static uint8_t calculate_next_resize_scale(const AV1_COMP *cpi) {
if (is_stat_generation_stage(cpi)) return SCALE_NUMERATOR;
uint8_t new_denom = SCALE_NUMERATOR;
- if (cpi->common.seq_params.reduced_still_picture_hdr) return SCALE_NUMERATOR;
+ if (cpi->common.seq_params->reduced_still_picture_hdr) return SCALE_NUMERATOR;
switch (resize_cfg->resize_mode) {
case RESIZE_NONE: new_denom = SCALE_NUMERATOR; break;
case RESIZE_FIXED:
@@ -109,12 +109,13 @@ int av1_superres_in_recode_allowed(const AV1_COMP *const cpi) {
#define SUPERRES_ENERGY_BY_AC_THRESH 0.2
static double get_energy_by_q2_thresh(const GF_GROUP *gf_group,
- const RATE_CONTROL *rc) {
+ const RATE_CONTROL *rc,
+ int gf_frame_index) {
// TODO(now): Return keyframe thresh * factor based on frame type / pyramid
// level.
- if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
+ if (gf_group->update_type[gf_frame_index] == ARF_UPDATE) {
return SUPERRES_ENERGY_BY_Q2_THRESH_ARFFRAME;
- } else if (gf_group->update_type[gf_group->index] == KF_UPDATE) {
+ } else if (gf_group->update_type[gf_frame_index] == KF_UPDATE) {
if (rc->frames_to_key <= 1)
return SUPERRES_ENERGY_BY_Q2_THRESH_KEYFRAME_SOLO;
else
@@ -142,15 +143,15 @@ static uint8_t get_superres_denom_from_qindex_energy(int qindex, double *energy,
static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex,
int sr_kf, int sr_arf) {
// Use superres for Key-frames and Alt-ref frames only.
- const GF_GROUP *gf_group = &cpi->gf_group;
- if (gf_group->update_type[gf_group->index] != KF_UPDATE &&
- gf_group->update_type[gf_group->index] != ARF_UPDATE) {
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
+ if (gf_group->update_type[cpi->gf_frame_index] != KF_UPDATE &&
+ gf_group->update_type[cpi->gf_frame_index] != ARF_UPDATE) {
return SCALE_NUMERATOR;
}
- if (gf_group->update_type[gf_group->index] == KF_UPDATE && !sr_kf) {
+ if (gf_group->update_type[cpi->gf_frame_index] == KF_UPDATE && !sr_kf) {
return SCALE_NUMERATOR;
}
- if (gf_group->update_type[gf_group->index] == ARF_UPDATE && !sr_arf) {
+ if (gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE && !sr_arf) {
return SCALE_NUMERATOR;
}
@@ -158,7 +159,7 @@ static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex,
analyze_hor_freq(cpi, energy);
const double energy_by_q2_thresh =
- get_energy_by_q2_thresh(gf_group, &cpi->rc);
+ get_energy_by_q2_thresh(gf_group, &cpi->rc, cpi->gf_frame_index);
int denom = get_superres_denom_from_qindex_energy(
qindex, energy, energy_by_q2_thresh, SUPERRES_ENERGY_BY_AC_THRESH);
/*
@@ -166,8 +167,8 @@ static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex,
for (int k = 1; k < 16; ++k) printf("%f, ", energy[k]);
printf("]\n");
printf("boost = %d\n",
- (gf_group->update_type[gf_group->index] == KF_UPDATE)
- ? cpi->rc.kf_boost
+ (gf_group->update_type[cpi->gf_frame_index] == KF_UPDATE)
+ ? cpi->ppi->p_rc.kf_boost
: cpi->rc.gfu_boost);
printf("denom = %d\n", denom);
*/
@@ -194,8 +195,8 @@ static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
// Make sure that superres mode of the frame is consistent with the
// sequence-level flag.
assert(IMPLIES(superres_cfg->superres_mode != AOM_SUPERRES_NONE,
- cpi->common.seq_params.enable_superres));
- assert(IMPLIES(!cpi->common.seq_params.enable_superres,
+ cpi->common.seq_params->enable_superres));
+ assert(IMPLIES(!cpi->common.seq_params->enable_superres,
superres_cfg->superres_mode == AOM_SUPERRES_NONE));
// Make sure that superres mode for current encoding is consistent with user
// provided superres mode.
@@ -222,8 +223,8 @@ static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
// Now decide the use of superres based on 'q'.
int bottom_index, top_index;
const int q = av1_rc_pick_q_and_bounds(
- cpi, &cpi->rc, frm_dim_cfg->width, frm_dim_cfg->height,
- cpi->gf_group.index, &bottom_index, &top_index);
+ cpi, frm_dim_cfg->width, frm_dim_cfg->height, cpi->gf_frame_index,
+ &bottom_index, &top_index);
const int qthresh = (frame_is_intra_only(&cpi->common))
? superres_cfg->superres_kf_qthresh
@@ -243,8 +244,8 @@ static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
// Now decide the use of superres based on 'q'.
int bottom_index, top_index;
const int q = av1_rc_pick_q_and_bounds(
- cpi, &cpi->rc, frm_dim_cfg->width, frm_dim_cfg->height,
- cpi->gf_group.index, &bottom_index, &top_index);
+ cpi, frm_dim_cfg->width, frm_dim_cfg->height, cpi->gf_frame_index,
+ &bottom_index, &top_index);
const SUPERRES_AUTO_SEARCH_TYPE sr_search_type =
cpi->sf.hl_sf.superres_auto_search_type;
@@ -345,7 +346,7 @@ static size_params_type calculate_next_size_params(AV1_COMP *cpi) {
size_params_type rsz = { frm_dim_cfg->width, frm_dim_cfg->height,
SCALE_NUMERATOR };
int resize_denom = SCALE_NUMERATOR;
- if (has_no_stats_stage(cpi) && cpi->use_svc &&
+ if (has_no_stats_stage(cpi) && cpi->ppi->use_svc &&
cpi->svc.spatial_layer_id < cpi->svc.number_spatial_layers - 1) {
rsz.resize_width = cpi->common.width;
rsz.resize_height = cpi->common.height;
diff --git a/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.c b/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.c
index 17109201e6..5cff958a85 100644
--- a/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.c
+++ b/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.c
@@ -30,6 +30,7 @@ void av1_init_layer_context(AV1_COMP *const cpi) {
svc->current_superframe = 0;
svc->force_zero_mode_spatial_ref = 1;
svc->num_encoded_top_layer = 0;
+ svc->use_flexible_mode = 0;
for (int sl = 0; sl < svc->number_spatial_layers; ++sl) {
for (int tl = 0; tl < svc->number_temporal_layers; ++tl) {
@@ -90,6 +91,7 @@ void av1_init_layer_context(AV1_COMP *const cpi) {
void av1_update_layer_context_change_config(AV1_COMP *const cpi,
const int64_t target_bandwidth) {
const RATE_CONTROL *const rc = &cpi->rc;
+ const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
SVC *const svc = &cpi->svc;
int layer = 0;
int64_t spatial_layer_target = 0;
@@ -106,17 +108,18 @@ void av1_update_layer_context_change_config(AV1_COMP *const cpi,
LAYER_CONTEXT *const lc =
&svc->layer_context[sl * svc->number_temporal_layers + tl];
RATE_CONTROL *const lrc = &lc->rc;
+ PRIMARY_RATE_CONTROL *const lp_rc = &lc->p_rc;
lc->spatial_layer_target_bandwidth = spatial_layer_target;
bitrate_alloc = (float)lc->target_bandwidth / target_bandwidth;
- lrc->starting_buffer_level =
- (int64_t)(rc->starting_buffer_level * bitrate_alloc);
- lrc->optimal_buffer_level =
- (int64_t)(rc->optimal_buffer_level * bitrate_alloc);
- lrc->maximum_buffer_size =
- (int64_t)(rc->maximum_buffer_size * bitrate_alloc);
+ lp_rc->starting_buffer_level =
+ (int64_t)(p_rc->starting_buffer_level * bitrate_alloc);
+ lp_rc->optimal_buffer_level =
+ (int64_t)(p_rc->optimal_buffer_level * bitrate_alloc);
+ lp_rc->maximum_buffer_size =
+ (int64_t)(p_rc->maximum_buffer_size * bitrate_alloc);
lrc->bits_off_target =
- AOMMIN(lrc->bits_off_target, lrc->maximum_buffer_size);
- lrc->buffer_level = AOMMIN(lrc->buffer_level, lrc->maximum_buffer_size);
+ AOMMIN(lrc->bits_off_target, lp_rc->maximum_buffer_size);
+ lrc->buffer_level = AOMMIN(lrc->buffer_level, lp_rc->maximum_buffer_size);
lc->framerate = cpi->framerate / lc->framerate_factor;
lrc->avg_frame_bandwidth = (int)(lc->target_bandwidth / lc->framerate);
lrc->max_frame_bandwidth = rc->max_frame_bandwidth;
@@ -164,7 +167,6 @@ void av1_update_temporal_layer_framerate(AV1_COMP *const cpi) {
}
void av1_restore_layer_context(AV1_COMP *const cpi) {
- GF_GROUP *const gf_group = &cpi->gf_group;
SVC *const svc = &cpi->svc;
const AV1_COMMON *const cm = &cpi->common;
LAYER_CONTEXT *const lc = get_layer_context(cpi);
@@ -172,8 +174,9 @@ void av1_restore_layer_context(AV1_COMP *const cpi) {
const int old_frame_to_key = cpi->rc.frames_to_key;
// Restore layer rate control.
cpi->rc = lc->rc;
+ cpi->ppi->p_rc = lc->p_rc;
cpi->oxcf.rc_cfg.target_bandwidth = lc->target_bandwidth;
- gf_group->index = 0;
+ cpi->gf_frame_index = 0;
cpi->mv_search_params.max_mv_magnitude = lc->max_mv_magnitude;
if (cpi->mv_search_params.max_mv_magnitude == 0)
cpi->mv_search_params.max_mv_magnitude = AOMMAX(cm->width, cm->height);
@@ -198,7 +201,7 @@ void av1_restore_layer_context(AV1_COMP *const cpi) {
// This is to skip searching mv for that reference if it was last
// refreshed (i.e., buffer slot holding that reference was refreshed) on the
// previous spatial layer(s) at the same time (current_superframe).
- if (svc->external_ref_frame_config && svc->force_zero_mode_spatial_ref) {
+ if (svc->set_ref_frame_config && svc->force_zero_mode_spatial_ref) {
int ref_frame_idx = svc->ref_idx[LAST_FRAME - 1];
if (svc->buffer_time_index[ref_frame_idx] == svc->current_superframe &&
svc->buffer_spatial_layer[ref_frame_idx] <= svc->spatial_layer_id - 1)
@@ -211,13 +214,13 @@ void av1_restore_layer_context(AV1_COMP *const cpi) {
}
void av1_save_layer_context(AV1_COMP *const cpi) {
- GF_GROUP *const gf_group = &cpi->gf_group;
SVC *const svc = &cpi->svc;
const AV1_COMMON *const cm = &cpi->common;
LAYER_CONTEXT *lc = get_layer_context(cpi);
lc->rc = cpi->rc;
+ lc->p_rc = cpi->ppi->p_rc;
lc->target_bandwidth = (int)cpi->oxcf.rc_cfg.target_bandwidth;
- lc->group_index = gf_group->index;
+ lc->group_index = cpi->gf_frame_index;
lc->max_mv_magnitude = cpi->mv_search_params.max_mv_magnitude;
if (svc->spatial_layer_id == 0) svc->base_framerate = cpi->framerate;
// For spatial-svc, allow cyclic-refresh to be applied on the spatial layers,
@@ -243,7 +246,7 @@ void av1_save_layer_context(AV1_COMP *const cpi) {
svc->buffer_time_index[i] = svc->current_superframe;
svc->buffer_spatial_layer[i] = svc->spatial_layer_id;
}
- } else if (cpi->svc.external_ref_frame_config) {
+ } else if (cpi->svc.set_ref_frame_config) {
for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
int ref_frame_map_idx = svc->ref_idx[i];
if (cpi->svc.refresh[ref_frame_map_idx]) {
@@ -342,3 +345,171 @@ void av1_one_pass_cbr_svc_start_layer(AV1_COMP *const cpi) {
cpi->common.height = height;
av1_update_frame_size(cpi);
}
+
+enum {
+ SVC_LAST_FRAME = 0,
+ SVC_LAST2_FRAME,
+ SVC_LAST3_FRAME,
+ SVC_GOLDEN_FRAME,
+ SVC_BWDREF_FRAME,
+ SVC_ALTREF2_FRAME,
+ SVC_ALTREF_FRAME
+};
+
+// For fixed svc mode: fixed pattern is set based on the number of
+// spatial and temporal layers, and the ksvc_fixed_mode.
+void av1_set_svc_fixed_mode(AV1_COMP *const cpi) {
+ SVC *const svc = &cpi->svc;
+ int i;
+ assert(svc->use_flexible_mode == 0);
+ // Fixed SVC mode only supports at most 3 spatial or temporal layers.
+ assert(svc->number_spatial_layers >= 1 && svc->number_spatial_layers <= 3 &&
+ svc->number_temporal_layers >= 1 && svc->number_temporal_layers <= 3);
+ svc->set_ref_frame_config = 1;
+ int superframe_cnt = svc->current_superframe;
+ // Set the reference map buffer idx for the 7 references:
+ // LAST_FRAME (0), LAST2_FRAME(1), LAST3_FRAME(2), GOLDEN_FRAME(3),
+ // BWDREF_FRAME(4), ALTREF2_FRAME(5), ALTREF_FRAME(6).
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = i;
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->reference[i] = 0;
+ for (i = 0; i < REF_FRAMES; i++) svc->refresh[i] = 0;
+ // Always reference LAST, and reference GOLDEN on SL > 0.
+ // For KSVC: GOLDEN reference will be removed on INTER_FRAMES later
+ // when frame_type is set.
+ svc->reference[SVC_LAST_FRAME] = 1;
+ if (svc->spatial_layer_id > 0) svc->reference[SVC_GOLDEN_FRAME] = 1;
+ if (svc->temporal_layer_id == 0) {
+ // Base temporal layer.
+ if (svc->spatial_layer_id == 0) {
+ // Set all buffer_idx to 0. Update slot 0 (LAST).
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ svc->refresh[0] = 1;
+ } else if (svc->spatial_layer_id == 1) {
+ // Set buffer_idx for LAST to slot 1, GOLDEN (and all other refs) to
+ // slot 0. Update slot 1 (LAST).
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ svc->ref_idx[SVC_LAST_FRAME] = 1;
+ svc->refresh[1] = 1;
+ } else if (svc->spatial_layer_id == 2) {
+ // Set buffer_idx for LAST to slot 2, GOLDEN (and all other refs) to
+ // slot 1. Update slot 2 (LAST).
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 1;
+ svc->ref_idx[SVC_LAST_FRAME] = 2;
+ svc->refresh[2] = 1;
+ }
+ } else if (svc->temporal_layer_id == 2 && (superframe_cnt - 1) % 4 == 0) {
+ // First top temporal enhancement layer.
+ if (svc->spatial_layer_id == 0) {
+ // Reference LAST (slot 0).
+ // Set GOLDEN to slot 3 and update slot 3.
+ // Set all other buffer_idx to slot 0.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ if (svc->spatial_layer_id < svc->number_spatial_layers - 1) {
+ svc->ref_idx[SVC_GOLDEN_FRAME] = 3;
+ svc->refresh[3] = 1;
+ }
+ } else if (svc->spatial_layer_id == 1) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 1,
+ // GOLDEN (and all other refs) to slot 3.
+ // Set LAST2 to slot 4 and Update slot 4.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 3;
+ svc->ref_idx[SVC_LAST_FRAME] = 1;
+ if (svc->spatial_layer_id < svc->number_spatial_layers - 1) {
+ svc->ref_idx[SVC_LAST2_FRAME] = 4;
+ svc->refresh[4] = 1;
+ }
+ } else if (svc->spatial_layer_id == 2) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 2,
+ // GOLDEN (and all other refs) to slot 4.
+ // No update.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 4;
+ svc->ref_idx[SVC_LAST_FRAME] = 2;
+ }
+ } else if (svc->temporal_layer_id == 1) {
+ // Middle temporal enhancement layer.
+ if (svc->spatial_layer_id == 0) {
+ // Reference LAST.
+ // Set all buffer_idx to 0.
+ // Set GOLDEN to slot 5 and update slot 5.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ if (svc->temporal_layer_id < svc->number_temporal_layers - 1) {
+ svc->ref_idx[SVC_GOLDEN_FRAME] = 5;
+ svc->refresh[5] = 1;
+ }
+ } else if (svc->spatial_layer_id == 1) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 1,
+ // GOLDEN (and all other refs) to slot 5.
+ // Set LAST3 to slot 6 and update slot 6.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 5;
+ svc->ref_idx[SVC_LAST_FRAME] = 1;
+ if (svc->temporal_layer_id < svc->number_temporal_layers - 1) {
+ svc->ref_idx[SVC_LAST3_FRAME] = 6;
+ svc->refresh[6] = 1;
+ }
+ } else if (svc->spatial_layer_id == 2) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 2,
+ // GOLDEN (and all other refs) to slot 6.
+ // Set LAST3 to slot 7 and update slot 7.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 6;
+ svc->ref_idx[SVC_LAST_FRAME] = 2;
+ if (svc->temporal_layer_id < svc->number_temporal_layers - 1) {
+ svc->ref_idx[SVC_LAST3_FRAME] = 7;
+ svc->refresh[7] = 1;
+ }
+ }
+ } else if (svc->temporal_layer_id == 2 && (superframe_cnt - 3) % 4 == 0) {
+ // Second top temporal enhancement layer.
+ if (svc->spatial_layer_id == 0) {
+ // Set LAST to slot 5 and reference LAST.
+ // Set GOLDEN to slot 3 and update slot 3.
+ // Set all other buffer_idx to 0.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ svc->ref_idx[SVC_LAST_FRAME] = 5;
+ if (svc->spatial_layer_id < svc->number_spatial_layers - 1) {
+ svc->ref_idx[SVC_GOLDEN_FRAME] = 3;
+ svc->refresh[3] = 1;
+ }
+ } else if (svc->spatial_layer_id == 1) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 6,
+ // GOLDEN to slot 3. Set LAST2 to slot 4 and update slot 4.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ svc->ref_idx[SVC_LAST_FRAME] = 6;
+ svc->ref_idx[SVC_GOLDEN_FRAME] = 3;
+ if (svc->spatial_layer_id < svc->number_spatial_layers - 1) {
+ svc->ref_idx[SVC_LAST2_FRAME] = 4;
+ svc->refresh[4] = 1;
+ }
+ } else if (svc->spatial_layer_id == 2) {
+ // Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 7,
+ // GOLDEN to slot 4. No update.
+ for (i = 0; i < INTER_REFS_PER_FRAME; i++) svc->ref_idx[i] = 0;
+ svc->ref_idx[SVC_LAST_FRAME] = 7;
+ svc->ref_idx[SVC_GOLDEN_FRAME] = 4;
+ }
+ }
+}
+
+void av1_svc_check_reset_layer_rc_flag(AV1_COMP *const cpi) {
+ SVC *const svc = &cpi->svc;
+ for (int sl = 0; sl < svc->number_spatial_layers; ++sl) {
+ // Check for reset based on avg_frame_bandwidth for spatial layer sl.
+ int layer = LAYER_IDS_TO_IDX(sl, svc->number_temporal_layers - 1,
+ svc->number_temporal_layers);
+ LAYER_CONTEXT *lc = &svc->layer_context[layer];
+ RATE_CONTROL *lrc = &lc->rc;
+ if (lrc->avg_frame_bandwidth > (3 * lrc->prev_avg_frame_bandwidth >> 1) ||
+ lrc->avg_frame_bandwidth < (lrc->prev_avg_frame_bandwidth >> 1)) {
+ // Reset for all temporal layers with spatial layer sl.
+ for (int tl = 0; tl < svc->number_temporal_layers; ++tl) {
+ int layer2 = LAYER_IDS_TO_IDX(sl, tl, svc->number_temporal_layers);
+ LAYER_CONTEXT *lc2 = &svc->layer_context[layer2];
+ RATE_CONTROL *lrc2 = &lc2->rc;
+ PRIMARY_RATE_CONTROL *const lp_rc = &lc2->p_rc;
+ lrc2->rc_1_frame = 0;
+ lrc2->rc_2_frame = 0;
+ lrc2->bits_off_target = lp_rc->optimal_buffer_level;
+ lrc2->buffer_level = lp_rc->optimal_buffer_level;
+ }
+ }
+ }
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.h b/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.h
index 1eeba5e273..817e3620b0 100644
--- a/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.h
+++ b/third_party/libaom/source/libaom/av1/encoder/svc_layercontext.h
@@ -26,6 +26,7 @@ extern "C" {
typedef struct {
/*!\cond */
RATE_CONTROL rc;
+ PRIMARY_RATE_CONTROL p_rc;
int framerate_factor;
int64_t layer_target_bitrate;
int scaling_factor_num;
@@ -94,8 +95,10 @@ typedef struct SVC {
int temporal_layer_id;
int number_spatial_layers;
int number_temporal_layers;
- int external_ref_frame_config;
+ int set_ref_frame_config;
int non_reference_frame;
+ int use_flexible_mode;
+ int ksvc_fixed_mode;
/*!\endcond */
/*!
@@ -271,6 +274,11 @@ int av1_svc_primary_ref_frame(const struct AV1_COMP *const cpi);
void av1_get_layer_resolution(const int width_org, const int height_org,
const int num, const int den, int *width_out,
int *height_out);
+
+void av1_set_svc_fixed_mode(struct AV1_COMP *const cpi);
+
+void av1_svc_check_reset_layer_rc_flag(struct AV1_COMP *const cpi);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/third_party/libaom/source/libaom/av1/encoder/temporal_filter.c b/third_party/libaom/source/libaom/av1/encoder/temporal_filter.c
index 676e110e60..6833ac8a40 100644
--- a/third_party/libaom/source/libaom/av1/encoder/temporal_filter.c
+++ b/third_party/libaom/source/libaom/av1/encoder/temporal_filter.c
@@ -155,7 +155,7 @@ static void tf_motion_search(AV1_COMP *cpi, MACROBLOCK *mb,
best_mv.as_mv.row = GET_MV_SUBPEL(mv_row);
best_mv.as_mv.col = GET_MV_SUBPEL(mv_col);
const int mv_offset = mv_row * y_stride + mv_col;
- error = cpi->fn_ptr[block_size].vf(
+ error = cpi->ppi->fn_ptr[block_size].vf(
ref_frame->y_buffer + y_offset + mv_offset, y_stride,
frame_to_filter->y_buffer + y_offset, y_stride, &sse);
block_mse = DIVIDE_AND_ROUND(error, mb_pels);
@@ -561,9 +561,16 @@ void av1_apply_temporal_filter_c(
(double)TF_WINDOW_BLOCK_BALANCE_WEIGHT * inv_factor;
// Decay factors for non-local mean approach.
double decay_factor[MAX_MB_PLANE] = { 0 };
- // Smaller q -> smaller filtering weight.
+ // Adjust filtering based on q.
+ // Larger q -> stronger filtering -> larger weight.
+ // Smaller q -> weaker filtering -> smaller weight.
double q_decay = pow((double)q_factor / TF_Q_DECAY_THRESHOLD, 2);
q_decay = CLIP(q_decay, 1e-5, 1);
+ if (q_factor >= TF_QINDEX_CUTOFF) {
+ // Max q_factor is 255, therefore the upper bound of q_decay is 8.
+ // We do not need a clip here.
+ q_decay = 0.5 * pow((double)q_factor / 64, 2);
+ }
// Smaller strength -> smaller filtering weight.
double s_decay = pow((double)filter_strength / TF_STRENGTH_THRESHOLD, 2);
s_decay = CLIP(s_decay, 1e-5, 1);
@@ -745,10 +752,19 @@ static void tf_normalize_filtered_frame(
}
int av1_get_q(const AV1_COMP *cpi) {
- const GF_GROUP *gf_group = &cpi->gf_group;
- const FRAME_TYPE frame_type = gf_group->frame_type[gf_group->index];
- const int q = (int)av1_convert_qindex_to_q(
- cpi->rc.avg_frame_qindex[frame_type], cpi->common.seq_params.bit_depth);
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
+ const FRAME_TYPE frame_type = gf_group->frame_type[cpi->gf_frame_index];
+ int avg_frame_qindex;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ avg_frame_qindex =
+ (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
+ ? cpi->ppi->temp_avg_frame_qindex[frame_type]
+ : cpi->rc.avg_frame_qindex[frame_type];
+#else
+ avg_frame_qindex = cpi->rc.avg_frame_qindex[frame_type];
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ const int q = (int)av1_convert_qindex_to_q(avg_frame_qindex,
+ cpi->common.seq_params->bit_depth);
return q;
}
@@ -855,23 +871,24 @@ void av1_tf_do_filtering_row(AV1_COMP *cpi, ThreadData *td, int mb_row) {
}
}
tf_normalize_filtered_frame(mbd, block_size, mb_row, mb_col, num_planes,
- accum, count, &cpi->alt_ref_buffer);
+ accum, count, &cpi->ppi->alt_ref_buffer);
if (check_show_existing) {
const int y_height = mb_height >> mbd->plane[0].subsampling_y;
const int y_width = mb_width >> mbd->plane[0].subsampling_x;
const int source_y_stride = frame_to_filter->y_stride;
- const int filter_y_stride = cpi->alt_ref_buffer.y_stride;
+ const int filter_y_stride = cpi->ppi->alt_ref_buffer.y_stride;
const int source_offset =
mb_row * y_height * source_y_stride + mb_col * y_width;
const int filter_offset =
mb_row * y_height * filter_y_stride + mb_col * y_width;
unsigned int sse = 0;
- cpi->fn_ptr[block_size].vf(
+ cpi->ppi->fn_ptr[block_size].vf(
frame_to_filter->y_buffer + source_offset, source_y_stride,
- cpi->alt_ref_buffer.y_buffer + filter_offset, filter_y_stride, &sse);
+ cpi->ppi->alt_ref_buffer.y_buffer + filter_offset, filter_y_stride,
+ &sse);
diff->sum += sse;
- diff->sse += sse * sse;
+ diff->sse += sse * (int64_t)sse;
}
}
}
@@ -939,8 +956,9 @@ static void tf_setup_filtering_buffer(AV1_COMP *cpi,
const int lookahead_depth =
av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
- int arf_src_offset = cpi->gf_group.arf_src_offset[cpi->gf_group.index];
- const FRAME_TYPE frame_type = cpi->gf_group.frame_type[cpi->gf_group.index];
+ int arf_src_offset = cpi->ppi->gf_group.arf_src_offset[cpi->gf_frame_index];
+ const FRAME_TYPE frame_type =
+ cpi->ppi->gf_group.frame_type[cpi->gf_frame_index];
// Temporal filtering should not go beyond key frames
const int key_to_curframe =
@@ -949,10 +967,10 @@ static void tf_setup_filtering_buffer(AV1_COMP *cpi,
AOMMAX(cpi->rc.frames_to_key - arf_src_offset - 1, 0);
// Number of buffered frames before the to-filter frame.
- const int max_before = AOMMIN(filter_frame_lookahead_idx, key_to_curframe);
+ int max_before = AOMMIN(filter_frame_lookahead_idx, key_to_curframe);
// Number of buffered frames after the to-filter frame.
- const int max_after =
+ int max_after =
AOMMIN(lookahead_depth - filter_frame_lookahead_idx - 1, curframe_to_key);
// Estimate noises for each plane.
@@ -964,26 +982,34 @@ static void tf_setup_filtering_buffer(AV1_COMP *cpi,
double *noise_levels = tf_ctx->noise_levels;
for (int plane = 0; plane < num_planes; ++plane) {
noise_levels[plane] = av1_estimate_noise_from_single_plane(
- to_filter_frame, plane, cpi->common.seq_params.bit_depth);
+ to_filter_frame, plane, cpi->common.seq_params->bit_depth);
}
// Get quantization factor.
const int q = av1_get_q(cpi);
- // Get correlation estimates from first-pass
- RATE_CONTROL *rc = &cpi->rc;
- const double *coeff = rc->cor_coeff;
- const int offset = rc->regions_offset;
- int cur_frame_idx =
- filter_frame_lookahead_idx + rc->frames_since_key - offset;
-
+ // Get correlation estimates from first-pass;
+ const FIRSTPASS_STATS *stats =
+ cpi->ppi->twopass.stats_in - (cpi->rc.frames_since_key == 0);
double accu_coeff0 = 1.0, accu_coeff1 = 1.0;
for (int i = 1; i <= max_after; i++) {
- accu_coeff1 *= coeff[cur_frame_idx + i];
+ if (stats + filter_frame_lookahead_idx + i >=
+ cpi->ppi->twopass.stats_buf_ctx->stats_in_end) {
+ max_after = i - 1;
+ break;
+ }
+ accu_coeff1 *=
+ AOMMAX(stats[filter_frame_lookahead_idx + i].cor_coeff, 0.001);
}
if (max_after >= 1) {
accu_coeff1 = pow(accu_coeff1, 1.0 / (double)max_after);
}
for (int i = 1; i <= max_before; i++) {
- accu_coeff0 *= coeff[cur_frame_idx - i + 1];
+ if (stats + filter_frame_lookahead_idx - i + 1 <=
+ cpi->ppi->twopass.stats_buf_ctx->stats_in_start) {
+ max_before = i - 1;
+ break;
+ }
+ accu_coeff0 *=
+ AOMMAX(stats[filter_frame_lookahead_idx - i + 1].cor_coeff, 0.001);
}
if (max_before >= 1) {
accu_coeff0 = pow(accu_coeff0, 1.0 / (double)max_before);
@@ -1008,7 +1034,7 @@ static void tf_setup_filtering_buffer(AV1_COMP *cpi,
num_before = AOMMIN(num_frames - 1, max_before);
num_after = 0;
} else {
- num_frames = AOMMIN(num_frames, cpi->rc.gfu_boost / 150);
+ num_frames = AOMMIN(num_frames, cpi->ppi->p_rc.gfu_boost / 150);
num_frames += !(num_frames & 1); // Make the number odd.
// Only use 2 neighbours for the second ARF.
if (is_second_arf) num_frames = AOMMIN(num_frames, 3);
@@ -1051,10 +1077,10 @@ static void tf_setup_filtering_buffer(AV1_COMP *cpi,
assert(frames[tf_ctx->filter_frame_idx] == to_filter_frame);
av1_setup_src_planes(&cpi->td.mb, &to_filter_buf->img, 0, 0, num_planes,
- cpi->common.seq_params.sb_size);
+ cpi->common.seq_params->sb_size);
av1_setup_block_planes(&cpi->td.mb.e_mbd,
- cpi->common.seq_params.subsampling_x,
- cpi->common.seq_params.subsampling_y, num_planes);
+ cpi->common.seq_params->subsampling_x,
+ cpi->common.seq_params->subsampling_y, num_planes);
}
/*!\cond */
@@ -1174,8 +1200,8 @@ int av1_temporal_filter(AV1_COMP *cpi, const int filter_frame_lookahead_idx,
int *show_existing_arf) {
MultiThreadInfo *const mt_info = &cpi->mt_info;
// Basic informaton of the current frame.
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const uint8_t group_idx = gf_group->index;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
+ const uint8_t group_idx = cpi->gf_frame_index;
TemporalFilterCtx *tf_ctx = &cpi->tf_ctx;
TemporalFilterData *tf_data = &cpi->td.tf_data;
// Filter one more ARF if the lookahead index is leq 7 (w.r.t. 9-th frame).
@@ -1236,9 +1262,9 @@ int av1_temporal_filter(AV1_COMP *cpi, const int filter_frame_lookahead_idx,
int top_index = 0;
int bottom_index = 0;
const int q = av1_rc_pick_q_and_bounds(
- cpi, &cpi->rc, cpi->oxcf.frm_dim_cfg.width,
- cpi->oxcf.frm_dim_cfg.height, group_idx, &bottom_index, &top_index);
- const int ac_q = av1_ac_quant_QTX(q, 0, cpi->common.seq_params.bit_depth);
+ cpi, cpi->oxcf.frm_dim_cfg.width, cpi->oxcf.frm_dim_cfg.height,
+ group_idx, &bottom_index, &top_index);
+ const int ac_q = av1_ac_quant_QTX(q, 0, cpi->common.seq_params->bit_depth);
const float threshold = 0.7f * ac_q * ac_q;
if (!is_second_arf) {
diff --git a/third_party/libaom/source/libaom/av1/encoder/temporal_filter.h b/third_party/libaom/source/libaom/av1/encoder/temporal_filter.h
index 2ae7dd4bda..3b9563755c 100644
--- a/third_party/libaom/source/libaom/av1/encoder/temporal_filter.h
+++ b/third_party/libaom/source/libaom/av1/encoder/temporal_filter.h
@@ -64,6 +64,14 @@ struct ThreadData;
// then the actual threshold will be 720 * 0.1 = 72. Similarly, the threshold
// for 360p videos will be 360 * 0.1 = 36.
#define TF_SEARCH_DISTANCE_THRESHOLD 0.1
+// 6. Threshold to identify if the q is in a relative high range.
+// Above this cutoff q, a stronger filtering is applied.
+// For a high q, the quantization throws away more information, and thus a
+// stronger filtering is less likely to distort the encoded quality, while a
+// stronger filtering could reduce bit rates.
+// Ror a low q, more details are expected to be retained. Filtering is thus
+// more conservative.
+#define TF_QINDEX_CUTOFF 128
#define NOISE_ESTIMATION_EDGE_THRESHOLD 50
@@ -276,11 +284,6 @@ static AOM_INLINE void tf_dealloc_data(TemporalFilterData *tf_data,
aom_free(tf_data->pred);
}
-// Helper function to compute number of blocks on either side of the frame.
-static INLINE int get_num_blocks(const int frame_length, const int mb_length) {
- return (frame_length + mb_length - 1) / mb_length;
-}
-
// Saves the state prior to temporal filter process.
// Inputs:
// mbd: Pointer to the block for filtering.
diff --git a/third_party/libaom/source/libaom/av1/encoder/tokenize.c b/third_party/libaom/source/libaom/av1/encoder/tokenize.c
index bc63cc00ae..7e16b29a9a 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tokenize.c
+++ b/third_party/libaom/source/libaom/av1/encoder/tokenize.c
@@ -155,16 +155,18 @@ static void tokenize_vartx(ThreadData *td, TX_SIZE tx_size,
const int bsw = tx_size_wide_unit[sub_txs];
const int bsh = tx_size_high_unit[sub_txs];
const int step = bsw * bsh;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
assert(bsw > 0 && bsh > 0);
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
- for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
- const int offsetr = blk_row + row;
+ for (int row = 0; row < row_end; row += bsh) {
+ const int offsetr = blk_row + row;
+ for (int col = 0; col < col_end; col += bsw) {
const int offsetc = blk_col + col;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
-
tokenize_vartx(td, sub_txs, plane_bsize, offsetr, offsetc, block, plane,
arg);
block += step;
diff --git a/third_party/libaom/source/libaom/av1/encoder/tokenize.h b/third_party/libaom/source/libaom/av1/encoder/tokenize.h
index 51eb28cee6..f31dc96958 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tokenize.h
+++ b/third_party/libaom/source/libaom/av1/encoder/tokenize.h
@@ -119,8 +119,8 @@ static INLINE unsigned int get_token_alloc(int mb_rows, int mb_cols,
// Allocate memory for token related info.
static AOM_INLINE void alloc_token_info(AV1_COMMON *cm, TokenInfo *token_info) {
int mi_rows_aligned_to_sb =
- ALIGN_POWER_OF_TWO(cm->mi_params.mi_rows, cm->seq_params.mib_size_log2);
- int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params.mib_size_log2;
+ ALIGN_POWER_OF_TWO(cm->mi_params.mi_rows, cm->seq_params->mib_size_log2);
+ int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params->mib_size_log2;
const int num_planes = av1_num_planes(cm);
unsigned int tokens =
get_token_alloc(cm->mi_params.mb_rows, cm->mi_params.mb_cols,
diff --git a/third_party/libaom/source/libaom/av1/encoder/tpl_model.c b/third_party/libaom/source/libaom/av1/encoder/tpl_model.c
index 6ae957d4e5..e07ab3e311 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tpl_model.c
+++ b/third_party/libaom/source/libaom/av1/encoder/tpl_model.c
@@ -35,38 +35,48 @@
#include "av1/encoder/reconinter_enc.h"
#include "av1/encoder/tpl_model.h"
-static AOM_INLINE int tpl_use_multithread(const AV1_COMP *cpi) {
- return cpi->mt_info.num_workers > 1 && !cpi->sf.tpl_sf.allow_compound_pred;
+static INLINE double exp_bounded(double v) {
+ // When v > 700 or <-700, the exp function will be close to overflow
+ // For details, see the "Notes" in the following link.
+ // https://en.cppreference.com/w/c/numeric/math/exp
+ if (v > 700) {
+ return DBL_MAX;
+ } else if (v < -700) {
+ return 0;
+ }
+ return exp(v);
}
-static AOM_INLINE void tpl_stats_record_txfm_block(TplDepFrame *tpl_frame,
- const tran_low_t *coeff) {
- aom_clear_system_state();
- // For transform larger than 16x16, the scale of coeff need to be adjusted.
- // It's not LOSSLESS_Q_STEP.
- assert(tpl_frame->coeff_num <= 256);
- for (int i = 0; i < tpl_frame->coeff_num; ++i) {
- tpl_frame->abs_coeff_sum[i] += abs(coeff[i]) / (double)LOSSLESS_Q_STEP;
+void av1_init_tpl_txfm_stats(TplTxfmStats *tpl_txfm_stats) {
+ tpl_txfm_stats->coeff_num = 256;
+ tpl_txfm_stats->txfm_block_count = 0;
+ memset(tpl_txfm_stats->abs_coeff_sum, 0,
+ sizeof(tpl_txfm_stats->abs_coeff_sum[0]) * tpl_txfm_stats->coeff_num);
+}
+
+void av1_accumulate_tpl_txfm_stats(const TplTxfmStats *sub_stats,
+ TplTxfmStats *accumulated_stats) {
+ accumulated_stats->txfm_block_count += sub_stats->txfm_block_count;
+ for (int i = 0; i < accumulated_stats->coeff_num; ++i) {
+ accumulated_stats->abs_coeff_sum[i] += sub_stats->abs_coeff_sum[i];
}
- ++tpl_frame->txfm_block_count;
}
-static AOM_INLINE void tpl_stats_update_abs_coeff_mean(TplDepFrame *tpl_frame) {
- aom_clear_system_state();
- for (int i = 0; i < tpl_frame->coeff_num; ++i) {
- tpl_frame->abs_coeff_mean[i] =
- tpl_frame->abs_coeff_sum[i] / tpl_frame->txfm_block_count;
+void av1_record_tpl_txfm_block(TplTxfmStats *tpl_txfm_stats,
+ const tran_low_t *coeff) {
+ // For transform larger than 16x16, the scale of coeff need to be adjusted.
+ // It's not LOSSLESS_Q_STEP.
+ assert(tpl_txfm_stats->coeff_num <= 256);
+ for (int i = 0; i < tpl_txfm_stats->coeff_num; ++i) {
+ tpl_txfm_stats->abs_coeff_sum[i] += abs(coeff[i]) / (double)LOSSLESS_Q_STEP;
}
+ ++tpl_txfm_stats->txfm_block_count;
}
-void av1_tpl_stats_init_txfm_stats(TplDepFrame *tpl_frame, int tpl_bsize_1d) {
- aom_clear_system_state();
- tpl_frame->txfm_block_count = 0;
- tpl_frame->coeff_num = tpl_bsize_1d * tpl_bsize_1d;
- memset(tpl_frame->abs_coeff_sum, 0, sizeof(tpl_frame->abs_coeff_sum));
- assert(sizeof(tpl_frame->abs_coeff_mean) /
- sizeof(tpl_frame->abs_coeff_mean[0]) ==
- tpl_frame->coeff_num);
+static AOM_INLINE void av1_tpl_store_txfm_stats(
+ TplParams *tpl_data, const TplTxfmStats *tpl_txfm_stats,
+ const int frame_index) {
+ tpl_data->txfm_stats_list[frame_index] = *tpl_txfm_stats;
}
static AOM_INLINE void get_quantize_error(const MACROBLOCK *x, int plane,
@@ -118,9 +128,11 @@ static AOM_INLINE void set_tpl_stats_block_size(uint8_t *block_mis_log2,
assert(*tpl_bsize_1d >= 16);
}
-void av1_setup_tpl_buffers(AV1_COMMON *const cm, TplParams *const tpl_data,
- int lag_in_frames) {
- CommonModeInfoParams *const mi_params = &cm->mi_params;
+void av1_setup_tpl_buffers(AV1_PRIMARY *const ppi,
+ CommonModeInfoParams *const mi_params, int width,
+ int height, int byte_alignment, int lag_in_frames) {
+ SequenceHeader *const seq_params = &ppi->seq_params;
+ TplParams *const tpl_data = &ppi->tpl_data;
set_tpl_stats_block_size(&tpl_data->tpl_stats_block_mis_log2,
&tpl_data->tpl_bsize_1d);
const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
@@ -139,7 +151,6 @@ void av1_setup_tpl_buffers(AV1_COMMON *const cm, TplParams *const tpl_data,
tpl_frame->stride = tpl_data->tpl_stats_buffer[frame].width;
tpl_frame->mi_rows = mi_params->mi_rows;
tpl_frame->mi_cols = mi_params->mi_cols;
- av1_tpl_stats_init_txfm_stats(tpl_frame, tpl_data->tpl_bsize_1d);
}
tpl_data->tpl_frame = &tpl_data->tpl_stats_buffer[REF_FRAMES + 1];
@@ -150,47 +161,33 @@ void av1_setup_tpl_buffers(AV1_COMMON *const cm, TplParams *const tpl_data,
// TODO(aomedia:2873): Explore the allocation of tpl buffers based on
// lag_in_frames.
for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
- CHECK_MEM_ERROR(
- cm, tpl_data->tpl_stats_pool[frame],
+ AOM_CHECK_MEM_ERROR(
+ &ppi->error, tpl_data->tpl_stats_pool[frame],
aom_calloc(tpl_data->tpl_stats_buffer[frame].width *
tpl_data->tpl_stats_buffer[frame].height,
sizeof(*tpl_data->tpl_stats_buffer[frame].tpl_stats_ptr)));
- if (aom_alloc_frame_buffer(
- &tpl_data->tpl_rec_pool[frame], cm->width, cm->height,
- cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
- cm->seq_params.use_highbitdepth, tpl_data->border_in_pixels,
- cm->features.byte_alignment))
- aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
+
+ if (aom_alloc_frame_buffer(&tpl_data->tpl_rec_pool[frame], width, height,
+ seq_params->subsampling_x,
+ seq_params->subsampling_y,
+ seq_params->use_highbitdepth,
+ tpl_data->border_in_pixels, byte_alignment))
+ aom_internal_error(&ppi->error, AOM_CODEC_MEM_ERROR,
"Failed to allocate frame buffer");
}
}
-static AOM_INLINE void tpl_fwd_txfm(const int16_t *src_diff, int bw,
- tran_low_t *coeff, TX_SIZE tx_size,
- int bit_depth, int is_hbd) {
- TxfmParam txfm_param;
- txfm_param.tx_type = DCT_DCT;
- txfm_param.tx_size = tx_size;
- txfm_param.lossless = 0;
- txfm_param.tx_set_type = EXT_TX_SET_ALL16;
-
- txfm_param.bd = bit_depth;
- txfm_param.is_hbd = is_hbd;
- av1_fwd_txfm(src_diff, coeff, bw, &txfm_param);
-}
-
-static AOM_INLINE int64_t tpl_get_satd_cost(const MACROBLOCK *x,
+static AOM_INLINE int64_t tpl_get_satd_cost(BitDepthInfo bd_info,
int16_t *src_diff, int diff_stride,
const uint8_t *src, int src_stride,
const uint8_t *dst, int dst_stride,
tran_low_t *coeff, int bw, int bh,
TX_SIZE tx_size) {
- const MACROBLOCKD *xd = &x->e_mbd;
const int pix_num = bw * bh;
- av1_subtract_block(xd, bh, bw, src_diff, diff_stride, src, src_stride, dst,
- dst_stride);
- tpl_fwd_txfm(src_diff, bw, coeff, tx_size, xd->bd, is_cur_buf_hbd(xd));
+ av1_subtract_block(bd_info, bh, bw, src_diff, diff_stride, src, src_stride,
+ dst, dst_stride);
+ av1_quick_txfm(/*use_hadamard=*/0, tx_size, bd_info, src_diff, bw, coeff);
return aom_satd(coeff, pix_num);
}
@@ -198,7 +195,6 @@ static int rate_estimator(const tran_low_t *qcoeff, int eob, TX_SIZE tx_size) {
const SCAN_ORDER *const scan_order = &av1_scan_orders[tx_size][DCT_DCT];
assert((1 << num_pels_log2_lookup[txsize_to_bsize[tx_size]]) >= eob);
- aom_clear_system_state();
int rate_cost = 1;
for (int idx = 0; idx < eob; ++idx) {
@@ -215,11 +211,11 @@ static AOM_INLINE void txfm_quant_rdcost(
tran_low_t *qcoeff, tran_low_t *dqcoeff, int bw, int bh, TX_SIZE tx_size,
int *rate_cost, int64_t *recon_error, int64_t *sse) {
const MACROBLOCKD *xd = &x->e_mbd;
+ const BitDepthInfo bd_info = get_bit_depth_info(xd);
uint16_t eob;
- av1_subtract_block(xd, bh, bw, src_diff, diff_stride, src, src_stride, dst,
- dst_stride);
- tpl_fwd_txfm(src_diff, diff_stride, coeff, tx_size, xd->bd,
- is_cur_buf_hbd(xd));
+ av1_subtract_block(bd_info, bh, bw, src_diff, diff_stride, src, src_stride,
+ dst, dst_stride);
+ av1_quick_txfm(/*use_hadamard=*/0, tx_size, bd_info, src_diff, bw, coeff);
get_quantize_error(x, 0, coeff, qcoeff, dqcoeff, tx_size, &eob, recon_error,
sse);
@@ -316,13 +312,16 @@ static int is_alike_mv(int_mv candidate_mv, center_mv_t *center_mvs,
}
static void get_rate_distortion(
- int *rate_cost, int64_t *recon_error, int16_t *src_diff, tran_low_t *coeff,
- tran_low_t *qcoeff, tran_low_t *dqcoeff, AV1_COMMON *cm, MACROBLOCK *x,
+ int *rate_cost, int64_t *recon_error, int64_t *pred_error,
+ int16_t *src_diff, tran_low_t *coeff, tran_low_t *qcoeff,
+ tran_low_t *dqcoeff, AV1_COMMON *cm, MACROBLOCK *x,
const YV12_BUFFER_CONFIG *ref_frame_ptr[2], uint8_t *rec_buffer_pool[3],
const int rec_stride_pool[3], TX_SIZE tx_size, PREDICTION_MODE best_mode,
int mi_row, int mi_col, int use_y_only_rate_distortion) {
+ const SequenceHeader *seq_params = cm->seq_params;
*rate_cost = 0;
*recon_error = 1;
+ *pred_error = 1;
MACROBLOCKD *xd = &x->e_mbd;
int is_compound = (best_mode == NEW_NEWMV);
@@ -356,7 +355,8 @@ static void get_rate_distortion(
for (int ref = 0; ref < 1 + is_compound; ++ref) {
if (!is_inter_mode(best_mode)) {
av1_predict_intra_block(
- cm, xd, block_size_wide[bsize_plane], block_size_high[bsize_plane],
+ xd, seq_params->sb_size, seq_params->enable_intra_edge_filter,
+ block_size_wide[bsize_plane], block_size_high[bsize_plane],
max_txsize_rect_lookup[bsize_plane], best_mode, 0, 0,
FILTER_INTRA_MODES, dst_buffer, dst_buffer_stride, dst_buffer,
dst_buffer_stride, 0, 0, plane);
@@ -405,21 +405,24 @@ static void get_rate_distortion(
&this_rate, &this_recon_error, &sse);
*recon_error += this_recon_error;
+ *pred_error += sse;
*rate_cost += this_rate;
}
}
-static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
- int mi_col, BLOCK_SIZE bsize,
- TX_SIZE tx_size,
+static AOM_INLINE void mode_estimation(AV1_COMP *cpi,
+ TplTxfmStats *tpl_txfm_stats,
+ MACROBLOCK *x, int mi_row, int mi_col,
+ BLOCK_SIZE bsize, TX_SIZE tx_size,
TplDepStats *tpl_stats) {
AV1_COMMON *cm = &cpi->common;
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
(void)gf_group;
MACROBLOCKD *xd = &x->e_mbd;
- TplParams *tpl_data = &cpi->tpl_data;
+ const BitDepthInfo bd_info = get_bit_depth_info(xd);
+ TplParams *tpl_data = &cpi->ppi->tpl_data;
TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_data->frame_idx];
const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
@@ -471,6 +474,7 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
uint8_t *predictor =
is_cur_buf_hbd(xd) ? CONVERT_TO_BYTEPTR(predictor8) : predictor8;
int64_t recon_error = 1;
+ int64_t pred_error = 1;
memset(tpl_stats, 0, sizeof(*tpl_stats));
tpl_stats->ref_frame_index[0] = -1;
@@ -493,7 +497,6 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
// Pre-load the bottom left line.
if (xd->left_available &&
mi_row + tx_size_high_unit[tx_size] < xd->tile.mi_row_end) {
-#if CONFIG_AV1_HIGHBITDEPTH
if (is_cur_buf_hbd(xd)) {
uint16_t *dst = CONVERT_TO_SHORTPTR(dst_buffer);
for (int i = 0; i < bw; ++i)
@@ -504,26 +507,24 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
dst_buffer[(bw + i) * dst_buffer_stride - 1] =
dst_buffer[(bw - 1) * dst_buffer_stride - 1];
}
-#else
- for (int i = 0; i < bw; ++i)
- dst_buffer[(bw + i) * dst_buffer_stride - 1] =
- dst_buffer[(bw - 1) * dst_buffer_stride - 1];
-#endif
}
// if cpi->sf.tpl_sf.prune_intra_modes is on, then search only DC_PRED,
// H_PRED, and V_PRED
const PREDICTION_MODE last_intra_mode =
cpi->sf.tpl_sf.prune_intra_modes ? D45_PRED : INTRA_MODE_END;
+ const SequenceHeader *seq_params = cm->seq_params;
for (PREDICTION_MODE mode = INTRA_MODE_START; mode < last_intra_mode;
++mode) {
- av1_predict_intra_block(cm, xd, block_size_wide[bsize],
- block_size_high[bsize], tx_size, mode, 0, 0,
- FILTER_INTRA_MODES, dst_buffer, dst_buffer_stride,
- predictor, bw, 0, 0, 0);
+ av1_predict_intra_block(xd, seq_params->sb_size,
+ seq_params->enable_intra_edge_filter,
+ block_size_wide[bsize], block_size_high[bsize],
+ tx_size, mode, 0, 0, FILTER_INTRA_MODES, dst_buffer,
+ dst_buffer_stride, predictor, bw, 0, 0, 0);
- intra_cost = tpl_get_satd_cost(x, src_diff, bw, src_mb_buffer, src_stride,
- predictor, bw, coeff, bw, bh, tx_size);
+ intra_cost =
+ tpl_get_satd_cost(bd_info, src_diff, bw, src_mb_buffer, src_stride,
+ predictor, bw, coeff, bw, bh, tx_size);
if (intra_cost < best_intra_cost) {
best_intra_cost = intra_cost;
@@ -607,7 +608,7 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
for (idx = 0; idx < refmv_count; ++idx) {
FULLPEL_MV mv = get_fullmv_from_mv(&center_mvs[idx].mv.as_mv);
clamp_fullmv(&mv, &x->mv_limits);
- center_mvs[idx].sad = (int)cpi->fn_ptr[bsize].sdf(
+ center_mvs[idx].sad = (int)cpi->ppi->fn_ptr[bsize].sdf(
src_mb_buffer, src_stride, &ref_mb[mv.row * ref_stride + mv.col],
ref_stride);
}
@@ -653,8 +654,9 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
av1_enc_build_one_inter_predictor(predictor, bw, &best_rfidx_mv.as_mv,
&inter_pred_params);
- inter_cost = tpl_get_satd_cost(x, src_diff, bw, src_mb_buffer, src_stride,
- predictor, bw, coeff, bw, bh, tx_size);
+ inter_cost =
+ tpl_get_satd_cost(bd_info, src_diff, bw, src_mb_buffer, src_stride,
+ predictor, bw, coeff, bw, bh, tx_size);
// Store inter cost for each ref frame
tpl_stats->pred_error[rf_idx] = AOMMAX(1, inter_cost);
@@ -732,8 +734,9 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
av1_enc_build_one_inter_predictor(predictor, bw, &tmp_mv[ref].as_mv,
&inter_pred_params);
}
- inter_cost = tpl_get_satd_cost(x, src_diff, bw, src_mb_buffer, src_stride,
- predictor, bw, coeff, bw, bh, tx_size);
+ inter_cost =
+ tpl_get_satd_cost(bd_info, src_diff, bw, src_mb_buffer, src_stride,
+ predictor, bw, coeff, bw, bh, tx_size);
if (inter_cost < best_inter_cost) {
best_cmp_rf_idx = cmp_rf_idx;
best_inter_cost = inter_cost;
@@ -760,8 +763,8 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
: NULL,
};
int rate_cost = 1;
- get_rate_distortion(&rate_cost, &recon_error, src_diff, coeff, qcoeff,
- dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
+ get_rate_distortion(&rate_cost, &recon_error, &pred_error, src_diff, coeff,
+ qcoeff, dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
rec_stride_pool, tx_size, best_mode, mi_row, mi_col,
use_y_only_rate_distortion);
tpl_stats->srcrf_rate = rate_cost << TPL_DEP_COST_SCALE_LOG2;
@@ -772,7 +775,8 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
tpl_stats->inter_cost = best_inter_cost << TPL_DEP_COST_SCALE_LOG2;
tpl_stats->intra_cost = best_intra_cost << TPL_DEP_COST_SCALE_LOG2;
- tpl_stats->srcrf_dist = recon_error << (TPL_DEP_COST_SCALE_LOG2);
+ tpl_stats->srcrf_dist = recon_error << TPL_DEP_COST_SCALE_LOG2;
+ tpl_stats->srcrf_sse = pred_error << TPL_DEP_COST_SCALE_LOG2;
// Final encode
int rate_cost = 0;
@@ -786,21 +790,19 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
best_mode == NEW_NEWMV
? tpl_data->ref_frame[comp_ref_frames[best_cmp_rf_idx][1]]
: NULL;
- get_rate_distortion(&rate_cost, &recon_error, src_diff, coeff, qcoeff,
- dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
+ get_rate_distortion(&rate_cost, &recon_error, &pred_error, src_diff, coeff,
+ qcoeff, dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
rec_stride_pool, tx_size, best_mode, mi_row, mi_col,
use_y_only_rate_distortion);
- if (!tpl_use_multithread(cpi)) {
- // TODO(angiebird): make this work for multithread
- tpl_stats_record_txfm_block(tpl_frame, coeff);
- }
+ av1_record_tpl_txfm_block(tpl_txfm_stats, coeff);
tpl_stats->recrf_dist = recon_error << (TPL_DEP_COST_SCALE_LOG2);
tpl_stats->recrf_rate = rate_cost << TPL_DEP_COST_SCALE_LOG2;
if (!is_inter_mode(best_mode)) {
tpl_stats->srcrf_dist = recon_error << (TPL_DEP_COST_SCALE_LOG2);
tpl_stats->srcrf_rate = rate_cost << TPL_DEP_COST_SCALE_LOG2;
+ tpl_stats->srcrf_sse = pred_error << TPL_DEP_COST_SCALE_LOG2;
}
tpl_stats->recrf_dist = AOMMAX(tpl_stats->srcrf_dist, tpl_stats->recrf_dist);
@@ -810,8 +812,8 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
ref_frame_ptr[0] = tpl_data->ref_frame[comp_ref_frames[best_cmp_rf_idx][0]];
ref_frame_ptr[1] =
tpl_data->src_ref_frame[comp_ref_frames[best_cmp_rf_idx][1]];
- get_rate_distortion(&rate_cost, &recon_error, src_diff, coeff, qcoeff,
- dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
+ get_rate_distortion(&rate_cost, &recon_error, &pred_error, src_diff, coeff,
+ qcoeff, dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
rec_stride_pool, tx_size, best_mode, mi_row, mi_col,
use_y_only_rate_distortion);
tpl_stats->cmp_recrf_dist[0] = recon_error << TPL_DEP_COST_SCALE_LOG2;
@@ -831,8 +833,8 @@ static AOM_INLINE void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
ref_frame_ptr[0] =
tpl_data->src_ref_frame[comp_ref_frames[best_cmp_rf_idx][0]];
ref_frame_ptr[1] = tpl_data->ref_frame[comp_ref_frames[best_cmp_rf_idx][1]];
- get_rate_distortion(&rate_cost, &recon_error, src_diff, coeff, qcoeff,
- dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
+ get_rate_distortion(&rate_cost, &recon_error, &pred_error, src_diff, coeff,
+ qcoeff, dqcoeff, cm, x, ref_frame_ptr, rec_buffer_pool,
rec_stride_pool, tx_size, best_mode, mi_row, mi_col,
use_y_only_rate_distortion);
tpl_stats->cmp_recrf_dist[1] = recon_error << TPL_DEP_COST_SCALE_LOG2;
@@ -887,41 +889,24 @@ static int round_floor(int ref_pos, int bsize_pix) {
return round;
}
-static int get_overlap_area(int grid_pos_row, int grid_pos_col, int ref_pos_row,
- int ref_pos_col, int block, BLOCK_SIZE bsize) {
- int width = 0, height = 0;
- int bw = 4 << mi_size_wide_log2[bsize];
- int bh = 4 << mi_size_high_log2[bsize];
-
- switch (block) {
- case 0:
- width = grid_pos_col + bw - ref_pos_col;
- height = grid_pos_row + bh - ref_pos_row;
- break;
- case 1:
- width = ref_pos_col + bw - grid_pos_col;
- height = grid_pos_row + bh - ref_pos_row;
- break;
- case 2:
- width = grid_pos_col + bw - ref_pos_col;
- height = ref_pos_row + bh - grid_pos_row;
- break;
- case 3:
- width = ref_pos_col + bw - grid_pos_col;
- height = ref_pos_row + bh - grid_pos_row;
- break;
- default: assert(0);
+int av1_get_overlap_area(int row_a, int col_a, int row_b, int col_b, int width,
+ int height) {
+ int min_row = AOMMAX(row_a, row_b);
+ int max_row = AOMMIN(row_a + height, row_b + height);
+ int min_col = AOMMAX(col_a, col_b);
+ int max_col = AOMMIN(col_a + width, col_b + width);
+ if (min_row < max_row && min_col < max_col) {
+ return (max_row - min_row) * (max_col - min_col);
}
- int overlap_area = width * height;
- return overlap_area;
+ return 0;
}
int av1_tpl_ptr_pos(int mi_row, int mi_col, int stride, uint8_t right_shift) {
return (mi_row >> right_shift) * stride + (mi_col >> right_shift);
}
-static int64_t delta_rate_cost(int64_t delta_rate, int64_t recrf_dist,
- int64_t srcrf_dist, int pix_num) {
+int64_t av1_delta_rate_cost(int64_t delta_rate, int64_t recrf_dist,
+ int64_t srcrf_dist, int pix_num) {
double beta = (double)srcrf_dist / recrf_dist;
int64_t rate_cost = delta_rate;
@@ -952,7 +937,6 @@ static int64_t delta_rate_cost(int64_t delta_rate, int64_t recrf_dist,
static AOM_INLINE void tpl_model_update_b(TplParams *const tpl_data, int mi_row,
int mi_col, const BLOCK_SIZE bsize,
int frame_idx, int ref) {
- aom_clear_system_state();
TplDepFrame *tpl_frame_ptr = &tpl_data->tpl_frame[frame_idx];
TplDepStats *tpl_ptr = tpl_frame_ptr->tpl_stats_ptr;
TplDepFrame *tpl_frame = tpl_data->tpl_frame;
@@ -998,8 +982,8 @@ static AOM_INLINE void tpl_model_update_b(TplParams *const tpl_data, int mi_row,
tpl_stats_ptr->recrf_dist));
int64_t delta_rate = tpl_stats_ptr->recrf_rate - srcrf_rate;
int64_t mc_dep_rate =
- delta_rate_cost(tpl_stats_ptr->mc_dep_rate, tpl_stats_ptr->recrf_dist,
- srcrf_dist, pix_num);
+ av1_delta_rate_cost(tpl_stats_ptr->mc_dep_rate, tpl_stats_ptr->recrf_dist,
+ srcrf_dist, pix_num);
for (block = 0; block < 4; ++block) {
int grid_pos_row = grid_pos_row_base + bh * (block >> 1);
@@ -1007,8 +991,8 @@ static AOM_INLINE void tpl_model_update_b(TplParams *const tpl_data, int mi_row,
if (grid_pos_row >= 0 && grid_pos_row < ref_tpl_frame->mi_rows * MI_SIZE &&
grid_pos_col >= 0 && grid_pos_col < ref_tpl_frame->mi_cols * MI_SIZE) {
- int overlap_area = get_overlap_area(
- grid_pos_row, grid_pos_col, ref_pos_row, ref_pos_col, block, bsize);
+ int overlap_area = av1_get_overlap_area(grid_pos_row, grid_pos_col,
+ ref_pos_row, ref_pos_col, bw, bh);
int ref_mi_row = round_floor(grid_pos_row, bh) * mi_height;
int ref_mi_col = round_floor(grid_pos_col, bw) * mi_width;
assert((1 << block_mis_log2) == mi_height);
@@ -1043,6 +1027,7 @@ static AOM_INLINE void tpl_model_store(TplDepStats *tpl_stats_ptr, int mi_row,
tpl_ptr->intra_cost = AOMMAX(1, tpl_ptr->intra_cost);
tpl_ptr->inter_cost = AOMMAX(1, tpl_ptr->inter_cost);
tpl_ptr->srcrf_dist = AOMMAX(1, tpl_ptr->srcrf_dist);
+ tpl_ptr->srcrf_sse = AOMMAX(1, tpl_ptr->srcrf_sse);
tpl_ptr->recrf_dist = AOMMAX(1, tpl_ptr->recrf_dist);
tpl_ptr->srcrf_rate = AOMMAX(1, tpl_ptr->srcrf_rate);
tpl_ptr->recrf_rate = AOMMAX(1, tpl_ptr->recrf_rate);
@@ -1068,12 +1053,12 @@ static AOM_INLINE int get_gop_length(const GF_GROUP *gf_group) {
// Initialize the mc_flow parameters used in computing tpl data.
static AOM_INLINE void init_mc_flow_dispenser(AV1_COMP *cpi, int frame_idx,
int pframe_qindex) {
- TplParams *const tpl_data = &cpi->tpl_data;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
TplDepFrame *tpl_frame = &tpl_data->tpl_frame[frame_idx];
const YV12_BUFFER_CONFIG *this_frame = tpl_frame->gf_picture;
const YV12_BUFFER_CONFIG *ref_frames_ordered[INTER_REFS_PER_FRAME];
uint32_t ref_frame_display_indices[INTER_REFS_PER_FRAME];
- GF_GROUP *gf_group = &cpi->gf_group;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
int ref_pruning_enabled = is_frame_eligible_for_ref_pruning(
gf_group, cpi->sf.inter_sf.selective_ref_frame,
cpi->sf.tpl_sf.prune_ref_frames_in_tpl, frame_idx);
@@ -1084,6 +1069,7 @@ static AOM_INLINE void init_mc_flow_dispenser(AV1_COMP *cpi, int frame_idx,
ThreadData *td = &cpi->td;
MACROBLOCK *x = &td->mb;
MACROBLOCKD *xd = &x->e_mbd;
+ TplTxfmStats *tpl_txfm_stats = &td->tpl_txfm_stats;
tpl_data->frame_idx = frame_idx;
tpl_reset_src_ref_frames(tpl_data);
av1_tile_init(&xd->tile, cm, 0, 0);
@@ -1161,18 +1147,21 @@ static AOM_INLINE void init_mc_flow_dispenser(AV1_COMP *cpi, int frame_idx,
tpl_frame->base_rdmult =
av1_compute_rd_mult_based_on_qindex(cpi, pframe_qindex) / 6;
+
+ av1_init_tpl_txfm_stats(tpl_txfm_stats);
}
// This function stores the motion estimation dependencies of all the blocks in
// a row
-void av1_mc_flow_dispenser_row(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
- BLOCK_SIZE bsize, TX_SIZE tx_size) {
+void av1_mc_flow_dispenser_row(AV1_COMP *cpi, TplTxfmStats *tpl_txfm_stats,
+ MACROBLOCK *x, int mi_row, BLOCK_SIZE bsize,
+ TX_SIZE tx_size) {
AV1_COMMON *const cm = &cpi->common;
MultiThreadInfo *const mt_info = &cpi->mt_info;
AV1TplRowMultiThreadInfo *const tpl_row_mt = &mt_info->tpl_row_mt;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
const int mi_width = mi_size_wide[bsize];
- TplParams *const tpl_data = &cpi->tpl_data;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_data->frame_idx];
MACROBLOCKD *xd = &x->e_mbd;
@@ -1194,7 +1183,8 @@ void av1_mc_flow_dispenser_row(AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
xd->mb_to_left_edge = -GET_MV_SUBPEL(mi_col * MI_SIZE);
xd->mb_to_right_edge =
GET_MV_SUBPEL(mi_params->mi_cols - mi_width - mi_col);
- mode_estimation(cpi, x, mi_row, mi_col, bsize, tx_size, &tpl_stats);
+ mode_estimation(cpi, tpl_txfm_stats, x, mi_row, mi_col, bsize, tx_size,
+ &tpl_stats);
// Motion flow dependency dispenser.
tpl_model_store(tpl_frame->tpl_stats_ptr, mi_row, mi_col, tpl_frame->stride,
@@ -1210,40 +1200,36 @@ static AOM_INLINE void mc_flow_dispenser(AV1_COMP *cpi) {
ThreadData *td = &cpi->td;
MACROBLOCK *x = &td->mb;
MACROBLOCKD *xd = &x->e_mbd;
- const BLOCK_SIZE bsize = convert_length_to_bsize(cpi->tpl_data.tpl_bsize_1d);
+ const BLOCK_SIZE bsize =
+ convert_length_to_bsize(cpi->ppi->tpl_data.tpl_bsize_1d);
const TX_SIZE tx_size = max_txsize_lookup[bsize];
const int mi_height = mi_size_high[bsize];
for (int mi_row = 0; mi_row < mi_params->mi_rows; mi_row += mi_height) {
// Motion estimation row boundary
av1_set_mv_row_limits(mi_params, &x->mv_limits, mi_row, mi_height,
- cpi->tpl_data.border_in_pixels);
+ cpi->ppi->tpl_data.border_in_pixels);
xd->mb_to_top_edge = -GET_MV_SUBPEL(mi_row * MI_SIZE);
xd->mb_to_bottom_edge =
GET_MV_SUBPEL((mi_params->mi_rows - mi_height - mi_row) * MI_SIZE);
- av1_mc_flow_dispenser_row(cpi, x, mi_row, bsize, tx_size);
- }
- if (!tpl_use_multithread(cpi)) {
- // TODO(angiebird): make this work for multithread
- TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[cpi->tpl_data.frame_idx];
- tpl_stats_update_abs_coeff_mean(tpl_frame);
+ av1_mc_flow_dispenser_row(cpi, &td->tpl_txfm_stats, x, mi_row, bsize,
+ tx_size);
}
}
-static void mc_flow_synthesizer(AV1_COMP *cpi, int frame_idx) {
- AV1_COMMON *cm = &cpi->common;
- TplParams *const tpl_data = &cpi->tpl_data;
-
+static void mc_flow_synthesizer(TplParams *tpl_data, int frame_idx, int mi_rows,
+ int mi_cols) {
+ if (!frame_idx) {
+ return;
+ }
const BLOCK_SIZE bsize = convert_length_to_bsize(tpl_data->tpl_bsize_1d);
const int mi_height = mi_size_high[bsize];
const int mi_width = mi_size_wide[bsize];
assert(mi_height == (1 << tpl_data->tpl_stats_block_mis_log2));
assert(mi_width == (1 << tpl_data->tpl_stats_block_mis_log2));
- for (int mi_row = 0; mi_row < cm->mi_params.mi_rows; mi_row += mi_height) {
- for (int mi_col = 0; mi_col < cm->mi_params.mi_cols; mi_col += mi_width) {
- if (frame_idx) {
- tpl_model_update(tpl_data, mi_row, mi_col, frame_idx);
- }
+ for (int mi_row = 0; mi_row < mi_rows; mi_row += mi_height) {
+ for (int mi_col = 0; mi_col < mi_cols; mi_col += mi_width) {
+ tpl_model_update(tpl_data, mi_row, mi_col, frame_idx);
}
}
}
@@ -1253,12 +1239,17 @@ static AOM_INLINE void init_gop_frames_for_tpl(
GF_GROUP *gf_group, int gop_eval, int *tpl_group_frames,
const EncodeFrameInput *const frame_input, int *pframe_qindex) {
AV1_COMMON *cm = &cpi->common;
- int cur_frame_idx = gf_group->index;
+ int cur_frame_idx = cpi->gf_frame_index;
*pframe_qindex = 0;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
+ init_ref_map_pair(cpi, ref_frame_map_pairs);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
RefBufferStack ref_buffer_stack = cpi->ref_buffer_stack;
EncodeFrameParams frame_params = *init_frame_params;
- TplParams *const tpl_data = &cpi->tpl_data;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
int ref_picture_map[REF_FRAMES];
@@ -1288,7 +1279,7 @@ static AOM_INLINE void init_gop_frames_for_tpl(
TplDepFrame *tpl_frame = &tpl_data->tpl_frame[gf_index];
FRAME_UPDATE_TYPE frame_update_type = gf_group->update_type[gf_index];
int frame_display_index = gf_index == gf_group->size
- ? cpi->rc.baseline_gf_interval
+ ? cpi->ppi->p_rc.baseline_gf_interval
: gf_group->cur_frame_idx[gf_index] +
gf_group->arf_src_offset[gf_index];
@@ -1317,7 +1308,7 @@ static AOM_INLINE void init_gop_frames_for_tpl(
}
if (gop_eval && cpi->rc.frames_since_key > 0 &&
gf_group->arf_index == gf_index)
- tpl_frame->gf_picture = &cpi->alt_ref_buffer;
+ tpl_frame->gf_picture = &cpi->ppi->alt_ref_buffer;
// 'cm->current_frame.frame_number' is the display number
// of the current frame.
@@ -1338,15 +1329,45 @@ static AOM_INLINE void init_gop_frames_for_tpl(
tpl_frame->tpl_stats_ptr = tpl_data->tpl_stats_pool[process_frame_count];
++process_frame_count;
}
-
- av1_get_ref_frames(cpi, &ref_buffer_stack);
- int refresh_mask = av1_get_refresh_frame_flags(
- cpi, &frame_params, frame_update_type, &ref_buffer_stack);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ const int true_disp = (int)(tpl_frame->frame_display_index);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+ av1_get_ref_frames(&ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi, ref_frame_map_pairs, true_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ cm->remapped_ref_idx);
+
+ int refresh_mask =
+ av1_get_refresh_frame_flags(cpi, &frame_params, frame_update_type,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ true_disp, ref_frame_map_pairs,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ &ref_buffer_stack);
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ // Make the frames marked as is_frame_non_ref to non-reference frames.
+ if (cpi->ppi->gf_group.is_frame_non_ref[gf_index]) refresh_mask = 0;
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
int refresh_frame_map_index = av1_get_refresh_ref_frame_map(refresh_mask);
+#if !CONFIG_FRAME_PARALLEL_ENCODE
av1_update_ref_frame_map(cpi, frame_update_type, frame_params.frame_type,
frame_params.show_existing_frame,
refresh_frame_map_index, &ref_buffer_stack);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ if (refresh_frame_map_index < REF_FRAMES &&
+ refresh_frame_map_index != INVALID_IDX) {
+ ref_frame_map_pairs[refresh_frame_map_index].disp_order =
+ AOMMAX(0, true_disp);
+ ref_frame_map_pairs[refresh_frame_map_index].pyr_level =
+ get_true_pyr_level(gf_group->layer_depth[gf_index], true_disp,
+ cpi->ppi->gf_group.max_layer_depth);
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
for (int i = LAST_FRAME; i <= ALTREF_FRAME; ++i)
tpl_frame->ref_map_index[i - LAST_FRAME] =
@@ -1360,8 +1381,9 @@ static AOM_INLINE void init_gop_frames_for_tpl(
if (cpi->rc.frames_since_key == 0) return;
int extend_frame_count = 0;
- int extend_frame_length = AOMMIN(
- MAX_TPL_EXTEND, cpi->rc.frames_to_key - cpi->rc.baseline_gf_interval);
+ int extend_frame_length =
+ AOMMIN(MAX_TPL_EXTEND,
+ cpi->rc.frames_to_key - cpi->ppi->p_rc.baseline_gf_interval);
int frame_display_index = gf_group->cur_frame_idx[gop_length - 1] +
gf_group->arf_src_offset[gop_length - 1] + 1;
@@ -1400,14 +1422,37 @@ static AOM_INLINE void init_gop_frames_for_tpl(
gf_group->update_type[gf_index] = LF_UPDATE;
gf_group->q_val[gf_index] = *pframe_qindex;
-
- av1_get_ref_frames(cpi, &ref_buffer_stack);
- int refresh_mask = av1_get_refresh_frame_flags(
- cpi, &frame_params, frame_update_type, &ref_buffer_stack);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ const int true_disp = (int)(tpl_frame->frame_display_index);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ av1_get_ref_frames(&ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi, ref_frame_map_pairs, true_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ cm->remapped_ref_idx);
+ int refresh_mask =
+ av1_get_refresh_frame_flags(cpi, &frame_params, frame_update_type,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ true_disp, ref_frame_map_pairs,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ &ref_buffer_stack);
int refresh_frame_map_index = av1_get_refresh_ref_frame_map(refresh_mask);
+#if !CONFIG_FRAME_PARALLEL_ENCODE
av1_update_ref_frame_map(cpi, frame_update_type, frame_params.frame_type,
frame_params.show_existing_frame,
refresh_frame_map_index, &ref_buffer_stack);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ if (refresh_frame_map_index < REF_FRAMES &&
+ refresh_frame_map_index != INVALID_IDX) {
+ ref_frame_map_pairs[refresh_frame_map_index].disp_order =
+ AOMMAX(0, true_disp);
+ ref_frame_map_pairs[refresh_frame_map_index].pyr_level =
+ get_true_pyr_level(gf_group->layer_depth[gf_index], true_disp,
+ cpi->ppi->gf_group.max_layer_depth);
+ }
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
for (int i = LAST_FRAME; i <= ALTREF_FRAME; ++i)
tpl_frame->ref_map_index[i - LAST_FRAME] =
@@ -1424,8 +1469,16 @@ static AOM_INLINE void init_gop_frames_for_tpl(
++extend_frame_count;
++frame_display_index;
}
-
- av1_get_ref_frames(cpi, &cpi->ref_buffer_stack);
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ TplDepFrame *tpl_frame = &tpl_data->tpl_frame[cur_frame_idx];
+ const int true_disp = (int)(tpl_frame->frame_display_index);
+ init_ref_map_pair(cpi, ref_frame_map_pairs);
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ av1_get_ref_frames(&cpi->ref_buffer_stack,
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ cpi, ref_frame_map_pairs, true_disp,
+#endif // CONFIG_FRAME_PARALLEL_ENCODE
+ cm->remapped_ref_idx);
}
void av1_init_tpl_stats(TplParams *const tpl_data) {
@@ -1440,9 +1493,47 @@ void av1_init_tpl_stats(TplParams *const tpl_data) {
sizeof(*tpl_frame->tpl_stats_ptr));
tpl_frame->is_valid = 0;
}
- for (frame_idx = 0; frame_idx < MAX_LENGTH_TPL_FRAME_STATS; ++frame_idx) {
- TplDepFrame *tpl_frame = &tpl_data->tpl_stats_buffer[frame_idx];
- av1_tpl_stats_init_txfm_stats(tpl_frame, tpl_data->tpl_bsize_1d);
+#if CONFIG_BITRATE_ACCURACY
+ tpl_data->estimated_gop_bitrate = 0;
+ tpl_data->actual_gop_bitrate = 0;
+#endif
+}
+
+static AOM_INLINE int eval_gop_length(double *beta, int gop_eval) {
+ switch (gop_eval) {
+ case 1:
+ // Allow larger GOP size if the base layer ARF has higher dependency
+ // factor than the intermediate ARF and both ARFs have reasonably high
+ // dependency factors.
+ return (beta[0] >= beta[1] + 0.7) && beta[0] > 8.0;
+ case 2:
+ if ((beta[0] >= beta[1] + 0.4) && beta[0] > 1.6)
+ return 1; // Don't shorten the gf interval
+ else if ((beta[0] < beta[1] + 0.1) || beta[0] <= 1.4)
+ return 0; // Shorten the gf interval
+ else
+ return 2; // Cannot decide the gf interval, so redo the
+ // tpl stats calculation.
+ case 3: return beta[0] > 1.1;
+ default: return 2;
+ }
+}
+
+// TODO(jingning): Restructure av1_rc_pick_q_and_bounds() to narrow down
+// the scope of input arguments.
+void av1_tpl_preload_rc_estimate(AV1_COMP *cpi,
+ const EncodeFrameParams *const frame_params) {
+ AV1_COMMON *cm = &cpi->common;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
+ int bottom_index, top_index;
+ cm->current_frame.frame_type = frame_params->frame_type;
+ for (int gf_index = cpi->gf_frame_index; gf_index < gf_group->size;
+ ++gf_index) {
+ cm->current_frame.frame_type = gf_group->frame_type[gf_index];
+ cm->show_frame = gf_group->update_type[gf_index] != ARF_UPDATE &&
+ gf_group->update_type[gf_index] != INTNL_ARF_UPDATE;
+ gf_group->q_val[gf_index] = av1_rc_pick_q_and_bounds(
+ cpi, cm->width, cm->height, gf_index, &bottom_index, &top_index);
}
}
@@ -1455,10 +1546,17 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
AV1_COMMON *cm = &cpi->common;
MultiThreadInfo *const mt_info = &cpi->mt_info;
AV1TplRowMultiThreadInfo *const tpl_row_mt = &mt_info->tpl_row_mt;
- GF_GROUP *gf_group = &cpi->gf_group;
- int bottom_index, top_index;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
EncodeFrameParams this_frame_params = *frame_params;
- TplParams *const tpl_data = &cpi->tpl_data;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
+ int approx_gop_eval = (gop_eval > 1);
+ int num_arf_layers = MAX_ARF_LAYERS;
+
+ // When gop_eval is set to 2, tpl stats calculation is done for ARFs from base
+ // layer, (base+1) layer and (base+2) layer. When gop_eval is set to 3,
+ // tpl stats calculation is limited to ARFs from base layer and (base+1)
+ // layer.
+ if (approx_gop_eval) num_arf_layers = (gop_eval == 2) ? 3 : 2;
if (cpi->superres_mode != AOM_SUPERRES_NONE) {
assert(cpi->superres_mode != AOM_SUPERRES_AUTO);
@@ -1467,7 +1565,8 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
}
cm->current_frame.frame_type = frame_params->frame_type;
- for (int gf_index = gf_group->index; gf_index < gf_group->size; ++gf_index) {
+ for (int gf_index = cpi->gf_frame_index; gf_index < gf_group->size;
+ ++gf_index) {
cm->current_frame.frame_type = gf_group->frame_type[gf_index];
av1_configure_buffer_updates(cpi, &this_frame_params.refresh_frame,
gf_group->update_type[gf_index],
@@ -1475,13 +1574,6 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
memcpy(&cpi->refresh_frame, &this_frame_params.refresh_frame,
sizeof(cpi->refresh_frame));
-
- cm->show_frame = gf_group->update_type[gf_index] != ARF_UPDATE &&
- gf_group->update_type[gf_index] != INTNL_ARF_UPDATE;
-
- gf_group->q_val[gf_index] =
- av1_rc_pick_q_and_bounds(cpi, &cpi->rc, cm->width, cm->height, gf_index,
- &bottom_index, &top_index);
}
int pframe_qindex;
@@ -1489,7 +1581,7 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
init_gop_frames_for_tpl(cpi, frame_params, gf_group, gop_eval,
&tpl_gf_group_frames, frame_input, &pframe_qindex);
- cpi->rc.base_layer_qp = pframe_qindex;
+ cpi->ppi->p_rc.base_layer_qp = pframe_qindex;
av1_init_tpl_stats(tpl_data);
@@ -1505,37 +1597,59 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
av1_fill_mv_costs(&cm->fc->nmvc, cm->features.cur_frame_force_integer_mv,
cm->features.allow_high_precision_mv, cpi->td.mb.mv_costs);
+ const int gop_length = get_gop_length(gf_group);
// Backward propagation from tpl_group_frames to 1.
- for (int frame_idx = gf_group->index; frame_idx < tpl_gf_group_frames;
+ for (int frame_idx = cpi->gf_frame_index; frame_idx < tpl_gf_group_frames;
++frame_idx) {
if (gf_group->update_type[frame_idx] == INTNL_OVERLAY_UPDATE ||
gf_group->update_type[frame_idx] == OVERLAY_UPDATE)
continue;
+ // When approx_gop_eval = 1, skip tpl stats calculation for higher layer
+ // frames and for frames beyond gop length.
+ if (approx_gop_eval && (gf_group->layer_depth[frame_idx] > num_arf_layers ||
+ frame_idx >= gop_length))
+ continue;
+
init_mc_flow_dispenser(cpi, frame_idx, pframe_qindex);
- if (tpl_use_multithread(cpi)) {
+ if (mt_info->num_workers > 1) {
tpl_row_mt->sync_read_ptr = av1_tpl_row_mt_sync_read;
tpl_row_mt->sync_write_ptr = av1_tpl_row_mt_sync_write;
av1_mc_flow_dispenser_mt(cpi);
} else {
mc_flow_dispenser(cpi);
}
+ av1_tpl_store_txfm_stats(tpl_data, &cpi->td.tpl_txfm_stats, frame_idx);
aom_extend_frame_borders(tpl_data->tpl_frame[frame_idx].rec_picture,
av1_num_planes(cm));
}
- for (int frame_idx = tpl_gf_group_frames - 1; frame_idx >= gf_group->index;
- --frame_idx) {
+#if CONFIG_BITRATE_ACCURACY
+ tpl_data->estimated_gop_bitrate = av1_estimate_gop_bitrate(
+ gf_group->q_val, gf_group->size, tpl_data->txfm_stats_list);
+ if (gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
+ gop_eval == 0) {
+ printf("\nestimated bitrate: %f\n", tpl_data->estimated_gop_bitrate);
+ }
+#endif
+
+ for (int frame_idx = tpl_gf_group_frames - 1;
+ frame_idx >= cpi->gf_frame_index; --frame_idx) {
if (gf_group->update_type[frame_idx] == INTNL_OVERLAY_UPDATE ||
gf_group->update_type[frame_idx] == OVERLAY_UPDATE)
continue;
- mc_flow_synthesizer(cpi, frame_idx);
+ if (approx_gop_eval && (gf_group->layer_depth[frame_idx] > num_arf_layers ||
+ frame_idx >= gop_length))
+ continue;
+
+ mc_flow_synthesizer(tpl_data, frame_idx, cm->mi_params.mi_rows,
+ cm->mi_params.mi_cols);
}
av1_configure_buffer_updates(cpi, &this_frame_params.refresh_frame,
- gf_group->update_type[gf_group->index],
+ gf_group->update_type[cpi->gf_frame_index],
frame_params->frame_type, 0);
cm->current_frame.frame_type = frame_params->frame_type;
cm->show_frame = frame_params->show_frame;
@@ -1592,21 +1706,17 @@ int av1_tpl_setup_stats(AV1_COMP *cpi, int gop_eval,
#if CONFIG_COLLECT_COMPONENT_TIMING
end_timing(cpi, av1_tpl_setup_stats_time);
#endif
-
- // Allow larger GOP size if the base layer ARF has higher dependency factor
- // than the intermediate ARF and both ARFs have reasonably high dependency
- // factors.
- return (beta[0] >= beta[1] + 0.7) && beta[0] > 8.0;
+ return eval_gop_length(beta, gop_eval);
}
void av1_tpl_rdmult_setup(AV1_COMP *cpi) {
const AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *const gf_group = &cpi->gf_group;
- const int tpl_idx = gf_group->index;
+ const int tpl_idx = cpi->gf_frame_index;
- assert(IMPLIES(gf_group->size > 0, tpl_idx < gf_group->size));
+ assert(
+ IMPLIES(cpi->ppi->gf_group.size > 0, tpl_idx < cpi->ppi->gf_group.size));
- TplParams *const tpl_data = &cpi->tpl_data;
+ TplParams *const tpl_data = &cpi->ppi->tpl_data;
const TplDepFrame *const tpl_frame = &tpl_data->tpl_frame[tpl_idx];
if (!tpl_frame->is_valid) return;
@@ -1623,8 +1733,6 @@ void av1_tpl_rdmult_setup(AV1_COMP *cpi) {
const double c = 1.2;
const int step = 1 << tpl_data->tpl_stats_block_mis_log2;
- aom_clear_system_state();
-
// Loop through each 'block_size' X 'block_size' block.
for (int row = 0; row < num_rows; row++) {
for (int col = 0; col < num_cols; col++) {
@@ -1647,24 +1755,23 @@ void av1_tpl_rdmult_setup(AV1_COMP *cpi) {
}
const double rk = intra_cost / mc_dep_cost;
const int index = row * num_cols + col;
- cpi->tpl_rdmult_scaling_factors[index] = rk / cpi->rd.r0 + c;
+ cpi->ppi->tpl_rdmult_scaling_factors[index] = rk / cpi->rd.r0 + c;
}
}
- aom_clear_system_state();
}
void av1_tpl_rdmult_setup_sb(AV1_COMP *cpi, MACROBLOCK *const x,
BLOCK_SIZE sb_size, int mi_row, int mi_col) {
AV1_COMMON *const cm = &cpi->common;
- GF_GROUP *gf_group = &cpi->gf_group;
- assert(IMPLIES(cpi->gf_group.size > 0,
- cpi->gf_group.index < cpi->gf_group.size));
- const int tpl_idx = cpi->gf_group.index;
- TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[tpl_idx];
-
- if (tpl_frame->is_valid == 0) return;
- if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return;
+ GF_GROUP *gf_group = &cpi->ppi->gf_group;
+ assert(IMPLIES(cpi->ppi->gf_group.size > 0,
+ cpi->gf_frame_index < cpi->ppi->gf_group.size));
+ const int tpl_idx = cpi->gf_frame_index;
+
if (tpl_idx >= MAX_TPL_FRAME_IDX) return;
+ TplDepFrame *tpl_frame = &cpi->ppi->tpl_data.tpl_frame[tpl_idx];
+ if (!tpl_frame->is_valid) return;
+ if (!is_frame_tpl_eligible(gf_group, cpi->gf_frame_index)) return;
if (cpi->oxcf.q_cfg.aq_mode != NO_AQ) return;
const int mi_col_sr =
@@ -1685,13 +1792,12 @@ void av1_tpl_rdmult_setup_sb(AV1_COMP *cpi, MACROBLOCK *const x,
double base_block_count = 0.0;
double log_sum = 0.0;
- aom_clear_system_state();
for (row = mi_row / num_mi_w;
row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
for (col = mi_col_sr / num_mi_h;
col < num_cols && col < mi_col_sr / num_mi_h + num_bcols; ++col) {
const int index = row * num_cols + col;
- log_sum += log(cpi->tpl_rdmult_scaling_factors[index]);
+ log_sum += log(cpi->ppi->tpl_rdmult_scaling_factors[index]);
base_block_count += 1.0;
}
}
@@ -1705,33 +1811,30 @@ void av1_tpl_rdmult_setup_sb(AV1_COMP *cpi, MACROBLOCK *const x,
const double scaling_factor = (double)new_rdmult / (double)orig_rdmult;
double scale_adj = log(scaling_factor) - log_sum / base_block_count;
- scale_adj = exp(scale_adj);
+ scale_adj = exp_bounded(scale_adj);
for (row = mi_row / num_mi_w;
row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
for (col = mi_col_sr / num_mi_h;
col < num_cols && col < mi_col_sr / num_mi_h + num_bcols; ++col) {
const int index = row * num_cols + col;
- cpi->tpl_sb_rdmult_scaling_factors[index] =
- scale_adj * cpi->tpl_rdmult_scaling_factors[index];
+ cpi->ppi->tpl_sb_rdmult_scaling_factors[index] =
+ scale_adj * cpi->ppi->tpl_rdmult_scaling_factors[index];
}
}
- aom_clear_system_state();
}
-#define EPSILON (0.0000001)
-
double av1_exponential_entropy(double q_step, double b) {
- aom_clear_system_state();
- double z = fmax(exp(-q_step / b), EPSILON);
+ b = AOMMAX(b, TPL_EPSILON);
+ double z = fmax(exp_bounded(-q_step / b), TPL_EPSILON);
return -log2(1 - z) - z * log2(z) / (1 - z);
}
double av1_laplace_entropy(double q_step, double b, double zero_bin_ratio) {
- aom_clear_system_state();
// zero bin's size is zero_bin_ratio * q_step
// non-zero bin's size is q_step
- double z = fmax(exp(-zero_bin_ratio / 2 * q_step / b), EPSILON);
+ b = AOMMAX(b, TPL_EPSILON);
+ double z = fmax(exp_bounded(-zero_bin_ratio / 2 * q_step / b), TPL_EPSILON);
double h = av1_exponential_entropy(q_step, b);
double r = -(1 - z) * log2(1 - z) - z * log2(z) + z * (h + 1);
return r;
@@ -1740,7 +1843,6 @@ double av1_laplace_entropy(double q_step, double b, double zero_bin_ratio) {
double av1_laplace_estimate_frame_rate(int q_index, int block_count,
const double *abs_coeff_mean,
int coeff_num) {
- aom_clear_system_state();
double zero_bin_ratio = 2;
double dc_q_step = av1_dc_quant_QTX(q_index, 0, AOM_BITS_8) / 4.;
double ac_q_step = av1_ac_quant_QTX(q_index, 0, AOM_BITS_8) / 4.;
@@ -1755,3 +1857,58 @@ double av1_laplace_estimate_frame_rate(int q_index, int block_count,
est_rate *= block_count;
return est_rate;
}
+
+double av1_estimate_gop_bitrate(const unsigned char *q_index_list,
+ const int frame_count,
+ const TplTxfmStats *stats_list) {
+ double gop_bitrate = 0;
+ for (int frame_index = 0; frame_index < frame_count; frame_index++) {
+ int q_index = q_index_list[frame_index];
+ TplTxfmStats frame_stats = stats_list[frame_index];
+
+ /* Convert to mean absolute deviation */
+ double abs_coeff_mean[256] = { 0 };
+ for (int i = 0; i < 256; i++) {
+ abs_coeff_mean[i] =
+ frame_stats.abs_coeff_sum[i] / frame_stats.txfm_block_count;
+ }
+
+ double frame_bitrate = av1_laplace_estimate_frame_rate(
+ q_index, frame_stats.txfm_block_count, abs_coeff_mean, 256);
+ gop_bitrate += frame_bitrate;
+ }
+ return gop_bitrate;
+}
+
+double av1_estimate_coeff_entropy(double q_step, double b,
+ double zero_bin_ratio, int qcoeff) {
+ b = AOMMAX(b, TPL_EPSILON);
+ int abs_qcoeff = abs(qcoeff);
+ double z0 = fmax(exp_bounded(-zero_bin_ratio / 2 * q_step / b), TPL_EPSILON);
+ if (abs_qcoeff == 0) {
+ double r = -log2(1 - z0);
+ return r;
+ } else {
+ double z = fmax(exp_bounded(-q_step / b), TPL_EPSILON);
+ double r = 1 - log2(z0) - log2(1 - z) - (abs_qcoeff - 1) * log2(z);
+ return r;
+ }
+}
+
+double av1_estimate_txfm_block_entropy(int q_index,
+ const double *abs_coeff_mean,
+ int *qcoeff_arr, int coeff_num) {
+ double zero_bin_ratio = 2;
+ double dc_q_step = av1_dc_quant_QTX(q_index, 0, AOM_BITS_8) / 4.;
+ double ac_q_step = av1_ac_quant_QTX(q_index, 0, AOM_BITS_8) / 4.;
+ double est_rate = 0;
+ // dc coeff
+ est_rate += av1_estimate_coeff_entropy(dc_q_step, abs_coeff_mean[0],
+ zero_bin_ratio, qcoeff_arr[0]);
+ // ac coeff
+ for (int i = 1; i < coeff_num; ++i) {
+ est_rate += av1_estimate_coeff_entropy(ac_q_step, abs_coeff_mean[i],
+ zero_bin_ratio, qcoeff_arr[i]);
+ }
+ return est_rate;
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/tpl_model.h b/third_party/libaom/source/libaom/av1/encoder/tpl_model.h
index 4b85740f3e..c764d92239 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tpl_model.h
+++ b/third_party/libaom/source/libaom/av1/encoder/tpl_model.h
@@ -18,11 +18,20 @@ extern "C" {
/*!\cond */
+struct AV1_PRIMARY;
struct AV1_COMP;
+struct AV1_SEQ_CODING_TOOLS;
struct EncodeFrameParams;
struct EncodeFrameInput;
-#include "av1/encoder/encoder.h"
+#include "config/aom_config.h"
+
+#include "aom_scale/yv12config.h"
+
+#include "av1/common/mv.h"
+#include "av1/common/scale.h"
+#include "av1/encoder/block.h"
+#include "av1/encoder/lookahead.h"
static INLINE BLOCK_SIZE convert_length_to_bsize(int length) {
switch (length) {
@@ -82,6 +91,14 @@ typedef struct AV1TplRowMultiThreadInfo {
#define MAX_TPL_EXTEND (MAX_LAG_BUFFERS - MAX_GF_INTERVAL)
#define TPL_DEP_COST_SCALE_LOG2 4
+#define TPL_EPSILON 0.0000001
+
+typedef struct TplTxfmStats {
+ double abs_coeff_sum[256]; // Assume we are using 16x16 transform block
+ int txfm_block_count;
+ int coeff_num;
+} TplTxfmStats;
+
typedef struct TplDepStats {
int64_t intra_cost;
int64_t inter_cost;
@@ -90,6 +107,7 @@ typedef struct TplDepStats {
int64_t cmp_recrf_dist[2];
int64_t srcrf_rate;
int64_t recrf_rate;
+ int64_t srcrf_sse;
int64_t cmp_recrf_rate[2];
int64_t mc_dep_rate;
int64_t mc_dep_dist;
@@ -111,10 +129,6 @@ typedef struct TplDepFrame {
int mi_cols;
int base_rdmult;
uint32_t frame_display_index;
- double abs_coeff_sum[256]; // Assume we are using 16x16 transform block
- double abs_coeff_mean[256];
- int coeff_num; // number of coefficients in a transform block
- int txfm_block_count;
} TplDepFrame;
/*!\endcond */
@@ -147,6 +161,12 @@ typedef struct TplParams {
TplDepStats *tpl_stats_pool[MAX_LAG_BUFFERS];
/*!
+ * Buffer to store tpl transform stats per frame.
+ * txfm_stats_list[i] stores the TplTxfmStats of the ith frame in a gf group.
+ */
+ TplTxfmStats txfm_stats_list[MAX_LENGTH_TPL_FRAME_STATS];
+
+ /*!
* Buffer to store tpl reconstructed frame.
* tpl_rec_pool[i] stores the reconstructed frame of ith frame in a gf group.
*/
@@ -192,10 +212,13 @@ typedef struct TplParams {
*/
int border_in_pixels;
- /*!
- * Skip tpl setup when tpl data from gop length decision can be reused.
+#if CONFIG_BITRATE_ACCURACY
+ /*
+ * Estimated and actual GOP bitrate.
*/
- int skip_tpl_setup_stats;
+ double estimated_gop_bitrate;
+ double actual_gop_bitrate;
+#endif
} TplParams;
/*!\brief Allocate buffers used by tpl model
@@ -206,8 +229,9 @@ typedef struct TplParams {
* \param[out] tpl_data tpl data structure
*/
-void av1_setup_tpl_buffers(AV1_COMMON *const cm, TplParams *const tpl_data,
- int lag_in_frames);
+void av1_setup_tpl_buffers(struct AV1_PRIMARY *const ppi,
+ CommonModeInfoParams *const mi_params, int width,
+ int height, int byte_alignment, int lag_in_frames);
/*!\brief Implements temporal dependency modelling for a GOP (GF/ARF
* group) and selects between 16 and 32 frame GOP structure.
@@ -227,6 +251,9 @@ int av1_tpl_setup_stats(struct AV1_COMP *cpi, int gop_eval,
/*!\cond */
+void av1_tpl_preload_rc_estimate(
+ struct AV1_COMP *cpi, const struct EncodeFrameParams *const frame_params);
+
int av1_tpl_ptr_pos(int mi_row, int mi_col, int stride, uint8_t right_shift);
void av1_init_tpl_stats(TplParams *const tpl_data);
@@ -236,8 +263,9 @@ void av1_tpl_rdmult_setup(struct AV1_COMP *cpi);
void av1_tpl_rdmult_setup_sb(struct AV1_COMP *cpi, MACROBLOCK *const x,
BLOCK_SIZE sb_size, int mi_row, int mi_col);
-void av1_mc_flow_dispenser_row(struct AV1_COMP *cpi, MACROBLOCK *x, int mi_row,
- BLOCK_SIZE bsize, TX_SIZE tx_size);
+void av1_mc_flow_dispenser_row(struct AV1_COMP *cpi,
+ TplTxfmStats *tpl_txfm_stats, MACROBLOCK *x,
+ int mi_row, BLOCK_SIZE bsize, TX_SIZE tx_size);
/*!\brief Compute the entropy of an exponential probability distribution
* function (pdf) subjected to uniform quantization.
@@ -271,7 +299,7 @@ double av1_laplace_entropy(double q_step, double b, double zero_bin_ratio);
/*!\brief Compute the frame rate using transform block stats
*
* Assume each position i in the transform block is of Laplace distribution
- * with maximum absolute deviation abs_coeff_mean[i]
+ * with mean absolute deviation abs_coeff_mean[i]
*
* Then we can use av1_laplace_entropy() to compute the expected frame
* rate.
@@ -280,7 +308,7 @@ double av1_laplace_entropy(double q_step, double b, double zero_bin_ratio);
*
* \param[in] q_index quantizer index
* \param[in] block_count number of transform blocks
- * \param[in] abs_coeff_mean array of maximum absolute deviation
+ * \param[in] abs_coeff_mean array of mean absolute deviation
* \param[in] coeff_num number of coefficients per transform block
*
* \return expected frame rate
@@ -289,15 +317,104 @@ double av1_laplace_estimate_frame_rate(int q_index, int block_count,
const double *abs_coeff_mean,
int coeff_num);
-/*!\brief Init data structure storing transform stats
+/*
+ *!\brief Compute the number of bits needed to encode a GOP
+ *
+ * \param[in] q_index_list array of q_index, one per frame
+ * \param[in] frame_count number of frames in the GOP
+ * \param[in] stats array of transform stats, one per frame
+ *
+ */
+double av1_estimate_gop_bitrate(const unsigned char *q_index_list,
+ const int frame_count,
+ const TplTxfmStats *stats);
+
+/*
+ *!\brief Init TplTxfmStats
+ *
+ * \param[in] tpl_txfm_stats a structure for storing transform stats
+ *
+ *
+ */
+void av1_init_tpl_txfm_stats(TplTxfmStats *tpl_txfm_stats);
+
+/*
+ *!\brief Accumulate TplTxfmStats
+ *
+ * \param[in] sub_stats a structure for storing sub transform stats
+ * \param[out] accumulated_stats a structure for storing accumulated transform
+ *stats
+ *
+ */
+void av1_accumulate_tpl_txfm_stats(const TplTxfmStats *sub_stats,
+ TplTxfmStats *accumulated_stats);
+
+/*
+ *!\brief Record a transform block into TplTxfmStats
+ *
+ * \param[in] tpl_txfm_stats A structure for storing transform stats
+ * \param[out] coeff An array of transform coefficients. Its size
+ * should equal to tpl_txfm_stats.coeff_num.
+ *
+ */
+void av1_record_tpl_txfm_block(TplTxfmStats *tpl_txfm_stats,
+ const tran_low_t *coeff);
+
+/*!\brief Estimate coefficient entropy using Laplace dsitribution
*
*\ingroup tpl_modelling
*
- * \param[in] tpl_frame pointer of tpl frame data structure
+ * This function is equivalent to -log2(laplace_prob()), where laplace_prob() is
+ * defined in tpl_model_test.cc
+ *
+ * \param[in] q_step quantizer step size without any scaling
+ * \param[in] b mean absolute deviation of Laplace distribution
+ * \param[in] zero_bin_ratio zero bin's size is zero_bin_ratio * q_step
+ * \param[in] qcoeff quantized coefficient
+ *
+ * \return estimated coefficient entropy
+ *
+ */
+double av1_estimate_coeff_entropy(double q_step, double b,
+ double zero_bin_ratio, int qcoeff);
+
+/*!\brief Estimate entropy of a transform block using Laplace dsitribution
+ *
+ *\ingroup tpl_modelling
+ *
+ * \param[in] q_index quantizer index
+ * \param[in] abs_coeff_mean array of mean absolute deviations
+ * \param[in] qcoeff_arr array of quantized coefficients
* \param[in] coeff_num number of coefficients per transform block
*
+ * \return estimated transform block entropy
+ *
+ */
+double av1_estimate_txfm_block_entropy(int q_index,
+ const double *abs_coeff_mean,
+ int *qcoeff_arr, int coeff_num);
+
+// TODO(angiebird): Add doxygen description here.
+int64_t av1_delta_rate_cost(int64_t delta_rate, int64_t recrf_dist,
+ int64_t srcrf_dist, int pix_num);
+
+/*!\brief Compute the overlap area between two blocks with the same size
+ *
+ *\ingroup tpl_modelling
+ *
+ * If there is no overlap, this function should return zero.
+ *
+ * \param[in] row_a row position of the first block
+ * \param[in] col_a column position of the first block
+ * \param[in] row_b row position of the second block
+ * \param[in] col_b column position of the second block
+ * \param[in] width width shared by the two blocks
+ * \param[in] height height shared by the two blocks
+ *
+ * \return overlap area of the two blocks
*/
-void av1_tpl_stats_init_txfm_stats(TplDepFrame *tpl_frame, int coeff_num);
+int av1_get_overlap_area(int row_a, int col_a, int row_b, int col_b, int width,
+ int height);
/*!\endcond */
#ifdef __cplusplus
diff --git a/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.c b/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.c
index 39940e8aa6..f82e910595 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.c
+++ b/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.c
@@ -15,24 +15,34 @@
#include "aom_dsp/butteraugli.h"
#include "aom_ports/system_state.h"
-#include "av1/encoder/rdopt.h"
+#include "av1/encoder/encodeframe.h"
+#include "av1/encoder/encoder_utils.h"
#include "av1/encoder/extend.h"
+#include "av1/encoder/var_based_part.h"
static const int resize_factor = 2;
-void set_mb_butteraugli_rdmult_scaling(AV1_COMP *cpi,
- const YV12_BUFFER_CONFIG *source,
- const YV12_BUFFER_CONFIG *recon) {
+static void set_mb_butteraugli_rdmult_scaling(AV1_COMP *cpi,
+ const YV12_BUFFER_CONFIG *source,
+ const YV12_BUFFER_CONFIG *recon,
+ const double K) {
AV1_COMMON *const cm = &cpi->common;
+ SequenceHeader *const seq_params = cm->seq_params;
const CommonModeInfoParams *const mi_params = &cm->mi_params;
+ const aom_color_range_t color_range =
+ seq_params->color_range != 0 ? AOM_CR_FULL_RANGE : AOM_CR_STUDIO_RANGE;
const int bit_depth = cpi->td.mb.e_mbd.bd;
const int width = source->y_crop_width;
const int height = source->y_crop_height;
+ const int ss_x = source->subsampling_x;
+ const int ss_y = source->subsampling_y;
float *diffmap;
CHECK_MEM_ERROR(cm, diffmap, aom_malloc(width * height * sizeof(*diffmap)));
- if (!aom_calc_butteraugli(source, recon, bit_depth, diffmap)) {
- aom_internal_error(&cm->error, AOM_CODEC_ERROR,
+ if (!aom_calc_butteraugli(source, recon, bit_depth,
+ seq_params->matrix_coefficients, color_range,
+ diffmap)) {
+ aom_internal_error(cm->error, AOM_CODEC_ERROR,
"Failed to calculate Butteraugli distances.");
}
@@ -55,6 +65,7 @@ void set_mb_butteraugli_rdmult_scaling(AV1_COMP *cpi,
const int x_start = col * block_w;
float dbutteraugli = 0.0f;
float dmse = 0.0f;
+ float px_count = 0.0f;
// Loop through each pixel.
for (int y = y_start; y < y_start + block_h && y < height; y++) {
@@ -63,25 +74,28 @@ void set_mb_butteraugli_rdmult_scaling(AV1_COMP *cpi,
float px_diff = source->y_buffer[y * source->y_stride + x] -
recon->y_buffer[y * recon->y_stride + x];
dmse += px_diff * px_diff;
+ px_count += 1.0f;
}
}
- for (int y = y_start; y < y_start + block_h && y < height; y += 2) {
- for (int x = x_start; x < x_start + block_w && x < width; x += 2) {
- const int src_px_index = y / 2 * source->uv_stride + x / 2;
- const int recon_px_index = y / 2 * recon->uv_stride + x / 2;
+ const int y_end = AOMMIN((y_start >> ss_y) + (block_h >> ss_y),
+ (height + ss_y) >> ss_y);
+ for (int y = y_start >> ss_y; y < y_end; y++) {
+ const int x_end = AOMMIN((x_start >> ss_x) + (block_w >> ss_x),
+ (width + ss_x) >> ss_x);
+ for (int x = x_start >> ss_x; x < x_end; x++) {
+ const int src_px_index = y * source->uv_stride + x;
+ const int recon_px_index = y * recon->uv_stride + x;
const float px_diff_u = (float)(source->u_buffer[src_px_index] -
recon->u_buffer[recon_px_index]);
const float px_diff_v = (float)(source->v_buffer[src_px_index] -
recon->v_buffer[recon_px_index]);
dmse += px_diff_u * px_diff_u + px_diff_v * px_diff_v;
+ px_count += 2.0f;
}
}
dbutteraugli = powf(dbutteraugli, 1.0f / 12.0f);
- dmse = dmse / (2.0f * (float)block_w * (float)block_h);
- // 'K' is used to balance the rate-distortion distribution between PSNR
- // and Butteraugli.
- const double K = 0.4;
+ dmse = dmse / px_count;
const float eps = 0.01f;
double weight;
if (dbutteraugli < eps || dmse < eps) {
@@ -166,10 +180,12 @@ static void copy_img(const YV12_BUFFER_CONFIG *src, YV12_BUFFER_CONFIG *dst,
int width, int height) {
copy_plane(src->y_buffer, src->y_stride, dst->y_buffer, dst->y_stride, width,
height);
+ const int width_uv = (width + src->subsampling_x) >> src->subsampling_x;
+ const int height_uv = (height + src->subsampling_y) >> src->subsampling_y;
copy_plane(src->u_buffer, src->uv_stride, dst->u_buffer, dst->uv_stride,
- width / 2, height / 2);
+ width_uv, height_uv);
copy_plane(src->v_buffer, src->uv_stride, dst->v_buffer, dst->uv_stride,
- width / 2, height / 2);
+ width_uv, height_uv);
}
static void zero_plane(uint8_t *dst, int dst_stride, int h) {
@@ -192,9 +208,11 @@ void av1_setup_butteraugli_source(AV1_COMP *cpi) {
const int width = cpi->source->y_crop_width;
const int height = cpi->source->y_crop_height;
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const int ss_x = cpi->source->subsampling_x;
+ const int ss_y = cpi->source->subsampling_y;
if (dst->buffer_alloc_sz == 0) {
aom_alloc_frame_buffer(
- dst, width, height, 1, 1, cm->seq_params.use_highbitdepth,
+ dst, width, height, ss_x, ss_y, cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
}
av1_copy_and_extend_frame(cpi->source, dst);
@@ -202,8 +220,8 @@ void av1_setup_butteraugli_source(AV1_COMP *cpi) {
YV12_BUFFER_CONFIG *const resized_dst = &cpi->butteraugli_info.resized_source;
if (resized_dst->buffer_alloc_sz == 0) {
aom_alloc_frame_buffer(
- resized_dst, width / resize_factor, height / resize_factor, 1, 1,
- cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
+ resized_dst, width / resize_factor, height / resize_factor, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
cm->features.byte_alignment);
}
av1_resize_and_extend_frame_nonnormative(cpi->source, resized_dst, bit_depth,
@@ -215,25 +233,86 @@ void av1_setup_butteraugli_source(AV1_COMP *cpi) {
aom_clear_system_state();
}
-void av1_restore_butteraugli_source(AV1_COMP *cpi) {
+void av1_setup_butteraugli_rdmult_and_restore_source(AV1_COMP *cpi, double K) {
aom_clear_system_state();
av1_copy_and_extend_frame(&cpi->butteraugli_info.source, cpi->source);
AV1_COMMON *const cm = &cpi->common;
const int width = cpi->source->y_crop_width;
const int height = cpi->source->y_crop_height;
+ const int ss_x = cpi->source->subsampling_x;
+ const int ss_y = cpi->source->subsampling_y;
YV12_BUFFER_CONFIG resized_recon;
memset(&resized_recon, 0, sizeof(resized_recon));
aom_alloc_frame_buffer(
- &resized_recon, width / resize_factor, height / resize_factor, 1, 1,
- cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
+ &resized_recon, width / resize_factor, height / resize_factor, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
cm->features.byte_alignment);
copy_img(&cpi->common.cur_frame->buf, &resized_recon, width / resize_factor,
height / resize_factor);
set_mb_butteraugli_rdmult_scaling(cpi, &cpi->butteraugli_info.resized_source,
- &resized_recon);
+ &resized_recon, K);
cpi->butteraugli_info.recon_set = true;
aom_free_frame_buffer(&resized_recon);
aom_clear_system_state();
}
+
+void av1_setup_butteraugli_rdmult(AV1_COMP *cpi) {
+ AV1_COMMON *const cm = &cpi->common;
+ const AV1EncoderConfig *const oxcf = &cpi->oxcf;
+ const QuantizationCfg *const q_cfg = &oxcf->q_cfg;
+ const int q_index = 96;
+ aom_clear_system_state();
+
+ // Setup necessary params for encoding, including frame source, etc.
+ if (cm->current_frame.frame_type == KEY_FRAME) copy_frame_prob_info(cpi);
+ av1_set_frame_size(cpi, cm->superres_upscaled_width,
+ cm->superres_upscaled_height);
+
+ cpi->source =
+ av1_scale_if_required(cm, cpi->unscaled_source, &cpi->scaled_source,
+ cm->features.interp_filter, 0, false, false);
+ if (cpi->unscaled_last_source != NULL) {
+ cpi->last_source = av1_scale_if_required(
+ cm, cpi->unscaled_last_source, &cpi->scaled_last_source,
+ cm->features.interp_filter, 0, false, false);
+ }
+
+ av1_setup_butteraugli_source(cpi);
+ av1_setup_frame(cpi);
+
+ if (cm->seg.enabled) {
+ if (!cm->seg.update_data && cm->prev_frame) {
+ segfeatures_copy(&cm->seg, &cm->prev_frame->seg);
+ cm->seg.enabled = cm->prev_frame->seg.enabled;
+ } else {
+ av1_calculate_segdata(&cm->seg);
+ }
+ } else {
+ memset(&cm->seg, 0, sizeof(cm->seg));
+ }
+ segfeatures_copy(&cm->cur_frame->seg, &cm->seg);
+ cm->cur_frame->seg.enabled = cm->seg.enabled;
+
+ const PARTITION_SEARCH_TYPE partition_search_type =
+ cpi->sf.part_sf.partition_search_type;
+ const BLOCK_SIZE fixed_partition_size = cpi->sf.part_sf.fixed_partition_size;
+ // Enable a quicker pass by uncommenting the following lines:
+ // cpi->sf.part_sf.partition_search_type = FIXED_PARTITION;
+ // cpi->sf.part_sf.fixed_partition_size = BLOCK_32X32;
+
+ av1_set_quantizer(cm, q_cfg->qm_minlevel, q_cfg->qm_maxlevel, q_index,
+ q_cfg->enable_chroma_deltaq);
+ av1_set_speed_features_qindex_dependent(cpi, oxcf->speed);
+ if (q_cfg->deltaq_mode != NO_DELTA_Q || q_cfg->enable_chroma_deltaq)
+ av1_init_quantizer(&cpi->enc_quant_dequant_params, &cm->quant_params,
+ cm->seq_params->bit_depth);
+
+ av1_set_variance_partition_thresholds(cpi, q_index, 0);
+ av1_encode_frame(cpi);
+
+ av1_setup_butteraugli_rdmult_and_restore_source(cpi, 0.3);
+ cpi->sf.part_sf.partition_search_type = partition_search_type;
+ cpi->sf.part_sf.fixed_partition_size = fixed_partition_size;
+}
diff --git a/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.h b/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.h
index a4af31c718..7b7b0b64d3 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.h
+++ b/third_party/libaom/source/libaom/av1/encoder/tune_butteraugli.h
@@ -38,6 +38,10 @@ void av1_setup_butteraugli_recon(AV1_COMP *cpi,
void av1_setup_butteraugli_source(AV1_COMP *cpi);
-void av1_restore_butteraugli_source(AV1_COMP *cpi);
+// 'K' is used to balance the rate-distortion distribution between PSNR
+// and Butteraugli.
+void av1_setup_butteraugli_rdmult_and_restore_source(AV1_COMP *cpi, double K);
+
+void av1_setup_butteraugli_rdmult(AV1_COMP *cpi);
#endif // AOM_AV1_ENCODER_TUNE_BUTTERAUGLI_H_
diff --git a/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.c b/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.c
index f5b6129407..0c28cebefa 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.c
+++ b/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.c
@@ -15,9 +15,7 @@
#include "aom_ports/system_state.h"
#include "av1/encoder/extend.h"
#include "av1/encoder/rdopt.h"
-#if CONFIG_USE_VMAF_RC
#include "config/aom_scale_rtcd.h"
-#endif
static const double kBaselineVmaf = 97.42773;
@@ -89,9 +87,9 @@ static unsigned int residual_variance(const AV1_COMP *cpi,
assert(y_stride == ref->y_stride);
const int y_offset = mb_row * mb_height * y_stride + mb_col * mb_width;
const int mv_offset = ref_mv.row * y_stride + ref_mv.col;
- const unsigned int var =
- cpi->fn_ptr[block_size].vf(ref->y_buffer + y_offset + mv_offset, y_stride,
- src->y_buffer + y_offset, y_stride, sse);
+ const unsigned int var = cpi->ppi->fn_ptr[block_size].vf(
+ ref->y_buffer + y_offset + mv_offset, y_stride, src->y_buffer + y_offset,
+ y_stride, sse);
return var;
}
@@ -117,7 +115,7 @@ static double frame_average_variance(const AV1_COMP *const cpi,
buf.buf = (uint8_t *)y_buffer + row_offset_y * y_stride + col_offset_y;
buf.stride = y_stride;
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
assert(frame->flags & YV12_FLAG_HIGHBITDEPTH);
var += av1_high_get_sby_perpixel_variance(cpi, &buf, block_size,
bit_depth);
@@ -234,7 +232,7 @@ static AOM_INLINE void unsharp(const AV1_COMP *const cpi,
const YV12_BUFFER_CONFIG *blurred,
const YV12_BUFFER_CONFIG *dst, double amount) {
const int bit_depth = cpi->td.mb.e_mbd.bd;
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
assert(source->flags & YV12_FLAG_HIGHBITDEPTH);
assert(blurred->flags & YV12_FLAG_HIGHBITDEPTH);
assert(dst->flags & YV12_FLAG_HIGHBITDEPTH);
@@ -294,38 +292,27 @@ static AOM_INLINE void gaussian_blur(const int bit_depth,
}
static AOM_INLINE double cal_approx_vmaf(const AV1_COMP *const cpi,
-#if CONFIG_USE_VMAF_RC
- VmafContext *vmaf_context,
- int *vmaf_cal_index,
-#endif
double source_variance,
YV12_BUFFER_CONFIG *const source,
YV12_BUFFER_CONFIG *const sharpened) {
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const bool cal_vmaf_neg =
+ cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN;
double new_vmaf;
-#if CONFIG_USE_VMAF_RC
- aom_calc_vmaf_at_index_rc(vmaf_context, cpi->vmaf_info.vmaf_model, source,
- sharpened, bit_depth, *vmaf_cal_index, &new_vmaf);
- (*vmaf_cal_index)++;
-#else
- aom_calc_vmaf(cpi->oxcf.tune_cfg.vmaf_model_path, source, sharpened,
- bit_depth, &new_vmaf);
-#endif
+ aom_calc_vmaf(cpi->vmaf_info.vmaf_model, source, sharpened, bit_depth,
+ cal_vmaf_neg, &new_vmaf);
const double sharpened_var = frame_average_variance(cpi, sharpened);
return source_variance / sharpened_var * (new_vmaf - kBaselineVmaf);
}
static double find_best_frame_unsharp_amount_loop(
- const AV1_COMP *const cpi,
-#if CONFIG_USE_VMAF_RC
- VmafContext *vmaf_context, int *vmaf_cal_index,
-#endif
- YV12_BUFFER_CONFIG *const source, YV12_BUFFER_CONFIG *const blurred,
- YV12_BUFFER_CONFIG *const sharpened, double best_vmaf,
- const double baseline_variance, const double unsharp_amount_start,
- const double step_size, const int max_loop_count, const double max_amount) {
+ const AV1_COMP *const cpi, YV12_BUFFER_CONFIG *const source,
+ YV12_BUFFER_CONFIG *const blurred, YV12_BUFFER_CONFIG *const sharpened,
+ double best_vmaf, const double baseline_variance,
+ const double unsharp_amount_start, const double step_size,
+ const int max_loop_count, const double max_amount) {
const double min_amount = 0.0;
int loop_count = 0;
double approx_vmaf = best_vmaf;
@@ -335,11 +322,7 @@ static double find_best_frame_unsharp_amount_loop(
unsharp_amount += step_size;
if (unsharp_amount > max_amount || unsharp_amount < min_amount) break;
unsharp(cpi, source, blurred, sharpened, unsharp_amount);
- approx_vmaf = cal_approx_vmaf(cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, vmaf_cal_index,
-#endif
- baseline_variance, source, sharpened);
+ approx_vmaf = cal_approx_vmaf(cpi, baseline_variance, source, sharpened);
loop_count++;
} while (approx_vmaf > best_vmaf && loop_count < max_loop_count);
@@ -358,73 +341,43 @@ static double find_best_frame_unsharp_amount(const AV1_COMP *const cpi,
const AV1_COMMON *const cm = &cpi->common;
const int width = source->y_width;
const int height = source->y_height;
-#if CONFIG_USE_VMAF_RC
- VmafContext *vmaf_context;
- aom_init_vmaf_context_rc(
- &vmaf_context, cpi->vmaf_info.vmaf_model,
- cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN);
- int vmaf_cal_index = 0;
-#endif
YV12_BUFFER_CONFIG sharpened;
memset(&sharpened, 0, sizeof(sharpened));
aom_alloc_frame_buffer(
- &sharpened, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ &sharpened, width, height, source->subsampling_x, source->subsampling_y,
+ cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
const double baseline_variance = frame_average_variance(cpi, source);
double unsharp_amount;
if (unsharp_amount_start <= step_size) {
unsharp_amount = find_best_frame_unsharp_amount_loop(
- cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, &vmaf_cal_index,
-#endif
- source, blurred, &sharpened, 0.0, baseline_variance, 0.0, step_size,
- max_loop_count, max_filter_amount);
+ cpi, source, blurred, &sharpened, 0.0, baseline_variance, 0.0,
+ step_size, max_loop_count, max_filter_amount);
} else {
double a0 = unsharp_amount_start - step_size, a1 = unsharp_amount_start;
double v0, v1;
unsharp(cpi, source, blurred, &sharpened, a0);
- v0 = cal_approx_vmaf(cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, &vmaf_cal_index,
-#endif
- baseline_variance, source, &sharpened);
+ v0 = cal_approx_vmaf(cpi, baseline_variance, source, &sharpened);
unsharp(cpi, source, blurred, &sharpened, a1);
- v1 = cal_approx_vmaf(cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, &vmaf_cal_index,
-#endif
- baseline_variance, source, &sharpened);
+ v1 = cal_approx_vmaf(cpi, baseline_variance, source, &sharpened);
if (fabs(v0 - v1) < 0.01) {
unsharp_amount = a0;
} else if (v0 > v1) {
unsharp_amount = find_best_frame_unsharp_amount_loop(
- cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, &vmaf_cal_index,
-#endif
- source, blurred, &sharpened, v0, baseline_variance, a0, -step_size,
- max_loop_count, max_filter_amount);
+ cpi, source, blurred, &sharpened, v0, baseline_variance, a0,
+ -step_size, max_loop_count, max_filter_amount);
} else {
unsharp_amount = find_best_frame_unsharp_amount_loop(
- cpi,
-#if CONFIG_USE_VMAF_RC
- vmaf_context, &vmaf_cal_index,
-#endif
- source, blurred, &sharpened, v1, baseline_variance, a1, step_size,
- max_loop_count, max_filter_amount);
+ cpi, source, blurred, &sharpened, v1, baseline_variance, a1,
+ step_size, max_loop_count, max_filter_amount);
}
}
aom_free_frame_buffer(&sharpened);
-#if CONFIG_USE_VMAF_RC
- aom_close_vmaf_context_rc(vmaf_context);
-#endif
return unsharp_amount;
}
-#if CONFIG_USE_VMAF_RC
void av1_vmaf_neg_preprocessing(AV1_COMP *const cpi,
YV12_BUFFER_CONFIG *const source) {
aom_clear_system_state();
@@ -433,9 +386,9 @@ void av1_vmaf_neg_preprocessing(AV1_COMP *const cpi,
const int width = source->y_width;
const int height = source->y_height;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int layer_depth =
- AOMMIN(gf_group->layer_depth[gf_group->index], MAX_ARF_LAYERS - 1);
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], MAX_ARF_LAYERS - 1);
const double best_frame_unsharp_amount =
get_layer_value(cpi->vmaf_info.last_frame_unsharp_amount, layer_depth);
@@ -444,15 +397,15 @@ void av1_vmaf_neg_preprocessing(AV1_COMP *const cpi,
YV12_BUFFER_CONFIG blurred;
memset(&blurred, 0, sizeof(blurred));
aom_alloc_frame_buffer(
- &blurred, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ &blurred, width, height, source->subsampling_x, source->subsampling_y,
+ cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
gaussian_blur(bit_depth, source, &blurred);
unsharp(cpi, source, &blurred, source, best_frame_unsharp_amount);
aom_free_frame_buffer(&blurred);
aom_clear_system_state();
}
-#endif
void av1_vmaf_frame_preprocessing(AV1_COMP *const cpi,
YV12_BUFFER_CONFIG *const source) {
@@ -466,19 +419,21 @@ void av1_vmaf_frame_preprocessing(AV1_COMP *const cpi,
memset(&source_extended, 0, sizeof(source_extended));
memset(&blurred, 0, sizeof(blurred));
aom_alloc_frame_buffer(
- &source_extended, width, height, 1, 1, cm->seq_params.use_highbitdepth,
+ &source_extended, width, height, source->subsampling_x,
+ source->subsampling_y, cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
aom_alloc_frame_buffer(
- &blurred, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ &blurred, width, height, source->subsampling_x, source->subsampling_y,
+ cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
av1_copy_and_extend_frame(source, &source_extended);
gaussian_blur(bit_depth, &source_extended, &blurred);
aom_free_frame_buffer(&source_extended);
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int layer_depth =
- AOMMIN(gf_group->layer_depth[gf_group->index], MAX_ARF_LAYERS - 1);
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], MAX_ARF_LAYERS - 1);
const double last_frame_unsharp_amount =
get_layer_value(cpi->vmaf_info.last_frame_unsharp_amount, layer_depth);
@@ -500,24 +455,27 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *const cpi,
const int width = source->y_width;
const int height = source->y_height;
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const int ss_x = source->subsampling_x;
+ const int ss_y = source->subsampling_y;
YV12_BUFFER_CONFIG source_extended, blurred;
memset(&blurred, 0, sizeof(blurred));
memset(&source_extended, 0, sizeof(source_extended));
aom_alloc_frame_buffer(
- &blurred, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &source_extended, width, height, 1, 1, cm->seq_params.use_highbitdepth,
+ &blurred, width, height, ss_x, ss_y, cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&source_extended, width, height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
av1_copy_and_extend_frame(source, &source_extended);
gaussian_blur(bit_depth, &source_extended, &blurred);
aom_free_frame_buffer(&source_extended);
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int layer_depth =
- AOMMIN(gf_group->layer_depth[gf_group->index], MAX_ARF_LAYERS - 1);
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], MAX_ARF_LAYERS - 1);
const double last_frame_unsharp_amount =
get_layer_value(cpi->vmaf_info.last_frame_unsharp_amount, layer_depth);
@@ -540,12 +498,14 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *const cpi,
YV12_BUFFER_CONFIG source_block, blurred_block;
memset(&source_block, 0, sizeof(source_block));
memset(&blurred_block, 0, sizeof(blurred_block));
- aom_alloc_frame_buffer(
- &source_block, block_w, block_h, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &blurred_block, block_w, block_h, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&source_block, block_w, block_h, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&blurred_block, block_w, block_h, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
for (int row = 0; row < num_rows; ++row) {
for (int col = 0; col < num_cols; ++col) {
@@ -555,7 +515,7 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *const cpi,
const int block_height = AOMMIN(height - row_offset_y, block_h);
const int index = col + row * num_cols;
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
assert(source->flags & YV12_FLAG_HIGHBITDEPTH);
assert(blurred.flags & YV12_FLAG_HIGHBITDEPTH);
uint16_t *frame_src_buf = CONVERT_TO_SHORTPTR(source->y_buffer) +
@@ -624,7 +584,7 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *const cpi,
const int block_height = AOMMIN(source->y_height - row_offset_y, block_h);
const int index = col + row * num_cols;
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
assert(source->flags & YV12_FLAG_HIGHBITDEPTH);
assert(blurred.flags & YV12_FLAG_HIGHBITDEPTH);
uint16_t *src_buf = CONVERT_TO_SHORTPTR(source->y_buffer) +
@@ -654,93 +614,6 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *const cpi,
aom_clear_system_state();
}
-#if !CONFIG_USE_VMAF_RC
-typedef struct FrameData {
- const YV12_BUFFER_CONFIG *source, *blurred;
- int block_w, block_h, num_rows, num_cols, row, col, bit_depth;
-} FrameData;
-
-// A callback function used to pass data to VMAF.
-// Returns 0 after reading a frame.
-// Returns 2 when there is no more frame to read.
-static int update_frame(float *ref_data, float *main_data, float *temp_data,
- int stride, void *user_data) {
- FrameData *frames = (FrameData *)user_data;
- const int width = frames->source->y_width;
- const int height = frames->source->y_height;
- const int row = frames->row;
- const int col = frames->col;
- const int num_rows = frames->num_rows;
- const int num_cols = frames->num_cols;
- const int block_w = frames->block_w;
- const int block_h = frames->block_h;
- const YV12_BUFFER_CONFIG *source = frames->source;
- const YV12_BUFFER_CONFIG *blurred = frames->blurred;
- const int bit_depth = frames->bit_depth;
- const float scale_factor = 1.0f / (float)(1 << (bit_depth - 8));
- (void)temp_data;
- stride /= (int)sizeof(*ref_data);
-
- for (int i = 0; i < height; ++i) {
- float *ref, *main;
- ref = ref_data + i * stride;
- main = main_data + i * stride;
- if (source->flags & YV12_FLAG_HIGHBITDEPTH) {
- uint16_t *src;
- src = CONVERT_TO_SHORTPTR(source->y_buffer) + i * source->y_stride;
- for (int j = 0; j < width; ++j) {
- ref[j] = main[j] = scale_factor * (float)src[j];
- }
- } else {
- uint8_t *src;
- src = source->y_buffer + i * source->y_stride;
- for (int j = 0; j < width; ++j) {
- ref[j] = main[j] = (float)src[j];
- }
- }
- }
- if (row < num_rows && col < num_cols) {
- // Set current block
- const int row_offset = row * block_h;
- const int col_offset = col * block_w;
- const int block_width = AOMMIN(width - col_offset, block_w);
- const int block_height = AOMMIN(height - row_offset, block_h);
-
- float *main_buf = main_data + col_offset + row_offset * stride;
- if (source->flags & YV12_FLAG_HIGHBITDEPTH) {
- uint16_t *blurred_buf = CONVERT_TO_SHORTPTR(blurred->y_buffer) +
- row_offset * blurred->y_stride + col_offset;
- for (int i = 0; i < block_height; ++i) {
- for (int j = 0; j < block_width; ++j) {
- main_buf[j] = scale_factor * (float)blurred_buf[j];
- }
- main_buf += stride;
- blurred_buf += blurred->y_stride;
- }
- } else {
- uint8_t *blurred_buf =
- blurred->y_buffer + row_offset * blurred->y_stride + col_offset;
- for (int i = 0; i < block_height; ++i) {
- for (int j = 0; j < block_width; ++j) {
- main_buf[j] = (float)blurred_buf[j];
- }
- main_buf += stride;
- blurred_buf += blurred->y_stride;
- }
- }
-
- frames->col++;
- if (frames->col >= num_cols) {
- frames->col = 0;
- frames->row++;
- }
- return 0;
- } else {
- return 2;
- }
-}
-#endif
-
void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
AV1_COMMON *cm = &cpi->common;
const int y_width = cpi->source->y_width;
@@ -748,13 +621,15 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
const int resized_block_size = BLOCK_32X32;
const int resize_factor = 2;
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const int ss_x = cpi->source->subsampling_x;
+ const int ss_y = cpi->source->subsampling_y;
aom_clear_system_state();
YV12_BUFFER_CONFIG resized_source;
memset(&resized_source, 0, sizeof(resized_source));
aom_alloc_frame_buffer(
- &resized_source, y_width / resize_factor, y_height / resize_factor, 1, 1,
- cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
+ &resized_source, y_width / resize_factor, y_height / resize_factor, ss_x,
+ ss_y, cm->seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
cm->features.byte_alignment);
av1_resize_and_extend_frame_nonnormative(cpi->source, &resized_source,
bit_depth, av1_num_planes(cm));
@@ -770,42 +645,26 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
YV12_BUFFER_CONFIG blurred;
memset(&blurred, 0, sizeof(blurred));
- aom_alloc_frame_buffer(&blurred, resized_y_width, resized_y_height, 1, 1,
- cm->seq_params.use_highbitdepth,
+ aom_alloc_frame_buffer(&blurred, resized_y_width, resized_y_height, ss_x,
+ ss_y, cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels,
cm->features.byte_alignment);
gaussian_blur(bit_depth, &resized_source, &blurred);
-#if CONFIG_USE_VMAF_RC
YV12_BUFFER_CONFIG recon;
memset(&recon, 0, sizeof(recon));
- aom_alloc_frame_buffer(&recon, resized_y_width, resized_y_height, 1, 1,
- cm->seq_params.use_highbitdepth,
+ aom_alloc_frame_buffer(&recon, resized_y_width, resized_y_height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels,
cm->features.byte_alignment);
aom_yv12_copy_frame(&resized_source, &recon, 1);
VmafContext *vmaf_context;
- aom_init_vmaf_context_rc(
- &vmaf_context, cpi->vmaf_info.vmaf_model,
- cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN);
-#else
- double *scores = aom_malloc(sizeof(*scores) * (num_rows * num_cols));
- memset(scores, 0, sizeof(*scores) * (num_rows * num_cols));
- FrameData frame_data;
- frame_data.source = &resized_source;
- frame_data.blurred = &blurred;
- frame_data.block_w = resized_block_w;
- frame_data.block_h = resized_block_h;
- frame_data.num_rows = num_rows;
- frame_data.num_cols = num_cols;
- frame_data.row = 0;
- frame_data.col = 0;
- frame_data.bit_depth = bit_depth;
- aom_calc_vmaf_multi_frame(&frame_data, cpi->oxcf.tune_cfg.vmaf_model_path,
- update_frame, resized_y_width, resized_y_height,
- bit_depth, scores);
-#endif
+ const bool cal_vmaf_neg =
+ cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN;
+ aom_init_vmaf_context(&vmaf_context, cpi->vmaf_info.vmaf_model, cal_vmaf_neg);
+ unsigned int *sses = aom_malloc(sizeof(*sses) * (num_rows * num_cols));
+ memset(sses, 0, sizeof(*sses) * (num_rows * num_cols));
// Loop through each 'block_size' block.
for (int row = 0; row < num_rows; ++row) {
@@ -820,15 +679,14 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
uint8_t *const blurred_buf =
blurred.y_buffer + row_offset_y * blurred.y_stride + col_offset_y;
- unsigned int sse;
- cpi->fn_ptr[resized_block_size].vf(orig_buf, resized_source.y_stride,
- blurred_buf, blurred.y_stride, &sse);
+ cpi->ppi->fn_ptr[resized_block_size].vf(orig_buf, resized_source.y_stride,
+ blurred_buf, blurred.y_stride,
+ &sses[index]);
-#if CONFIG_USE_VMAF_RC
uint8_t *const recon_buf =
recon.y_buffer + row_offset_y * recon.y_stride + col_offset_y;
// Set recon buf
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
highbd_unsharp_rect(CONVERT_TO_SHORTPTR(blurred_buf), blurred.y_stride,
CONVERT_TO_SHORTPTR(blurred_buf), blurred.y_stride,
CONVERT_TO_SHORTPTR(recon_buf), recon.y_stride,
@@ -839,13 +697,11 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
resized_block_w, resized_block_h, 0.0);
}
- double vmaf;
- aom_calc_vmaf_at_index_rc(vmaf_context, cpi->vmaf_info.vmaf_model,
- &resized_source, &recon, bit_depth, index,
- &vmaf);
+ aom_read_vmaf_image(vmaf_context, &resized_source, &recon, bit_depth,
+ index);
// Restore recon buf
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
highbd_unsharp_rect(
CONVERT_TO_SHORTPTR(orig_buf), resized_source.y_stride,
CONVERT_TO_SHORTPTR(orig_buf), resized_source.y_stride,
@@ -856,13 +712,18 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
resized_source.y_stride, recon_buf, recon.y_stride,
resized_block_w, resized_block_h, 0.0);
}
-#else
- const double vmaf = scores[index];
-#endif
+ }
+ }
+ aom_flush_vmaf_context(vmaf_context);
+ for (int row = 0; row < num_rows; ++row) {
+ for (int col = 0; col < num_cols; ++col) {
+ const int index = row * num_cols + col;
+ const double vmaf = aom_calc_vmaf_at_index(
+ vmaf_context, cpi->vmaf_info.vmaf_model, index);
const double dvmaf = kBaselineVmaf - vmaf;
const double mse =
- (double)sse / (double)(resized_y_width * resized_y_height);
+ (double)sses[index] / (double)(resized_y_width * resized_y_height);
double weight;
const double eps = 0.01 / (num_rows * num_cols);
if (dvmaf < eps || mse < eps) {
@@ -879,11 +740,8 @@ void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi) {
aom_free_frame_buffer(&resized_source);
aom_free_frame_buffer(&blurred);
-#if CONFIG_USE_VMAF_RC
- aom_close_vmaf_context_rc(vmaf_context);
-#else
- aom_free(scores);
-#endif
+ aom_close_vmaf_context(vmaf_context);
+ aom_free(sses);
aom_clear_system_state();
}
@@ -967,27 +825,32 @@ static double calc_vmaf_motion_score(const AV1_COMP *const cpi,
const int y_height = cur->y_height;
YV12_BUFFER_CONFIG blurred_cur, blurred_last, blurred_next;
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const int ss_x = cur->subsampling_x;
+ const int ss_y = cur->subsampling_y;
memset(&blurred_cur, 0, sizeof(blurred_cur));
memset(&blurred_last, 0, sizeof(blurred_last));
memset(&blurred_next, 0, sizeof(blurred_next));
- aom_alloc_frame_buffer(
- &blurred_cur, y_width, y_height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &blurred_last, y_width, y_height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &blurred_next, y_width, y_height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&blurred_cur, y_width, y_height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&blurred_last, y_width, y_height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&blurred_next, y_width, y_height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
gaussian_blur(bit_depth, cur, &blurred_cur);
gaussian_blur(bit_depth, last, &blurred_last);
if (next) gaussian_blur(bit_depth, next, &blurred_next);
double motion1, motion2 = 65536.0;
- if (cm->seq_params.use_highbitdepth) {
+ if (cm->seq_params->use_highbitdepth) {
assert(blurred_cur.flags & YV12_FLAG_HIGHBITDEPTH);
assert(blurred_last.flags & YV12_FLAG_HIGHBITDEPTH);
const float scale_factor = 1.0f / (float)(1 << (bit_depth - 8));
@@ -1026,9 +889,9 @@ static AOM_INLINE void get_neighbor_frames(const AV1_COMP *const cpi,
YV12_BUFFER_CONFIG **last,
YV12_BUFFER_CONFIG **next) {
const AV1_COMMON *const cm = &cpi->common;
- const GF_GROUP *gf_group = &cpi->gf_group;
+ const GF_GROUP *gf_group = &cpi->ppi->gf_group;
const int src_index =
- cm->show_frame != 0 ? 0 : gf_group->arf_src_offset[gf_group->index];
+ cm->show_frame != 0 ? 0 : gf_group->arf_src_offset[cpi->gf_frame_index];
struct lookahead_entry *last_entry = av1_lookahead_peek(
cpi->ppi->lookahead, src_index - 1, cpi->compressor_stage);
struct lookahead_entry *next_entry = av1_lookahead_peek(
@@ -1046,9 +909,9 @@ int av1_get_vmaf_base_qindex(const AV1_COMP *const cpi, int current_qindex) {
return current_qindex;
}
aom_clear_system_state();
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int layer_depth =
- AOMMIN(gf_group->layer_depth[gf_group->index], MAX_ARF_LAYERS - 1);
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], MAX_ARF_LAYERS - 1);
const double last_frame_ysse =
get_layer_value(cpi->vmaf_info.last_frame_ysse, layer_depth);
const double last_frame_vmaf =
@@ -1065,7 +928,7 @@ int av1_get_vmaf_base_qindex(const AV1_COMP *const cpi, int current_qindex) {
}
YV12_BUFFER_CONFIG *cur_buf = cpi->source;
if (cm->show_frame == 0) {
- const int src_index = gf_group->arf_src_offset[gf_group->index];
+ const int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
struct lookahead_entry *cur_entry = av1_lookahead_peek(
cpi->ppi->lookahead, src_index, cpi->compressor_stage);
cur_buf = &cur_entry->img;
@@ -1084,7 +947,8 @@ int av1_get_vmaf_base_qindex(const AV1_COMP *const cpi, int current_qindex) {
const double dsse = dvmaf * approx_sse / approx_dvmaf;
const double beta = approx_sse / (dsse + approx_sse);
- const int offset = av1_get_deltaq_offset(cpi, current_qindex, beta);
+ const int offset =
+ av1_get_deltaq_offset(cm->seq_params->bit_depth, current_qindex, beta);
int qindex = current_qindex + offset;
qindex = AOMMIN(qindex, MAXQ);
@@ -1094,23 +958,23 @@ int av1_get_vmaf_base_qindex(const AV1_COMP *const cpi, int current_qindex) {
return qindex;
}
-#if CONFIG_USE_VMAF_RC
static AOM_INLINE double cal_approx_score(
- AV1_COMP *const cpi, VmafContext *vmaf_context, int vmaf_cal_index,
- double src_variance, double new_variance, double src_score,
- YV12_BUFFER_CONFIG *const src, YV12_BUFFER_CONFIG *const recon_sharpened) {
+ AV1_COMP *const cpi, double src_variance, double new_variance,
+ double src_score, YV12_BUFFER_CONFIG *const src,
+ YV12_BUFFER_CONFIG *const recon_sharpened) {
double score;
const uint32_t bit_depth = cpi->td.mb.e_mbd.bd;
- aom_calc_vmaf_at_index_rc(vmaf_context, cpi->vmaf_info.vmaf_model, src,
- recon_sharpened, bit_depth, vmaf_cal_index, &score);
+ const bool cal_vmaf_neg =
+ cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN;
+ aom_calc_vmaf(cpi->vmaf_info.vmaf_model, src, recon_sharpened, bit_depth,
+ cal_vmaf_neg, &score);
return src_variance / new_variance * (score - src_score);
}
static double find_best_frame_unsharp_amount_loop_neg(
- AV1_COMP *const cpi, VmafContext *vmaf_context, double src_variance,
- double base_score, YV12_BUFFER_CONFIG *const src,
- YV12_BUFFER_CONFIG *const recon, YV12_BUFFER_CONFIG *const ref,
- YV12_BUFFER_CONFIG *const src_blurred,
+ AV1_COMP *const cpi, double src_variance, double base_score,
+ YV12_BUFFER_CONFIG *const src, YV12_BUFFER_CONFIG *const recon,
+ YV12_BUFFER_CONFIG *const ref, YV12_BUFFER_CONFIG *const src_blurred,
YV12_BUFFER_CONFIG *const recon_blurred,
YV12_BUFFER_CONFIG *const src_sharpened,
YV12_BUFFER_CONFIG *const recon_sharpened, FULLPEL_MV *mvs,
@@ -1120,7 +984,6 @@ static double find_best_frame_unsharp_amount_loop_neg(
int loop_count = 0;
double approx_score = best_score;
double unsharp_amount = unsharp_amount_start;
- int vmaf_cal_index = 3;
do {
best_score = approx_score;
@@ -1130,9 +993,8 @@ static double find_best_frame_unsharp_amount_loop_neg(
unsharp(cpi, src, src_blurred, src_sharpened, unsharp_amount);
const double new_variance =
residual_frame_average_variance(cpi, src_sharpened, ref, mvs);
- approx_score =
- cal_approx_score(cpi, vmaf_context, vmaf_cal_index++, src_variance,
- new_variance, base_score, src, recon_sharpened);
+ approx_score = cal_approx_score(cpi, src_variance, new_variance, base_score,
+ src, recon_sharpened);
loop_count++;
} while (approx_score > best_score && loop_count < max_loop_count);
@@ -1143,11 +1005,11 @@ static double find_best_frame_unsharp_amount_loop_neg(
}
static double find_best_frame_unsharp_amount_neg(
- AV1_COMP *const cpi, VmafContext *vmaf_context,
- YV12_BUFFER_CONFIG *const src, YV12_BUFFER_CONFIG *const recon,
- YV12_BUFFER_CONFIG *const ref, double base_score,
- const double unsharp_amount_start, const double step_size,
- const int max_loop_count, const double max_filter_amount) {
+ AV1_COMP *const cpi, YV12_BUFFER_CONFIG *const src,
+ YV12_BUFFER_CONFIG *const recon, YV12_BUFFER_CONFIG *const ref,
+ double base_score, const double unsharp_amount_start,
+ const double step_size, const int max_loop_count,
+ const double max_filter_amount) {
FULLPEL_MV *mvs = NULL;
const double src_variance =
residual_frame_average_variance(cpi, src, ref, mvs);
@@ -1156,22 +1018,28 @@ static double find_best_frame_unsharp_amount_neg(
const int width = recon->y_width;
const int height = recon->y_height;
const int bit_depth = cpi->td.mb.e_mbd.bd;
+ const int ss_x = recon->subsampling_x;
+ const int ss_y = recon->subsampling_y;
+
YV12_BUFFER_CONFIG src_blurred, recon_blurred, src_sharpened, recon_sharpened;
memset(&recon_sharpened, 0, sizeof(recon_sharpened));
memset(&src_sharpened, 0, sizeof(src_sharpened));
memset(&recon_blurred, 0, sizeof(recon_blurred));
memset(&src_blurred, 0, sizeof(src_blurred));
+ aom_alloc_frame_buffer(&recon_sharpened, width, height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&src_sharpened, width, height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
+ aom_alloc_frame_buffer(&recon_blurred, width, height, ss_x, ss_y,
+ cm->seq_params->use_highbitdepth,
+ cpi->oxcf.border_in_pixels,
+ cm->features.byte_alignment);
aom_alloc_frame_buffer(
- &recon_sharpened, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &src_sharpened, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &recon_blurred, width, height, 1, 1, cm->seq_params.use_highbitdepth,
- cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
- aom_alloc_frame_buffer(
- &src_blurred, width, height, 1, 1, cm->seq_params.use_highbitdepth,
+ &src_blurred, width, height, ss_x, ss_y, cm->seq_params->use_highbitdepth,
cpi->oxcf.border_in_pixels, cm->features.byte_alignment);
gaussian_blur(bit_depth, recon, &recon_blurred);
@@ -1181,32 +1049,28 @@ static double find_best_frame_unsharp_amount_neg(
unsharp(cpi, src, &src_blurred, &src_sharpened, unsharp_amount_start);
const double variance_start =
residual_frame_average_variance(cpi, &src_sharpened, ref, mvs);
- const double score_start =
- cal_approx_score(cpi, vmaf_context, 1, src_variance, variance_start,
- base_score, src, &recon_sharpened);
+ const double score_start = cal_approx_score(
+ cpi, src_variance, variance_start, base_score, src, &recon_sharpened);
const double unsharp_amount_next = unsharp_amount_start + step_size;
unsharp(cpi, recon, &recon_blurred, &recon_sharpened, unsharp_amount_next);
unsharp(cpi, src, &src_blurred, &src_sharpened, unsharp_amount_next);
const double variance_next =
residual_frame_average_variance(cpi, &src_sharpened, ref, mvs);
- const double score_next =
- cal_approx_score(cpi, vmaf_context, 2, src_variance, variance_next,
- base_score, src, &recon_sharpened);
+ const double score_next = cal_approx_score(cpi, src_variance, variance_next,
+ base_score, src, &recon_sharpened);
double unsharp_amount;
if (score_next > score_start) {
unsharp_amount = find_best_frame_unsharp_amount_loop_neg(
- cpi, vmaf_context, src_variance, base_score, src, recon, ref,
- &src_blurred, &recon_blurred, &src_sharpened, &recon_sharpened, mvs,
- score_next, unsharp_amount_next, step_size, max_loop_count,
- max_filter_amount);
+ cpi, src_variance, base_score, src, recon, ref, &src_blurred,
+ &recon_blurred, &src_sharpened, &recon_sharpened, mvs, score_next,
+ unsharp_amount_next, step_size, max_loop_count, max_filter_amount);
} else {
unsharp_amount = find_best_frame_unsharp_amount_loop_neg(
- cpi, vmaf_context, src_variance, base_score, src, recon, ref,
- &src_blurred, &recon_blurred, &src_sharpened, &recon_sharpened, mvs,
- score_start, unsharp_amount_start, -step_size, max_loop_count,
- max_filter_amount);
+ cpi, src_variance, base_score, src, recon, ref, &src_blurred,
+ &recon_blurred, &src_sharpened, &recon_sharpened, mvs, score_start,
+ unsharp_amount_start, -step_size, max_loop_count, max_filter_amount);
}
aom_free_frame_buffer(&recon_sharpened);
@@ -1216,29 +1080,21 @@ static double find_best_frame_unsharp_amount_neg(
aom_free(mvs);
return unsharp_amount;
}
-#endif // CONFIG_USE_VMAF_RC
void av1_update_vmaf_curve(AV1_COMP *cpi) {
YV12_BUFFER_CONFIG *source = cpi->source;
YV12_BUFFER_CONFIG *recon = &cpi->common.cur_frame->buf;
const int bit_depth = cpi->td.mb.e_mbd.bd;
- const GF_GROUP *const gf_group = &cpi->gf_group;
+ const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
const int layer_depth =
- AOMMIN(gf_group->layer_depth[gf_group->index], MAX_ARF_LAYERS - 1);
-#if CONFIG_USE_VMAF_RC
+ AOMMIN(gf_group->layer_depth[cpi->gf_frame_index], MAX_ARF_LAYERS - 1);
double base_score;
- VmafContext *vmaf_context;
- aom_init_vmaf_context_rc(
- &vmaf_context, cpi->vmaf_info.vmaf_model,
- cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN);
- aom_calc_vmaf_at_index_rc(vmaf_context, cpi->vmaf_info.vmaf_model, source,
- recon, bit_depth, 0, &base_score);
+ const bool cal_vmaf_neg =
+ cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN;
+ aom_calc_vmaf(cpi->vmaf_info.vmaf_model, source, recon, bit_depth,
+ cal_vmaf_neg, &base_score);
cpi->vmaf_info.last_frame_vmaf[layer_depth] = base_score;
-#else
- aom_calc_vmaf(cpi->oxcf.tune_cfg.vmaf_model_path, source, recon, bit_depth,
- &cpi->vmaf_info.last_frame_vmaf[layer_depth]);
-#endif // CONFIG_USE_VMAF_RC
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
assert(source->flags & YV12_FLAG_HIGHBITDEPTH);
assert(recon->flags & YV12_FLAG_HIGHBITDEPTH);
cpi->vmaf_info.last_frame_ysse[layer_depth] =
@@ -1248,7 +1104,6 @@ void av1_update_vmaf_curve(AV1_COMP *cpi) {
(double)aom_get_y_sse(source, recon);
}
-#if CONFIG_USE_VMAF_RC
if (cpi->oxcf.tune_cfg.tuning == AOM_TUNE_VMAF_NEG_MAX_GAIN) {
YV12_BUFFER_CONFIG *last, *next;
get_neighbor_frames(cpi, &last, &next);
@@ -1256,10 +1111,8 @@ void av1_update_vmaf_curve(AV1_COMP *cpi) {
get_layer_value(cpi->vmaf_info.last_frame_unsharp_amount, layer_depth);
const int max_loop_count = 5;
cpi->vmaf_info.last_frame_unsharp_amount[layer_depth] =
- find_best_frame_unsharp_amount_neg(
- cpi, vmaf_context, source, recon, last, base_score,
- best_unsharp_amount_start, 0.025, max_loop_count, 1.01);
+ find_best_frame_unsharp_amount_neg(cpi, source, recon, last, base_score,
+ best_unsharp_amount_start, 0.025,
+ max_loop_count, 1.01);
}
- aom_close_vmaf_context_rc(vmaf_context);
-#endif // CONFIG_USE_VMAF_RC
}
diff --git a/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.h b/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.h
index 01c3068bf0..4625fb9061 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.h
+++ b/third_party/libaom/source/libaom/av1/encoder/tune_vmaf.h
@@ -36,10 +36,8 @@ typedef struct {
// Stores the origial qindex before scaling.
int original_qindex;
-#if CONFIG_USE_VMAF_RC
// VMAF model used in VMAF caculations.
VmafModel *vmaf_model;
-#endif
} TuneVMAFInfo;
typedef struct AV1_COMP AV1_COMP;
@@ -48,9 +46,7 @@ void av1_vmaf_blk_preprocessing(AV1_COMP *cpi, YV12_BUFFER_CONFIG *source);
void av1_vmaf_frame_preprocessing(AV1_COMP *cpi, YV12_BUFFER_CONFIG *source);
-#ifdef CONFIG_USE_VMAF_RC
void av1_vmaf_neg_preprocessing(AV1_COMP *cpi, YV12_BUFFER_CONFIG *source);
-#endif
void av1_set_mb_vmaf_rdmult_scaling(AV1_COMP *cpi);
diff --git a/third_party/libaom/source/libaom/av1/encoder/tx_search.c b/third_party/libaom/source/libaom/av1/encoder/tx_search.c
index 30aac0a349..e65b70f788 100644
--- a/third_party/libaom/source/libaom/av1/encoder/tx_search.c
+++ b/third_party/libaom/source/libaom/av1/encoder/tx_search.c
@@ -618,7 +618,7 @@ static AOM_INLINE void get_energy_distribution_fine(
assert(bw <= 32);
assert(bh <= 32);
assert(((bw - 1) >> w_shift) + (((bh - 1) >> h_shift) << 2) == 15);
- if (cpi->common.seq_params.use_highbitdepth) {
+ if (cpi->common.seq_params->use_highbitdepth) {
const uint16_t *src16 = CONVERT_TO_SHORTPTR(src);
const uint16_t *dst16 = CONVERT_TO_SHORTPTR(dst);
for (int i = 0; i < bh; ++i)
@@ -643,43 +643,43 @@ static AOM_INLINE void get_energy_distribution_fine(
const BLOCK_SIZE subsize = (BLOCK_SIZE)f_index;
assert(block_size_wide[bsize] == 4 * block_size_wide[subsize]);
assert(block_size_high[bsize] == 4 * block_size_high[subsize]);
- cpi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[0]);
- cpi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4, dst_stride,
- &esq[1]);
- cpi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2, dst_stride,
- &esq[2]);
- cpi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
- dst_stride, &esq[3]);
+ cpi->ppi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[0]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4,
+ dst_stride, &esq[1]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2,
+ dst_stride, &esq[2]);
+ cpi->ppi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
+ dst_stride, &esq[3]);
src += bh / 4 * src_stride;
dst += bh / 4 * dst_stride;
- cpi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[4]);
- cpi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4, dst_stride,
- &esq[5]);
- cpi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2, dst_stride,
- &esq[6]);
- cpi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
- dst_stride, &esq[7]);
+ cpi->ppi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[4]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4,
+ dst_stride, &esq[5]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2,
+ dst_stride, &esq[6]);
+ cpi->ppi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
+ dst_stride, &esq[7]);
src += bh / 4 * src_stride;
dst += bh / 4 * dst_stride;
- cpi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[8]);
- cpi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4, dst_stride,
- &esq[9]);
- cpi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2, dst_stride,
- &esq[10]);
- cpi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
- dst_stride, &esq[11]);
+ cpi->ppi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[8]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4,
+ dst_stride, &esq[9]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2,
+ dst_stride, &esq[10]);
+ cpi->ppi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
+ dst_stride, &esq[11]);
src += bh / 4 * src_stride;
dst += bh / 4 * dst_stride;
- cpi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[12]);
- cpi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4, dst_stride,
- &esq[13]);
- cpi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2, dst_stride,
- &esq[14]);
- cpi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
- dst_stride, &esq[15]);
+ cpi->ppi->fn_ptr[subsize].vf(src, src_stride, dst, dst_stride, &esq[12]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 4, src_stride, dst + bw / 4,
+ dst_stride, &esq[13]);
+ cpi->ppi->fn_ptr[subsize].vf(src + bw / 2, src_stride, dst + bw / 2,
+ dst_stride, &esq[14]);
+ cpi->ppi->fn_ptr[subsize].vf(src + 3 * bw / 4, src_stride, dst + 3 * bw / 4,
+ dst_stride, &esq[15]);
}
double total = (double)esq[0] + esq[1] + esq[2] + esq[3] + esq[4] + esq[5] +
@@ -769,13 +769,13 @@ static AOM_INLINE void get_2x2_normalized_sses_and_sads(
if (sse_norm_arr) {
unsigned int this_sse;
- cpi->fn_ptr[tx_bsize_half].vf(this_src, src_stride, this_dst,
- dst_stride, &this_sse);
+ cpi->ppi->fn_ptr[tx_bsize_half].vf(this_src, src_stride, this_dst,
+ dst_stride, &this_sse);
sse_norm_arr[row * 2 + col] = (double)this_sse / num_samples_half;
}
if (sad_norm_arr) {
- const unsigned int this_sad = cpi->fn_ptr[tx_bsize_half].sdf(
+ const unsigned int this_sad = cpi->ppi->fn_ptr[tx_bsize_half].sdf(
this_src, src_stride, this_dst, dst_stride);
sad_norm_arr[row * 2 + col] = (double)this_sad / num_samples_half;
}
@@ -832,11 +832,11 @@ static AOM_INLINE void PrintTransformUnitStats(
const uint8_t *const dst =
&pd->dst.buf[(blk_row * dst_stride + blk_col) << MI_SIZE_LOG2];
unsigned int sse;
- cpi->fn_ptr[tx_bsize].vf(src, src_stride, dst, dst_stride, &sse);
+ cpi->ppi->fn_ptr[tx_bsize].vf(src, src_stride, dst, dst_stride, &sse);
const double sse_norm = (double)sse / num_samples;
const unsigned int sad =
- cpi->fn_ptr[tx_bsize].sdf(src, src_stride, dst, dst_stride);
+ cpi->ppi->fn_ptr[tx_bsize].sdf(src, src_stride, dst, dst_stride);
const double sad_norm = (double)sad / num_samples;
fprintf(fout, " %g %g", sse_norm, sad_norm);
@@ -905,8 +905,8 @@ static int64_t get_sse(const AV1_COMP *cpi, const MACROBLOCK *x) {
if (x->skip_chroma_rd && plane) continue;
- cpi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf, pd->dst.stride,
- &sse);
+ cpi->ppi->fn_ptr[bs].vf(p->src.buf, p->src.stride, pd->dst.buf,
+ pd->dst.stride, &sse);
total_sse += sse;
}
total_sse <<= 4;
@@ -1030,7 +1030,7 @@ static AOM_INLINE void PrintPredictionUnitStats(const AV1_COMP *const cpi,
const double sse_norm = (double)sse / num_samples;
const unsigned int sad =
- cpi->fn_ptr[plane_bsize].sdf(src, src_stride, dst, dst_stride);
+ cpi->ppi->fn_ptr[plane_bsize].sdf(src, src_stride, dst, dst_stride);
const double sad_norm =
(double)sad / (1 << num_pels_log2_lookup[plane_bsize]);
@@ -1183,7 +1183,7 @@ static unsigned pixel_dist_visible_only(
unsigned sse;
if (txb_rows == visible_rows && txb_cols == visible_cols) {
- cpi->fn_ptr[tx_bsize].vf(src, src_stride, dst, dst_stride, &sse);
+ cpi->ppi->fn_ptr[tx_bsize].vf(src, src_stride, dst, dst_stride, &sse);
return sse;
}
@@ -2024,9 +2024,15 @@ get_tx_mask(const AV1_COMP *cpi, MACROBLOCK *x, int plane, int block,
assert(plane == 0);
allowed_tx_mask = ext_tx_used_flag;
int num_allowed = 0;
- const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
- const int *tx_type_probs =
- cpi->frame_probs.tx_type_probs[update_type][tx_size];
+ const FRAME_UPDATE_TYPE update_type =
+ get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
+ int *tx_type_probs;
+#if CONFIG_FRAME_PARALLEL_ENCODE
+ tx_type_probs =
+ (int *)cpi->ppi->temp_frame_probs.tx_type_probs[update_type][tx_size];
+#else
+ tx_type_probs = (int *)cpi->frame_probs.tx_type_probs[update_type][tx_size];
+#endif
int i;
if (cpi->sf.tx_sf.tx_type_search.prune_tx_type_using_stats) {
@@ -2097,25 +2103,8 @@ get_tx_mask(const AV1_COMP *cpi, MACROBLOCK *x, int plane, int block,
#if CONFIG_RD_DEBUG
static INLINE void update_txb_coeff_cost(RD_STATS *rd_stats, int plane,
- TX_SIZE tx_size, int blk_row,
- int blk_col, int txb_coeff_cost) {
- (void)blk_row;
- (void)blk_col;
- (void)tx_size;
+ int txb_coeff_cost) {
rd_stats->txb_coeff_cost[plane] += txb_coeff_cost;
-
- {
- const int txb_h = tx_size_high_unit[tx_size];
- const int txb_w = tx_size_wide_unit[tx_size];
- int idx, idy;
- for (idy = 0; idy < txb_h; ++idy)
- for (idx = 0; idx < txb_w; ++idx)
- rd_stats->txb_coeff_cost_map[plane][blk_row + idy][blk_col + idx] = 0;
-
- rd_stats->txb_coeff_cost_map[plane][blk_row][blk_col] = txb_coeff_cost;
- }
- assert(blk_row < TXB_COEFF_COST_MAP_SIZE);
- assert(blk_col < TXB_COEFF_COST_MAP_SIZE);
}
#endif
@@ -2674,8 +2663,7 @@ static AOM_INLINE void try_tx_block_no_split(
RDCOST(x->rdmult, zero_blk_rate, rd_stats->sse));
if (pick_skip_txfm) {
#if CONFIG_RD_DEBUG
- update_txb_coeff_cost(rd_stats, 0, tx_size, blk_row, blk_col,
- zero_blk_rate - rd_stats->rate);
+ update_txb_coeff_cost(rd_stats, 0, zero_blk_rate - rd_stats->rate);
#endif // CONFIG_RD_DEBUG
rd_stats->rate = zero_blk_rate;
rd_stats->dist = rd_stats->sse;
@@ -2720,11 +2708,12 @@ static AOM_INLINE void try_tx_block_split(
x->mode_costs.txfm_partition_cost[txfm_partition_ctx][1];
for (int r = 0, blk_idx = 0; r < txb_height; r += sub_txb_height) {
+ const int offsetr = blk_row + r;
+ if (offsetr >= max_blocks_high) break;
for (int c = 0; c < txb_width; c += sub_txb_width, ++blk_idx) {
assert(blk_idx < 4);
- const int offsetr = blk_row + r;
const int offsetc = blk_col + c;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
+ if (offsetc >= max_blocks_wide) continue;
RD_STATS this_rd_stats;
int this_cost_valid = 1;
@@ -3173,8 +3162,7 @@ static AOM_INLINE void block_rd_txfm(int plane, int block, int blk_row,
}
#if CONFIG_RD_DEBUG
- update_txb_coeff_cost(&this_rd_stats, plane, tx_size, blk_row, blk_col,
- this_rd_stats.rate);
+ update_txb_coeff_cost(&this_rd_stats, plane, this_rd_stats.rate);
#endif // CONFIG_RD_DEBUG
av1_set_txb_context(x, plane, block, tx_size, a, l);
@@ -3452,15 +3440,18 @@ static AOM_INLINE void tx_block_yrd(
const int txb_width = tx_size_wide_unit[sub_txs];
const int txb_height = tx_size_high_unit[sub_txs];
const int step = txb_height * txb_width;
+ const int row_end =
+ AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
+ const int col_end =
+ AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
RD_STATS pn_rd_stats;
int64_t this_rd = 0;
assert(txb_width > 0 && txb_height > 0);
- for (int row = 0; row < tx_size_high_unit[tx_size]; row += txb_height) {
- for (int col = 0; col < tx_size_wide_unit[tx_size]; col += txb_width) {
- const int offsetr = blk_row + row;
+ for (int row = 0; row < row_end; row += txb_height) {
+ const int offsetr = blk_row + row;
+ for (int col = 0; col < col_end; col += txb_width) {
const int offsetc = blk_col + col;
- if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
av1_init_rd_stats(&pn_rd_stats);
tx_block_yrd(cpi, x, offsetr, offsetc, block, sub_txs, plane_bsize,
diff --git a/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.c b/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.c
index 31b86abe64..884d0a9e8b 100644
--- a/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.c
+++ b/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.c
@@ -327,16 +327,8 @@ int av1_optimize_txb(const struct AV1_COMP *cpi, MACROBLOCK *x, int plane,
const LV_MAP_EOB_COST *txb_eob_costs =
&coeff_costs->eob_costs[eob_multi_size][plane_type];
- const int rshift =
- (sharpness +
- (cpi->oxcf.q_cfg.aq_mode == VARIANCE_AQ && mbmi->segment_id < 4
- ? 7 - mbmi->segment_id
- : 2) +
- (cpi->oxcf.q_cfg.aq_mode != VARIANCE_AQ &&
- cpi->oxcf.q_cfg.deltaq_mode == DELTA_Q_PERCEPTUAL &&
- cm->delta_q_info.delta_q_present_flag && x->sb_energy_level < 0
- ? (3 - x->sb_energy_level)
- : 0));
+ const int rshift = sharpness + 2;
+
const int64_t rdmult =
(((int64_t)x->rdmult *
(plane_rd_mult[is_inter][plane_type] << (2 * (xd->bd - 8)))) +
diff --git a/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.h b/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.h
index e86caaa06e..70b322a2e1 100644
--- a/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.h
+++ b/third_party/libaom/source/libaom/av1/encoder/txb_rdopt.h
@@ -44,11 +44,11 @@ extern "C" {
* skip flag (tx_skip) and the sign of DC coefficient (dc_sign).
* \param[out] rate_cost The entropy cost of coding the transform block
* after adjustment of coefficients.
- * \param[in] sharpness When sharpness == 1, the function will be less
- * aggressive toward lowering the magnitude of coefficients.
+ * \param[in] sharpness When sharpness > 0, the function will be less
+ * aggressive towards lowering the magnitude of coefficients.
* In this way, the transform block will contain more high-frequency
- coefficients
- * and therefore preserve the sharpness of the reconstructed block.
+ * coefficients and therefore will preserve the sharpness of the reconstructed
+ * block.
*/
int av1_optimize_txb(const struct AV1_COMP *cpi, MACROBLOCK *x, int plane,
int block, TX_SIZE tx_size, TX_TYPE tx_type,
diff --git a/third_party/libaom/source/libaom/av1/encoder/var_based_part.c b/third_party/libaom/source/libaom/av1/encoder/var_based_part.c
index a42be4553f..8907d0d0ba 100644
--- a/third_party/libaom/source/libaom/av1/encoder/var_based_part.c
+++ b/third_party/libaom/source/libaom/av1/encoder/var_based_part.c
@@ -341,7 +341,7 @@ static int64_t scale_part_thresh_content(int64_t threshold_base, int speed,
static AOM_INLINE void set_vbp_thresholds(AV1_COMP *cpi, int64_t thresholds[],
int q, int content_lowsumdiff,
- int segment_id) {
+ int source_sad, int segment_id) {
AV1_COMMON *const cm = &cpi->common;
const int is_key_frame = frame_is_intra_only(cm);
const int threshold_multiplier = is_key_frame ? 120 : 1;
@@ -394,7 +394,6 @@ static AOM_INLINE void set_vbp_thresholds(AV1_COMP *cpi, int64_t thresholds[],
scale_part_thresh_content(threshold_base, cpi->oxcf.speed, cm->width,
cm->height, cpi->svc.non_reference_frame);
#endif
-
thresholds[0] = threshold_base >> 1;
thresholds[1] = threshold_base;
thresholds[3] = threshold_base << cpi->oxcf.speed;
@@ -436,20 +435,45 @@ static AOM_INLINE void set_vbp_thresholds(AV1_COMP *cpi, int64_t thresholds[],
thresholds[2] = (5 * threshold_base) >> 1;
}
if (cpi->sf.rt_sf.force_large_partition_blocks) {
+ double weight;
+ const int win = 20;
+ if (current_qindex < QINDEX_LARGE_BLOCK_THR - win)
+ weight = 1.0;
+ else if (current_qindex > QINDEX_LARGE_BLOCK_THR + win)
+ weight = 0.0;
+ else
+ weight =
+ 1.0 - (current_qindex - QINDEX_LARGE_BLOCK_THR + win) / (2 * win);
+ if (cm->width * cm->height > 640 * 480) {
+ for (int i = 0; i < 4; i++) {
+ thresholds[i] <<= 1;
+ }
+ }
if (cm->width * cm->height <= 352 * 288) {
thresholds[1] <<= 2;
thresholds[2] <<= 5;
thresholds[3] = INT32_MAX;
- } else if (cm->width * cm->height > 640 * 480 && segment_id == 0) {
+ // Condition the increase of partition thresholds on the segment
+ // and the content. Avoid the increase for superblocks which have
+ // high source sad, unless the whole frame has very high motion
+ // (i.e, cpi->rc.avg_source_sad is very large, in which case all blocks
+ // have high source sad).
+ } else if (cm->width * cm->height > 640 * 480 && segment_id == 0 &&
+ (source_sad != kHighSad || cpi->rc.avg_source_sad > 50000)) {
thresholds[0] = (3 * thresholds[0]) >> 1;
thresholds[3] = INT32_MAX;
- if (current_qindex >= QINDEX_LARGE_BLOCK_THR) {
- thresholds[1] <<= 1;
- thresholds[2] <<= 1;
+ if (current_qindex > QINDEX_LARGE_BLOCK_THR) {
+ thresholds[1] = (int)((1 - weight) * (thresholds[1] << 1) +
+ weight * thresholds[1]);
+ thresholds[2] = (int)((1 - weight) * (thresholds[2] << 1) +
+ weight * thresholds[2]);
}
- } else if (current_qindex > QINDEX_LARGE_BLOCK_THR && segment_id == 0) {
- thresholds[1] <<= 2;
- thresholds[2] <<= 5;
+ } else if (current_qindex > QINDEX_LARGE_BLOCK_THR && segment_id == 0 &&
+ (source_sad != kHighSad || cpi->rc.avg_source_sad > 50000)) {
+ thresholds[1] =
+ (int)((1 - weight) * (thresholds[1] << 2) + weight * thresholds[1]);
+ thresholds[2] =
+ (int)((1 - weight) * (thresholds[2] << 4) + weight * thresholds[2]);
thresholds[3] = INT32_MAX;
}
}
@@ -605,7 +629,7 @@ static AOM_INLINE void set_low_temp_var_flag(
xd->mi[0]->mv[0].as_mv.col > -mv_thr &&
xd->mi[0]->mv[0].as_mv.row < mv_thr &&
xd->mi[0]->mv[0].as_mv.row > -mv_thr))) {
- const int is_small_sb = (cm->seq_params.sb_size == BLOCK_64X64);
+ const int is_small_sb = (cm->seq_params->sb_size == BLOCK_64X64);
if (is_small_sb)
set_low_temp_var_flag_64x64(&cm->mi_params, part_info, xd,
&(vt->split[0]), thresholds, mi_col, mi_row);
@@ -621,7 +645,8 @@ void av1_set_variance_partition_thresholds(AV1_COMP *cpi, int q,
if (sf->part_sf.partition_search_type != VAR_BASED_PARTITION) {
return;
} else {
- set_vbp_thresholds(cpi, cpi->vbp_info.thresholds, q, content_lowsumdiff, 0);
+ set_vbp_thresholds(cpi, cpi->vbp_info.thresholds, q, content_lowsumdiff, 0,
+ 0);
// The threshold below is not changed locally.
cpi->vbp_info.threshold_minmax = 15 + (q >> 3);
}
@@ -643,10 +668,17 @@ static AOM_INLINE void chroma_check(AV1_COMP *cpi, MACROBLOCK *x,
get_plane_block_size(bsize, pd->subsampling_x, pd->subsampling_y);
if (bs != BLOCK_INVALID)
- uv_sad = cpi->fn_ptr[bs].sdf(p->src.buf, p->src.stride, pd->dst.buf,
- pd->dst.stride);
-
- x->color_sensitivity[i - 1] = uv_sad > (y_sad >> 2);
+ uv_sad = cpi->ppi->fn_ptr[bs].sdf(p->src.buf, p->src.stride, pd->dst.buf,
+ pd->dst.stride);
+
+ if (uv_sad > (y_sad >> 1))
+ x->color_sensitivity_sb[i - 1] = 1;
+ else if (uv_sad < (y_sad >> 3))
+ x->color_sensitivity_sb[i - 1] = 0;
+ // Borderline case: to be refined at coding block level in nonrd_pickmode,
+ // for coding block size < sb_size.
+ else
+ x->color_sensitivity_sb[i - 1] = 2;
}
}
@@ -658,7 +690,7 @@ static void fill_variance_tree_leaves(
AV1_COMMON *cm = &cpi->common;
MACROBLOCKD *xd = &x->e_mbd;
const int is_key_frame = frame_is_intra_only(cm);
- const int is_small_sb = (cm->seq_params.sb_size == BLOCK_64X64);
+ const int is_small_sb = (cm->seq_params->sb_size == BLOCK_64X64);
const int num_64x64_blocks = is_small_sb ? 1 : 4;
// TODO(kyslov) Bring back compute_minmax_variance with content type detection
const int compute_minmax_variance = 0;
@@ -772,7 +804,7 @@ static void setup_planes(AV1_COMP *cpi, MACROBLOCK *x, unsigned int *y_sad,
AV1_COMMON *const cm = &cpi->common;
MACROBLOCKD *xd = &x->e_mbd;
const int num_planes = av1_num_planes(cm);
- const int is_small_sb = (cm->seq_params.sb_size == BLOCK_64X64);
+ const int is_small_sb = (cm->seq_params->sb_size == BLOCK_64X64);
BLOCK_SIZE bsize = is_small_sb ? BLOCK_64X64 : BLOCK_128X128;
// TODO(kyslov): we are assuming that the ref is LAST_FRAME! Check if it
// is!!
@@ -783,13 +815,13 @@ static void setup_planes(AV1_COMP *cpi, MACROBLOCK *x, unsigned int *y_sad,
// For non-SVC GOLDEN is another temporal reference. Check if it should be
// used as reference for partitioning.
- if (!cpi->use_svc && (cpi->ref_frame_flags & AOM_GOLD_FLAG) &&
+ if (!cpi->ppi->use_svc && (cpi->ref_frame_flags & AOM_GOLD_FLAG) &&
cpi->sf.rt_sf.use_nonrd_pick_mode) {
yv12_g = get_ref_frame_yv12_buf(cm, GOLDEN_FRAME);
if (yv12_g && yv12_g != yv12) {
av1_setup_pre_planes(xd, 0, yv12_g, mi_row, mi_col,
get_ref_scale_factors(cm, GOLDEN_FRAME), num_planes);
- *y_sad_g = cpi->fn_ptr[bsize].sdf(
+ *y_sad_g = cpi->ppi->fn_ptr[bsize].sdf(
x->plane[0].src.buf, x->plane[0].src.stride, xd->plane[0].pre[0].buf,
xd->plane[0].pre[0].stride);
}
@@ -799,20 +831,20 @@ static void setup_planes(AV1_COMP *cpi, MACROBLOCK *x, unsigned int *y_sad,
get_ref_scale_factors(cm, LAST_FRAME), num_planes);
mi->ref_frame[0] = LAST_FRAME;
mi->ref_frame[1] = NONE_FRAME;
- mi->bsize = cm->seq_params.sb_size;
+ mi->bsize = cm->seq_params->sb_size;
mi->mv[0].as_int = 0;
mi->interp_filters = av1_broadcast_interp_filter(BILINEAR);
if (cpi->sf.rt_sf.estimate_motion_for_var_based_partition) {
if (xd->mb_to_right_edge >= 0 && xd->mb_to_bottom_edge >= 0) {
const MV dummy_mv = { 0, 0 };
- *y_sad = av1_int_pro_motion_estimation(cpi, x, cm->seq_params.sb_size,
+ *y_sad = av1_int_pro_motion_estimation(cpi, x, cm->seq_params->sb_size,
mi_row, mi_col, &dummy_mv);
}
}
if (*y_sad == UINT_MAX) {
- *y_sad = cpi->fn_ptr[bsize].sdf(x->plane[0].src.buf, x->plane[0].src.stride,
- xd->plane[0].pre[0].buf,
- xd->plane[0].pre[0].stride);
+ *y_sad = cpi->ppi->fn_ptr[bsize].sdf(
+ x->plane[0].src.buf, x->plane[0].src.stride, xd->plane[0].pre[0].buf,
+ xd->plane[0].pre[0].stride);
}
// Pick the ref frame for partitioning, use golden frame only if its
@@ -834,7 +866,7 @@ static void setup_planes(AV1_COMP *cpi, MACROBLOCK *x, unsigned int *y_sad,
set_ref_ptrs(cm, xd, mi->ref_frame[0], mi->ref_frame[1]);
av1_enc_build_inter_predictor(cm, xd, mi_row, mi_col, NULL,
- cm->seq_params.sb_size, AOM_PLANE_Y,
+ cm->seq_params->sb_size, AOM_PLANE_Y,
AOM_PLANE_Y);
}
@@ -869,12 +901,12 @@ int av1_choose_var_based_partitioning(AV1_COMP *cpi, const TileInfo *const tile,
int is_key_frame =
(frame_is_intra_only(cm) ||
- (cpi->use_svc &&
+ (cpi->ppi->use_svc &&
cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame));
- assert(cm->seq_params.sb_size == BLOCK_64X64 ||
- cm->seq_params.sb_size == BLOCK_128X128);
- const int is_small_sb = (cm->seq_params.sb_size == BLOCK_64X64);
+ assert(cm->seq_params->sb_size == BLOCK_64X64 ||
+ cm->seq_params->sb_size == BLOCK_128X128);
+ const int is_small_sb = (cm->seq_params->sb_size == BLOCK_64X64);
const int num_64x64_blocks = is_small_sb ? 1 : 4;
unsigned int y_sad = UINT_MAX;
@@ -900,10 +932,12 @@ int av1_choose_var_based_partitioning(AV1_COMP *cpi, const TileInfo *const tile,
cyclic_refresh_segment_id_boosted(segment_id) &&
cpi->sf.rt_sf.use_nonrd_pick_mode) {
int q = av1_get_qindex(&cm->seg, segment_id, cm->quant_params.base_qindex);
- set_vbp_thresholds(cpi, thresholds, q, x->content_state_sb.low_sumdiff, 1);
+ set_vbp_thresholds(cpi, thresholds, q, x->content_state_sb.low_sumdiff,
+ x->content_state_sb.source_sad, 1);
} else {
set_vbp_thresholds(cpi, thresholds, cm->quant_params.base_qindex,
- x->content_state_sb.low_sumdiff, 0);
+ x->content_state_sb.low_sumdiff,
+ x->content_state_sb.source_sad, 0);
}
// For non keyframes, disable 4x4 average for low resolution when speed = 8
@@ -1025,7 +1059,7 @@ int av1_choose_var_based_partitioning(AV1_COMP *cpi, const TileInfo *const tile,
if (!is_key_frame &&
(max_var_32x32[m] - min_var_32x32[m]) > 3 * (thresholds[1] >> 3) &&
max_var_32x32[m] > thresholds[1] >> 1 &&
- (noise_level >= kMedium || cpi->use_svc ||
+ (noise_level >= kMedium || cpi->ppi->use_svc ||
cpi->sf.rt_sf.force_large_partition_blocks ||
!cpi->sf.rt_sf.use_nonrd_pick_mode)) {
force_split[1 + m] = 1;
diff --git a/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_avx2.c b/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_avx2.c
index b5477ec9ba..68509fa106 100644
--- a/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_avx2.c
+++ b/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_avx2.c
@@ -352,10 +352,16 @@ void av1_highbd_apply_temporal_filter_avx2(
TF_SEARCH_ERROR_NORM_WEIGHT);
const double weight_factor =
(double)TF_WINDOW_BLOCK_BALANCE_WEIGHT * inv_factor;
- // Decay factors for non-local mean approach.
- // Smaller q -> smaller filtering weight.
+ // Adjust filtering based on q.
+ // Larger q -> stronger filtering -> larger weight.
+ // Smaller q -> weaker filtering -> smaller weight.
double q_decay = pow((double)q_factor / TF_Q_DECAY_THRESHOLD, 2);
q_decay = CLIP(q_decay, 1e-5, 1);
+ if (q_factor >= TF_QINDEX_CUTOFF) {
+ // Max q_factor is 255, therefore the upper bound of q_decay is 8.
+ // We do not need a clip here.
+ q_decay = 0.5 * pow((double)q_factor / 64, 2);
+ }
// Smaller strength -> smaller filtering weight.
double s_decay = pow((double)filter_strength / TF_STRENGTH_THRESHOLD, 2);
s_decay = CLIP(s_decay, 1e-5, 1);
@@ -393,6 +399,7 @@ void av1_highbd_apply_temporal_filter_avx2(
const double inv_num_ref_pixels = 1.0 / num_ref_pixels;
// Larger noise -> larger filtering weight.
const double n_decay = 0.5 + log(2 * noise_levels[plane] + 5.0);
+ // Decay factors for non-local mean approach.
const double decay_factor = 1 / (n_decay * q_decay * s_decay);
// Filter U-plane and V-plane using Y-plane. This is because motion
diff --git a/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_sse2.c b/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_sse2.c
index bbb3771543..1bfdaf72e1 100644
--- a/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_sse2.c
+++ b/third_party/libaom/source/libaom/av1/encoder/x86/highbd_temporal_filter_sse2.c
@@ -227,10 +227,16 @@ void av1_highbd_apply_temporal_filter_sse2(
TF_SEARCH_ERROR_NORM_WEIGHT);
const double weight_factor =
(double)TF_WINDOW_BLOCK_BALANCE_WEIGHT * inv_factor;
- // Decay factors for non-local mean approach.
- // Smaller q -> smaller filtering weight.
+ // Adjust filtering based on q.
+ // Larger q -> stronger filtering -> larger weight.
+ // Smaller q -> weaker filtering -> smaller weight.
double q_decay = pow((double)q_factor / TF_Q_DECAY_THRESHOLD, 2);
q_decay = CLIP(q_decay, 1e-5, 1);
+ if (q_factor >= TF_QINDEX_CUTOFF) {
+ // Max q_factor is 255, therefore the upper bound of q_decay is 8.
+ // We do not need a clip here.
+ q_decay = 0.5 * pow((double)q_factor / 64, 2);
+ }
// Smaller strength -> smaller filtering weight.
double s_decay = pow((double)filter_strength / TF_STRENGTH_THRESHOLD, 2);
s_decay = CLIP(s_decay, 1e-5, 1);
@@ -268,6 +274,7 @@ void av1_highbd_apply_temporal_filter_sse2(
const double inv_num_ref_pixels = 1.0 / num_ref_pixels;
// Larger noise -> larger filtering weight.
const double n_decay = 0.5 + log(2 * noise_levels[plane] + 5.0);
+ // Decay factors for non-local mean approach.
const double decay_factor = 1 / (n_decay * q_decay * s_decay);
// Filter U-plane and V-plane using Y-plane. This is because motion
diff --git a/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_avx2.c b/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_avx2.c
index 72914e1781..8aa07641aa 100644
--- a/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_avx2.c
+++ b/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_avx2.c
@@ -238,10 +238,16 @@ void av1_apply_temporal_filter_avx2(
TF_SEARCH_ERROR_NORM_WEIGHT);
const double weight_factor =
(double)TF_WINDOW_BLOCK_BALANCE_WEIGHT * inv_factor;
- // Decay factors for non-local mean approach.
- // Smaller q -> smaller filtering weight.
+ // Adjust filtering based on q.
+ // Larger q -> stronger filtering -> larger weight.
+ // Smaller q -> weaker filtering -> smaller weight.
double q_decay = pow((double)q_factor / TF_Q_DECAY_THRESHOLD, 2);
q_decay = CLIP(q_decay, 1e-5, 1);
+ if (q_factor >= TF_QINDEX_CUTOFF) {
+ // Max q_factor is 255, therefore the upper bound of q_decay is 8.
+ // We do not need a clip here.
+ q_decay = 0.5 * pow((double)q_factor / 64, 2);
+ }
// Smaller strength -> smaller filtering weight.
double s_decay = pow((double)filter_strength / TF_STRENGTH_THRESHOLD, 2);
s_decay = CLIP(s_decay, 1e-5, 1);
@@ -277,6 +283,7 @@ void av1_apply_temporal_filter_avx2(
const double inv_num_ref_pixels = 1.0 / num_ref_pixels;
// Larger noise -> larger filtering weight.
const double n_decay = 0.5 + log(2 * noise_levels[plane] + 5.0);
+ // Decay factors for non-local mean approach.
const double decay_factor = 1 / (n_decay * q_decay * s_decay);
// Filter U-plane and V-plane using Y-plane. This is because motion
diff --git a/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_sse2.c b/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_sse2.c
index d70792c644..26c3926dca 100644
--- a/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_sse2.c
+++ b/third_party/libaom/source/libaom/av1/encoder/x86/temporal_filter_sse2.c
@@ -215,10 +215,16 @@ void av1_apply_temporal_filter_sse2(
TF_SEARCH_ERROR_NORM_WEIGHT);
const double weight_factor =
(double)TF_WINDOW_BLOCK_BALANCE_WEIGHT * inv_factor;
- // Decay factors for non-local mean approach.
- // Smaller q -> smaller filtering weight.
+ // Adjust filtering based on q.
+ // Larger q -> stronger filtering -> larger weight.
+ // Smaller q -> weaker filtering -> smaller weight.
double q_decay = pow((double)q_factor / TF_Q_DECAY_THRESHOLD, 2);
q_decay = CLIP(q_decay, 1e-5, 1);
+ if (q_factor >= TF_QINDEX_CUTOFF) {
+ // Max q_factor is 255, therefore the upper bound of q_decay is 8.
+ // We do not need a clip here.
+ q_decay = 0.5 * pow((double)q_factor / 64, 2);
+ }
// Smaller strength -> smaller filtering weight.
double s_decay = pow((double)filter_strength / TF_STRENGTH_THRESHOLD, 2);
s_decay = CLIP(s_decay, 1e-5, 1);
@@ -254,6 +260,7 @@ void av1_apply_temporal_filter_sse2(
const double inv_num_ref_pixels = 1.0 / num_ref_pixels;
// Larger noise -> larger filtering weight.
const double n_decay = 0.5 + log(2 * noise_levels[plane] + 5.0);
+ // Decay factors for non-local mean approach.
const double decay_factor = 1 / (n_decay * q_decay * s_decay);
// Filter U-plane and V-plane using Y-plane. This is because motion
diff --git a/third_party/libaom/source/libaom/common/args.c b/third_party/libaom/source/libaom/common/args.c
index 64d6e03383..ed622943e3 100644
--- a/third_party/libaom/source/libaom/common/args.c
+++ b/third_party/libaom/source/libaom/common/args.c
@@ -92,7 +92,6 @@ int parse_cfg(const char *file, cfg_options_t *config) {
GET_PARAMS(disable_intrabc);
GET_PARAMS(disable_cfl);
GET_PARAMS(disable_smooth_intra);
- GET_PARAMS(disable_diagonal_intra);
GET_PARAMS(disable_filter_intra);
GET_PARAMS(disable_dual_filter);
GET_PARAMS(disable_intra_angle_delta);
diff --git a/third_party/libaom/source/libaom/examples/aom_cx_set_ref.c b/third_party/libaom/source/libaom/examples/aom_cx_set_ref.c
index 3aea2cfdd6..da36d9fe13 100644
--- a/third_party/libaom/source/libaom/examples/aom_cx_set_ref.c
+++ b/third_party/libaom/source/libaom/examples/aom_cx_set_ref.c
@@ -271,7 +271,11 @@ int main(int argc, char **argv) {
printf("Using %s\n", aom_codec_iface_name(encoder));
+#if CONFIG_REALTIME_ONLY
+ res = aom_codec_enc_config_default(encoder, &cfg, 1);
+#else
res = aom_codec_enc_config_default(encoder, &cfg, 0);
+#endif
if (res) die_codec(&ecodec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
@@ -334,6 +338,12 @@ int main(int argc, char **argv) {
die_codec(&ecodec, "Failed to set encoder reference frame");
printf(" <SET_REF>");
+#if CONFIG_REALTIME_ONLY
+ // Set cpu speed in encoder.
+ if (aom_codec_control(&ecodec, AOME_SET_CPUUSED, 7))
+ die_codec(&ecodec, "Failed to set cpu speed");
+#endif
+
// If set_reference in decoder is commented out, the enc/dec mismatch
// would be seen.
if (test_decode) {
diff --git a/third_party/libaom/source/libaom/examples/set_maps.c b/third_party/libaom/source/libaom/examples/set_maps.c
index 69b4bccbe6..5a84faa565 100644
--- a/third_party/libaom/source/libaom/examples/set_maps.c
+++ b/third_party/libaom/source/libaom/examples/set_maps.c
@@ -129,6 +129,14 @@ int main(int argc, char **argv) {
const int fps = 2; // TODO(dkovalev) add command line argument
const double bits_per_pixel_per_frame = 0.067;
+#if CONFIG_REALTIME_ONLY
+ const int usage = 1;
+ const int speed = 7;
+#else
+ const int usage = 0;
+ const int speed = 2;
+#endif
+
exec_name = argv[0];
if (argc != 6) die("Invalid number of arguments");
@@ -157,7 +165,7 @@ int main(int argc, char **argv) {
printf("Using %s\n", aom_codec_iface_name(encoder));
- res = aom_codec_enc_config_default(encoder, &cfg, 0);
+ res = aom_codec_enc_config_default(encoder, &cfg, usage);
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
@@ -177,7 +185,7 @@ int main(int argc, char **argv) {
if (aom_codec_enc_init(&codec, encoder, &cfg, 0))
die("Failed to initialize encoder");
- if (aom_codec_control(&codec, AOME_SET_CPUUSED, 2))
+ if (aom_codec_control(&codec, AOME_SET_CPUUSED, speed))
die_codec(&codec, "Failed to set cpu-used");
// Encode frames.
diff --git a/third_party/libaom/source/libaom/examples/simple_encoder.c b/third_party/libaom/source/libaom/examples/simple_encoder.c
index 682fe9842b..c026706555 100644
--- a/third_party/libaom/source/libaom/examples/simple_encoder.c
+++ b/third_party/libaom/source/libaom/examples/simple_encoder.c
@@ -163,6 +163,13 @@ int main(int argc, char **argv) {
const char *infile_arg = NULL;
const char *outfile_arg = NULL;
const char *keyframe_interval_arg = NULL;
+#if CONFIG_REALTIME_ONLY
+ const int usage = 1;
+ const int speed = 7;
+#else
+ const int usage = 0;
+ const int speed = 2;
+#endif
exec_name = argv[0];
@@ -204,7 +211,7 @@ int main(int argc, char **argv) {
printf("Using %s\n", aom_codec_iface_name(encoder));
- res = aom_codec_enc_config_default(encoder, &cfg, 0);
+ res = aom_codec_enc_config_default(encoder, &cfg, usage);
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
@@ -223,7 +230,7 @@ int main(int argc, char **argv) {
if (aom_codec_enc_init(&codec, encoder, &cfg, 0))
die("Failed to initialize encoder");
- if (aom_codec_control(&codec, AOME_SET_CPUUSED, 2))
+ if (aom_codec_control(&codec, AOME_SET_CPUUSED, speed))
die_codec(&codec, "Failed to set cpu-used");
// Encode frames.
diff --git a/third_party/libaom/source/libaom/examples/svc_encoder_rtc.c b/third_party/libaom/source/libaom/examples/svc_encoder_rtc.c
index 87e3aa95f1..44bed38318 100644
--- a/third_party/libaom/source/libaom/examples/svc_encoder_rtc.c
+++ b/third_party/libaom/source/libaom/examples/svc_encoder_rtc.c
@@ -24,6 +24,7 @@
#include "common/args.h"
#include "common/tools_common.h"
#include "common/video_writer.h"
+#include "examples/encoder_util.h"
#include "aom_ports/aom_timer.h"
#define OPTION_BUFFER_SIZE 1024
@@ -286,6 +287,9 @@ static void parse_command_line(int argc, const char **argv_,
if (app_input->speed > 9) {
warn("Mapping speed %d to speed 9.\n", app_input->speed);
}
+ if (app_input->speed <= 6) {
+ die("Encoder speed setting should be in [7, 9].\n");
+ }
} else if (arg_match(&arg, &aqmode_arg, argi)) {
app_input->aq_mode = arg_parse_uint(&arg);
} else if (arg_match(&arg, &threads_arg, argi)) {
@@ -567,7 +571,7 @@ static void set_layer_pattern(int layering_mode, int superframe_cnt,
layer_id->spatial_layer_id = spatial_layer_id;
int lag_index = 0;
int base_count = superframe_cnt >> 2;
- // Set the referende map buffer idx for the 7 references:
+ // Set the reference map buffer idx for the 7 references:
// LAST_FRAME (0), LAST2_FRAME(1), LAST3_FRAME(2), GOLDEN_FRAME(3),
// BWDREF_FRAME(4), ALTREF2_FRAME(5), ALTREF_FRAME(6).
for (i = 0; i < INTER_REFS_PER_FRAME; i++) ref_frame_config->ref_idx[i] = i;
@@ -795,12 +799,10 @@ static void set_layer_pattern(int layering_mode, int superframe_cnt,
} else if (layer_id->spatial_layer_id == 1) {
// Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 1,
// GOLDEN (and all other refs) to slot 3.
- // Set LAST2 to slot 4 and Update slot 4.
+ // No update.
for (i = 0; i < INTER_REFS_PER_FRAME; i++)
ref_frame_config->ref_idx[i] = 3;
ref_frame_config->ref_idx[SVC_LAST_FRAME] = 1;
- ref_frame_config->ref_idx[SVC_LAST2_FRAME] = 4;
- ref_frame_config->refresh[4] = 1;
}
} else if ((superframe_cnt - 2) % 4 == 0) {
// Middle temporal enhancement layer.
@@ -837,13 +839,11 @@ static void set_layer_pattern(int layering_mode, int superframe_cnt,
ref_frame_config->refresh[3] = 1;
} else if (layer_id->spatial_layer_id == 1) {
// Reference LAST and GOLDEN. Set buffer_idx for LAST to slot 6,
- // GOLDEN to slot 3. Set LAST2 to slot 4 and update slot 4.
+ // GOLDEN to slot 3. No update.
for (i = 0; i < INTER_REFS_PER_FRAME; i++)
ref_frame_config->ref_idx[i] = 0;
ref_frame_config->ref_idx[SVC_LAST_FRAME] = 6 - shift;
ref_frame_config->ref_idx[SVC_GOLDEN_FRAME] = 3;
- ref_frame_config->ref_idx[SVC_LAST2_FRAME] = 4;
- ref_frame_config->refresh[4] = 1;
}
}
if (layer_id->spatial_layer_id > 0 && !ksvc_mode) {
@@ -998,6 +998,64 @@ static void set_layer_pattern(int layering_mode, int superframe_cnt,
}
}
+#if CONFIG_AV1_DECODER
+static void test_decode(aom_codec_ctx_t *encoder, aom_codec_ctx_t *decoder,
+ const int frames_out, int *mismatch_seen) {
+ aom_image_t enc_img, dec_img;
+
+ if (*mismatch_seen) return;
+
+ /* Get the internal reference frame */
+ AOM_CODEC_CONTROL_TYPECHECKED(encoder, AV1_GET_NEW_FRAME_IMAGE, &enc_img);
+ AOM_CODEC_CONTROL_TYPECHECKED(decoder, AV1_GET_NEW_FRAME_IMAGE, &dec_img);
+
+#if CONFIG_AV1_HIGHBITDEPTH
+ if ((enc_img.fmt & AOM_IMG_FMT_HIGHBITDEPTH) !=
+ (dec_img.fmt & AOM_IMG_FMT_HIGHBITDEPTH)) {
+ if (enc_img.fmt & AOM_IMG_FMT_HIGHBITDEPTH) {
+ aom_image_t enc_hbd_img;
+ aom_img_alloc(&enc_hbd_img, enc_img.fmt - AOM_IMG_FMT_HIGHBITDEPTH,
+ enc_img.d_w, enc_img.d_h, 16);
+ aom_img_truncate_16_to_8(&enc_hbd_img, &enc_img);
+ enc_img = enc_hbd_img;
+ }
+ if (dec_img.fmt & AOM_IMG_FMT_HIGHBITDEPTH) {
+ aom_image_t dec_hbd_img;
+ aom_img_alloc(&dec_hbd_img, dec_img.fmt - AOM_IMG_FMT_HIGHBITDEPTH,
+ dec_img.d_w, dec_img.d_h, 16);
+ aom_img_truncate_16_to_8(&dec_hbd_img, &dec_img);
+ dec_img = dec_hbd_img;
+ }
+ }
+#endif
+
+ if (!aom_compare_img(&enc_img, &dec_img)) {
+ int y[4], u[4], v[4];
+#if CONFIG_AV1_HIGHBITDEPTH
+ if (enc_img.fmt & AOM_IMG_FMT_HIGHBITDEPTH) {
+ aom_find_mismatch_high(&enc_img, &dec_img, y, u, v);
+ } else {
+ aom_find_mismatch(&enc_img, &dec_img, y, u, v);
+ }
+#else
+ aom_find_mismatch(&enc_img, &dec_img, y, u, v);
+#endif
+ decoder->err = 1;
+ printf(
+ "Encode/decode mismatch on frame %d at"
+ " Y[%d, %d] {%d/%d},"
+ " U[%d, %d] {%d/%d},"
+ " V[%d, %d] {%d/%d}",
+ frames_out, y[0], y[1], y[2], y[3], u[0], u[1], u[2], u[3], v[0], v[1],
+ v[2], v[3]);
+ *mismatch_seen = frames_out;
+ }
+
+ aom_img_free(&enc_img);
+ aom_img_free(&dec_img);
+}
+#endif // CONFIG_AV1_DECODER
+
int main(int argc, const char **argv) {
AppInput app_input;
AvxVideoWriter *outfile[AOM_MAX_LAYERS] = { NULL };
@@ -1017,6 +1075,17 @@ int main(int argc, const char **argv) {
aom_svc_params_t svc_params;
aom_svc_ref_frame_config_t ref_frame_config;
+#if CONFIG_INTERNAL_STATS
+ FILE *stats_file = fopen("opsnr.stt", "a");
+ if (stats_file == NULL) {
+ die("Cannot open opsnr.stt\n");
+ }
+#endif
+#if CONFIG_AV1_DECODER
+ int mismatch_seen = 0;
+ aom_codec_ctx_t decoder;
+#endif
+
struct RateControlMetrics rc;
int64_t cx_time = 0;
int64_t cx_time_sl[3]; // max number of spatial layers.
@@ -1039,11 +1108,12 @@ int main(int argc, const char **argv) {
app_input.input_ctx.framerate.denominator = 1;
app_input.input_ctx.only_i420 = 1;
app_input.input_ctx.bit_depth = 0;
+ app_input.speed = 7;
exec_name = argv[0];
// start with default encoder configuration
- aom_codec_err_t res =
- aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg, 0);
+ aom_codec_err_t res = aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg,
+ AOM_USAGE_REALTIME);
if (res) {
die("Failed to get config: %s\n", aom_codec_err_to_string(res));
}
@@ -1071,10 +1141,13 @@ int main(int argc, const char **argv) {
unsigned int width = cfg.g_w;
unsigned int height = cfg.g_h;
- if (ts_number_layers !=
- mode_to_num_temporal_layers[app_input.layering_mode] ||
- ss_number_layers != mode_to_num_spatial_layers[app_input.layering_mode]) {
- die("Number of layers doesn't match layering mode.");
+ if (app_input.layering_mode >= 0) {
+ if (ts_number_layers !=
+ mode_to_num_temporal_layers[app_input.layering_mode] ||
+ ss_number_layers !=
+ mode_to_num_spatial_layers[app_input.layering_mode]) {
+ die("Number of layers doesn't match layering mode.");
+ }
}
// Y4M reader has its own allocation.
@@ -1109,20 +1182,16 @@ int main(int argc, const char **argv) {
svc_params.framerate_factor[2] = 1;
}
- framerate = cfg.g_timebase.den / cfg.g_timebase.num;
- set_rate_control_metrics(&rc, framerate, ss_number_layers, ts_number_layers);
-
if (app_input.input_ctx.file_type == FILE_TYPE_Y4M) {
- if (app_input.input_ctx.width != cfg.g_w ||
- app_input.input_ctx.height != cfg.g_h) {
- die("Incorrect width or height: %d x %d", cfg.g_w, cfg.g_h);
- }
- if (app_input.input_ctx.framerate.numerator != cfg.g_timebase.den ||
- app_input.input_ctx.framerate.denominator != cfg.g_timebase.num) {
- die("Incorrect framerate: numerator %d denominator %d",
- cfg.g_timebase.num, cfg.g_timebase.den);
- }
+ // Override these settings with the info from Y4M file.
+ cfg.g_w = app_input.input_ctx.width;
+ cfg.g_h = app_input.input_ctx.height;
+ // g_timebase is the reciprocal of frame rate.
+ cfg.g_timebase.num = app_input.input_ctx.framerate.denominator;
+ cfg.g_timebase.den = app_input.input_ctx.framerate.numerator;
}
+ framerate = cfg.g_timebase.den / cfg.g_timebase.num;
+ set_rate_control_metrics(&rc, framerate, ss_number_layers, ts_number_layers);
AvxVideoInfo info;
info.codec_fourcc = get_fourcc_by_aom_encoder(encoder);
@@ -1162,6 +1231,12 @@ int main(int argc, const char **argv) {
if (aom_codec_enc_init(&codec, encoder, &cfg, 0))
die("Failed to initialize encoder");
+#if CONFIG_AV1_DECODER
+ if (aom_codec_dec_init(&decoder, get_aom_decoder_by_index(0), NULL, 0)) {
+ die("Failed to initialize decoder");
+ }
+#endif
+
aom_codec_control(&codec, AOME_SET_CPUUSED, app_input.speed);
aom_codec_control(&codec, AV1E_SET_AQ_MODE, app_input.aq_mode ? 3 : 0);
aom_codec_control(&codec, AV1E_SET_GF_CBR_BOOST_PCT, 0);
@@ -1172,6 +1247,7 @@ int main(int argc, const char **argv) {
aom_codec_control(&codec, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
aom_codec_control(&codec, AV1E_SET_MODE_COST_UPD_FREQ, 3);
aom_codec_control(&codec, AV1E_SET_MV_COST_UPD_FREQ, 3);
+ aom_codec_control(&codec, AV1E_SET_DV_COST_UPD_FREQ, 3);
aom_codec_control(&codec, AV1E_SET_CDF_UPDATE_MODE, 1);
aom_codec_control(&codec, AV1E_SET_TILE_COLUMNS,
cfg.g_threads ? get_msb(cfg.g_threads) : 0);
@@ -1196,8 +1272,8 @@ int main(int argc, const char **argv) {
svc_params.scaling_factor_num[1] = 1;
svc_params.scaling_factor_den[1] = 2;
}
-
aom_codec_control(&codec, AV1E_SET_SVC_PARAMS, &svc_params);
+ // TODO(aomedia:3032): Configure KSVC in fixed mode.
// This controls the maximum target size of the key frame.
// For generating smaller key frames, use a smaller max_intra_size_pct
@@ -1220,15 +1296,34 @@ int main(int argc, const char **argv) {
const aom_codec_cx_pkt_t *pkt;
int layer = 0;
- // Set the reference/update flags, layer_id, and reference_map
- // buffer index.
- set_layer_pattern(app_input.layering_mode, frame_cnt, &layer_id,
- &ref_frame_config, &use_svc_control, slx, is_key_frame,
- (app_input.layering_mode == 10));
- aom_codec_control(&codec, AV1E_SET_SVC_LAYER_ID, &layer_id);
- if (use_svc_control)
- aom_codec_control(&codec, AV1E_SET_SVC_REF_FRAME_CONFIG,
- &ref_frame_config);
+ // For flexible mode:
+ if (app_input.layering_mode >= 0) {
+ // Set the reference/update flags, layer_id, and reference_map
+ // buffer index.
+ set_layer_pattern(app_input.layering_mode, frame_cnt, &layer_id,
+ &ref_frame_config, &use_svc_control, slx,
+ is_key_frame, (app_input.layering_mode == 10));
+ aom_codec_control(&codec, AV1E_SET_SVC_LAYER_ID, &layer_id);
+ if (use_svc_control)
+ aom_codec_control(&codec, AV1E_SET_SVC_REF_FRAME_CONFIG,
+ &ref_frame_config);
+ } else {
+ // Only up to 3 temporal layers supported in fixed mode.
+ // Only need to set spatial and temporal layer_id: reference
+ // prediction, refresh, and buffer_idx are set internally.
+ layer_id.spatial_layer_id = slx;
+ layer_id.temporal_layer_id = 0;
+ if (ts_number_layers == 2) {
+ layer_id.temporal_layer_id = (frame_cnt % 2) != 0;
+ } else if (ts_number_layers == 3) {
+ if (frame_cnt % 2 != 0)
+ layer_id.temporal_layer_id = 2;
+ else if ((frame_cnt > 1) && ((frame_cnt - 2) % 4 == 0))
+ layer_id.temporal_layer_id = 1;
+ }
+ aom_codec_control(&codec, AV1E_SET_SVC_LAYER_ID, &layer_id);
+ }
+
if (set_err_resil_frame) {
// Set error_resilient per frame: off/0 for base layer and
// on/1 for enhancement layer frames.
@@ -1332,14 +1427,31 @@ int main(int argc, const char **argv) {
sum_bitrate2 = 0.0;
}
}
+
+#if CONFIG_AV1_DECODER
+ if (aom_codec_decode(&decoder, pkt->data.frame.buf,
+ (unsigned int)pkt->data.frame.sz, NULL))
+ die_codec(&decoder, "Failed to decode frame.");
+#endif
+
break;
default: break;
}
}
+#if CONFIG_AV1_DECODER
+ // Don't look for mismatch on top spatial and top temporal layers as they
+ // are non reference frames.
+ if ((ss_number_layers > 1 || ts_number_layers > 1) &&
+ !(layer_id.temporal_layer_id > 0 &&
+ layer_id.temporal_layer_id == (int)ts_number_layers - 1)) {
+ test_decode(&codec, &decoder, frame_cnt, &mismatch_seen);
+ }
+#endif
} // loop over spatial layers
++frame_cnt;
pts += frame_duration;
}
+
close_input_file(&(app_input.input_ctx));
printout_rate_control_summary(&rc, frame_cnt, ss_number_layers,
ts_number_layers);
@@ -1358,6 +1470,15 @@ int main(int argc, const char **argv) {
if (aom_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
+#if CONFIG_INTERNAL_STATS
+ if (mismatch_seen) {
+ fprintf(stats_file, "First mismatch occurred in frame %d\n", mismatch_seen);
+ } else {
+ fprintf(stats_file, "No mismatch detected in recon buffers\n");
+ }
+ fclose(stats_file);
+#endif
+
// Try to rewrite the output file headers with the actual frame count.
for (i = 0; i < ss_number_layers * ts_number_layers; ++i)
aom_video_writer_close(outfile[i]);
diff --git a/third_party/libaom/source/libaom/test/active_map_test.cc b/third_party/libaom/source/libaom/test/active_map_test.cc
index 4e30f55f81..2bbc3b64fb 100644
--- a/third_party/libaom/source/libaom/test/active_map_test.cc
+++ b/third_party/libaom/source/libaom/test/active_map_test.cc
@@ -38,6 +38,9 @@ class ActiveMapTest
::libaom_test::Encoder *encoder) {
if (video->frame() == 0) {
encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ encoder->Control(AV1E_SET_ALLOW_WARPED_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_OBMC, 0);
} else if (video->frame() == 3) {
aom_active_map_t map = aom_active_map_t();
/* clang-format off */
@@ -87,14 +90,6 @@ class ActiveMapTest
TEST_P(ActiveMapTest, Test) { DoTest(); }
-class ActiveMapTestLarge : public ActiveMapTest {};
-
-TEST_P(ActiveMapTestLarge, Test) { DoTest(); }
-
-AV1_INSTANTIATE_TEST_SUITE(ActiveMapTestLarge,
- ::testing::Values(::libaom_test::kRealTime),
- ::testing::Range(0, 5));
-
AV1_INSTANTIATE_TEST_SUITE(ActiveMapTest,
::testing::Values(::libaom_test::kRealTime),
::testing::Range(5, 9));
diff --git a/third_party/libaom/source/libaom/test/altref_test.cc b/third_party/libaom/source/libaom/test/altref_test.cc
index 1334b4af57..002a206967 100644
--- a/third_party/libaom/source/libaom/test/altref_test.cc
+++ b/third_party/libaom/source/libaom/test/altref_test.cc
@@ -133,9 +133,7 @@ const gfIntervalParam gfTestParams[] = {
{ ::libaom_test::kTwoPassGood, 5, 10 },
{ ::libaom_test::kTwoPassGood, 8, 16 },
{ ::libaom_test::kTwoPassGood, 16, 32 },
- // disabled below test case because it causes failure
- // TODO(anyone): enable below test case once issue is fixed.
- // { ::libaom_test::kTwoPassGood, 20, 32 },
+ { ::libaom_test::kTwoPassGood, 20, 32 },
};
// This class is used to test if the gf interval bounds configured by the user
diff --git a/third_party/libaom/source/libaom/test/aom_image_test.cc b/third_party/libaom/source/libaom/test/aom_image_test.cc
new file mode 100644
index 0000000000..7ff82d7273
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/aom_image_test.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include "aom/aom_image.h"
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+TEST(AomImageTest, AomImgWrapInvalidAlign) {
+ const int kWidth = 128;
+ const int kHeight = 128;
+ unsigned char buf[kWidth * kHeight * 3];
+
+ aom_image_t img;
+ // Set img_data and img_data_owner to junk values. aom_img_wrap() should
+ // not read these values on failure.
+ img.img_data = (unsigned char *)"";
+ img.img_data_owner = 1;
+
+ aom_img_fmt_t format = AOM_IMG_FMT_I444;
+ // 'align' must be a power of 2 but is not. This causes the aom_img_wrap()
+ // call to fail. The test verifies we do not read the junk values in 'img'.
+ unsigned int align = 31;
+ EXPECT_EQ(aom_img_wrap(&img, format, kWidth, kHeight, align, buf), nullptr);
+}
diff --git a/third_party/libaom/source/libaom/test/aq_segment_test.cc b/third_party/libaom/source/libaom/test/aq_segment_test.cc
index 4e52b55dbe..b4a8b612bf 100644
--- a/third_party/libaom/source/libaom/test/aq_segment_test.cc
+++ b/third_party/libaom/source/libaom/test/aq_segment_test.cc
@@ -19,6 +19,13 @@
namespace {
+const libaom_test::TestMode kTestModeParams[] =
+#if CONFIG_REALTIME_ONLY
+ { ::libaom_test::kRealTime };
+#else
+ { ::libaom_test::kRealTime, ::libaom_test::kOnePassGood };
+#endif
+
class AqSegmentTest
: public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode, int,
int>,
@@ -40,6 +47,11 @@ class AqSegmentTest
encoder->Control(AV1E_SET_AQ_MODE, aq_mode_);
encoder->Control(AV1E_SET_DELTAQ_MODE, deltaq_mode_);
encoder->Control(AOME_SET_MAX_INTRA_BITRATE_PCT, 100);
+ if (mode_ == ::libaom_test::kRealTime) {
+ encoder->Control(AV1E_SET_ALLOW_WARPED_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_OBMC, 0);
+ }
}
}
@@ -69,10 +81,7 @@ class AqSegmentTest
// 3-cyclic_refresh_aq) encodes and decodes without a mismatch.
TEST_P(AqSegmentTest, TestNoMisMatch) { DoTest(GET_PARAM(3)); }
-class AqSegmentTestLarge : public AqSegmentTest {};
-
-TEST_P(AqSegmentTestLarge, TestNoMisMatch) { DoTest(GET_PARAM(3)); }
-
+#if !CONFIG_REALTIME_ONLY
// Validate that this delta q mode
// encodes and decodes without a mismatch.
TEST_P(AqSegmentTest, TestNoMisMatchExtDeltaQ) {
@@ -84,13 +93,18 @@ TEST_P(AqSegmentTest, TestNoMisMatchExtDeltaQ) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
+#endif
-AV1_INSTANTIATE_TEST_SUITE(AqSegmentTest,
- ::testing::Values(::libaom_test::kRealTime,
- ::libaom_test::kOnePassGood),
+AV1_INSTANTIATE_TEST_SUITE(AqSegmentTest, ::testing::ValuesIn(kTestModeParams),
::testing::Range(5, 9), ::testing::Range(0, 4));
+
+#if !CONFIG_REALTIME_ONLY
+class AqSegmentTestLarge : public AqSegmentTest {};
+
+TEST_P(AqSegmentTestLarge, TestNoMisMatch) { DoTest(GET_PARAM(3)); }
+
AV1_INSTANTIATE_TEST_SUITE(AqSegmentTestLarge,
- ::testing::Values(::libaom_test::kRealTime,
- ::libaom_test::kOnePassGood),
+ ::testing::Values(::libaom_test::kOnePassGood),
::testing::Range(3, 5), ::testing::Range(0, 4));
+#endif
} // namespace
diff --git a/third_party/libaom/source/libaom/test/arf_freq_test.cc b/third_party/libaom/source/libaom/test/arf_freq_test.cc
index 0bf47e6ec4..d12f5ccee6 100644
--- a/third_party/libaom/source/libaom/test/arf_freq_test.cc
+++ b/third_party/libaom/source/libaom/test/arf_freq_test.cc
@@ -56,9 +56,13 @@ const TestVideoParam kTestVectors[] = {
};
const TestEncodeParam kEncodeVectors[] = {
- { ::libaom_test::kOnePassGood, 2 }, { ::libaom_test::kOnePassGood, 5 },
- { ::libaom_test::kTwoPassGood, 1 }, { ::libaom_test::kTwoPassGood, 2 },
- { ::libaom_test::kTwoPassGood, 5 }, { ::libaom_test::kRealTime, 5 },
+#if CONFIG_REALTIME_ONLY
+ { ::libaom_test::kRealTime, 5 },
+#else
+ { ::libaom_test::kRealTime, 5 }, { ::libaom_test::kOnePassGood, 2 },
+ { ::libaom_test::kOnePassGood, 5 }, { ::libaom_test::kTwoPassGood, 1 },
+ { ::libaom_test::kTwoPassGood, 2 }, { ::libaom_test::kTwoPassGood, 5 },
+#endif
};
const int kMinArfVectors[] = {
diff --git a/third_party/libaom/source/libaom/test/av1_convolve_scale_test.cc b/third_party/libaom/source/libaom/test/av1_convolve_scale_test.cc
index a1c5746637..65300140ba 100644
--- a/third_party/libaom/source/libaom/test/av1_convolve_scale_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_convolve_scale_test.cc
@@ -293,8 +293,8 @@ class ConvolveScaleTestBase : public ::testing::Test {
convolve_params_.do_average = do_average;
} else {
convolve_params_.use_dist_wtd_comp_avg = use_dist_wtd_comp_avg;
- convolve_params_.fwd_offset = quant_dist_lookup_table[i][j][0];
- convolve_params_.bck_offset = quant_dist_lookup_table[i][j][1];
+ convolve_params_.fwd_offset = quant_dist_lookup_table[j][i];
+ convolve_params_.bck_offset = quant_dist_lookup_table[j][1 - i];
convolve_params_.is_compound = is_compound;
convolve_params_.do_average = do_average;
}
diff --git a/third_party/libaom/source/libaom/test/av1_convolve_test.cc b/third_party/libaom/source/libaom/test/av1_convolve_test.cc
index 0c902808ad..4d61f02298 100644
--- a/third_party/libaom/source/libaom/test/av1_convolve_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_convolve_test.cc
@@ -1172,8 +1172,8 @@ std::vector<CompoundParam> GetCompoundParams() {
result.push_back(CompoundParam(false, 0, 0));
for (int k = 0; k < 2; ++k) {
for (int l = 0; l < 4; ++l) {
- result.push_back(CompoundParam(true, quant_dist_lookup_table[k][l][0],
- quant_dist_lookup_table[k][l][1]));
+ result.push_back(CompoundParam(true, quant_dist_lookup_table[l][k],
+ quant_dist_lookup_table[l][1 - k]));
}
}
return result;
diff --git a/third_party/libaom/source/libaom/test/av1_external_partition_test.cc b/third_party/libaom/source/libaom/test/av1_external_partition_test.cc
new file mode 100644
index 0000000000..4fe61c7843
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/av1_external_partition_test.cc
@@ -0,0 +1,309 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <fstream>
+#include <new>
+#include <sstream>
+#include <string>
+
+#include "aom/aom_codec.h"
+#include "aom/aom_external_partition.h"
+#include "av1/common/blockd.h"
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+#include "test/codec_factory.h"
+#include "test/encode_test_driver.h"
+#include "test/y4m_video_source.h"
+#include "test/util.h"
+
+#if CONFIG_AV1_ENCODER
+#if !CONFIG_REALTIME_ONLY
+namespace {
+
+constexpr int kFrameNum = 8;
+constexpr int kVersion = 1;
+
+typedef struct TestData {
+ int version = kVersion;
+} TestData;
+
+typedef struct ToyModel {
+ TestData *data;
+ aom_ext_part_config_t config;
+ aom_ext_part_funcs_t funcs;
+} ToyModel;
+
+// Feature files written during encoding, as defined in partition_strategy.c.
+std::string feature_file_names[] = {
+ "feature_before_partition_none",
+ "feature_before_partition_none_prune_rect",
+ "feature_after_partition_none_prune",
+ "feature_after_partition_none_terminate",
+ "feature_after_partition_split_terminate",
+ "feature_after_partition_split_prune_rect",
+ "feature_after_partition_rect",
+ "feature_after_partition_ab",
+};
+
+// Files written here in the test, where the feature data is received
+// from the API.
+std::string test_feature_file_names[] = {
+ "test_feature_before_partition_none",
+ "test_feature_before_partition_none_prune_rect",
+ "test_feature_after_partition_none_prune",
+ "test_feature_after_partition_none_terminate",
+ "test_feature_after_partition_split_terminate",
+ "test_feature_after_partition_split_prune_rect",
+ "test_feature_after_partition_rect",
+ "test_feature_after_partition_ab",
+};
+
+static void write_features_to_file(const float *features,
+ const int feature_size, const int id) {
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s",
+ test_feature_file_names[id].c_str());
+ FILE *pfile = fopen(filename, "a");
+ for (int i = 0; i < feature_size; ++i) {
+ fprintf(pfile, "%.6f", features[i]);
+ if (i < feature_size - 1) fprintf(pfile, ",");
+ }
+ fprintf(pfile, "\n");
+ fclose(pfile);
+}
+
+aom_ext_part_status_t ext_part_create_model(
+ void *priv, const aom_ext_part_config_t *part_config,
+ aom_ext_part_model_t *ext_part_model) {
+ TestData *received_data = reinterpret_cast<TestData *>(priv);
+ EXPECT_EQ(received_data->version, kVersion);
+ ToyModel *toy_model = new (std::nothrow) ToyModel;
+ EXPECT_NE(toy_model, nullptr);
+ toy_model->data = received_data;
+ *ext_part_model = toy_model;
+ EXPECT_EQ(part_config->superblock_size, BLOCK_64X64);
+ return AOM_EXT_PART_OK;
+}
+
+aom_ext_part_status_t ext_part_create_model_test(
+ void *priv, const aom_ext_part_config_t *part_config,
+ aom_ext_part_model_t *ext_part_model) {
+ (void)priv;
+ (void)ext_part_model;
+ EXPECT_EQ(part_config->superblock_size, BLOCK_64X64);
+ return AOM_EXT_PART_TEST;
+}
+
+aom_ext_part_status_t ext_part_send_features(
+ aom_ext_part_model_t ext_part_model,
+ const aom_partition_features_t *part_features) {
+ (void)ext_part_model;
+ (void)part_features;
+ return AOM_EXT_PART_OK;
+}
+
+aom_ext_part_status_t ext_part_send_features_test(
+ aom_ext_part_model_t ext_part_model,
+ const aom_partition_features_t *part_features) {
+ (void)ext_part_model;
+ if (part_features->id == FEATURE_BEFORE_PART_NONE) {
+ write_features_to_file(part_features->before_part_none.f, SIZE_DIRECT_SPLIT,
+ 0);
+ } else if (part_features->id == FEATURE_BEFORE_PART_NONE_PART2) {
+ write_features_to_file(part_features->before_part_none.f_part2,
+ SIZE_PRUNE_PART, 1);
+ } else if (part_features->id == FEATURE_AFTER_PART_NONE) {
+ write_features_to_file(part_features->after_part_none.f, SIZE_PRUNE_NONE,
+ 2);
+ } else if (part_features->id == FEATURE_AFTER_PART_NONE_PART2) {
+ write_features_to_file(part_features->after_part_none.f_terminate,
+ SIZE_TERM_NONE, 3);
+ } else if (part_features->id == FEATURE_AFTER_PART_SPLIT) {
+ write_features_to_file(part_features->after_part_split.f_terminate,
+ SIZE_TERM_SPLIT, 4);
+ } else if (part_features->id == FEATURE_AFTER_PART_SPLIT_PART2) {
+ write_features_to_file(part_features->after_part_split.f_prune_rect,
+ SIZE_PRUNE_RECT, 5);
+ } else if (part_features->id == FEATURE_AFTER_PART_RECT) {
+ write_features_to_file(part_features->after_part_rect.f, SIZE_PRUNE_AB, 6);
+ } else if (part_features->id == FEATURE_AFTER_PART_AB) {
+ write_features_to_file(part_features->after_part_ab.f, SIZE_PRUNE_4_WAY, 7);
+ }
+ return AOM_EXT_PART_TEST;
+}
+
+aom_ext_part_status_t ext_part_get_partition_decision(
+ aom_ext_part_model_t ext_part_model,
+ aom_partition_decision_t *ext_part_decision) {
+ (void)ext_part_model;
+ (void)ext_part_decision;
+ return AOM_EXT_PART_ERROR;
+}
+
+aom_ext_part_status_t ext_part_send_partition_stats(
+ aom_ext_part_model_t ext_part_model,
+ const aom_partition_stats_t *ext_part_stats) {
+ (void)ext_part_model;
+ (void)ext_part_stats;
+ return AOM_EXT_PART_OK;
+}
+
+aom_ext_part_status_t ext_part_delete_model(
+ aom_ext_part_model_t ext_part_model) {
+ ToyModel *toy_model = static_cast<ToyModel *>(ext_part_model);
+ EXPECT_EQ(toy_model->data->version, kVersion);
+ delete toy_model;
+ return AOM_EXT_PART_OK;
+}
+
+class ExternalPartitionTest
+ : public ::libaom_test::CodecTestWith2Params<libaom_test::TestMode, int>,
+ public ::libaom_test::EncoderTest {
+ protected:
+ ExternalPartitionTest()
+ : EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)),
+ cpu_used_(GET_PARAM(2)), psnr_(0.0), nframes_(0) {}
+ virtual ~ExternalPartitionTest() {}
+
+ virtual void SetUp() {
+ InitializeConfig(encoding_mode_);
+ const aom_rational timebase = { 1, 30 };
+ cfg_.g_timebase = timebase;
+ cfg_.rc_end_usage = AOM_VBR;
+ cfg_.g_threads = 1;
+ cfg_.g_lag_in_frames = 4;
+ cfg_.rc_target_bitrate = 400;
+ init_flags_ = AOM_CODEC_USE_PSNR;
+ }
+
+ virtual bool DoDecode() const { return false; }
+
+ virtual void BeginPassHook(unsigned int) {
+ psnr_ = 0.0;
+ nframes_ = 0;
+ }
+
+ virtual void PSNRPktHook(const aom_codec_cx_pkt_t *pkt) {
+ psnr_ += pkt->data.psnr.psnr[0];
+ nframes_++;
+ }
+
+ double GetAveragePsnr() const {
+ if (nframes_) return psnr_ / nframes_;
+ return 0.0;
+ }
+
+ void SetExternalPartition(bool use_external_partition) {
+ use_external_partition_ = use_external_partition;
+ }
+
+ void SetTestSendFeatures(int test_send_features) {
+ test_send_features_ = test_send_features;
+ }
+
+ virtual void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) {
+ if (video->frame() == 0) {
+ aom_ext_part_funcs_t ext_part_funcs;
+ ext_part_funcs.priv = reinterpret_cast<void *>(&test_data_);
+ if (use_external_partition_) {
+ ext_part_funcs.create_model = ext_part_create_model;
+ ext_part_funcs.send_features = ext_part_send_features;
+ }
+ if (test_send_features_ == 1) {
+ ext_part_funcs.create_model = ext_part_create_model;
+ ext_part_funcs.send_features = ext_part_send_features_test;
+ } else if (test_send_features_ == 0) {
+ ext_part_funcs.create_model = ext_part_create_model_test;
+ ext_part_funcs.send_features = ext_part_send_features;
+ }
+ ext_part_funcs.get_partition_decision = ext_part_get_partition_decision;
+ ext_part_funcs.send_partition_stats = ext_part_send_partition_stats;
+ ext_part_funcs.delete_model = ext_part_delete_model;
+
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ encoder->Control(AOME_SET_ENABLEAUTOALTREF, 1);
+ if (use_external_partition_) {
+ encoder->Control(AV1E_SET_EXTERNAL_PARTITION, &ext_part_funcs);
+ }
+ }
+ }
+
+ private:
+ libaom_test::TestMode encoding_mode_;
+ int cpu_used_;
+ double psnr_;
+ unsigned int nframes_;
+ bool use_external_partition_ = false;
+ int test_send_features_ = -1;
+ TestData test_data_;
+};
+
+// Encode twice and expect the same psnr value.
+// The first run is the baseline without external partition.
+// The second run is to get partition decisions from the toy model we defined.
+// Here, we let the partition decision return true for all stages.
+// In this case, the external partition doesn't alter the original encoder
+// behavior. So we expect the same encoding results.
+TEST_P(ExternalPartitionTest, EncodeMatch) {
+ ::libaom_test::Y4mVideoSource video("paris_352_288_30.y4m", 0, kFrameNum);
+ SetExternalPartition(false);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+ const double psnr = GetAveragePsnr();
+
+ SetExternalPartition(true);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+ const double psnr2 = GetAveragePsnr();
+
+ EXPECT_DOUBLE_EQ(psnr, psnr2);
+}
+
+// Encode twice to compare generated feature files.
+// The first run let the encoder write partition features to file.
+// The second run calls send partition features function to send features to
+// the external model, and we write them to file.
+// The generated files should match each other.
+TEST_P(ExternalPartitionTest, SendFeatures) {
+ ::libaom_test::Y4mVideoSource video("paris_352_288_30.y4m", 0, kFrameNum);
+ SetExternalPartition(true);
+ SetTestSendFeatures(0);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+
+ SetExternalPartition(true);
+ SetTestSendFeatures(1);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+
+ // Compare feature files by reading them into strings.
+ for (int i = 0; i < 8; ++i) {
+ std::ifstream base_file(feature_file_names[i]);
+ std::stringstream base_stream;
+ base_stream << base_file.rdbuf();
+ std::string base_string = base_stream.str();
+
+ std::ifstream test_file(test_feature_file_names[i]);
+ std::stringstream test_stream;
+ test_stream << test_file.rdbuf();
+ std::string test_string = test_stream.str();
+
+ EXPECT_STREQ(base_string.c_str(), test_string.c_str());
+ }
+
+ // Remove files.
+ std::string command("rm -f feature_* test_feature_*");
+ system(command.c_str());
+}
+
+AV1_INSTANTIATE_TEST_SUITE(ExternalPartitionTest,
+ ::testing::Values(::libaom_test::kTwoPassGood),
+ ::testing::Values(4)); // cpu_used
+
+} // namespace
+#endif // !CONFIG_REALTIME_ONLY
+#endif // CONFIG_AV1_ENCODER
diff --git a/third_party/libaom/source/libaom/test/av1_fwd_txfm2d_test.cc b/third_party/libaom/source/libaom/test/av1_fwd_txfm2d_test.cc
index 0e7eb09f2a..d124330ff8 100644
--- a/third_party/libaom/source/libaom/test/av1_fwd_txfm2d_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_fwd_txfm2d_test.cc
@@ -362,6 +362,78 @@ TEST_P(AV1FwdTxfm2dTest, match) {
TEST_P(AV1FwdTxfm2dTest, DISABLED_Speed) {
AV1FwdTxfm2dSpeedTest(GET_PARAM(0), GET_PARAM(1));
}
+TEST(AV1FwdTxfm2dTest, DCTScaleTest) {
+ BitDepthInfo bd_info;
+ bd_info.bit_depth = 8;
+ bd_info.use_highbitdepth_buf = 0;
+ DECLARE_ALIGNED(32, int16_t, src_diff[1024]);
+ DECLARE_ALIGNED(32, tran_low_t, coeff[1024]);
+
+ const TX_SIZE tx_size_list[4] = { TX_4X4, TX_8X8, TX_16X16, TX_32X32 };
+ const int stride_list[4] = { 4, 8, 16, 32 };
+ const int ref_scale_list[4] = { 64, 64, 64, 16 };
+
+ for (int i = 0; i < 4; i++) {
+ TX_SIZE tx_size = tx_size_list[i];
+ int stride = stride_list[i];
+ int array_size = stride * stride;
+
+ for (int i = 0; i < array_size; i++) {
+ src_diff[i] = 8;
+ coeff[i] = 0;
+ }
+
+ av1_quick_txfm(/*use_hadamard=*/0, tx_size, bd_info, src_diff, stride,
+ coeff);
+
+ double input_sse = 0;
+ double output_sse = 0;
+ for (int i = 0; i < array_size; i++) {
+ input_sse += pow(src_diff[i], 2);
+ output_sse += pow(coeff[i], 2);
+ }
+
+ double scale = output_sse / input_sse;
+
+ EXPECT_NEAR(scale, ref_scale_list[i], 5);
+ }
+}
+TEST(AV1FwdTxfm2dTest, HadamardScaleTest) {
+ BitDepthInfo bd_info;
+ bd_info.bit_depth = 8;
+ bd_info.use_highbitdepth_buf = 0;
+ DECLARE_ALIGNED(32, int16_t, src_diff[1024]);
+ DECLARE_ALIGNED(32, tran_low_t, coeff[1024]);
+
+ const TX_SIZE tx_size_list[4] = { TX_4X4, TX_8X8, TX_16X16, TX_32X32 };
+ const int stride_list[4] = { 4, 8, 16, 32 };
+ const int ref_scale_list[4] = { 1, 64, 64, 16 };
+
+ for (int i = 0; i < 4; i++) {
+ TX_SIZE tx_size = tx_size_list[i];
+ int stride = stride_list[i];
+ int array_size = stride * stride;
+
+ for (int i = 0; i < array_size; i++) {
+ src_diff[i] = 8;
+ coeff[i] = 0;
+ }
+
+ av1_quick_txfm(/*use_hadamard=*/1, tx_size, bd_info, src_diff, stride,
+ coeff);
+
+ double input_sse = 0;
+ double output_sse = 0;
+ for (int i = 0; i < array_size; i++) {
+ input_sse += pow(src_diff[i], 2);
+ output_sse += pow(coeff[i], 2);
+ }
+
+ double scale = output_sse / input_sse;
+
+ EXPECT_NEAR(scale, ref_scale_list[i], 5);
+ }
+}
using ::testing::Combine;
using ::testing::Values;
using ::testing::ValuesIn;
@@ -580,8 +652,10 @@ using ::testing::ValuesIn;
#if HAVE_SSE4_1
static TX_SIZE Highbd_fwd_txfm_for_sse4_1[] = {
TX_4X4, TX_8X8, TX_16X16, TX_32X32, TX_64X64, TX_4X8, TX_8X4,
- TX_8X16, TX_16X8, TX_16X32, TX_32X16, TX_32X64, TX_64X32, TX_4X16,
- TX_16X4, TX_8X32, TX_32X8, TX_16X64, TX_64X16,
+ TX_8X16, TX_16X8, TX_16X32, TX_32X16, TX_32X64, TX_64X32,
+#if !CONFIG_REALTIME_ONLY
+ TX_4X16, TX_16X4, TX_8X32, TX_32X8, TX_16X64, TX_64X16,
+#endif
};
INSTANTIATE_TEST_SUITE_P(SSE4_1, AV1HighbdFwdTxfm2dTest,
diff --git a/third_party/libaom/source/libaom/test/av1_highbd_iht_test.cc b/third_party/libaom/source/libaom/test/av1_highbd_iht_test.cc
index a576c0ffed..165abc9483 100644
--- a/third_party/libaom/source/libaom/test/av1_highbd_iht_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_highbd_iht_test.cc
@@ -210,6 +210,12 @@ GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(AV1HighbdInvTxfm2d);
void AV1HighbdInvTxfm2d::RunAV1InvTxfm2dTest(TX_TYPE tx_type_, TX_SIZE tx_size_,
int run_times, int bit_depth_,
int gt_int16) {
+#if CONFIG_REALTIME_ONLY
+ if (tx_size_ == TX_4X16 || tx_size_ == TX_16X4 || tx_size_ == TX_8X32 ||
+ tx_size_ == TX_32X8 || tx_size_ == TX_16X64 || tx_size_ == TX_64X16) {
+ return;
+ }
+#endif
FwdTxfm2dFunc fwd_func_ = libaom_test::fwd_txfm_func_ls[tx_size_];
TxfmParam txfm_param;
const int BLK_WIDTH = 64;
diff --git a/third_party/libaom/source/libaom/test/av1_key_value_api_test.cc b/third_party/libaom/source/libaom/test/av1_key_value_api_test.cc
index 3d06d2d6c5..058b8ce443 100644
--- a/third_party/libaom/source/libaom/test/av1_key_value_api_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_key_value_api_test.cc
@@ -29,10 +29,15 @@ class BaseKeyValAPI : public testing::Test {
#if CONFIG_AV1_ENCODER
aom_codec_iface_t *iface_cx = aom_codec_av1_cx();
aom_codec_enc_cfg_t enc_cfg;
-
+#if CONFIG_REALTIME_ONLY
+ const int usage = 1;
+#else
+ const int usage = 0;
+#endif
+ EXPECT_EQ(AOM_CODEC_OK,
+ aom_codec_enc_config_default(iface_cx, &enc_cfg, usage));
EXPECT_EQ(AOM_CODEC_OK,
- aom_codec_enc_config_default(iface_cx, &enc_cfg, 0));
- EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_init(&enc_, iface_cx, &enc_cfg, 0));
+ aom_codec_enc_init(&enc_, iface_cx, &enc_cfg, usage));
#endif
#if CONFIG_AV1_DECODER
aom_codec_iface_t *iface_dx = aom_codec_av1_dx();
diff --git a/third_party/libaom/source/libaom/test/av1_quantize_test.cc b/third_party/libaom/source/libaom/test/av1_quantize_test.cc
index f0882c7099..bfb684effd 100644
--- a/third_party/libaom/source/libaom/test/av1_quantize_test.cc
+++ b/third_party/libaom/source/libaom/test/av1_quantize_test.cc
@@ -19,6 +19,7 @@
#include "test/clear_system_state.h"
#include "test/register_state_check.h"
#include "av1/common/scan.h"
+#include "av1/encoder/av1_quantize.h"
namespace {
@@ -207,6 +208,32 @@ GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(AV1QuantizeTest);
TEST_P(AV1QuantizeTest, BitExactCheck) { RunQuantizeTest(); }
TEST_P(AV1QuantizeTest, EobVerify) { RunEobTest(); }
+TEST(AV1QuantizeTest, QuantizeFpNoQmatrix) {
+ // Here we use a uniform quantizer as an example
+ const int16_t dequant_ptr[2] = { 78, 93 }; // quantize step
+ const int16_t round_ptr[2] = { 39, 46 }; // round ~= dequant / 2
+
+ // quant ~= 2^16 / dequant. This is a 16-bit fixed point representation of the
+ // inverse of quantize step.
+ const int16_t quant_ptr[2] = { 840, 704 };
+ int log_scale = 0;
+ int coeff_count = 4;
+ const tran_low_t coeff_ptr[4] = { -449, 624, -14, 24 };
+ const tran_low_t ref_qcoeff_ptr[4] = { -6, 7, 0, 0 };
+ const tran_low_t ref_dqcoeff_ptr[4] = { -468, 651, 0, 0 };
+ const int16_t scan[4] = { 0, 1, 2, 3 };
+ tran_low_t qcoeff_ptr[4];
+ tran_low_t dqcoeff_ptr[4];
+ int eob = av1_quantize_fp_no_qmatrix(quant_ptr, dequant_ptr, round_ptr,
+ log_scale, scan, coeff_count, coeff_ptr,
+ qcoeff_ptr, dqcoeff_ptr);
+ EXPECT_EQ(eob, 2);
+ for (int i = 0; i < coeff_count; ++i) {
+ EXPECT_EQ(qcoeff_ptr[i], ref_qcoeff_ptr[i]);
+ EXPECT_EQ(dqcoeff_ptr[i], ref_dqcoeff_ptr[i]);
+ }
+}
+
#if HAVE_SSE4_1
const QuantizeFuncParams qfps[4] = {
QuantizeFuncParams(&av1_highbd_quantize_fp_sse4_1, &av1_highbd_quantize_fp_c,
diff --git a/third_party/libaom/source/libaom/test/block_test.cc b/third_party/libaom/source/libaom/test/block_test.cc
index 9cf5b020ef..74deee3f54 100644
--- a/third_party/libaom/source/libaom/test/block_test.cc
+++ b/third_party/libaom/source/libaom/test/block_test.cc
@@ -191,9 +191,17 @@ TEST_P(SuperBlockSizeTestLarge, SuperBlockSizeTest) {
<< "Failed for SB size " << superblock_size_;
}
+const ::libaom_test::TestMode kTestModes[] = {
+#if CONFIG_REALTIME_ONLY
+ ::libaom_test::kRealTime
+#else
+ ::libaom_test::kRealTime, ::libaom_test::kOnePassGood,
+ ::libaom_test::kTwoPassGood
+#endif
+};
+
AV1_INSTANTIATE_TEST_SUITE(SuperBlockSizeTestLarge,
- ::testing::Values(::libaom_test::kOnePassGood,
- ::libaom_test::kTwoPassGood),
+ ::testing::ValuesIn(kTestModes),
::testing::Values(AOM_SUPERBLOCK_SIZE_64X64,
AOM_SUPERBLOCK_SIZE_128X128),
::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ));
diff --git a/third_party/libaom/source/libaom/test/coding_path_sync.cc b/third_party/libaom/source/libaom/test/coding_path_sync.cc
index 4c613dc03b..0eaa9dad8d 100644
--- a/third_party/libaom/source/libaom/test/coding_path_sync.cc
+++ b/third_party/libaom/source/libaom/test/coding_path_sync.cc
@@ -31,7 +31,11 @@ class CompressedSource {
aom_codec_iface_t *algo = aom_codec_av1_cx();
aom_codec_enc_cfg_t cfg;
+#if CONFIG_REALTIME_ONLY
+ aom_codec_enc_config_default(algo, &cfg, 1);
+#else
aom_codec_enc_config_default(algo, &cfg, 0);
+#endif
// force the quantizer, to reduce the sensitivity on encoding choices.
// e.g, we don't want this test to break when the rate control is modified.
diff --git a/third_party/libaom/source/libaom/test/comp_avg_pred_test.h b/third_party/libaom/source/libaom/test/comp_avg_pred_test.h
index 7f73312c4e..f2fee6d434 100644
--- a/third_party/libaom/source/libaom/test/comp_avg_pred_test.h
+++ b/third_party/libaom/source/libaom/test/comp_avg_pred_test.h
@@ -117,8 +117,8 @@ class AV1DISTWTDCOMPAVGTest
for (int ii = 0; ii < 2; ii++) {
for (int jj = 0; jj < 4; jj++) {
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
const int offset_r = 3 + rnd_.PseudoUniform(h - in_h - 7);
const int offset_c = 3 + rnd_.PseudoUniform(w - in_w - 7);
@@ -160,8 +160,8 @@ class AV1DISTWTDCOMPAVGTest
DIST_WTD_COMP_PARAMS dist_wtd_comp_params;
dist_wtd_comp_params.use_dist_wtd_comp_avg = 1;
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][0][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][1];
const int num_loops = 1000000000 / (in_w + in_h);
aom_usec_timer timer;
@@ -226,10 +226,9 @@ class AV1DISTWTDCOMPAVGUPSAMPLEDTest
for (sub_y_q3 = 0; sub_y_q3 < 8; ++sub_y_q3) {
for (int ii = 0; ii < 2; ii++) {
for (int jj = 0; jj < 4; jj++) {
- dist_wtd_comp_params.fwd_offset =
- quant_dist_lookup_table[ii][jj][0];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[jj][ii];
dist_wtd_comp_params.bck_offset =
- quant_dist_lookup_table[ii][jj][1];
+ quant_dist_lookup_table[jj][1 - ii];
const int offset_r = 3 + rnd_.PseudoUniform(h - in_h - 7);
const int offset_c = 3 + rnd_.PseudoUniform(w - in_w - 7);
@@ -282,8 +281,8 @@ class AV1DISTWTDCOMPAVGUPSAMPLEDTest
DIST_WTD_COMP_PARAMS dist_wtd_comp_params;
dist_wtd_comp_params.use_dist_wtd_comp_avg = 1;
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][0][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][1];
int sub_x_q3 = 0;
int sub_y_q3 = 0;
@@ -351,8 +350,8 @@ class AV1HighBDDISTWTDCOMPAVGTest
for (int ii = 0; ii < 2; ii++) {
for (int jj = 0; jj < 4; jj++) {
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
const int offset_r = 3 + rnd_.PseudoUniform(h - in_h - 7);
const int offset_c = 3 + rnd_.PseudoUniform(w - in_w - 7);
@@ -398,8 +397,8 @@ class AV1HighBDDISTWTDCOMPAVGTest
DIST_WTD_COMP_PARAMS dist_wtd_comp_params;
dist_wtd_comp_params.use_dist_wtd_comp_avg = 1;
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][0][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][1];
const int num_loops = 1000000000 / (in_w + in_h);
aom_usec_timer timer;
@@ -466,10 +465,9 @@ class AV1HighBDDISTWTDCOMPAVGUPSAMPLEDTest
for (sub_y_q3 = 0; sub_y_q3 < 8; ++sub_y_q3) {
for (int ii = 0; ii < 2; ii++) {
for (int jj = 0; jj < 4; jj++) {
- dist_wtd_comp_params.fwd_offset =
- quant_dist_lookup_table[ii][jj][0];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[jj][ii];
dist_wtd_comp_params.bck_offset =
- quant_dist_lookup_table[ii][jj][1];
+ quant_dist_lookup_table[jj][1 - ii];
const int offset_r = 3 + rnd_.PseudoUniform(h - in_h - 7);
const int offset_c = 3 + rnd_.PseudoUniform(w - in_w - 7);
@@ -524,8 +522,8 @@ class AV1HighBDDISTWTDCOMPAVGUPSAMPLEDTest
DIST_WTD_COMP_PARAMS dist_wtd_comp_params;
dist_wtd_comp_params.use_dist_wtd_comp_avg = 1;
- dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0][0];
- dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][0][1];
+ dist_wtd_comp_params.fwd_offset = quant_dist_lookup_table[0][0];
+ dist_wtd_comp_params.bck_offset = quant_dist_lookup_table[0][1];
int sub_x_q3 = 0;
int sub_y_q3 = 0;
const int num_loops = 1000000000 / (in_w + in_h);
diff --git a/third_party/libaom/source/libaom/test/cpu_used_firstpass_test.cc b/third_party/libaom/source/libaom/test/cpu_used_firstpass_test.cc
new file mode 100644
index 0000000000..c970c1977d
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/cpu_used_firstpass_test.cc
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include "test/codec_factory.h"
+#include "test/encode_test_driver.h"
+#include "test/i420_video_source.h"
+#include "test/util.h"
+
+namespace {
+
+const double kPsnrDiffThreshold = 0.1;
+const int kFirstPassCpuUsed[] = { 2, 4, 6 };
+
+class CpuUsedFirstpassTest : public ::libaom_test::CodecTestWithParam<int>,
+ public ::libaom_test::EncoderTest {
+ protected:
+ CpuUsedFirstpassTest()
+ : EncoderTest(GET_PARAM(0)), second_pass_cpu_used_(GET_PARAM(1)) {}
+ virtual ~CpuUsedFirstpassTest() {}
+
+ virtual void SetUp() {
+ InitializeConfig(::libaom_test::kTwoPassGood);
+ const aom_rational timebase = { 1, 30 };
+ cfg_.g_timebase = timebase;
+ cfg_.rc_end_usage = AOM_VBR;
+ cfg_.rc_target_bitrate = 1000;
+ cfg_.g_lag_in_frames = 19;
+ cfg_.g_threads = 0;
+ init_flags_ = AOM_CODEC_USE_PSNR;
+ }
+
+ virtual void BeginPassHook(unsigned int pass) {
+ psnr_ = 0.0;
+ nframes_ = 0;
+
+ if (pass == 0)
+ cpu_used_ = first_pass_cpu_used_;
+ else
+ cpu_used_ = second_pass_cpu_used_;
+ }
+
+ virtual void PSNRPktHook(const aom_codec_cx_pkt_t *pkt) {
+ psnr_ += pkt->data.psnr.psnr[0];
+ nframes_++;
+ }
+
+ virtual void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) {
+ if (video->frame() == 0) {
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ encoder->Control(AOME_SET_ENABLEAUTOALTREF, 1);
+ encoder->Control(AOME_SET_ARNR_MAXFRAMES, 7);
+ encoder->Control(AOME_SET_ARNR_STRENGTH, 5);
+ }
+ }
+
+ double GetAveragePsnr() const {
+ if (nframes_) return psnr_ / nframes_;
+ return 0.0;
+ }
+
+ double GetPsnrDiffThreshold() { return kPsnrDiffThreshold; }
+
+ void DoTest() {
+ libaom_test::I420VideoSource video("niklas_640_480_30.yuv", 640, 480,
+ cfg_.g_timebase.den, cfg_.g_timebase.num,
+ 0, 30);
+ const int size = sizeof(kFirstPassCpuUsed) / sizeof(kFirstPassCpuUsed[0]);
+ double ref_psnr;
+ double psnr_diff;
+
+ first_pass_cpu_used_ = second_pass_cpu_used_;
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video)); // same preset case ref_psnr
+ ref_psnr = GetAveragePsnr();
+
+ for (int i = 0; i < size; i++) {
+ first_pass_cpu_used_ = kFirstPassCpuUsed[i];
+ if (first_pass_cpu_used_ == second_pass_cpu_used_) continue;
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+ psnr_diff = abs(ref_psnr - GetAveragePsnr());
+ EXPECT_LT(psnr_diff, GetPsnrDiffThreshold())
+ << "first pass cpu used = " << first_pass_cpu_used_
+ << ", second pass cpu used = " << second_pass_cpu_used_;
+ }
+ }
+
+ int cpu_used_;
+ int first_pass_cpu_used_;
+ int second_pass_cpu_used_;
+ unsigned int nframes_;
+ double psnr_;
+};
+
+TEST_P(CpuUsedFirstpassTest, FirstPassTest) { DoTest(); }
+
+class CpuUsedFirstpassTestLarge : public CpuUsedFirstpassTest {};
+
+TEST_P(CpuUsedFirstpassTestLarge, FirstPassTest) { DoTest(); }
+
+AV1_INSTANTIATE_TEST_SUITE(CpuUsedFirstpassTestLarge,
+ ::testing::Values(2)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(CpuUsedFirstpassTest,
+ ::testing::Values(4, 6)); // cpu_used
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/datarate_test.cc b/third_party/libaom/source/libaom/test/datarate_test.cc
index 2ff074fe8c..71f8b0f37b 100644
--- a/third_party/libaom/source/libaom/test/datarate_test.cc
+++ b/third_party/libaom/source/libaom/test/datarate_test.cc
@@ -57,7 +57,9 @@ class DatarateTestLarge
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(effective_datarate_, cfg_.rc_target_bitrate * 0.7)
<< " The datarate for the file is lower than target by too much!";
- ASSERT_LE(effective_datarate_, cfg_.rc_target_bitrate * 1.4)
+ // FIXME(jingning): Lower this test threshold after vbr mode can render
+ // sufficiently accurate bit rate.
+ ASSERT_LE(effective_datarate_, cfg_.rc_target_bitrate * 1.45)
<< " The datarate for the file is greater than target by too much!";
}
diff --git a/third_party/libaom/source/libaom/test/datarate_test.h b/third_party/libaom/source/libaom/test/datarate_test.h
index 0396034874..1b0d515efa 100644
--- a/third_party/libaom/source/libaom/test/datarate_test.h
+++ b/third_party/libaom/source/libaom/test/datarate_test.h
@@ -63,6 +63,7 @@ class DatarateTest : public ::libaom_test::EncoderTest {
encoder->Control(AV1E_SET_COEFF_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MODE_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MV_COST_UPD_FREQ, 2);
+ encoder->Control(AV1E_SET_DV_COST_UPD_FREQ, 2);
}
}
diff --git a/third_party/libaom/source/libaom/test/encode_api_test.cc b/third_party/libaom/source/libaom/test/encode_api_test.cc
index eb918460ae..70b0612ced 100644
--- a/third_party/libaom/source/libaom/test/encode_api_test.cc
+++ b/third_party/libaom/source/libaom/test/encode_api_test.cc
@@ -20,6 +20,12 @@
namespace {
+#if CONFIG_REALTIME_ONLY
+const int kUsage = 1;
+#else
+const int kUsage = 0;
+#endif
+
TEST(EncodeAPI, InvalidParams) {
uint8_t buf[1] = { 0 };
aom_image_t img;
@@ -45,7 +51,7 @@ TEST(EncodeAPI, InvalidParams) {
EXPECT_EQ(AOM_CODEC_INVALID_PARAM, aom_codec_enc_init(&enc, iface, NULL, 0));
EXPECT_EQ(AOM_CODEC_INVALID_PARAM,
aom_codec_enc_config_default(iface, &cfg, 3));
- EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, 0));
+ EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, kUsage));
EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_init(&enc, iface, &cfg, 0));
EXPECT_EQ(NULL, aom_codec_get_global_headers(NULL));
@@ -63,13 +69,14 @@ TEST(EncodeAPI, InvalidControlId) {
aom_codec_iface_t *iface = aom_codec_av1_cx();
aom_codec_ctx_t enc;
aom_codec_enc_cfg_t cfg;
- EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, 0));
+ EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, kUsage));
EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_init(&enc, iface, &cfg, 0));
EXPECT_EQ(AOM_CODEC_ERROR, aom_codec_control(&enc, -1, 0));
EXPECT_EQ(AOM_CODEC_INVALID_PARAM, aom_codec_control(&enc, 0, 0));
EXPECT_EQ(AOM_CODEC_OK, aom_codec_destroy(&enc));
}
+#if !CONFIG_REALTIME_ONLY
TEST(EncodeAPI, AllIntraMode) {
aom_codec_iface_t *iface = aom_codec_av1_cx();
aom_codec_ctx_t enc;
@@ -93,5 +100,6 @@ TEST(EncodeAPI, AllIntraMode) {
cfg.kf_max_dist = 1;
EXPECT_EQ(AOM_CODEC_INVALID_PARAM, aom_codec_enc_init(&enc, iface, &cfg, 0));
}
+#endif
} // namespace
diff --git a/third_party/libaom/source/libaom/test/encode_small_width_height_test.cc b/third_party/libaom/source/libaom/test/encode_small_width_height_test.cc
index 6f52fd58ef..ad493e5ce0 100644
--- a/third_party/libaom/source/libaom/test/encode_small_width_height_test.cc
+++ b/third_party/libaom/source/libaom/test/encode_small_width_height_test.cc
@@ -19,11 +19,17 @@
#include "aom/aomcx.h"
#include "aom/aom_encoder.h"
+#include "config/aom_config.h"
namespace {
// Dummy buffer of zero samples.
constexpr unsigned char kBuffer[256 * 512 + 2 * 128 * 256] = { 0 };
+#if CONFIG_REALTIME_ONLY
+const int kUsage = 1;
+#else
+const int kUsage = 0;
+#endif
TEST(EncodeSmallWidthHeight, SmallWidthMultiThreaded) {
// The image has only one tile and the tile is two AV1 superblocks wide.
@@ -37,7 +43,7 @@ TEST(EncodeSmallWidthHeight, SmallWidthMultiThreaded) {
aom_codec_iface_t *iface = aom_codec_av1_cx();
aom_codec_enc_cfg_t cfg;
- EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, 0));
+ EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, kUsage));
cfg.g_threads = 2;
cfg.g_w = kWidth;
cfg.g_h = kHeight;
@@ -49,6 +55,7 @@ TEST(EncodeSmallWidthHeight, SmallWidthMultiThreaded) {
EXPECT_EQ(AOM_CODEC_OK, aom_codec_destroy(&enc));
}
+#if !CONFIG_REALTIME_ONLY
TEST(EncodeSmallWidthHeight, SmallWidthMultiThreadedSpeed0) {
// The image has only one tile and the tile is two AV1 superblocks wide.
// For speed 0, superblock size is 128x128 (see av1_select_sb_size()).
@@ -72,6 +79,7 @@ TEST(EncodeSmallWidthHeight, SmallWidthMultiThreadedSpeed0) {
EXPECT_EQ(AOM_CODEC_OK, aom_codec_encode(&enc, NULL, 0, 0, 0));
EXPECT_EQ(AOM_CODEC_OK, aom_codec_destroy(&enc));
}
+#endif
TEST(EncodeSmallWidthHeight, SmallHeightMultiThreaded) {
// The image has only one tile and the tile is one AV1 superblock tall.
@@ -85,7 +93,7 @@ TEST(EncodeSmallWidthHeight, SmallHeightMultiThreaded) {
aom_codec_iface_t *iface = aom_codec_av1_cx();
aom_codec_enc_cfg_t cfg;
- EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, 0));
+ EXPECT_EQ(AOM_CODEC_OK, aom_codec_enc_config_default(iface, &cfg, kUsage));
cfg.g_threads = 2;
cfg.g_w = kWidth;
cfg.g_h = kHeight;
@@ -97,6 +105,7 @@ TEST(EncodeSmallWidthHeight, SmallHeightMultiThreaded) {
EXPECT_EQ(AOM_CODEC_OK, aom_codec_destroy(&enc));
}
+#if !CONFIG_REALTIME_ONLY
TEST(EncodeSmallWidthHeight, SmallHeightMultiThreadedSpeed0) {
// The image has only one tile and the tile is one AV1 superblock tall.
// For speed 0, superblock size is 128x128 (see av1_select_sb_size()).
@@ -120,5 +129,5 @@ TEST(EncodeSmallWidthHeight, SmallHeightMultiThreadedSpeed0) {
EXPECT_EQ(AOM_CODEC_OK, aom_codec_encode(&enc, NULL, 0, 0, 0));
EXPECT_EQ(AOM_CODEC_OK, aom_codec_destroy(&enc));
}
-
+#endif
} // namespace
diff --git a/third_party/libaom/source/libaom/test/encode_test_driver.cc b/third_party/libaom/source/libaom/test/encode_test_driver.cc
index 058e08e5d7..4a8801f06c 100644
--- a/third_party/libaom/source/libaom/test/encode_test_driver.cc
+++ b/third_party/libaom/source/libaom/test/encode_test_driver.cc
@@ -226,18 +226,18 @@ void EncoderTest::RunLoop(VideoSource *video) {
encoder->EncodeFrame(video, frame_flags_);
CxDataIterator iter = encoder->GetCxData();
+ bool has_cxdata = false;
#if CONFIG_AV1_DECODER
- bool has_cxdata = false;
bool has_dxdata = false;
#endif
while (const aom_codec_cx_pkt_t *pkt = iter.Next()) {
pkt = MutateEncoderOutputHook(pkt);
again = true;
switch (pkt->kind) {
- case AOM_CODEC_CX_FRAME_PKT:
-#if CONFIG_AV1_DECODER
+ case AOM_CODEC_CX_FRAME_PKT: //
has_cxdata = true;
+#if CONFIG_AV1_DECODER
if (decoder.get() != NULL && DoDecode()) {
aom_codec_err_t res_dec;
if (DoDecodeInvisible()) {
@@ -267,21 +267,27 @@ void EncoderTest::RunLoop(VideoSource *video) {
default: break;
}
}
-#if CONFIG_AV1_DECODER
- if (has_dxdata && has_cxdata) {
+ if (has_cxdata) {
const aom_image_t *img_enc = encoder->GetPreviewFrame();
- DxDataIterator dec_iter = decoder->GetDxData();
- const aom_image_t *img_dec = dec_iter.Next();
- if (img_enc && img_dec) {
- const bool res =
- compare_img(img_enc, img_dec, NULL, NULL, NULL, NULL, NULL);
- if (!res) { // Mismatch
- MismatchHook(img_enc, img_dec);
+ if (img_enc) {
+ CalculateFrameLevelSSIM(video->img(), img_enc, cfg_.g_bit_depth,
+ cfg_.g_input_bit_depth);
+ }
+#if CONFIG_AV1_DECODER
+ if (has_dxdata) {
+ DxDataIterator dec_iter = decoder->GetDxData();
+ const aom_image_t *img_dec = dec_iter.Next();
+ if (img_enc && img_dec) {
+ const bool res =
+ compare_img(img_enc, img_dec, NULL, NULL, NULL, NULL, NULL);
+ if (!res) { // Mismatch
+ MismatchHook(img_enc, img_dec);
+ }
}
+ if (img_dec) DecompressedFrameHook(*img_dec, video->pts());
}
- if (img_dec) DecompressedFrameHook(*img_dec, video->pts());
- }
#endif
+ }
if (!Continue()) break;
} // Loop over spatial layers
}
diff --git a/third_party/libaom/source/libaom/test/encode_test_driver.h b/third_party/libaom/source/libaom/test/encode_test_driver.h
index 5da3ac5d0b..468a41bef3 100644
--- a/third_party/libaom/source/libaom/test/encode_test_driver.h
+++ b/third_party/libaom/source/libaom/test/encode_test_driver.h
@@ -134,6 +134,11 @@ class Encoder {
ASSERT_EQ(AOM_CODEC_OK, res) << EncoderError();
}
+ void Control(int ctrl_id, struct aom_ext_part_funcs *arg) {
+ const aom_codec_err_t res = aom_codec_control(&encoder_, ctrl_id, arg);
+ ASSERT_EQ(AOM_CODEC_OK, res) << EncoderError();
+ }
+
#if CONFIG_AV1_ENCODER
void Control(int ctrl_id, aom_active_map_t *arg) {
const aom_codec_err_t res = aom_codec_control(&encoder_, ctrl_id, arg);
@@ -216,6 +221,12 @@ class EncoderTest {
// Hook to be called on every first pass stats packet.
virtual void StatsPktHook(const aom_codec_cx_pkt_t * /*pkt*/) {}
+ // Calculates SSIM at frame level.
+ virtual void CalculateFrameLevelSSIM(const aom_image_t * /*img_src*/,
+ const aom_image_t * /*img_enc*/,
+ aom_bit_depth_t /*bit_depth*/,
+ unsigned int /*input_bit_depth*/) {}
+
// Hook to determine whether the encode loop should continue.
virtual bool Continue() const {
return !(::testing::Test::HasFatalFailure() || abort_);
diff --git a/third_party/libaom/source/libaom/test/encodemb_test.cc b/third_party/libaom/source/libaom/test/encodemb_test.cc
new file mode 100644
index 0000000000..4c725c7dea
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/encodemb_test.cc
@@ -0,0 +1,245 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <stdint.h>
+#include <vector>
+
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+#include "av1/encoder/block.h"
+#include "av1/encoder/encodemb.h"
+#include "av1/common/scan.h"
+
+namespace {
+
+// Reorders 'qcoeff_lexico', which is in lexicographic order (row by row), into
+// scan order (zigzag) in 'qcoeff_scan'.
+void ToScanOrder(TX_SIZE tx_size, TX_TYPE tx_type, tran_low_t *qcoeff_lexico,
+ tran_low_t *qcoeff_scan) {
+ const int max_eob = av1_get_max_eob(tx_size);
+ const SCAN_ORDER *const scan_order = get_scan(tx_size, tx_type);
+ for (int i = 0; i < max_eob; ++i) {
+ qcoeff_scan[i] = qcoeff_lexico[scan_order->scan[i]];
+ }
+}
+
+// Reorders 'qcoeff_scan', which is in scan order (zigzag), into lexicographic
+// order (row by row) in 'qcoeff_lexico'.
+void ToLexicoOrder(TX_SIZE tx_size, TX_TYPE tx_type, tran_low_t *qcoeff_scan,
+ tran_low_t *qcoeff_lexico) {
+ const int max_eob = av1_get_max_eob(tx_size);
+ const SCAN_ORDER *const scan_order = get_scan(tx_size, tx_type);
+ for (int i = 0; i < max_eob; ++i) {
+ qcoeff_lexico[scan_order->scan[i]] = qcoeff_scan[i];
+ }
+}
+
+// Runs coefficient dropout on 'qcoeff_scan'.
+void Dropout(TX_SIZE tx_size, TX_TYPE tx_type, int dropout_num_before,
+ int dropout_num_after, tran_low_t *qcoeff_scan) {
+ tran_low_t qcoeff[MAX_TX_SQUARE];
+ // qcoeff_scan is assumed to be in scan order, since tests are easier to
+ // understand this way, but av1_dropout_qcoeff expects coeffs in lexico order
+ // so we convert to lexico then back to scan afterwards.
+ ToLexicoOrder(tx_size, tx_type, qcoeff_scan, qcoeff);
+
+ const int max_eob = av1_get_max_eob(tx_size);
+ const int kDequantFactor = 10;
+ tran_low_t dqcoeff[MAX_TX_SQUARE];
+ for (int i = 0; i < max_eob; ++i) {
+ dqcoeff[i] = qcoeff[i] * kDequantFactor;
+ }
+
+ uint16_t eob = max_eob;
+ while (eob > 0 && qcoeff_scan[eob - 1] == 0) --eob;
+
+ MACROBLOCK mb;
+ const int kPlane = 0;
+ const int kBlock = 0;
+ memset(&mb, 0, sizeof(mb));
+ uint16_t eobs[] = { eob };
+ mb.plane[kPlane].eobs = eobs;
+ mb.plane[kPlane].qcoeff = qcoeff;
+ mb.plane[kPlane].dqcoeff = dqcoeff;
+ uint8_t txb_entropy_ctx[1];
+ mb.plane[kPlane].txb_entropy_ctx = txb_entropy_ctx;
+
+ av1_dropout_qcoeff_num(&mb, kPlane, kBlock, tx_size, tx_type,
+ dropout_num_before, dropout_num_after);
+
+ ToScanOrder(tx_size, tx_type, qcoeff, qcoeff_scan);
+
+ // Check updated eob value is valid.
+ uint16_t new_eob = max_eob;
+ while (new_eob > 0 && qcoeff_scan[new_eob - 1] == 0) --new_eob;
+ EXPECT_EQ(new_eob, mb.plane[kPlane].eobs[0]);
+
+ // Check qqcoeff is still valid.
+ for (int i = 0; i < max_eob; ++i) {
+ EXPECT_EQ(qcoeff[i] * kDequantFactor, dqcoeff[i]);
+ }
+}
+
+void ExpectArrayEq(tran_low_t *actual, std::vector<tran_low_t> expected) {
+ for (size_t i = 0; i < expected.size(); ++i) {
+ EXPECT_EQ(expected[i], actual[i]) << "Arrays differ at index " << i;
+ }
+}
+
+static constexpr TX_TYPE kTxType = DCT_DCT;
+
+TEST(DropoutTest, KeepsLargeCoeffs) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Large isolated coeffs should be preserved.
+ tran_low_t qcoeff_scan[] = { 0, 0, 0, 0, 0, 0, 42, 0, // should be kept
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, -30, // should be kept
+ 0, 0, 0, 0, 0, 0, 0, 0 };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 0, 0, 0, 0, 0, 0, 42, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, -30, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+TEST(DropoutTest, RemovesSmallIsolatedCoeffs) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Small isolated coeffs should be removed.
+ tran_low_t qcoeff_scan[] = { 0, 0, 0, 0, 1, 0, 0, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, -2, 0, 0, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0 };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+TEST(DropoutTest, KeepsSmallCoeffsAmongLargeOnes) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Small coeffs that are not isolated (not enough zeros before/after should be
+ // kept).
+ tran_low_t qcoeff_scan[] = {
+ 1, 0, 0, 0, -5, 0, 0, -1, // should be kept
+ 0, 0, 0, 10, 0, 0, 2, 0, // should be kept
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, -2, 0, 0, 0, 0, 0, 0 // should be removed
+ }; // should be removed
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 1, 0, 0, 0, -5, 0, 0, -1, //
+ 0, 0, 0, 10, 0, 0, 2, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+TEST(DropoutTest, KeepsSmallCoeffsCloseToStartOrEnd) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Small coeffs that are too close to the beginning or end of the block
+ // should also be kept (not enough zeroes before/after).
+ tran_low_t qcoeff_scan[] = { 0, 0, -1, 0, 0, 0, 0, 0, // should be kept
+ 0, 0, 0, 10, 0, 0, 0, 0, // should be kept
+ 0, 0, 0, 2, 0, 0, 0, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, -1, 0 }; // should be kept
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 0, 0, -1, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 10, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, -1, 0 });
+}
+
+TEST(DropoutTest, RemovesSmallClusterOfCoeffs) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Small clusters (<= kDropoutContinuityMax) of small coeffs should be
+ // removed.
+ tran_low_t qcoeff_scan_two[] = {
+ 0, 0, 0, 0, 1, 0, 0, -1, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 1, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after,
+ qcoeff_scan_two);
+ ExpectArrayEq(qcoeff_scan_two, { 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+TEST(DropoutTest, KeepsLargeClusterOfCoeffs) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 6;
+ // Large clusters (> kDropoutContinuityMax) of small coeffs should be kept.
+ tran_low_t qcoeff_scan[] = { 0, 0, 0, 0, 1, 0, 1, -1, // should be kept
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, -2, 0, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0 };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 0, 0, 0, 0, 1, 0, 1, -1, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+TEST(DropoutTest, NumBeforeLargerThanNumAfter) {
+ const TX_SIZE tx_size = TX_8X4;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 2;
+ // The second coeff (-2) doesn't seem to meet the dropout_num_before
+ // criteria. But since the first coeff (1) will be dropped, it will meet
+ // the criteria and should be dropped too.
+ tran_low_t qcoeff_scan[] = { 0, 0, 0, 0, 1, 0, 0, 0, // should be removed
+ -2, 0, 0, 0, 0, 0, 0, 0, // should be removed
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0 });
+}
+
+// More complex test combining other test cases.
+TEST(DropoutTest, ComplexTest) {
+ const TX_SIZE tx_size = TX_8X8;
+ const uint32_t dropout_num_before = 4;
+ const uint32_t dropout_num_after = 2;
+ tran_low_t qcoeff_scan[] = { 1, 12, 0, 0, 0, 0, 1, 0, //
+ 0, 0, 0, -12, 0, 0, 0, 1, //
+ 0, 0, -2, 0, 1, 0, 0, 1, //
+ 0, 0, 0, 0, 5, 0, -1, 0, //
+ 0, 0, 0, 1, 0, 0, 0, -1, //
+ 0, 0, 0, 0, 2, 0, 0, 0, //
+ 0, 1, 0, 0, 0, 5, 0, 0, //
+ 0, 0, 1, 1, 0, 0, 0, -2 };
+ Dropout(tx_size, kTxType, dropout_num_before, dropout_num_after, qcoeff_scan);
+ ExpectArrayEq(qcoeff_scan, { 1, 12, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, -12, 0, 0, 0, 1, //
+ 0, 0, -2, 0, 1, 0, 0, 1, //
+ 0, 0, 0, 0, 5, 0, -1, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, 0, //
+ 0, 0, 0, 0, 0, 5, 0, 0, //
+ 0, 0, 0, 0, 0, 0, 0, -2 });
+}
+
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/end_to_end_psnr_test.cc b/third_party/libaom/source/libaom/test/end_to_end_psnr_test.cc
new file mode 100644
index 0000000000..5574c1a909
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/end_to_end_psnr_test.cc
@@ -0,0 +1,220 @@
+/*
+ * Copyright (c) 2016, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <memory>
+#include <ostream>
+
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+#include "test/codec_factory.h"
+#include "test/encode_test_driver.h"
+#include "test/util.h"
+#include "test/y4m_video_source.h"
+#include "test/yuv_video_source.h"
+
+namespace {
+
+const unsigned int kWidth = 160;
+const unsigned int kHeight = 90;
+const unsigned int kFramerate = 50;
+const unsigned int kFrames = 10;
+const int kBitrate = 500;
+const unsigned int kCqLevel = 18;
+// List of psnr thresholds for speed settings 0-8 and 4 encoding modes
+const double kPsnrThreshold[][4] = {
+ { 35.7, 44.4, 39.5, 41.9 }, { 35.7, 44.4, 39.5, 41.9 },
+ { 35.7, 44.4, 39.4, 41.9 }, { 35.7, 44.4, 39.1, 41.8 },
+ { 35.6, 44.4, 39.1, 41.8 }, { 35.0, 44.3, 38.7, 41.8 },
+ { 35.0, 44.3, 38.7, 41.3 }, { 35.0, 44.3, 38.7, 40.8 },
+ { 35.0, 44.3, 38.7, 40.8 }
+};
+
+typedef struct {
+ const char *filename;
+ unsigned int input_bit_depth;
+ aom_img_fmt fmt;
+ aom_bit_depth_t bit_depth;
+ unsigned int profile;
+} TestVideoParam;
+
+std::ostream &operator<<(std::ostream &os, const TestVideoParam &test_arg) {
+ return os << "TestVideoParam { filename:" << test_arg.filename
+ << " input_bit_depth:" << test_arg.input_bit_depth
+ << " fmt:" << test_arg.fmt << " bit_depth:" << test_arg.bit_depth
+ << " profile:" << test_arg.profile << " }";
+}
+
+const TestVideoParam kTestVectors[] = {
+ { "park_joy_90p_8_420.y4m", 8, AOM_IMG_FMT_I420, AOM_BITS_8, 0 },
+ { "park_joy_90p_8_422.y4m", 8, AOM_IMG_FMT_I422, AOM_BITS_8, 2 },
+ { "park_joy_90p_8_444.y4m", 8, AOM_IMG_FMT_I444, AOM_BITS_8, 1 },
+#if CONFIG_AV1_HIGHBITDEPTH
+ { "park_joy_90p_10_420.y4m", 10, AOM_IMG_FMT_I42016, AOM_BITS_10, 0 },
+ { "park_joy_90p_10_422.y4m", 10, AOM_IMG_FMT_I42216, AOM_BITS_10, 2 },
+ { "park_joy_90p_10_444.y4m", 10, AOM_IMG_FMT_I44416, AOM_BITS_10, 1 },
+ { "park_joy_90p_12_420.y4m", 12, AOM_IMG_FMT_I42016, AOM_BITS_12, 2 },
+ { "park_joy_90p_12_422.y4m", 12, AOM_IMG_FMT_I42216, AOM_BITS_12, 2 },
+ { "park_joy_90p_12_444.y4m", 12, AOM_IMG_FMT_I44416, AOM_BITS_12, 2 },
+#endif
+};
+
+// Encoding modes tested
+const libaom_test::TestMode kEncodingModeVectors[] = {
+ ::libaom_test::kTwoPassGood,
+ ::libaom_test::kOnePassGood,
+ ::libaom_test::kRealTime,
+};
+
+// Speed settings tested
+const int kCpuUsedVectors[] = { 1, 2, 3, 5, 6 };
+
+int is_extension_y4m(const char *filename) {
+ const char *dot = strrchr(filename, '.');
+ if (!dot || dot == filename)
+ return 0;
+ else
+ return !strcmp(dot, ".y4m");
+}
+
+class EndToEndTest
+ : public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode,
+ TestVideoParam, int>,
+ public ::libaom_test::EncoderTest {
+ protected:
+ EndToEndTest()
+ : EncoderTest(GET_PARAM(0)), test_video_param_(GET_PARAM(2)),
+ cpu_used_(GET_PARAM(3)), psnr_(0.0), nframes_(0),
+ encoding_mode_(GET_PARAM(1)) {}
+
+ virtual ~EndToEndTest() {}
+
+ virtual void SetUp() {
+ InitializeConfig(encoding_mode_);
+ if (encoding_mode_ == ::libaom_test::kOnePassGood ||
+ encoding_mode_ == ::libaom_test::kTwoPassGood) {
+ cfg_.g_lag_in_frames = 5;
+ } else if (encoding_mode_ == ::libaom_test::kRealTime) {
+ cfg_.rc_buf_sz = 1000;
+ cfg_.rc_buf_initial_sz = 500;
+ cfg_.rc_buf_optimal_sz = 600;
+ }
+ }
+
+ virtual void BeginPassHook(unsigned int) {
+ psnr_ = 0.0;
+ nframes_ = 0;
+ }
+
+ virtual void PSNRPktHook(const aom_codec_cx_pkt_t *pkt) {
+ psnr_ += pkt->data.psnr.psnr[0];
+ nframes_++;
+ }
+
+ virtual void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) {
+ if (video->frame() == 0) {
+ encoder->Control(AV1E_SET_FRAME_PARALLEL_DECODING, 1);
+ encoder->Control(AV1E_SET_TILE_COLUMNS, 4);
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ // Test screen coding tools at cpu_used = 1 && encoding mode is two-pass.
+ if (cpu_used_ == 1 && encoding_mode_ == ::libaom_test::kTwoPassGood)
+ encoder->Control(AV1E_SET_TUNE_CONTENT, AOM_CONTENT_SCREEN);
+ else
+ encoder->Control(AV1E_SET_TUNE_CONTENT, AOM_CONTENT_DEFAULT);
+ if (encoding_mode_ == ::libaom_test::kOnePassGood ||
+ encoding_mode_ == ::libaom_test::kTwoPassGood) {
+ encoder->Control(AOME_SET_ENABLEAUTOALTREF, 1);
+ encoder->Control(AOME_SET_ARNR_MAXFRAMES, 7);
+ encoder->Control(AOME_SET_ARNR_STRENGTH, 5);
+ } else if (encoding_mode_ == ::libaom_test::kAllIntra) {
+ encoder->Control(AOME_SET_CQ_LEVEL, kCqLevel);
+ }
+ }
+ }
+
+ double GetAveragePsnr() const {
+ if (nframes_) return psnr_ / nframes_;
+ return 0.0;
+ }
+
+ double GetPsnrThreshold() {
+ return kPsnrThreshold[cpu_used_][encoding_mode_];
+ }
+
+ void DoTest() {
+ cfg_.rc_target_bitrate = kBitrate;
+ cfg_.g_error_resilient = 0;
+ cfg_.g_profile = test_video_param_.profile;
+ cfg_.g_input_bit_depth = test_video_param_.input_bit_depth;
+ cfg_.g_bit_depth = test_video_param_.bit_depth;
+ init_flags_ = AOM_CODEC_USE_PSNR;
+ if (cfg_.g_bit_depth > 8) init_flags_ |= AOM_CODEC_USE_HIGHBITDEPTH;
+
+ std::unique_ptr<libaom_test::VideoSource> video;
+ if (is_extension_y4m(test_video_param_.filename)) {
+ video.reset(new libaom_test::Y4mVideoSource(test_video_param_.filename, 0,
+ kFrames));
+ } else {
+ video.reset(new libaom_test::YUVVideoSource(
+ test_video_param_.filename, test_video_param_.fmt, kWidth, kHeight,
+ kFramerate, 1, 0, kFrames));
+ }
+ ASSERT_TRUE(video.get() != NULL);
+
+ ASSERT_NO_FATAL_FAILURE(RunLoop(video.get()));
+ const double psnr = GetAveragePsnr();
+ EXPECT_GT(psnr, GetPsnrThreshold())
+ << "cpu used = " << cpu_used_ << ", encoding mode = " << encoding_mode_;
+ }
+
+ TestVideoParam test_video_param_;
+ int cpu_used_;
+
+ private:
+ double psnr_;
+ unsigned int nframes_;
+ libaom_test::TestMode encoding_mode_;
+};
+
+class EndToEndTestLarge : public EndToEndTest {};
+
+class EndToEndAllIntraTestLarge : public EndToEndTest {};
+
+class EndToEndAllIntraTest : public EndToEndTest {};
+
+TEST_P(EndToEndTestLarge, EndtoEndPSNRTest) { DoTest(); }
+
+TEST_P(EndToEndTest, EndtoEndPSNRTest) { DoTest(); }
+
+TEST_P(EndToEndAllIntraTestLarge, EndtoEndPSNRTest) { DoTest(); }
+
+TEST_P(EndToEndAllIntraTest, EndtoEndPSNRTest) { DoTest(); }
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndTestLarge,
+ ::testing::ValuesIn(kEncodingModeVectors),
+ ::testing::ValuesIn(kTestVectors),
+ ::testing::ValuesIn(kCpuUsedVectors));
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndTest,
+ ::testing::Values(::libaom_test::kTwoPassGood),
+ ::testing::Values(kTestVectors[2]), // 444
+ ::testing::Values(3)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndAllIntraTestLarge,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::ValuesIn(kTestVectors),
+ ::testing::Values(2, 4, 6, 8)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndAllIntraTest,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(kTestVectors[0]), // 420
+ ::testing::Values(6)); // cpu_used
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/end_to_end_ssim_test.cc b/third_party/libaom/source/libaom/test/end_to_end_ssim_test.cc
new file mode 100644
index 0000000000..1e638d7b45
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/end_to_end_ssim_test.cc
@@ -0,0 +1,189 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include "aom_ports/mem.h"
+#include "aom_dsp/ssim.h"
+#include "av1/common/blockd.h"
+#include "test/codec_factory.h"
+#include "test/encode_test_driver.h"
+#include "test/util.h"
+#include "test/y4m_video_source.h"
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+namespace {
+
+const unsigned int kFrames = 10;
+const unsigned int kCqLevel = 18;
+// List of ssim thresholds for speed settings 0-8 with all intra encoding mode.
+const double kSsimThreshold[] = { 83.4, 83.4, 83.4, 83.3, 83.3,
+ 83.0, 82.3, 81.1, 81.1 };
+
+typedef struct {
+ const char *filename;
+ unsigned int input_bit_depth;
+ aom_img_fmt fmt;
+ aom_bit_depth_t bit_depth;
+ unsigned int profile;
+} TestVideoParam;
+
+std::ostream &operator<<(std::ostream &os, const TestVideoParam &test_arg) {
+ return os << "TestVideoParam { filename:" << test_arg.filename
+ << " input_bit_depth:" << test_arg.input_bit_depth
+ << " fmt:" << test_arg.fmt << " bit_depth:" << test_arg.bit_depth
+ << " profile:" << test_arg.profile << " }";
+}
+
+const TestVideoParam kTestVectors[] = {
+ { "park_joy_90p_8_420.y4m", 8, AOM_IMG_FMT_I420, AOM_BITS_8, 0 },
+ { "park_joy_90p_8_422.y4m", 8, AOM_IMG_FMT_I422, AOM_BITS_8, 2 },
+ { "park_joy_90p_8_444.y4m", 8, AOM_IMG_FMT_I444, AOM_BITS_8, 1 },
+#if CONFIG_AV1_HIGHBITDEPTH
+ { "park_joy_90p_10_420.y4m", 10, AOM_IMG_FMT_I42016, AOM_BITS_10, 0 },
+ { "park_joy_90p_10_422.y4m", 10, AOM_IMG_FMT_I42216, AOM_BITS_10, 2 },
+ { "park_joy_90p_10_444.y4m", 10, AOM_IMG_FMT_I44416, AOM_BITS_10, 1 },
+ { "park_joy_90p_12_420.y4m", 12, AOM_IMG_FMT_I42016, AOM_BITS_12, 2 },
+ { "park_joy_90p_12_422.y4m", 12, AOM_IMG_FMT_I42216, AOM_BITS_12, 2 },
+ { "park_joy_90p_12_444.y4m", 12, AOM_IMG_FMT_I44416, AOM_BITS_12, 2 },
+#endif
+};
+
+// This class is used to check adherence to given ssim value.
+class EndToEndSSIMTest
+ : public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode,
+ TestVideoParam, int>,
+ public ::libaom_test::EncoderTest {
+ protected:
+ EndToEndSSIMTest()
+ : EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)),
+ test_video_param_(GET_PARAM(2)), cpu_used_(GET_PARAM(3)), nframes_(0),
+ ssim_(0.0) {}
+
+ ~EndToEndSSIMTest() override {}
+
+ void SetUp() override { InitializeConfig(encoding_mode_); }
+
+ void BeginPassHook(unsigned int) override {
+ nframes_ = 0;
+ ssim_ = 0.0;
+ }
+
+ void CalculateFrameLevelSSIM(const aom_image_t *img_src,
+ const aom_image_t *img_enc,
+ aom_bit_depth_t bit_depth,
+ unsigned int input_bit_depth) override {
+ double frame_ssim;
+ double plane_ssim[MAX_MB_PLANE] = { 0.0, 0.0, 0.0 };
+ int crop_widths[PLANE_TYPES];
+ int crop_heights[PLANE_TYPES];
+ crop_widths[PLANE_TYPE_Y] = img_src->d_w;
+ crop_heights[PLANE_TYPE_Y] = img_src->d_h;
+ // Width of UV planes calculated based on chroma_shift values.
+ crop_widths[PLANE_TYPE_UV] =
+ img_src->x_chroma_shift == 1 ? (img_src->w + 1) >> 1 : img_src->w;
+ crop_heights[PLANE_TYPE_UV] =
+ img_src->y_chroma_shift == 1 ? (img_src->h + 1) >> 1 : img_src->h;
+ nframes_++;
+
+#if CONFIG_AV1_HIGHBITDEPTH
+ uint8_t is_hbd = bit_depth > AOM_BITS_8;
+ if (is_hbd) {
+ // HBD ssim calculation.
+ uint8_t shift = bit_depth - input_bit_depth;
+ for (int i = AOM_PLANE_Y; i < MAX_MB_PLANE; ++i) {
+ const int is_uv = i > AOM_PLANE_Y;
+ plane_ssim[i] = aom_highbd_ssim2(
+ CONVERT_TO_BYTEPTR(img_src->planes[i]),
+ CONVERT_TO_BYTEPTR(img_enc->planes[i]),
+ img_src->stride[is_uv] >> is_hbd, img_enc->stride[is_uv] >> is_hbd,
+ crop_widths[is_uv], crop_heights[is_uv], input_bit_depth, shift);
+ }
+ frame_ssim = plane_ssim[AOM_PLANE_Y] * .8 +
+ .1 * (plane_ssim[AOM_PLANE_U] + plane_ssim[AOM_PLANE_V]);
+ // Accumulate to find sequence level ssim value.
+ ssim_ += frame_ssim;
+ return;
+ }
+#else
+ (void)bit_depth;
+ (void)input_bit_depth;
+#endif // CONFIG_AV1_HIGHBITDEPTH
+
+ // LBD ssim calculation.
+ for (int i = AOM_PLANE_Y; i < MAX_MB_PLANE; ++i) {
+ const int is_uv = i > AOM_PLANE_Y;
+ plane_ssim[i] = aom_ssim2(img_src->planes[i], img_enc->planes[i],
+ img_src->stride[is_uv], img_enc->stride[is_uv],
+ crop_widths[is_uv], crop_heights[is_uv]);
+ }
+ frame_ssim = plane_ssim[AOM_PLANE_Y] * .8 +
+ .1 * (plane_ssim[AOM_PLANE_U] + plane_ssim[AOM_PLANE_V]);
+ // Accumulate to find sequence level ssim value.
+ ssim_ += frame_ssim;
+ }
+
+ void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) override {
+ if (video->frame() == 0) {
+ encoder->Control(AV1E_SET_FRAME_PARALLEL_DECODING, 1);
+ encoder->Control(AV1E_SET_TILE_COLUMNS, 4);
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ encoder->Control(AOME_SET_TUNING, AOM_TUNE_SSIM);
+ encoder->Control(AOME_SET_CQ_LEVEL, kCqLevel);
+ }
+ }
+
+ double GetAverageSsim() const {
+ if (nframes_) return 100 * pow(ssim_ / nframes_, 8.0);
+ return 0.0;
+ }
+
+ double GetSsimThreshold() { return kSsimThreshold[cpu_used_]; }
+
+ void DoTest() {
+ cfg_.g_profile = test_video_param_.profile;
+ cfg_.g_input_bit_depth = test_video_param_.input_bit_depth;
+ cfg_.g_bit_depth = test_video_param_.bit_depth;
+ if (cfg_.g_bit_depth > 8) init_flags_ |= AOM_CODEC_USE_HIGHBITDEPTH;
+
+ std::unique_ptr<libaom_test::VideoSource> video(
+ new libaom_test::Y4mVideoSource(test_video_param_.filename, 0,
+ kFrames));
+ ASSERT_TRUE(video.get() != NULL);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(video.get()));
+ const double ssim = GetAverageSsim();
+ EXPECT_GT(ssim, GetSsimThreshold())
+ << "encoding mode = " << encoding_mode_ << ", cpu used = " << cpu_used_;
+ }
+
+ private:
+ const libaom_test::TestMode encoding_mode_;
+ const TestVideoParam test_video_param_;
+ const int cpu_used_;
+ unsigned int nframes_;
+ double ssim_;
+};
+
+class EndToEndSSIMTestLarge : public EndToEndSSIMTest {};
+
+TEST_P(EndToEndSSIMTestLarge, EndtoEndSSIMTest) { DoTest(); }
+
+TEST_P(EndToEndSSIMTest, EndtoEndSSIMTest) { DoTest(); }
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndSSIMTestLarge,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::ValuesIn(kTestVectors),
+ ::testing::Values(2, 4, 6, 8)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(EndToEndSSIMTest,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(kTestVectors[0]), // 420
+ ::testing::Values(6)); // cpu_used
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/error_resilience_test.cc b/third_party/libaom/source/libaom/test/error_resilience_test.cc
index 31906a47d0..3999c9146d 100644
--- a/third_party/libaom/source/libaom/test/error_resilience_test.cc
+++ b/third_party/libaom/source/libaom/test/error_resilience_test.cc
@@ -358,6 +358,10 @@ TEST_P(ErrorResilienceTestLarge, OnVersusOff) {
// if we lose (i.e., drop before decoding) a set of droppable
// frames (i.e., frames that don't update any reference buffers).
TEST_P(ErrorResilienceTestLarge, DropFramesWithoutRecovery) {
+ if (GET_PARAM(1) == ::libaom_test::kOnePassGood && GET_PARAM(2) == 1) {
+ fprintf(stderr, "Skipping test case #1 because of bug aomedia:3002\n");
+ return;
+ }
SetupEncoder(500, 10);
libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
cfg_.g_timebase.den, cfg_.g_timebase.num,
diff --git a/third_party/libaom/source/libaom/test/ethread_test.cc b/third_party/libaom/source/libaom/test/ethread_test.cc
index 5bf8762052..78811b65cf 100644
--- a/third_party/libaom/source/libaom/test/ethread_test.cc
+++ b/third_party/libaom/source/libaom/test/ethread_test.cc
@@ -21,6 +21,9 @@
#include "av1/encoder/firstpass.h"
namespace {
+const unsigned int kCqLevel = 18;
+
+#if !CONFIG_REALTIME_ONLY
const size_t kFirstPassStatsSz = sizeof(FIRSTPASS_STATS);
class AVxFirstPassEncoderThreadTest
: public ::libaom_test::CodecTestWith4Params<libaom_test::TestMode, int,
@@ -196,6 +199,7 @@ TEST_P(AVxFirstPassEncoderThreadTest, FirstPassStatsTest) {
// Comparison 4 (between threads=4 and threads=8).
compare_fp_stats_md5(&firstpass_stats);
}
+#endif // !CONFIG_REALTIME_ONLY
class AVxEncoderThreadTest
: public ::libaom_test::CodecTestWith5Params<libaom_test::TestMode, int,
@@ -227,11 +231,12 @@ class AVxEncoderThreadTest
virtual void SetUp() {
InitializeConfig(encoding_mode_);
- if (encoding_mode_ != ::libaom_test::kRealTime) {
+ if (encoding_mode_ == ::libaom_test::kOnePassGood ||
+ encoding_mode_ == ::libaom_test::kTwoPassGood) {
cfg_.g_lag_in_frames = 6;
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
- } else {
+ } else if (encoding_mode_ == ::libaom_test::kRealTime) {
cfg_.g_error_resilient = 1;
}
cfg_.rc_max_quantizer = 56;
@@ -248,18 +253,22 @@ class AVxEncoderThreadTest
SetTileSize(encoder);
encoder->Control(AOME_SET_CPUUSED, set_cpu_used_);
encoder->Control(AV1E_SET_ROW_MT, row_mt_);
- if (encoding_mode_ != ::libaom_test::kRealTime) {
+ if (encoding_mode_ == ::libaom_test::kOnePassGood ||
+ encoding_mode_ == ::libaom_test::kTwoPassGood) {
encoder->Control(AOME_SET_ENABLEAUTOALTREF, 1);
encoder->Control(AOME_SET_ARNR_MAXFRAMES, 5);
encoder->Control(AOME_SET_ARNR_STRENGTH, 5);
encoder->Control(AV1E_SET_FRAME_PARALLEL_DECODING, 0);
encoder->Control(AV1E_SET_MAX_GF_INTERVAL, 4);
- } else {
+ } else if (encoding_mode_ == ::libaom_test::kRealTime) {
encoder->Control(AOME_SET_ENABLEAUTOALTREF, 0);
encoder->Control(AV1E_SET_AQ_MODE, 3);
encoder->Control(AV1E_SET_COEFF_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MODE_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MV_COST_UPD_FREQ, 3);
+ encoder->Control(AV1E_SET_DV_COST_UPD_FREQ, 3);
+ } else {
+ encoder->Control(AOME_SET_CQ_LEVEL, kCqLevel);
}
encoder_initialized_ = true;
}
@@ -423,31 +432,60 @@ class AVxEncoderThreadTest
std::vector<std::string> md5_dec_;
};
-TEST_P(AVxEncoderThreadTest, EncoderResultTest) {
+class AVxEncoderThreadRTTest : public AVxEncoderThreadTest {};
+
+TEST_P(AVxEncoderThreadRTTest, EncoderResultTest) {
cfg_.large_scale_tile = 0;
decoder_->Control(AV1_SET_TILE_MODE, 0);
DoTest();
}
-class AVxEncoderThreadRTTest : public AVxEncoderThreadTest {};
+// For real time mode, test speed 6, 7, 8, 9.
+AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadRTTest,
+ ::testing::Values(::libaom_test::kRealTime),
+ ::testing::Values(6, 7, 8, 9),
+ ::testing::Values(0, 2), ::testing::Values(0, 2),
+ ::testing::Values(0, 1));
-TEST_P(AVxEncoderThreadRTTest, EncoderResultTest) {
+#if !CONFIG_REALTIME_ONLY
+
+// The AVxEncoderThreadTestLarge takes up ~14% of total run-time of the
+// Valgrind long tests. Exclude it; the smaller tests are still run.
+#if !AOM_VALGRIND_BUILD
+class AVxEncoderThreadTestLarge : public AVxEncoderThreadTest {};
+
+TEST_P(AVxEncoderThreadTestLarge, EncoderResultTest) {
cfg_.large_scale_tile = 0;
decoder_->Control(AV1_SET_TILE_MODE, 0);
DoTest();
}
-class AVxEncoderThreadTestLarge : public AVxEncoderThreadTest {};
+// Test cpu_used 0, 1, 3 and 5.
+AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadTestLarge,
+ ::testing::Values(::libaom_test::kTwoPassGood,
+ ::libaom_test::kOnePassGood),
+ ::testing::Values(0, 1, 3, 5),
+ ::testing::Values(1, 6), ::testing::Values(1, 6),
+ ::testing::Values(0, 1));
+#endif // !AOM_VALGRIND_BUILD
-TEST_P(AVxEncoderThreadTestLarge, EncoderResultTest) {
+TEST_P(AVxEncoderThreadTest, EncoderResultTest) {
cfg_.large_scale_tile = 0;
decoder_->Control(AV1_SET_TILE_MODE, 0);
DoTest();
}
-class AVxEncoderThreadRTTestLarge : public AVxEncoderThreadTest {};
+class AVxEncoderThreadAllIntraTest : public AVxEncoderThreadTest {};
-TEST_P(AVxEncoderThreadRTTestLarge, EncoderResultTest) {
+TEST_P(AVxEncoderThreadAllIntraTest, EncoderResultTest) {
+ cfg_.large_scale_tile = 0;
+ decoder_->Control(AV1_SET_TILE_MODE, 0);
+ DoTest();
+}
+
+class AVxEncoderThreadAllIntraTestLarge : public AVxEncoderThreadTest {};
+
+TEST_P(AVxEncoderThreadAllIntraTestLarge, EncoderResultTest) {
cfg_.large_scale_tile = 0;
decoder_->Control(AV1_SET_TILE_MODE, 0);
DoTest();
@@ -466,26 +504,20 @@ AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadTest,
::testing::Values(2), ::testing::Values(0, 2),
::testing::Values(0, 2), ::testing::Values(0, 1));
-// Test cpu_used 7, 8, 9 here.
-AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadRTTest,
- ::testing::Values(::libaom_test::kRealTime),
- ::testing::Values(7, 8, 9), ::testing::Values(0, 2),
+// For all intra mode, test speed 0, 2, 4, 6, 8.
+// Only test cpu_used 6 here.
+AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadAllIntraTest,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(6), ::testing::Values(0, 2),
::testing::Values(0, 2), ::testing::Values(0, 1));
-// Test cpu_used 0, 1, 3 and 5.
-AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadTestLarge,
- ::testing::Values(::libaom_test::kTwoPassGood,
- ::libaom_test::kOnePassGood),
- ::testing::Values(0, 1, 3, 5),
- ::testing::Values(1, 6), ::testing::Values(1, 6),
- ::testing::Values(0, 1));
-
-// Test cpu_used 0, 2, 4 and 6.
-AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadRTTestLarge,
- ::testing::Values(::libaom_test::kRealTime),
- ::testing::Values(0, 2, 4, 6),
+// Test cpu_used 0, 2, 4 and 8.
+AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadAllIntraTestLarge,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(0, 2, 4, 8),
::testing::Values(1, 6), ::testing::Values(1, 6),
::testing::Values(0, 1));
+#endif // !CONFIG_REALTIME_ONLY
class AVxEncoderThreadLSTest : public AVxEncoderThreadTest {
virtual void SetTileSize(libaom_test::Encoder *encoder) {
@@ -512,6 +544,10 @@ TEST_P(AVxEncoderThreadLSTest, EncoderResultTest) {
DoTest();
}
+// AVxEncoderThreadLSTestLarge takes up about 2% of total run-time of
+// the Valgrind long tests. Since we already run AVxEncoderThreadLSTest,
+// skip this one for Valgrind.
+#if !CONFIG_REALTIME_ONLY && !AOM_VALGRIND_BUILD
class AVxEncoderThreadLSTestLarge : public AVxEncoderThreadLSTest {};
TEST_P(AVxEncoderThreadLSTestLarge, EncoderResultTest) {
@@ -526,4 +562,5 @@ AV1_INSTANTIATE_TEST_SUITE(AVxEncoderThreadLSTestLarge,
::libaom_test::kOnePassGood),
::testing::Values(1, 3), ::testing::Values(0, 6),
::testing::Values(0, 6), ::testing::Values(1));
+#endif // !CONFIG_REALTIME_ONLY && !AOM_VALGRIND_BUILD
} // namespace
diff --git a/third_party/libaom/source/libaom/test/external_frame_buffer_test.cc b/third_party/libaom/source/libaom/test/external_frame_buffer_test.cc
index 5006b5b6cf..b060ee3913 100644
--- a/third_party/libaom/source/libaom/test/external_frame_buffer_test.cc
+++ b/third_party/libaom/source/libaom/test/external_frame_buffer_test.cc
@@ -199,6 +199,7 @@ int do_not_release_aom_frame_buffer(void *user_priv,
#endif // CONFIG_WEBM_IO
+#if !CONFIG_REALTIME_ONLY
// Class for testing passing in external frame buffers to libaom.
class ExternalFrameBufferMD5Test
: public ::libaom_test::DecoderTest,
@@ -298,6 +299,7 @@ class ExternalFrameBufferMD5Test
int num_buffers_;
ExternalFrameBufferList fb_list_;
};
+#endif // !CONFIG_REALTIME_ONLY
#if CONFIG_WEBM_IO
const char kAV1TestFile[] = "av1-1-b8-03-sizeup.mkv";
@@ -395,6 +397,7 @@ class ExternalFrameBufferNonRefTest : public ExternalFrameBufferTest {
};
#endif // CONFIG_WEBM_IO
+#if !CONFIG_REALTIME_ONLY
// This test runs through the set of test vectors, and decodes them.
// Libaom will call into the application to allocate a frame buffer when
// needed. The md5 checksums are computed for each frame in the video file.
@@ -438,6 +441,7 @@ TEST_P(ExternalFrameBufferMD5Test, ExtFBMD5Match) {
// Decode frame, and check the md5 matching.
ASSERT_NO_FATAL_FAILURE(RunLoop(video.get(), cfg));
}
+#endif // !CONFIG_REALTIME_ONLY
#if CONFIG_WEBM_IO
TEST_F(ExternalFrameBufferTest, MinFrameBuffers) {
@@ -447,7 +451,11 @@ TEST_F(ExternalFrameBufferTest, MinFrameBuffers) {
ASSERT_EQ(AOM_CODEC_OK,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
release_aom_frame_buffer));
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeRemainingFrames());
+#else
ASSERT_EQ(AOM_CODEC_OK, DecodeRemainingFrames());
+#endif
}
TEST_F(ExternalFrameBufferTest, EightJitterBuffers) {
@@ -459,7 +467,11 @@ TEST_F(ExternalFrameBufferTest, EightJitterBuffers) {
ASSERT_EQ(AOM_CODEC_OK,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
release_aom_frame_buffer));
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeRemainingFrames());
+#else
ASSERT_EQ(AOM_CODEC_OK, DecodeRemainingFrames());
+#endif
}
TEST_F(ExternalFrameBufferTest, NotEnoughBuffers) {
@@ -470,10 +482,14 @@ TEST_F(ExternalFrameBufferTest, NotEnoughBuffers) {
ASSERT_EQ(AOM_CODEC_OK,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
release_aom_frame_buffer));
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeOneFrame());
+#else
ASSERT_EQ(AOM_CODEC_OK, DecodeOneFrame());
// Only run this on long clips. Decoding a very short clip will return
// AOM_CODEC_OK even with only 2 buffers.
ASSERT_EQ(AOM_CODEC_MEM_ERROR, DecodeRemainingFrames());
+#endif
}
TEST_F(ExternalFrameBufferTest, NoRelease) {
@@ -481,8 +497,12 @@ TEST_F(ExternalFrameBufferTest, NoRelease) {
ASSERT_EQ(AOM_CODEC_OK,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
do_not_release_aom_frame_buffer));
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeOneFrame());
+#else
ASSERT_EQ(AOM_CODEC_OK, DecodeOneFrame());
ASSERT_EQ(AOM_CODEC_MEM_ERROR, DecodeRemainingFrames());
+#endif
}
TEST_F(ExternalFrameBufferTest, NullRealloc) {
@@ -515,11 +535,15 @@ TEST_F(ExternalFrameBufferTest, NullReleaseFunction) {
}
TEST_F(ExternalFrameBufferTest, SetAfterDecode) {
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeOneFrame());
+#else
const int num_buffers = AOM_MAXIMUM_REF_BUFFERS + AOM_MAXIMUM_WORK_BUFFERS;
ASSERT_EQ(AOM_CODEC_OK, DecodeOneFrame());
ASSERT_EQ(AOM_CODEC_ERROR,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
release_aom_frame_buffer));
+#endif
}
TEST_F(ExternalFrameBufferNonRefTest, ReleaseNonRefFrameBuffer) {
@@ -527,14 +551,20 @@ TEST_F(ExternalFrameBufferNonRefTest, ReleaseNonRefFrameBuffer) {
ASSERT_EQ(AOM_CODEC_OK,
SetFrameBufferFunctions(num_buffers, get_aom_frame_buffer,
release_aom_frame_buffer));
+#if CONFIG_REALTIME_ONLY
+ ASSERT_EQ(AOM_CODEC_UNSUP_FEATURE, DecodeRemainingFrames());
+#else
ASSERT_EQ(AOM_CODEC_OK, DecodeRemainingFrames());
+#endif
CheckFrameBufferRelease();
}
#endif // CONFIG_WEBM_IO
+#if !CONFIG_REALTIME_ONLY
AV1_INSTANTIATE_TEST_SUITE(
ExternalFrameBufferMD5Test,
::testing::ValuesIn(libaom_test::kAV1TestVectors,
libaom_test::kAV1TestVectors +
libaom_test::kNumAV1TestVectors));
+#endif
} // namespace
diff --git a/third_party/libaom/source/libaom/test/film_grain_table_test.cc b/third_party/libaom/source/libaom/test/film_grain_table_test.cc
index 524d67d7bc..31fb908ffa 100644
--- a/third_party/libaom/source/libaom/test/film_grain_table_test.cc
+++ b/third_party/libaom/source/libaom/test/film_grain_table_test.cc
@@ -101,6 +101,20 @@ TEST(FilmGrainTableTest, AddAndLookupSingleSegment) {
aom_film_grain_table_free(&table);
}
+TEST(FilmGrainTableTest, AddSingleSegmentRemoveBiggerSegment) {
+ aom_film_grain_table_t table;
+ aom_film_grain_t grain;
+
+ memset(&table, 0, sizeof(table));
+
+ aom_film_grain_table_append(&table, 0, 1000, film_grain_test_vectors + 0);
+ EXPECT_TRUE(aom_film_grain_table_lookup(&table, 0, 1100, true, &grain));
+
+ EXPECT_EQ(0, table.head);
+ EXPECT_EQ(0, table.tail);
+ aom_film_grain_table_free(&table);
+}
+
TEST(FilmGrainTableTest, SplitSingleSegment) {
aom_film_grain_table_t table;
aom_film_grain_t grain;
diff --git a/third_party/libaom/source/libaom/test/frame_size_tests.cc b/third_party/libaom/source/libaom/test/frame_size_tests.cc
index 38b6a63c3d..2365a20c24 100644
--- a/third_party/libaom/source/libaom/test/frame_size_tests.cc
+++ b/third_party/libaom/source/libaom/test/frame_size_tests.cc
@@ -73,6 +73,7 @@ TEST_F(AV1FrameSizeTests, OneByOneVideo) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
+#if !CONFIG_REALTIME_ONLY
typedef struct {
unsigned int width;
unsigned int height;
@@ -129,5 +130,6 @@ TEST_P(AV1LosslessFrameSizeTests, LosslessEncode) {
AV1_INSTANTIATE_TEST_SUITE(AV1LosslessFrameSizeTests,
::testing::ValuesIn(FrameSizeTestParams),
testing::Values(::libaom_test::kAllIntra));
+#endif // !CONFIG_REALTIME_ONLY
} // namespace
diff --git a/third_party/libaom/source/libaom/test/hbd_metrics_test.cc b/third_party/libaom/source/libaom/test/hbd_metrics_test.cc
index 8044b516c1..39c2b4c101 100644
--- a/third_party/libaom/source/libaom/test/hbd_metrics_test.cc
+++ b/third_party/libaom/source/libaom/test/hbd_metrics_test.cc
@@ -88,7 +88,7 @@ double compute_hbd_aomssim(const YV12_BUFFER_CONFIG *source,
double compute_aomssim(const YV12_BUFFER_CONFIG *source,
const YV12_BUFFER_CONFIG *dest) {
double ssim, weight;
- aom_calc_ssim(source, dest, &weight, &ssim);
+ aom_lowbd_calc_ssim(source, dest, &weight, &ssim);
return 100 * pow(ssim / weight, 8.0);
}
diff --git a/third_party/libaom/source/libaom/test/horz_superres_test.cc b/third_party/libaom/source/libaom/test/horz_superres_test.cc
index 9733344111..2f0f3fdb6a 100644
--- a/third_party/libaom/source/libaom/test/horz_superres_test.cc
+++ b/third_party/libaom/source/libaom/test/horz_superres_test.cc
@@ -52,7 +52,7 @@ std::ostream &operator<<(std::ostream &os, const TestVideoParam &test_arg) {
}
const TestVideoParam kTestVideoVectors[] = {
- { "park_joy_90p_8_420.y4m", AOM_IMG_FMT_I420, AOM_BITS_8, 0, 5, 0, 25.5,
+ { "park_joy_90p_8_420.y4m", AOM_IMG_FMT_I420, AOM_BITS_8, 0, 5, 0, 25.4,
45.0 },
#if CONFIG_AV1_HIGHBITDEPTH
{ "park_joy_90p_10_444.y4m", AOM_IMG_FMT_I44416, AOM_BITS_10, 1, 5, 0, 27.0,
diff --git a/third_party/libaom/source/libaom/test/intrabc_test.cc b/third_party/libaom/source/libaom/test/intrabc_test.cc
index b57eb6fab5..2c60596ab8 100644
--- a/third_party/libaom/source/libaom/test/intrabc_test.cc
+++ b/third_party/libaom/source/libaom/test/intrabc_test.cc
@@ -153,8 +153,10 @@ TEST(IntrabcTest, DvValidation) {
xd.plane[2].subsampling_x = 1;
xd.plane[2].subsampling_y = 1;
+ SequenceHeader seq_params = {};
AV1_COMMON cm;
memset(&cm, 0, sizeof(cm));
+ cm.seq_params = &seq_params;
for (const DvTestCase &dv_case : kDvCases) {
const int mi_row = xd.tile.mi_row_start + dv_case.mi_row_offset;
diff --git a/third_party/libaom/source/libaom/test/invalid_file_test.cc b/third_party/libaom/source/libaom/test/invalid_file_test.cc
index 77839fafcd..6ac8d1ac32 100644
--- a/third_party/libaom/source/libaom/test/invalid_file_test.cc
+++ b/third_party/libaom/source/libaom/test/invalid_file_test.cc
@@ -151,6 +151,7 @@ const DecodeParam kAV1InvalidFileTests[] = {
{ 1, "invalid-oss-fuzz-10779.ivf", NULL },
{ 1, "invalid-oss-fuzz-11477.ivf", NULL },
{ 1, "invalid-oss-fuzz-11479.ivf", "invalid-oss-fuzz-11479.ivf.res.2" },
+ { 1, "invalid-oss-fuzz-33030.ivf", NULL },
#endif
};
diff --git a/third_party/libaom/source/libaom/test/kf_test.cc b/third_party/libaom/source/libaom/test/kf_test.cc
index cc2cc89c2b..2d228f2fef 100644
--- a/third_party/libaom/source/libaom/test/kf_test.cc
+++ b/third_party/libaom/source/libaom/test/kf_test.cc
@@ -100,10 +100,36 @@ class KeyFrameIntervalTestLarge
aom_rc_mode end_usage_check_;
};
+// Because valgrind builds take a very long time to run, use a lower
+// resolution video for valgrind runs.
+const char *TestFileName() {
+#if AOM_VALGRIND_BUILD
+ return "hantro_collage_w176h144.yuv";
+#else
+ return "hantro_collage_w352h288.yuv";
+#endif // AOM_VALGRIND_BUILD
+}
+
+int TestFileWidth() {
+#if AOM_VALGRIND_BUILD
+ return 176;
+#else
+ return 352;
+#endif // AOM_VALGRIND_BUILD
+}
+
+int TestFileHeight() {
+#if AOM_VALGRIND_BUILD
+ return 144;
+#else
+ return 288;
+#endif // AOM_VALGRIND_BUILD
+}
+
TEST_P(KeyFrameIntervalTestLarge, KeyFrameIntervalTest) {
- libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
- cfg_.g_timebase.den, cfg_.g_timebase.num,
- 0, 75);
+ libaom_test::I420VideoSource video(TestFileName(), TestFileWidth(),
+ TestFileHeight(), cfg_.g_timebase.den,
+ cfg_.g_timebase.num, 0, 75);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_EQ(is_kf_interval_violated_, false) << kf_dist_param_;
}
@@ -187,9 +213,9 @@ TEST_P(ForcedKeyTestLarge, Frame1IsKey) {
frame_num_ = 0;
cfg_.g_lag_in_frames = lag_values[i];
is_kf_placement_violated_ = false;
- libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
- timebase.den, timebase.num, 0,
- fwd_kf_enabled_ ? 60 : 30);
+ libaom_test::I420VideoSource video(
+ TestFileName(), TestFileWidth(), TestFileHeight(), timebase.den,
+ timebase.num, 0, fwd_kf_enabled_ ? 60 : 30);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_EQ(is_kf_placement_violated_, false)
<< "Frame #" << frame_num_ << " isn't a keyframe!";
@@ -207,9 +233,9 @@ TEST_P(ForcedKeyTestLarge, ForcedFrameIsKey) {
forced_kf_frame_num_ = lag_values[i] - 1;
cfg_.g_lag_in_frames = lag_values[i];
is_kf_placement_violated_ = false;
- libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
- timebase.den, timebase.num, 0,
- fwd_kf_enabled_ ? 60 : 30);
+ libaom_test::I420VideoSource video(
+ TestFileName(), TestFileWidth(), TestFileHeight(), timebase.den,
+ timebase.num, 0, fwd_kf_enabled_ ? 60 : 30);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_EQ(is_kf_placement_violated_, false)
<< "Frame #" << frame_num_ << " isn't a keyframe!";
@@ -237,9 +263,9 @@ TEST_P(ForcedKeyTestLarge, ForcedFrameIsKeyCornerCases) {
forced_kf_frame_num_ = (int)cfg_.kf_max_dist + kf_offsets[i];
forced_kf_frame_num_ = forced_kf_frame_num_ > 0 ? forced_kf_frame_num_ : 1;
is_kf_placement_violated_ = false;
- libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
- timebase.den, timebase.num, 0,
- fwd_kf_enabled_ ? 60 : 30);
+ libaom_test::I420VideoSource video(
+ TestFileName(), TestFileWidth(), TestFileHeight(), timebase.den,
+ timebase.num, 0, fwd_kf_enabled_ ? 60 : 30);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_EQ(is_kf_placement_violated_, false)
<< "Frame #" << frame_num_ << " isn't a keyframe!";
diff --git a/third_party/libaom/source/libaom/test/lossless_test.cc b/third_party/libaom/source/libaom/test/lossless_test.cc
index 92ab299ea9..c14bc06e5e 100644
--- a/third_party/libaom/source/libaom/test/lossless_test.cc
+++ b/third_party/libaom/source/libaom/test/lossless_test.cc
@@ -24,13 +24,14 @@ namespace {
const int kMaxPsnr = 100;
class LosslessTestLarge
- : public ::libaom_test::CodecTestWith2Params<libaom_test::TestMode,
- aom_rc_mode>,
+ : public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode,
+ aom_rc_mode, int>,
public ::libaom_test::EncoderTest {
protected:
LosslessTestLarge()
: EncoderTest(GET_PARAM(0)), psnr_(kMaxPsnr), nframes_(0),
- encoding_mode_(GET_PARAM(1)), rc_end_usage_(GET_PARAM(2)) {}
+ encoding_mode_(GET_PARAM(1)), rc_end_usage_(GET_PARAM(2)),
+ cpu_used_(GET_PARAM(3)) {}
virtual ~LosslessTestLarge() {}
@@ -47,6 +48,7 @@ class LosslessTestLarge
if (cfg_.rc_max_quantizer > 0 || cfg_.rc_min_quantizer > 0) {
encoder->Control(AV1E_SET_LOSSLESS, 1);
}
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
}
}
@@ -79,6 +81,7 @@ class LosslessTestLarge
unsigned int nframes_;
libaom_test::TestMode encoding_mode_;
aom_rc_mode rc_end_usage_;
+ int cpu_used_;
int base_qindex_;
};
@@ -136,8 +139,33 @@ TEST_P(LosslessTestLarge, TestLossLessEncodingCtrl) {
EXPECT_GE(psnr_lossless, kMaxPsnr);
}
+class LosslessAllIntraTestLarge : public LosslessTestLarge {};
+
+TEST_P(LosslessAllIntraTestLarge, TestLossLessEncodingCtrl) {
+ const aom_rational timebase = { 33333333, 1000000000 };
+ cfg_.g_timebase = timebase;
+ // Intentionally set Q > 0, to make sure control can be used to activate
+ // lossless
+ cfg_.rc_min_quantizer = 10;
+ cfg_.rc_max_quantizer = 20;
+
+ init_flags_ = AOM_CODEC_USE_PSNR;
+
+ libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
+ timebase.den, timebase.num, 0, 5);
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+ const double psnr_lossless = GetMinPsnr();
+ EXPECT_GE(psnr_lossless, kMaxPsnr);
+}
+
AV1_INSTANTIATE_TEST_SUITE(LosslessTestLarge,
::testing::Values(::libaom_test::kOnePassGood,
::libaom_test::kTwoPassGood),
- ::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ));
+ ::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ),
+ ::testing::Values(0)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(LosslessAllIntraTestLarge,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(AOM_Q),
+ ::testing::Values(6, 9)); // cpu_used
} // namespace
diff --git a/third_party/libaom/source/libaom/test/metadata_test.cc b/third_party/libaom/source/libaom/test/metadata_test.cc
index fd3d5c4932..b7b7f14f42 100644
--- a/third_party/libaom/source/libaom/test/metadata_test.cc
+++ b/third_party/libaom/source/libaom/test/metadata_test.cc
@@ -34,7 +34,7 @@ const size_t kMetadataPayloadSizeCll = 4;
const uint8_t kMetadataPayloadCll[kMetadataPayloadSizeCll] = { 0xB5, 0x01, 0x02,
0x03 };
-#if CONFIG_AV1_ENCODER
+#if CONFIG_AV1_ENCODER && !CONFIG_REALTIME_ONLY
const size_t kMetadataObuSizeT35 = 28;
const uint8_t kMetadataObuT35[kMetadataObuSizeT35] = {
@@ -193,7 +193,7 @@ TEST_P(MetadataEncodeTest, TestMetadataEncoding) {
AV1_INSTANTIATE_TEST_SUITE(MetadataEncodeTest,
::testing::Values(::libaom_test::kOnePassGood));
-#endif // CONFIG_AV1_ENCODER
+#endif // CONFIG_AV1_ENCODER && !CONFIG_REALTIME_ONLY
} // namespace
TEST(MetadataTest, MetadataAllocation) {
diff --git a/third_party/libaom/source/libaom/test/monochrome_test.cc b/third_party/libaom/source/libaom/test/monochrome_test.cc
index 6395c22caf..a71cc9b3df 100644
--- a/third_party/libaom/source/libaom/test/monochrome_test.cc
+++ b/third_party/libaom/source/libaom/test/monochrome_test.cc
@@ -20,16 +20,45 @@
namespace {
+const unsigned int kCqLevel = 18;
+const double kMaxPsnr = 100.0;
+
+// kPsnrThreshold represents the psnr threshold used to validate the quality of
+// the first frame. The indices, 0 and 1 correspond to non-allintra and allintra
+// encoding modes.
+const double kPsnrThreshold[2] = { 29.0, 41.5 };
+
+// kPsnrFluctuation represents the maximum allowed psnr fluctuation w.r.t first
+// frame. The indices, 0 and 1 correspond to non-allintra and allintra encoding
+// modes.
+const double kPsnrFluctuation[2] = { 2.5, 0.3 };
+
class MonochromeTest
- : public ::libaom_test::CodecTestWithParam<libaom_test::TestMode>,
+ : public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode, int,
+ int>,
public ::libaom_test::EncoderTest {
protected:
- MonochromeTest() : EncoderTest(GET_PARAM(0)), frame0_psnr_y_(0.) {}
+ MonochromeTest()
+ : EncoderTest(GET_PARAM(0)), lossless_(GET_PARAM(2)),
+ frame0_psnr_y_(0.0) {}
virtual ~MonochromeTest() {}
virtual void SetUp() { InitializeConfig(GET_PARAM(1)); }
+ virtual void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) {
+ if (video->frame() == 0) {
+ encoder->Control(AOME_SET_CPUUSED, GET_PARAM(3));
+ if (mode_ == ::libaom_test::kAllIntra) {
+ encoder->Control(AOME_SET_CQ_LEVEL, kCqLevel);
+ }
+ if (lossless_) {
+ encoder->Control(AV1E_SET_LOSSLESS, 1);
+ }
+ }
+ }
+
virtual void DecompressedFrameHook(const aom_image_t &img,
aom_codec_pts_t pts) {
(void)pts;
@@ -68,15 +97,23 @@ class MonochromeTest
}
virtual void PSNRPktHook(const aom_codec_cx_pkt_t *pkt) {
+ // Check average PSNR value is >= 100 db in case of lossless encoding.
+ if (lossless_) {
+ EXPECT_GE(pkt->data.psnr.psnr[0], kMaxPsnr);
+ return;
+ }
+ const bool is_allintra = (mode_ == ::libaom_test::kAllIntra);
// Check that the initial Y PSNR value is 'high enough', and check that
// subsequent Y PSNR values are 'close' to this initial value.
- if (frame0_psnr_y_ == 0.) {
+ if (frame0_psnr_y_ == 0.0) {
frame0_psnr_y_ = pkt->data.psnr.psnr[1];
- EXPECT_GT(frame0_psnr_y_, 29.);
+ EXPECT_GT(frame0_psnr_y_, kPsnrThreshold[is_allintra]);
}
- EXPECT_NEAR(pkt->data.psnr.psnr[1], frame0_psnr_y_, 2.5);
+ EXPECT_NEAR(pkt->data.psnr.psnr[1], frame0_psnr_y_,
+ kPsnrFluctuation[is_allintra]);
}
+ int lossless_;
std::vector<int> chroma_value_list_;
double frame0_psnr_y_;
};
@@ -87,9 +124,6 @@ TEST_P(MonochromeTest, TestMonochromeEncoding) {
init_flags_ = AOM_CODEC_USE_PSNR;
- cfg_.g_w = 352;
- cfg_.g_h = 288;
-
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 600;
cfg_.rc_buf_sz = 1000;
@@ -98,13 +132,10 @@ TEST_P(MonochromeTest, TestMonochromeEncoding) {
cfg_.rc_undershoot_pct = 50;
cfg_.rc_overshoot_pct = 50;
cfg_.rc_end_usage = AOM_CBR;
- cfg_.kf_mode = AOM_KF_AUTO;
cfg_.g_lag_in_frames = 1;
cfg_.kf_min_dist = cfg_.kf_max_dist = 3000;
// Enable dropped frames.
cfg_.rc_dropframe_thresh = 1;
- // Disable error_resilience mode.
- cfg_.g_error_resilient = 0;
// Run at low bitrate.
cfg_.rc_target_bitrate = 40;
// Set monochrome encoding flag
@@ -121,8 +152,33 @@ TEST_P(MonochromeTest, TestMonochromeEncoding) {
}
}
+class MonochromeAllIntraTest : public MonochromeTest {};
+
+TEST_P(MonochromeAllIntraTest, TestMonochromeEncoding) {
+ ::libaom_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
+ 30, 1, 0, 5);
+ init_flags_ = AOM_CODEC_USE_PSNR;
+ // Set monochrome encoding flag
+ cfg_.monochrome = 1;
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+
+ // Check that the chroma planes are equal across all frames
+ std::vector<int>::const_iterator iter = chroma_value_list_.begin();
+ int initial_chroma_value = *iter;
+ for (; iter != chroma_value_list_.end(); ++iter) {
+ // Check that all decoded frames have the same constant chroma planes.
+ EXPECT_EQ(*iter, initial_chroma_value);
+ }
+}
+
AV1_INSTANTIATE_TEST_SUITE(MonochromeTest,
::testing::Values(::libaom_test::kOnePassGood,
- ::libaom_test::kTwoPassGood));
-
+ ::libaom_test::kTwoPassGood),
+ ::testing::Values(0), // lossless
+ ::testing::Values(0)); // cpu_used
+
+AV1_INSTANTIATE_TEST_SUITE(MonochromeAllIntraTest,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(0, 1), // lossless
+ ::testing::Values(6, 9)); // cpu_used
} // namespace
diff --git a/third_party/libaom/source/libaom/test/noise_model_test.cc b/third_party/libaom/source/libaom/test/noise_model_test.cc
index aad8905a45..c12c080cac 100644
--- a/third_party/libaom/source/libaom/test/noise_model_test.cc
+++ b/third_party/libaom/source/libaom/test/noise_model_test.cc
@@ -212,6 +212,12 @@ TEST(NoiseStrengthSolver, SimplifiesCurve) {
aom_noise_strength_solver_free(&solver);
}
+TEST(NoiseStrengthLut, LutInitNegativeOrZeroSize) {
+ aom_noise_strength_lut_t lut;
+ ASSERT_FALSE(aom_noise_strength_lut_init(&lut, -1));
+ ASSERT_FALSE(aom_noise_strength_lut_init(&lut, 0));
+}
+
TEST(NoiseStrengthLut, LutEvalSinglePoint) {
aom_noise_strength_lut_t lut;
ASSERT_TRUE(aom_noise_strength_lut_init(&lut, 1));
diff --git a/third_party/libaom/source/libaom/test/quant_test.cc b/third_party/libaom/source/libaom/test/quant_test.cc
index 9fca953922..a042af13eb 100644
--- a/third_party/libaom/source/libaom/test/quant_test.cc
+++ b/third_party/libaom/source/libaom/test/quant_test.cc
@@ -20,6 +20,13 @@
namespace {
+const ::libaom_test::TestMode kTestMode[] =
+#if CONFIG_REALTIME_ONLY
+ { ::libaom_test::kRealTime };
+#else
+ { ::libaom_test::kRealTime, ::libaom_test::kOnePassGood };
+#endif
+
class QMTest
: public ::libaom_test::CodecTestWith2Params<libaom_test::TestMode, int>,
public ::libaom_test::EncoderTest {
@@ -41,6 +48,11 @@ class QMTest
encoder->Control(AV1E_SET_QM_MAX, qm_max_);
encoder->Control(AOME_SET_MAX_INTRA_BITRATE_PCT, 100);
+ if (mode_ == ::libaom_test::kRealTime) {
+ encoder->Control(AV1E_SET_ALLOW_WARPED_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_OBMC, 0);
+ }
}
}
@@ -75,11 +87,10 @@ TEST_P(QMTest, TestNoMisMatchQM2) { DoTest(0, 8); }
// encodes and decodes without a mismatch.
TEST_P(QMTest, TestNoMisMatchQM3) { DoTest(9, 15); }
-AV1_INSTANTIATE_TEST_SUITE(QMTest,
- ::testing::Values(::libaom_test::kRealTime,
- ::libaom_test::kOnePassGood),
+AV1_INSTANTIATE_TEST_SUITE(QMTest, ::testing::ValuesIn(kTestMode),
::testing::Range(5, 9));
+#if !CONFIG_REALTIME_ONLY
typedef struct {
const unsigned int min_q;
const unsigned int max_q;
@@ -173,4 +184,5 @@ AV1_INSTANTIATE_TEST_SUITE(QuantizerBoundsCheckTestLarge,
::libaom_test::kTwoPassGood),
::testing::ValuesIn(QuantTestParams),
::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ));
+#endif // !CONFIG_REALTIME_ONLY
} // namespace
diff --git a/third_party/libaom/source/libaom/test/quantize_func_test.cc b/third_party/libaom/source/libaom/test/quantize_func_test.cc
index 3d79cf8bd8..3523050844 100644
--- a/third_party/libaom/source/libaom/test/quantize_func_test.cc
+++ b/third_party/libaom/source/libaom/test/quantize_func_test.cc
@@ -589,4 +589,5 @@ INSTANTIATE_TEST_SUITE_P(
static_cast<TX_SIZE>(TX_32X32), TYPE_B, AOM_BITS_8)));
#endif // HAVE_AVX
+
} // namespace
diff --git a/third_party/libaom/source/libaom/test/rd_test.cc b/third_party/libaom/source/libaom/test/rd_test.cc
new file mode 100644
index 0000000000..0c481fcbb6
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/rd_test.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <math.h>
+#include <vector>
+
+#include "av1/common/quant_common.h"
+#include "av1/encoder/rd.h"
+#include "aom/aom_codec.h"
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+namespace {
+
+TEST(RdTest, GetDeltaqOffsetValueTest1) {
+ aom_bit_depth_t bit_depth = AOM_BITS_8;
+ double beta = 4;
+ int q_index = 29;
+ int dc_q_step =
+ av1_dc_quant_QTX(q_index, 0, static_cast<aom_bit_depth_t>(bit_depth));
+ EXPECT_EQ(dc_q_step, 32);
+
+ int ref_new_dc_q_step = static_cast<int>(round(dc_q_step / sqrt(beta)));
+ EXPECT_EQ(ref_new_dc_q_step, 16);
+
+ int delta_q = av1_get_deltaq_offset(bit_depth, q_index, beta);
+ int new_dc_q_step = av1_dc_quant_QTX(q_index, delta_q,
+ static_cast<aom_bit_depth_t>(bit_depth));
+
+ EXPECT_EQ(new_dc_q_step, ref_new_dc_q_step);
+}
+
+TEST(RdTest, GetDeltaqOffsetValueTest2) {
+ aom_bit_depth_t bit_depth = AOM_BITS_8;
+ double beta = 1.0 / 4.0;
+ int q_index = 29;
+ int dc_q_step =
+ av1_dc_quant_QTX(q_index, 0, static_cast<aom_bit_depth_t>(bit_depth));
+ EXPECT_EQ(dc_q_step, 32);
+
+ int ref_new_dc_q_step = static_cast<int>(round(dc_q_step / sqrt(beta)));
+ EXPECT_EQ(ref_new_dc_q_step, 64);
+
+ int delta_q = av1_get_deltaq_offset(bit_depth, q_index, beta);
+ int new_dc_q_step = av1_dc_quant_QTX(q_index, delta_q,
+ static_cast<aom_bit_depth_t>(bit_depth));
+
+ EXPECT_EQ(new_dc_q_step, ref_new_dc_q_step);
+}
+
+TEST(RdTest, GetDeltaqOffsetBoundaryTest1) {
+ aom_bit_depth_t bit_depth = AOM_BITS_8;
+ double beta = 0.000000001;
+ std::vector<int> q_index_ls = { 254, 255 };
+ for (auto q_index : q_index_ls) {
+ int delta_q = av1_get_deltaq_offset(bit_depth, q_index, beta);
+ EXPECT_EQ(q_index + delta_q, 255);
+ }
+}
+
+TEST(RdTest, GetDeltaqOffsetBoundaryTest2) {
+ aom_bit_depth_t bit_depth = AOM_BITS_8;
+ double beta = 100;
+ std::vector<int> q_index_ls = { 1, 0 };
+ for (auto q_index : q_index_ls) {
+ int delta_q = av1_get_deltaq_offset(bit_depth, q_index, beta);
+ EXPECT_EQ(q_index + delta_q, 0);
+ }
+}
+
+TEST(RdTest, GetDeltaqOffsetUnitaryTest1) {
+ aom_bit_depth_t bit_depth = AOM_BITS_8;
+ double beta = 1;
+ for (int q_index = 0; q_index < 255; ++q_index) {
+ int delta_q = av1_get_deltaq_offset(bit_depth, q_index, beta);
+ EXPECT_EQ(delta_q, 0);
+ }
+}
+
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/resize_test.cc b/third_party/libaom/source/libaom/test/resize_test.cc
index cb09a9a193..68d610151d 100644
--- a/third_party/libaom/source/libaom/test/resize_test.cc
+++ b/third_party/libaom/source/libaom/test/resize_test.cc
@@ -203,6 +203,17 @@ class ResizeTest
virtual void SetUp() { InitializeConfig(GET_PARAM(1)); }
+ virtual void PreEncodeFrameHook(libaom_test::VideoSource *video,
+ libaom_test::Encoder *encoder) {
+ if (video->frame() == 0) {
+ if (GET_PARAM(1) == ::libaom_test::kRealTime) {
+ encoder->Control(AV1E_SET_AQ_MODE, 3);
+ encoder->Control(AOME_SET_CPUUSED, 5);
+ encoder->Control(AV1E_SET_FRAME_PARALLEL_DECODING, 1);
+ }
+ }
+ }
+
virtual void DecompressedFrameHook(const aom_image_t &img,
aom_codec_pts_t pts) {
frame_info_list_.push_back(FrameInfo(pts, img.d_w, img.d_h));
@@ -241,6 +252,7 @@ TEST_P(ResizeTest, TestExternalResizeWorks) {
const unsigned int kStepDownFrame = 3;
const unsigned int kStepUpFrame = 6;
+#if !CONFIG_REALTIME_ONLY
class ResizeInternalTestLarge : public ResizeTest {
protected:
#if WRITE_COMPRESSED_STREAM
@@ -362,6 +374,10 @@ TEST_P(ResizeInternalTestLarge, TestInternalResizeChangeConfig) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
+AV1_INSTANTIATE_TEST_SUITE(ResizeInternalTestLarge,
+ ::testing::Values(::libaom_test::kOnePassGood));
+#endif
+
class ResizeRealtimeTest
: public ::libaom_test::CodecTestWith2Params<libaom_test::TestMode, int>,
public ::libaom_test::EncoderTest {
@@ -375,6 +391,9 @@ class ResizeRealtimeTest
libaom_test::Encoder *encoder) {
if (video->frame() == 0) {
encoder->Control(AV1E_SET_AQ_MODE, 3);
+ encoder->Control(AV1E_SET_ALLOW_WARPED_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+ encoder->Control(AV1E_SET_ENABLE_OBMC, 0);
encoder->Control(AOME_SET_CPUUSED, set_cpu_used_);
encoder->Control(AV1E_SET_FRAME_PARALLEL_DECODING, 1);
}
@@ -786,6 +805,7 @@ TEST_P(ResizeCspTest, TestResizeCspWorks) {
}
}
+#if !CONFIG_REALTIME_ONLY
// This class is used to check if there are any fatal
// failures while encoding with resize-mode > 0
class ResizeModeTestLarge
@@ -833,16 +853,6 @@ TEST_P(ResizeModeTestLarge, ResizeModeTest) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
-AV1_INSTANTIATE_TEST_SUITE(ResizeTest,
- ::testing::Values(::libaom_test::kRealTime));
-AV1_INSTANTIATE_TEST_SUITE(ResizeInternalTestLarge,
- ::testing::Values(::libaom_test::kOnePassGood));
-AV1_INSTANTIATE_TEST_SUITE(ResizeRealtimeTest,
- ::testing::Values(::libaom_test::kRealTime),
- ::testing::Range(5, 10));
-AV1_INSTANTIATE_TEST_SUITE(ResizeCspTest,
- ::testing::Values(::libaom_test::kRealTime));
-
// TODO(anyone): Enable below test once resize issues are fixed
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(ResizeModeTestLarge);
// AV1_INSTANTIATE_TEST_SUITE(
@@ -851,4 +861,14 @@ GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(ResizeModeTestLarge);
// ::libaom_test::kTwoPassGood),
// ::testing::Values(1, 2), ::testing::Values(8, 12, 16),
// ::testing::Values(8, 12, 16), ::testing::Range(2, 7));
+#endif // !CONFIG_REALTIME_ONLY
+
+AV1_INSTANTIATE_TEST_SUITE(ResizeTest,
+ ::testing::Values(::libaom_test::kRealTime));
+AV1_INSTANTIATE_TEST_SUITE(ResizeRealtimeTest,
+ ::testing::Values(::libaom_test::kRealTime),
+ ::testing::Range(6, 10));
+AV1_INSTANTIATE_TEST_SUITE(ResizeCspTest,
+ ::testing::Values(::libaom_test::kRealTime));
+
} // namespace
diff --git a/third_party/libaom/source/libaom/test/rt_end_to_end_test.cc b/third_party/libaom/source/libaom/test/rt_end_to_end_test.cc
index e8a1a40d87..6d3704dbfc 100644
--- a/third_party/libaom/source/libaom/test/rt_end_to_end_test.cc
+++ b/third_party/libaom/source/libaom/test/rt_end_to_end_test.cc
@@ -42,9 +42,9 @@ std::unordered_map<std::string,
{ 6, { { 0, 36.1 }, { 3, 36.5 } } },
{ 7, { { 0, 35.5 }, { 3, 36.0 } } },
{ 8, { { 0, 36.0 }, { 3, 36.5 } } },
- { 9, { { 0, 35.5 }, { 3, 36.1 } } } } },
+ { 9, { { 0, 35.5 }, { 3, 36.0 } } } } },
{ "niklas_1280_720_30.y4m",
- { { 5, { { 0, 34.4 }, { 3, 34.4 } } },
+ { { 5, { { 0, 34.4 }, { 3, 34.32 } } },
{ 6, { { 0, 34.2 }, { 3, 34.2 } } },
{ 7, { { 0, 33.6 }, { 3, 33.6 } } },
{ 8, { { 0, 33.48 }, { 3, 33.48 } } },
@@ -125,6 +125,7 @@ class RTEndToEndTest
encoder->Control(AV1E_SET_COEFF_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MODE_COST_UPD_FREQ, 2);
encoder->Control(AV1E_SET_MV_COST_UPD_FREQ, 2);
+ encoder->Control(AV1E_SET_DV_COST_UPD_FREQ, 2);
}
}
diff --git a/third_party/libaom/source/libaom/test/sad_test.cc b/third_party/libaom/source/libaom/test/sad_test.cc
index afd84a8ad2..037ed2455f 100644
--- a/third_party/libaom/source/libaom/test/sad_test.cc
+++ b/third_party/libaom/source/libaom/test/sad_test.cc
@@ -564,8 +564,8 @@ class DistWtdCompAvgTest
void CheckCompAvg() {
for (int j = 0; j < 2; ++j) {
for (int i = 0; i < 4; ++i) {
- jcp_param_.fwd_offset = quant_dist_lookup_table[j][i][0];
- jcp_param_.bck_offset = quant_dist_lookup_table[j][i][1];
+ jcp_param_.fwd_offset = quant_dist_lookup_table[i][j];
+ jcp_param_.bck_offset = quant_dist_lookup_table[i][1 - j];
ReferenceDistWtdCompAvg(0);
dist_wtd_comp_avg(0);
@@ -632,8 +632,8 @@ class DistWtdSADavgTest
void CheckSAD() {
for (int j = 0; j < 2; ++j) {
for (int i = 0; i < 4; ++i) {
- jcp_param_.fwd_offset = quant_dist_lookup_table[j][i][0];
- jcp_param_.bck_offset = quant_dist_lookup_table[j][i][1];
+ jcp_param_.fwd_offset = quant_dist_lookup_table[i][j];
+ jcp_param_.bck_offset = quant_dist_lookup_table[i][1 - j];
const unsigned int reference_sad = ReferenceDistWtdSADavg(0);
const unsigned int exp_sad = dist_wtd_SAD_avg(0);
@@ -705,9 +705,7 @@ TEST_P(SADTest, ShortSrc) {
source_stride_ = tmp_stride;
}
-#define SPEED_TEST (0)
-#if SPEED_TEST
-TEST_P(SADTest, Speed) {
+TEST_P(SADTest, DISABLED_Speed) {
const int tmp_stride = source_stride_;
source_stride_ >>= 1;
FillRandom(source_data_, source_stride_);
@@ -715,7 +713,6 @@ TEST_P(SADTest, Speed) {
SpeedSAD();
source_stride_ = tmp_stride;
}
-#endif
TEST_P(SADSkipTest, MaxRef) {
FillConstant(source_data_, source_stride_, 0);
@@ -762,8 +759,7 @@ TEST_P(SADSkipTest, ShortSrc) {
source_stride_ = tmp_stride;
}
-#if SPEED_TEST
-TEST_P(SADSkipTest, Speed) {
+TEST_P(SADSkipTest, DISABLED_Speed) {
const int tmp_stride = source_stride_;
source_stride_ >>= 1;
FillRandom(source_data_, source_stride_);
@@ -771,7 +767,6 @@ TEST_P(SADSkipTest, Speed) {
SpeedSAD();
source_stride_ = tmp_stride;
}
-#endif
TEST_P(SADavgTest, MaxRef) {
FillConstant(source_data_, source_stride_, 0);
@@ -1020,8 +1015,7 @@ TEST_P(SADx4Test, SrcAlignedByWidth) {
source_data_ = tmp_source_data;
}
-#if SPEED_TEST
-TEST_P(SADx4Test, Speed) {
+TEST_P(SADx4Test, DISABLED_Speed) {
FillRandom(source_data_, source_stride_);
FillRandom(GetReference(0), reference_stride_);
FillRandom(GetReference(1), reference_stride_);
@@ -1029,7 +1023,6 @@ TEST_P(SADx4Test, Speed) {
FillRandom(GetReference(3), reference_stride_);
SpeedSAD();
}
-#endif
// SADSkipx4
TEST_P(SADSkipx4Test, MaxRef) {
@@ -1104,8 +1097,7 @@ TEST_P(SADSkipx4Test, SrcAlignedByWidth) {
source_data_ = tmp_source_data;
}
-#if SPEED_TEST
-TEST_P(SADSkipx4Test, Speed) {
+TEST_P(SADSkipx4Test, DISABLED_Speed) {
FillRandom(source_data_, source_stride_);
FillRandom(GetReference(0), reference_stride_);
FillRandom(GetReference(1), reference_stride_);
@@ -1113,12 +1105,10 @@ TEST_P(SADSkipx4Test, Speed) {
FillRandom(GetReference(3), reference_stride_);
SpeedSAD();
}
-#endif
using std::make_tuple;
-#if SPEED_TEST
-TEST_P(SADx4AvgTest, Speed) {
+TEST_P(SADx4AvgTest, DISABLED_Speed) {
int tmp_stride = reference_stride_;
reference_stride_ >>= 1;
FillRandom(source_data_, source_stride_);
@@ -1130,7 +1120,6 @@ TEST_P(SADx4AvgTest, Speed) {
SpeedSAD();
reference_stride_ = tmp_stride;
}
-#endif
TEST_P(SADx4AvgTest, MaxRef) {
FillConstant(source_data_, source_stride_, 0);
diff --git a/third_party/libaom/source/libaom/test/sharpness_test.cc b/third_party/libaom/source/libaom/test/sharpness_test.cc
new file mode 100644
index 0000000000..e74609bd9d
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/sharpness_test.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <unordered_map>
+
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+#include "test/codec_factory.h"
+#include "test/encode_test_driver.h"
+#include "test/util.h"
+#include "test/y4m_video_source.h"
+
+namespace {
+const unsigned int kFrames = 10;
+const int kBitrate = 500;
+const unsigned int kCqLevel = 18;
+
+// List of psnr thresholds for different test combinations
+// keys: test-mode, cpu-used, sharpness.
+const std::unordered_map<
+ int, std::unordered_map<int, std::unordered_map<int, double>>>
+ kPsnrThreshold = { { static_cast<int>(::libaom_test::kTwoPassGood),
+ { { 2, { { 2, 37.6 }, { 5, 37.6 } } },
+ { 4, { { 2, 37.5 }, { 5, 37.5 } } },
+ { 6, { { 2, 37.5 }, { 5, 37.5 } } } } },
+ { static_cast<int>(::libaom_test::kAllIntra),
+ { { 3, { { 2, 42.3 }, { 5, 42.4 } } },
+ { 6, { { 2, 41.8 }, { 4, 41.9 }, { 5, 41.9 } } },
+ { 9, { { 2, 41.4 }, { 5, 41.4 } } } } } };
+
+// This class is used to test sharpness parameter configured through control
+// call using AOME_SET_SHARPNESS for different encoder configurations.
+class SharpnessTest
+ : public ::libaom_test::CodecTestWith3Params<libaom_test::TestMode, int,
+ int>,
+ public ::libaom_test::EncoderTest {
+ protected:
+ SharpnessTest()
+ : EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)),
+ cpu_used_(GET_PARAM(2)), sharpness_level_(GET_PARAM(3)), psnr_(0.0),
+ nframes_(0) {}
+
+ ~SharpnessTest() override {}
+
+ void SetUp() override {
+ InitializeConfig(encoding_mode_);
+ if (encoding_mode_ == ::libaom_test::kTwoPassGood) {
+ cfg_.rc_target_bitrate = kBitrate;
+ cfg_.g_lag_in_frames = 5;
+ }
+ }
+
+ void BeginPassHook(unsigned int) override {
+ psnr_ = 0.0;
+ nframes_ = 0;
+ }
+
+ void PSNRPktHook(const aom_codec_cx_pkt_t *pkt) override {
+ psnr_ += pkt->data.psnr.psnr[0];
+ nframes_++;
+ }
+
+ void PreEncodeFrameHook(::libaom_test::VideoSource *video,
+ ::libaom_test::Encoder *encoder) override {
+ if (video->frame() == 0) {
+ encoder->Control(AOME_SET_CPUUSED, cpu_used_);
+ encoder->Control(AOME_SET_SHARPNESS, sharpness_level_);
+ if (encoding_mode_ == ::libaom_test::kTwoPassGood) {
+ encoder->Control(AOME_SET_ENABLEAUTOALTREF, 1);
+ encoder->Control(AOME_SET_ARNR_MAXFRAMES, 7);
+ encoder->Control(AOME_SET_ARNR_STRENGTH, 5);
+ } else if (encoding_mode_ == ::libaom_test::kAllIntra) {
+ encoder->Control(AOME_SET_CQ_LEVEL, kCqLevel);
+ }
+ }
+ }
+
+ double GetAveragePsnr() const {
+ if (nframes_) return psnr_ / nframes_;
+ return 0.0;
+ }
+
+ double GetPsnrThreshold() {
+ return kPsnrThreshold.at(encoding_mode_).at(cpu_used_).at(sharpness_level_);
+ }
+
+ void DoTest() {
+ init_flags_ = AOM_CODEC_USE_PSNR;
+
+ std::unique_ptr<libaom_test::VideoSource> video(
+ new libaom_test::Y4mVideoSource("paris_352_288_30.y4m", 0, kFrames));
+ ASSERT_TRUE(video.get() != NULL);
+
+ ASSERT_NO_FATAL_FAILURE(RunLoop(video.get()));
+ const double psnr = GetAveragePsnr();
+ EXPECT_GT(psnr, GetPsnrThreshold())
+ << "encoding mode = " << encoding_mode_ << ", cpu used = " << cpu_used_
+ << ", sharpness level = " << sharpness_level_;
+ }
+
+ private:
+ const libaom_test::TestMode encoding_mode_;
+ const int cpu_used_;
+ const int sharpness_level_;
+ double psnr_;
+ unsigned int nframes_;
+};
+
+class SharpnessTestLarge : public SharpnessTest {};
+
+class SharpnessAllIntraTest : public SharpnessTest {};
+
+class SharpnessAllIntraTestLarge : public SharpnessTest {};
+
+TEST_P(SharpnessTestLarge, SharpnessPSNRTest) { DoTest(); }
+
+TEST_P(SharpnessAllIntraTest, SharpnessPSNRTest) { DoTest(); }
+
+TEST_P(SharpnessAllIntraTestLarge, SharpnessPSNRTest) { DoTest(); }
+
+AV1_INSTANTIATE_TEST_SUITE(SharpnessTestLarge,
+ ::testing::Values(::libaom_test::kTwoPassGood),
+ ::testing::Values(2, 4, 6), // cpu_used
+ ::testing::Values(2, 5)); // sharpness level
+
+AV1_INSTANTIATE_TEST_SUITE(SharpnessAllIntraTest,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(6), // cpu_used
+ ::testing::Values(4)); // sharpness level
+
+AV1_INSTANTIATE_TEST_SUITE(SharpnessAllIntraTestLarge,
+ ::testing::Values(::libaom_test::kAllIntra),
+ ::testing::Values(3, 6, 9), // cpu_used
+ ::testing::Values(2, 5)); // sharpness level
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/svc_datarate_test.cc b/third_party/libaom/source/libaom/test/svc_datarate_test.cc
index 8d7376a554..d2839ccc61 100644
--- a/third_party/libaom/source/libaom/test/svc_datarate_test.cc
+++ b/third_party/libaom/source/libaom/test/svc_datarate_test.cc
@@ -80,6 +80,7 @@ class DatarateTestSVC
mismatch_psnr_ = 0.0;
set_frame_level_er_ = 0;
multi_ref_ = 0;
+ use_fixed_mode_svc_ = 0;
}
virtual void PreEncodeFrameHook(::libaom_test::VideoSource *video,
@@ -89,6 +90,7 @@ class DatarateTestSVC
initialize_svc(number_temporal_layers_, number_spatial_layers_,
&svc_params_);
encoder->Control(AV1E_SET_SVC_PARAMS, &svc_params_);
+ // TODO(aomedia:3032): Configure KSVC in fixed mode.
encoder->Control(AV1E_SET_ENABLE_ORDER_HINT, 0);
encoder->Control(AV1E_SET_ENABLE_TPL_MODEL, 0);
encoder->Control(AV1E_SET_DELTAQ_MODE, 0);
@@ -110,7 +112,11 @@ class DatarateTestSVC
set_layer_pattern(video->frame(), &layer_id_, &ref_frame_config_,
spatial_layer_id, multi_ref_);
encoder->Control(AV1E_SET_SVC_LAYER_ID, &layer_id_);
- encoder->Control(AV1E_SET_SVC_REF_FRAME_CONFIG, &ref_frame_config_);
+ // The SET_SVC_REF_FRAME_CONFIG api is for the flexible SVC mode
+ // (i.e., use_fixed_mode_svc == 0).
+ if (!use_fixed_mode_svc_) {
+ encoder->Control(AV1E_SET_SVC_REF_FRAME_CONFIG, &ref_frame_config_);
+ }
if (set_frame_level_er_) {
int mode =
(layer_id_.spatial_layer_id > 0 || layer_id_.temporal_layer_id > 0);
@@ -170,7 +176,7 @@ class DatarateTestSVC
int lag_index = 0;
int base_count = frame_cnt >> 2;
layer_id->spatial_layer_id = spatial_layer;
- // Set the referende map buffer idx for the 7 references:
+ // Set the reference map buffer idx for the 7 references:
// LAST_FRAME (0), LAST2_FRAME(1), LAST3_FRAME(2), GOLDEN_FRAME(3),
// BWDREF_FRAME(4), ALTREF2_FRAME(5), ALTREF_FRAME(6).
for (int i = 0; i < INTER_REFS_PER_FRAME; i++) {
@@ -689,6 +695,48 @@ class DatarateTestSVC
}
}
+ virtual void BasicRateTargetingFixedModeSVC3TL3SLHDTest() {
+ cfg_.rc_buf_initial_sz = 500;
+ cfg_.rc_buf_optimal_sz = 500;
+ cfg_.rc_buf_sz = 1000;
+ cfg_.rc_dropframe_thresh = 0;
+ cfg_.rc_min_quantizer = 0;
+ cfg_.rc_max_quantizer = 63;
+ cfg_.rc_end_usage = AOM_CBR;
+ cfg_.g_lag_in_frames = 0;
+ cfg_.g_error_resilient = 0;
+
+ ::libaom_test::Y4mVideoSource video("niklas_1280_720_30.y4m", 0, 60);
+ const int bitrate_array[2] = { 600, 1200 };
+ cfg_.rc_target_bitrate = bitrate_array[GET_PARAM(4)];
+ ResetModel();
+ number_temporal_layers_ = 3;
+ number_spatial_layers_ = 3;
+ use_fixed_mode_svc_ = 1;
+ // SL0
+ const int bitrate_sl0 = 1 * cfg_.rc_target_bitrate / 8;
+ target_layer_bitrate_[0] = 50 * bitrate_sl0 / 100;
+ target_layer_bitrate_[1] = 70 * bitrate_sl0 / 100;
+ target_layer_bitrate_[2] = bitrate_sl0;
+ // SL1
+ const int bitrate_sl1 = 3 * cfg_.rc_target_bitrate / 8;
+ target_layer_bitrate_[3] = 50 * bitrate_sl1 / 100;
+ target_layer_bitrate_[4] = 70 * bitrate_sl1 / 100;
+ target_layer_bitrate_[5] = bitrate_sl1;
+ // SL2
+ const int bitrate_sl2 = 4 * cfg_.rc_target_bitrate / 8;
+ target_layer_bitrate_[6] = 50 * bitrate_sl2 / 100;
+ target_layer_bitrate_[7] = 70 * bitrate_sl2 / 100;
+ target_layer_bitrate_[8] = bitrate_sl2;
+ ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
+ for (int i = 0; i < number_temporal_layers_ * number_spatial_layers_; i++) {
+ ASSERT_GE(effective_datarate_tl[i], target_layer_bitrate_[i] * 0.70)
+ << " The datarate for the file is lower than target by too much!";
+ ASSERT_LE(effective_datarate_tl[i], target_layer_bitrate_[i] * 1.45)
+ << " The datarate for the file is greater than target by too much!";
+ }
+ }
+
virtual void BasicRateTargetingSVC3TL3SLHDMT2Test() {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
@@ -1101,6 +1149,7 @@ class DatarateTestSVC
double mismatch_psnr_;
int set_frame_level_er_;
int multi_ref_;
+ int use_fixed_mode_svc_;
};
// Check basic rate targeting for CBR, for 3 temporal layers, 1 spatial.
@@ -1142,6 +1191,12 @@ TEST_P(DatarateTestSVC, BasicRateTargetingSVC3TL3SLHD) {
}
// Check basic rate targeting for CBR, for 3 spatial, 3 temporal layers,
+// for fixed mode SVC.
+TEST_P(DatarateTestSVC, BasicRateTargetingFixedModeSVC3TL3SLHD) {
+ BasicRateTargetingFixedModeSVC3TL3SLHDTest();
+}
+
+// Check basic rate targeting for CBR, for 3 spatial, 3 temporal layers,
// for 2 threads, 2 tile_columns, row-mt enabled.
TEST_P(DatarateTestSVC, BasicRateTargetingSVC3TL3SLHDMT2) {
BasicRateTargetingSVC3TL3SLHDMT2Test();
diff --git a/third_party/libaom/source/libaom/test/tile_config_test.cc b/third_party/libaom/source/libaom/test/tile_config_test.cc
index 0098903aa8..517d54bd94 100644
--- a/third_party/libaom/source/libaom/test/tile_config_test.cc
+++ b/third_party/libaom/source/libaom/test/tile_config_test.cc
@@ -28,6 +28,14 @@ typedef struct {
const unsigned int tile_cols;
} uniformTileConfigParam;
+const libaom_test::TestMode kTestModeParams[] =
+#if CONFIG_REALTIME_ONLY
+ { ::libaom_test::kRealTime };
+#else
+ { ::libaom_test::kRealTime, ::libaom_test::kOnePassGood,
+ ::libaom_test::kTwoPassGood };
+#endif
+
static const uniformTileConfigParam uniformTileConfigParams[] = {
{ 128, 0, 0 }, { 128, 0, 2 }, { 128, 2, 0 }, { 128, 1, 2 }, { 128, 2, 2 },
{ 128, 3, 2 }, { 64, 0, 0 }, { 64, 0, 2 }, { 64, 2, 0 }, { 64, 1, 2 },
@@ -254,14 +262,12 @@ TEST_P(NonUniformTileConfigTestLarge, NonUniformTileConfigTest) {
}
AV1_INSTANTIATE_TEST_SUITE(UniformTileConfigTestLarge,
- ::testing::Values(::libaom_test::kOnePassGood,
- ::libaom_test::kTwoPassGood),
+ ::testing::ValuesIn(kTestModeParams),
::testing::ValuesIn(uniformTileConfigParams),
::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ));
AV1_INSTANTIATE_TEST_SUITE(NonUniformTileConfigTestLarge,
- ::testing::Values(::libaom_test::kOnePassGood,
- ::libaom_test::kTwoPassGood),
+ ::testing::ValuesIn(kTestModeParams),
::testing::ValuesIn(nonUniformTileConfigParams),
::testing::Values(AOM_Q, AOM_VBR, AOM_CBR, AOM_CQ));
@@ -352,7 +358,6 @@ TEST_P(TileGroupTestLarge, TileGroupCountTest) {
}
AV1_INSTANTIATE_TEST_SUITE(TileGroupTestLarge,
- ::testing::Values(::libaom_test::kOnePassGood,
- ::libaom_test::kTwoPassGood),
+ ::testing::ValuesIn(kTestModeParams),
::testing::ValuesIn(tileGroupTestParams));
} // namespace
diff --git a/third_party/libaom/source/libaom/test/time_stamp_test.cc b/third_party/libaom/source/libaom/test/time_stamp_test.cc
index 205e5ba5bd..baa0dc06db 100644
--- a/third_party/libaom/source/libaom/test/time_stamp_test.cc
+++ b/third_party/libaom/source/libaom/test/time_stamp_test.cc
@@ -95,8 +95,13 @@ TEST_P(TimestampTest, TestAv1Rollover) {
video.set_starting_pts(922337170351ll);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
-
+#if CONFIG_REALTIME_ONLY
+AV1_INSTANTIATE_TEST_SUITE(TimestampTest,
+ ::testing::Values(::libaom_test::kRealTime));
+#else
AV1_INSTANTIATE_TEST_SUITE(TimestampTest,
- ::testing::Values(::libaom_test::kTwoPassGood));
+ ::testing::Values(::libaom_test::kRealTime,
+ ::libaom_test::kTwoPassGood));
+#endif
} // namespace
diff --git a/third_party/libaom/source/libaom/test/tpl_model_test.cc b/third_party/libaom/source/libaom/test/tpl_model_test.cc
new file mode 100644
index 0000000000..83845ee6d7
--- /dev/null
+++ b/third_party/libaom/source/libaom/test/tpl_model_test.cc
@@ -0,0 +1,232 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
+#include <cstdlib>
+#include <vector>
+
+#include "av1/encoder/cost.h"
+#include "av1/encoder/tpl_model.h"
+#include "av1/encoder/encoder.h"
+#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
+
+namespace {
+
+double laplace_prob(double q_step, double b, double zero_bin_ratio,
+ int qcoeff) {
+ int abs_qcoeff = abs(qcoeff);
+ double z0 = fmax(exp(-zero_bin_ratio / 2 * q_step / b), TPL_EPSILON);
+ if (abs_qcoeff == 0) {
+ double p0 = 1 - z0;
+ return p0;
+ } else {
+ assert(abs_qcoeff > 0);
+ double z = fmax(exp(-q_step / b), TPL_EPSILON);
+ double p = z0 / 2 * (1 - z) * pow(z, abs_qcoeff - 1);
+ return p;
+ }
+}
+TEST(TplModelTest, ExponentialEntropyBoundaryTest1) {
+ double b = 0;
+ double q_step = 1;
+ double entropy = av1_exponential_entropy(q_step, b);
+ EXPECT_NEAR(entropy, 0, 0.00001);
+}
+
+TEST(TplModelTest, TransformCoeffEntropyTest1) {
+ // Check the consistency between av1_estimate_coeff_entropy() and
+ // laplace_prob()
+ double b = 1;
+ double q_step = 1;
+ double zero_bin_ratio = 2;
+ for (int qcoeff = -256; qcoeff < 256; ++qcoeff) {
+ double rate = av1_estimate_coeff_entropy(q_step, b, zero_bin_ratio, qcoeff);
+ double prob = laplace_prob(q_step, b, zero_bin_ratio, qcoeff);
+ double ref_rate = -log2(prob);
+ EXPECT_DOUBLE_EQ(rate, ref_rate);
+ }
+}
+
+TEST(TplModelTest, TransformCoeffEntropyTest2) {
+ // Check the consistency between av1_estimate_coeff_entropy(), laplace_prob()
+ // and av1_laplace_entropy()
+ double b = 1;
+ double q_step = 1;
+ double zero_bin_ratio = 2;
+ double est_expected_rate = 0;
+ for (int qcoeff = -20; qcoeff < 20; ++qcoeff) {
+ double rate = av1_estimate_coeff_entropy(q_step, b, zero_bin_ratio, qcoeff);
+ double prob = laplace_prob(q_step, b, zero_bin_ratio, qcoeff);
+ est_expected_rate += prob * rate;
+ }
+ double expected_rate = av1_laplace_entropy(q_step, b, zero_bin_ratio);
+ EXPECT_NEAR(expected_rate, est_expected_rate, 0.001);
+}
+
+TEST(TplModelTest, DeltaRateCostZeroFlow) {
+ // When srcrf_dist equal to recrf_dist, av1_delta_rate_cost should return 0
+ int64_t srcrf_dist = 256;
+ int64_t recrf_dist = 256;
+ int64_t delta_rate = 512;
+ int pixel_num = 256;
+ int64_t rate_cost =
+ av1_delta_rate_cost(delta_rate, recrf_dist, srcrf_dist, pixel_num);
+ EXPECT_EQ(rate_cost, 0);
+}
+
+// a reference function of av1_delta_rate_cost() with delta_rate using bit as
+// basic unit
+double ref_delta_rate_cost(int64_t delta_rate, double src_rec_ratio,
+ int pixel_count) {
+ assert(src_rec_ratio <= 1 && src_rec_ratio >= 0);
+ double bits_per_pixel = (double)delta_rate / pixel_count;
+ double p = pow(2, bits_per_pixel);
+ double flow_rate_per_pixel =
+ sqrt(p * p / (src_rec_ratio * p * p + (1 - src_rec_ratio)));
+ double rate_cost = pixel_count * log2(flow_rate_per_pixel);
+ return rate_cost;
+}
+
+TEST(TplModelTest, DeltaRateCostReference) {
+ const int64_t scale = TPL_DEP_COST_SCALE_LOG2 + AV1_PROB_COST_SHIFT;
+ std::vector<int64_t> srcrf_dist_arr = { 256, 257, 312 };
+ std::vector<int64_t> recrf_dist_arr = { 512, 288, 620 };
+ std::vector<int64_t> delta_rate_arr = { 10, 278, 100 };
+ for (size_t t = 0; t < srcrf_dist_arr.size(); ++t) {
+ int64_t srcrf_dist = srcrf_dist_arr[t];
+ int64_t recrf_dist = recrf_dist_arr[t];
+ int64_t delta_rate = delta_rate_arr[t];
+ int64_t scaled_delta_rate = delta_rate << scale;
+ int pixel_count = 256;
+ int64_t rate_cost = av1_delta_rate_cost(scaled_delta_rate, recrf_dist,
+ srcrf_dist, pixel_count);
+ rate_cost >>= scale;
+ double src_rec_ratio = (double)srcrf_dist / recrf_dist;
+ double ref_rate_cost =
+ ref_delta_rate_cost(delta_rate, src_rec_ratio, pixel_count);
+ EXPECT_NEAR((double)rate_cost, ref_rate_cost, 1);
+ }
+}
+
+TEST(TplModelTest, GetOverlapAreaHasOverlap) {
+ // The block a's area is [10, 17) x [18, 24).
+ // The block b's area is [8, 15) x [17, 23).
+ // The overlapping area between block a and block b is [10, 15) x [18, 23).
+ // Therefore, the size of the area is (15 - 10) * (23 - 18) = 25.
+ int row_a = 10;
+ int col_a = 18;
+ int row_b = 8;
+ int col_b = 17;
+ int height = 7;
+ int width = 6;
+ int overlap_area =
+ av1_get_overlap_area(row_a, col_a, row_b, col_b, width, height);
+ EXPECT_EQ(overlap_area, 25);
+}
+
+TEST(TplModelTest, GetOverlapAreaNoOverlap) {
+ // The block a's area is [10, 14) x [18, 22).
+ // The block b's area is [5, 9) x [5, 9).
+ // Threre is no overlapping area between block a and block b.
+ // Therefore, the return value should be zero.
+ int row_a = 10;
+ int col_a = 18;
+ int row_b = 5;
+ int col_b = 5;
+ int height = 4;
+ int width = 4;
+ int overlap_area =
+ av1_get_overlap_area(row_a, col_a, row_b, col_b, width, height);
+ EXPECT_EQ(overlap_area, 0);
+}
+
+TEST(TPLModelTest, EstimateFrameRateTest) {
+ /*
+ * Transform size: 16x16
+ * Frame count: 16
+ * Transform block count: 20
+ */
+ const int txfm_size = 256; // 16x16
+ const int frame_count = 16;
+ unsigned char q_index_list[16];
+ TplTxfmStats stats_list[16];
+
+ for (int i = 0; i < frame_count; i++) {
+ q_index_list[i] = 1;
+ stats_list[i].txfm_block_count = 8;
+
+ for (int j = 0; j < txfm_size; j++) {
+ stats_list[i].abs_coeff_sum[j] = 0;
+ }
+ }
+
+ double result =
+ av1_estimate_gop_bitrate(q_index_list, frame_count, stats_list);
+ EXPECT_NEAR(result, 0, 0.1);
+}
+
+TEST(TPLModelTest, TxfmStatsInitTest) {
+ TplTxfmStats tpl_txfm_stats;
+ av1_init_tpl_txfm_stats(&tpl_txfm_stats);
+ EXPECT_EQ(tpl_txfm_stats.coeff_num, 256);
+ EXPECT_EQ(tpl_txfm_stats.txfm_block_count, 0);
+ for (int i = 0; i < tpl_txfm_stats.coeff_num; ++i) {
+ EXPECT_DOUBLE_EQ(tpl_txfm_stats.abs_coeff_sum[i], 0);
+ }
+}
+
+TEST(TPLModelTest, TxfmStatsAccumulateTest) {
+ TplTxfmStats sub_stats;
+ av1_init_tpl_txfm_stats(&sub_stats);
+ sub_stats.txfm_block_count = 17;
+ for (int i = 0; i < sub_stats.coeff_num; ++i) {
+ sub_stats.abs_coeff_sum[i] = i;
+ }
+
+ TplTxfmStats accumulated_stats;
+ av1_init_tpl_txfm_stats(&accumulated_stats);
+ accumulated_stats.txfm_block_count = 13;
+ for (int i = 0; i < accumulated_stats.coeff_num; ++i) {
+ accumulated_stats.abs_coeff_sum[i] = 5 * i;
+ }
+
+ av1_accumulate_tpl_txfm_stats(&sub_stats, &accumulated_stats);
+ EXPECT_DOUBLE_EQ(accumulated_stats.txfm_block_count, 30);
+ for (int i = 0; i < accumulated_stats.coeff_num; ++i) {
+ EXPECT_DOUBLE_EQ(accumulated_stats.abs_coeff_sum[i], 6 * i);
+ }
+}
+
+TEST(TPLModelTest, TxfmStatsRecordTest) {
+ TplTxfmStats stats1;
+ TplTxfmStats stats2;
+ av1_init_tpl_txfm_stats(&stats1);
+ av1_init_tpl_txfm_stats(&stats2);
+
+ tran_low_t coeff[256];
+ for (int i = 0; i < 256; ++i) {
+ coeff[i] = i;
+ }
+ av1_record_tpl_txfm_block(&stats1, coeff);
+ EXPECT_EQ(stats1.txfm_block_count, 1);
+
+ // we record the same transform block twice for testing purpose
+ av1_record_tpl_txfm_block(&stats2, coeff);
+ av1_record_tpl_txfm_block(&stats2, coeff);
+ EXPECT_EQ(stats2.txfm_block_count, 2);
+
+ EXPECT_EQ(stats1.coeff_num, 256);
+ EXPECT_EQ(stats2.coeff_num, 256);
+ for (int i = 0; i < 256; ++i) {
+ EXPECT_DOUBLE_EQ(stats2.abs_coeff_sum[i], 2 * stats1.abs_coeff_sum[i]);
+ }
+}
+
+} // namespace
diff --git a/third_party/libaom/source/libaom/test/variance_test.cc b/third_party/libaom/source/libaom/test/variance_test.cc
index fa90305acd..6bb96ce46f 100644
--- a/third_party/libaom/source/libaom/test/variance_test.cc
+++ b/third_party/libaom/source/libaom/test/variance_test.cc
@@ -1004,8 +1004,8 @@ void SubpelVarianceTest<DistWtdSubpixAvgVarMxNFunc>::RefTest() {
for (int y0 = 0; y0 < 4; ++y0) {
uint32_t sse1, sse2;
uint32_t var1, var2;
- jcp_param_.fwd_offset = quant_dist_lookup_table[x0][y0][0];
- jcp_param_.bck_offset = quant_dist_lookup_table[x0][y0][1];
+ jcp_param_.fwd_offset = quant_dist_lookup_table[y0][x0];
+ jcp_param_.bck_offset = quant_dist_lookup_table[y0][1 - x0];
ASM_REGISTER_STATE_CHECK(var1 = params_.func(ref_, width() + 0, x, y,
src_, width(), &sse1,
sec_, &jcp_param_));
diff --git a/third_party/libaom/source/libaom/test/warp_filter_test_util.cc b/third_party/libaom/source/libaom/test/warp_filter_test_util.cc
index 07a2e3f6e6..0e6e8b1324 100644
--- a/third_party/libaom/source/libaom/test/warp_filter_test_util.cc
+++ b/third_party/libaom/source/libaom/test/warp_filter_test_util.cc
@@ -226,8 +226,8 @@ void AV1WarpFilterTest::RunCheckOutput(warp_affine_func test_impl) {
conv_params.use_dist_wtd_comp_avg = 0;
} else {
conv_params.use_dist_wtd_comp_avg = 1;
- conv_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- conv_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ conv_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ conv_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
}
av1_warp_affine_c(mat, input, w, h, stride, output, 32, 32, out_w,
out_h, out_w, sub_x, sub_y, &conv_params, alpha,
@@ -240,8 +240,8 @@ void AV1WarpFilterTest::RunCheckOutput(warp_affine_func test_impl) {
conv_params.use_dist_wtd_comp_avg = 0;
} else {
conv_params.use_dist_wtd_comp_avg = 1;
- conv_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- conv_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ conv_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ conv_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
}
test_impl(mat, input, w, h, stride, output2, 32, 32, out_w, out_h,
out_w, sub_x, sub_y, &conv_params, alpha, beta, gamma,
@@ -424,8 +424,8 @@ void AV1HighbdWarpFilterTest::RunCheckOutput(
conv_params.use_dist_wtd_comp_avg = 0;
} else {
conv_params.use_dist_wtd_comp_avg = 1;
- conv_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- conv_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ conv_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ conv_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
}
av1_highbd_warp_affine_c(mat, input, w, h, stride, output, 32, 32,
@@ -441,8 +441,8 @@ void AV1HighbdWarpFilterTest::RunCheckOutput(
conv_params.use_dist_wtd_comp_avg = 0;
} else {
conv_params.use_dist_wtd_comp_avg = 1;
- conv_params.fwd_offset = quant_dist_lookup_table[ii][jj][0];
- conv_params.bck_offset = quant_dist_lookup_table[ii][jj][1];
+ conv_params.fwd_offset = quant_dist_lookup_table[jj][ii];
+ conv_params.bck_offset = quant_dist_lookup_table[jj][1 - ii];
}
test_impl(mat, input, w, h, stride, output2, 32, 32, out_w, out_h,
out_w, sub_x, sub_y, bd, &conv_params, alpha, beta,
diff --git a/third_party/libaom/source/libaom/third_party/fastfeat/fast.c b/third_party/libaom/source/libaom/third_party/fastfeat/fast.c
index f29ac8f725..30efde8396 100644
--- a/third_party/libaom/source/libaom/third_party/fastfeat/fast.c
+++ b/third_party/libaom/source/libaom/third_party/fastfeat/fast.c
@@ -1,3 +1,33 @@
+// Copyright (c) 2006, 2008 Edward Rosten
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+//
+// *Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//
+// *Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+//
+// *Neither the name of the University of Cambridge nor the names of
+// its contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+// OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
// clang-format off
#include <stdlib.h>
#include "fast.h"
diff --git a/third_party/libaom/source/libaom/third_party/fastfeat/fast.h b/third_party/libaom/source/libaom/third_party/fastfeat/fast.h
index a65d5a5d17..d7a9617cce 100644
--- a/third_party/libaom/source/libaom/third_party/fastfeat/fast.h
+++ b/third_party/libaom/source/libaom/third_party/fastfeat/fast.h
@@ -1,3 +1,33 @@
+// Copyright (c) 2006, 2008 Edward Rosten
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+//
+// *Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//
+// *Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+//
+// *Neither the name of the University of Cambridge nor the names of
+// its contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+// OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
// clang-format off
#ifndef FAST_H
#define FAST_H
diff --git a/third_party/libaom/source/libaom/third_party/fastfeat/fast_9.c b/third_party/libaom/source/libaom/third_party/fastfeat/fast_9.c
index 61c654c472..c0fdbe26cd 100644
--- a/third_party/libaom/source/libaom/third_party/fastfeat/fast_9.c
+++ b/third_party/libaom/source/libaom/third_party/fastfeat/fast_9.c
@@ -1,3 +1,33 @@
+// Copyright (c) 2006, 2008 Edward Rosten
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+//
+// *Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//
+// *Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+//
+// *Neither the name of the University of Cambridge nor the names of
+// its contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+// OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
// clang-format off
/*This is mechanically generated code*/
#include <stdlib.h>
diff --git a/third_party/libaom/source/libaom/third_party/fastfeat/nonmax.c b/third_party/libaom/source/libaom/third_party/fastfeat/nonmax.c
index 0dbc660cb0..2e048e5460 100644
--- a/third_party/libaom/source/libaom/third_party/fastfeat/nonmax.c
+++ b/third_party/libaom/source/libaom/third_party/fastfeat/nonmax.c
@@ -1,3 +1,33 @@
+// Copyright (c) 2006, 2008 Edward Rosten
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+//
+// *Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//
+// *Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+//
+// *Neither the name of the University of Cambridge nor the names of
+// its contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+// OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
// clang-format off
#include <stdlib.h>
#include "fast.h"
diff --git a/third_party/libaom/source/libaom/third_party/vector/vector.c b/third_party/libaom/source/libaom/third_party/vector/vector.c
index 4b8b9c6fd9..2295b8f080 100644
--- a/third_party/libaom/source/libaom/third_party/vector/vector.c
+++ b/third_party/libaom/source/libaom/third_party/vector/vector.c
@@ -3,7 +3,7 @@ The MIT License(MIT)
Copyright(c) 2016 Peter Goldsborough
Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files(the "Software"), to deal in
+this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
diff --git a/third_party/libaom/source/libaom/third_party/vector/vector.h b/third_party/libaom/source/libaom/third_party/vector/vector.h
index d09eb64c93..acc70fe099 100644
--- a/third_party/libaom/source/libaom/third_party/vector/vector.h
+++ b/third_party/libaom/source/libaom/third_party/vector/vector.h
@@ -3,7 +3,7 @@ The MIT License(MIT)
Copyright(c) 2016 Peter Goldsborough
Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files(the "Software"), to deal in
+this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/decl_status_code.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/decl_status_code.c
index 4c7afbaae5..bd445ab1b5 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/decl_status_code.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/decl_status_code.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
typedef struct S1 {
int x;
} T1;
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/func_in_out.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/func_in_out.c
index 8c14edc109..67ab58d520 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/func_in_out.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/func_in_out.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
typedef struct XD {
int u;
int v;
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/global_variable.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/global_variable.c
index 1934e20a75..26d5385e97 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/global_variable.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/global_variable.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
extern const int global_a[13];
const int global_b = 0;
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/parse_lvalue.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/parse_lvalue.c
index 093ab55ac6..97113efc15 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/parse_lvalue.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/parse_lvalue.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
typedef struct RD {
int u;
int v;
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/simple_code.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/simple_code.c
index 330fc3a90c..dd89a15621 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/simple_code.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/simple_code.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
typedef struct S {
int x;
int y;
diff --git a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/struct_code.c b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/struct_code.c
index 62b9d7adee..e14372c83e 100644
--- a/third_party/libaom/source/libaom/tools/auto_refactor/c_files/struct_code.c
+++ b/third_party/libaom/source/libaom/tools/auto_refactor/c_files/struct_code.c
@@ -1,3 +1,14 @@
+/*
+ * Copyright (c) 2021, Alliance for Open Media. All rights reserved
+ *
+ * This source code is subject to the terms of the BSD 2 Clause License and
+ * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+ * was not distributed with this source code in the LICENSE file, you can
+ * obtain it at www.aomedia.org/license/software. If the Alliance for Open
+ * Media Patent License 1.0 was not distributed with this source code in the
+ * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+ */
+
typedef struct S1 {
int x;
} T1;
diff --git a/third_party/libyuv/CMakeLists.txt b/third_party/libyuv/CMakeLists.txt
index 60d17338ed..94bdfe0af6 100644
--- a/third_party/libyuv/CMakeLists.txt
+++ b/third_party/libyuv/CMakeLists.txt
@@ -24,3 +24,7 @@ if (NOT WINDOWS_MSVC_X86_64)
target_link_libraries(libyuv_unittest ${ly_lib_name} gtest_main
Threads::Threads)
endif()
+
+if (LINUX_AARCH64)
+ target_compile_definitions(${ly_lib_name} PRIVATE LIBYUV_DISABLE_NEON=1)
+endif()
diff --git a/third_party/libyuv/include/libyuv/compare_row.h b/third_party/libyuv/include/libyuv/compare_row.h
index e95b9d93eb..64115b3a3f 100644
--- a/third_party/libyuv/include/libyuv/compare_row.h
+++ b/third_party/libyuv/include/libyuv/compare_row.h
@@ -55,20 +55,20 @@ extern "C" {
// The following are available for Visual C and clangcl 32 bit:
#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \
+ !defined(__clang__) && \
(defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2))
#define HAS_HASHDJB2_AVX2
#define HAS_SUMSQUAREERROR_AVX2
#endif
-// The following are available for GCC and clangcl 64 bit:
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+// The following are available for GCC and clangcl:
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#define HAS_HAMMINGDISTANCE_SSSE3
#endif
-// The following are available for GCC and clangcl 64 bit:
+// The following are available for GCC and clangcl:
#if !defined(LIBYUV_DISABLE_X86) && defined(CLANG_HAS_AVX2) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+ (defined(__x86_64__) || defined(__i386__))
#define HAS_HAMMINGDISTANCE_AVX2
#endif
diff --git a/third_party/libyuv/include/libyuv/convert.h b/third_party/libyuv/include/libyuv/convert.h
index 40869ef218..93e7550be8 100644
--- a/third_party/libyuv/include/libyuv/convert.h
+++ b/third_party/libyuv/include/libyuv/convert.h
@@ -693,6 +693,19 @@ int RAWToI420(const uint8_t* src_raw,
int width,
int height);
+// RGB big endian (rgb in memory) to J420.
+LIBYUV_API
+int RAWToJ420(const uint8_t* src_raw,
+ int src_stride_raw,
+ uint8_t* dst_y,
+ int dst_stride_y,
+ uint8_t* dst_u,
+ int dst_stride_u,
+ uint8_t* dst_v,
+ int dst_stride_v,
+ int width,
+ int height);
+
// RGB16 (RGBP fourcc) little endian to I420.
LIBYUV_API
int RGB565ToI420(const uint8_t* src_rgb565,
diff --git a/third_party/libyuv/include/libyuv/convert_argb.h b/third_party/libyuv/include/libyuv/convert_argb.h
index 297de15162..eb4ebd54a8 100644
--- a/third_party/libyuv/include/libyuv/convert_argb.h
+++ b/third_party/libyuv/include/libyuv/convert_argb.h
@@ -54,12 +54,30 @@ LIBYUV_API extern const struct YuvConstants kYvuV2020Constants; // BT.2020 full
NV21ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i)
#define NV21ToRAWMatrix(a, b, c, d, e, f, g, h, i) \
NV12ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i)
+#define I010ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I010ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k)
+#define I210ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I210ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k)
+#define I410ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I410ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k)
+#define I010ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I010ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k)
+#define I210ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I210ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k)
+#define I410ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \
+ I410ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k)
#define I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
I420AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
#define I422AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
I422AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
#define I444AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
I444AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
+#define I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
+ I010AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
+#define I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
+ I210AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
+#define I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \
+ I410AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n)
// Alias.
#define ARGBToARGB ARGBCopy
@@ -125,32 +143,6 @@ int J420ToABGR(const uint8_t* src_y,
int width,
int height);
-// Convert F420 to ARGB. BT.709 full range
-LIBYUV_API
-int F420ToARGB(const uint8_t* src_y,
- int src_stride_y,
- const uint8_t* src_u,
- int src_stride_u,
- const uint8_t* src_v,
- int src_stride_v,
- uint8_t* dst_argb,
- int dst_stride_argb,
- int width,
- int height);
-
-// Convert F420 to ABGR. BT.709 full range
-LIBYUV_API
-int F420ToABGR(const uint8_t* src_y,
- int src_stride_y,
- const uint8_t* src_u,
- int src_stride_u,
- const uint8_t* src_v,
- int src_stride_v,
- uint8_t* dst_abgr,
- int dst_stride_abgr,
- int width,
- int height);
-
// Convert H420 to ARGB.
LIBYUV_API
int H420ToARGB(const uint8_t* src_y,
@@ -814,29 +806,29 @@ int I010ToAR30(const uint16_t* src_y,
int width,
int height);
-// Convert I010 to AB30.
+// Convert H010 to AR30.
LIBYUV_API
-int I010ToAB30(const uint16_t* src_y,
+int H010ToAR30(const uint16_t* src_y,
int src_stride_y,
const uint16_t* src_u,
int src_stride_u,
const uint16_t* src_v,
int src_stride_v,
- uint8_t* dst_ab30,
- int dst_stride_ab30,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
int width,
int height);
-// Convert H010 to AR30.
+// Convert I010 to AB30.
LIBYUV_API
-int H010ToAR30(const uint16_t* src_y,
+int I010ToAB30(const uint16_t* src_y,
int src_stride_y,
const uint16_t* src_u,
int src_stride_u,
const uint16_t* src_v,
int src_stride_v,
- uint8_t* dst_ar30,
- int dst_stride_ar30,
+ uint8_t* dst_ab30,
+ int dst_stride_ab30,
int width,
int height);
@@ -1073,6 +1065,42 @@ int AR30ToAB30(const uint8_t* src_ar30,
int width,
int height);
+// Convert AR64 to ARGB.
+LIBYUV_API
+int AR64ToARGB(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height);
+
+// Convert AB64 to ABGR.
+#define AB64ToABGR AR64ToARGB
+
+// Convert AB64 to ARGB.
+LIBYUV_API
+int AB64ToARGB(const uint16_t* src_ab64,
+ int src_stride_ab64,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height);
+
+// Convert AR64 to ABGR.
+#define AR64ToABGR AB64ToARGB
+
+// Convert AR64 To AB64.
+LIBYUV_API
+int AR64ToAB64(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint16_t* dst_ab64,
+ int dst_stride_ab64,
+ int width,
+ int height);
+
+// Convert AB64 To AR64.
+#define AB64ToAR64 AR64ToAB64
+
// src_width/height provided by capture
// dst_width/height for clipping determine final size.
LIBYUV_API
@@ -1385,6 +1413,19 @@ int I420ToAR30(const uint8_t* src_y,
int width,
int height);
+// Convert I420 to AB30.
+LIBYUV_API
+int I420ToAB30(const uint8_t* src_y,
+ int src_stride_y,
+ const uint8_t* src_u,
+ int src_stride_u,
+ const uint8_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ab30,
+ int dst_stride_ab30,
+ int width,
+ int height);
+
// Convert H420 to AR30.
LIBYUV_API
int H420ToAR30(const uint8_t* src_y,
@@ -1398,6 +1439,19 @@ int H420ToAR30(const uint8_t* src_y,
int width,
int height);
+// Convert H420 to AB30.
+LIBYUV_API
+int H420ToAB30(const uint8_t* src_y,
+ int src_stride_y,
+ const uint8_t* src_u,
+ int src_stride_u,
+ const uint8_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ab30,
+ int dst_stride_ab30,
+ int width,
+ int height);
+
// Convert I420 to ARGB with matrix.
LIBYUV_API
int I420ToARGBMatrix(const uint8_t* src_y,
@@ -1440,7 +1494,7 @@ int I444ToARGBMatrix(const uint8_t* src_y,
int width,
int height);
-// multiply 10 bit yuv into high bits to allow any number of bits.
+// Convert 10 bit 420 YUV to ARGB with matrix.
LIBYUV_API
int I010ToAR30Matrix(const uint16_t* src_y,
int src_stride_y,
@@ -1454,7 +1508,7 @@ int I010ToAR30Matrix(const uint16_t* src_y,
int width,
int height);
-// multiply 10 bit yuv into high bits to allow any number of bits.
+// Convert 10 bit 420 YUV to ARGB with matrix.
LIBYUV_API
int I210ToAR30Matrix(const uint16_t* src_y,
int src_stride_y,
@@ -1468,6 +1522,20 @@ int I210ToAR30Matrix(const uint16_t* src_y,
int width,
int height);
+// Convert 10 bit 444 YUV to ARGB with matrix.
+LIBYUV_API
+int I410ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
// Convert 10 bit YUV to ARGB with matrix.
LIBYUV_API
int I010ToARGBMatrix(const uint16_t* src_y,
@@ -1482,6 +1550,34 @@ int I010ToARGBMatrix(const uint16_t* src_y,
int width,
int height);
+// multiply 12 bit yuv into high bits to allow any number of bits.
+LIBYUV_API
+int I012ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// Convert 12 bit YUV to ARGB with matrix.
+LIBYUV_API
+int I012ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
// Convert 10 bit 422 YUV to ARGB with matrix.
LIBYUV_API
int I210ToARGBMatrix(const uint16_t* src_y,
@@ -1496,6 +1592,87 @@ int I210ToARGBMatrix(const uint16_t* src_y,
int width,
int height);
+// Convert 10 bit 444 YUV to ARGB with matrix.
+LIBYUV_API
+int I410ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// Convert P010 to ARGB with matrix.
+LIBYUV_API
+int P010ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// Convert P210 to ARGB with matrix.
+LIBYUV_API
+int P210ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// Convert P010 to AR30 with matrix.
+LIBYUV_API
+int P010ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// Convert P210 to AR30 with matrix.
+LIBYUV_API
+int P210ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height);
+
+// P012 and P010 use most significant bits so the conversion is the same.
+// Convert P012 to ARGB with matrix.
+#define P012ToARGBMatrix P010ToARGBMatrix
+// Convert P012 to AR30 with matrix.
+#define P012ToAR30Matrix P010ToAR30Matrix
+// Convert P212 to ARGB with matrix.
+#define P212ToARGBMatrix P210ToARGBMatrix
+// Convert P212 to AR30 with matrix.
+#define P212ToAR30Matrix P210ToAR30Matrix
+
+// Convert P016 to ARGB with matrix.
+#define P016ToARGBMatrix P010ToARGBMatrix
+// Convert P016 to AR30 with matrix.
+#define P016ToAR30Matrix P010ToAR30Matrix
+// Convert P216 to ARGB with matrix.
+#define P216ToARGBMatrix P210ToARGBMatrix
+// Convert P216 to AR30 with matrix.
+#define P216ToAR30Matrix P210ToAR30Matrix
+
// Convert I420 with Alpha to preattenuated ARGB with matrix.
LIBYUV_API
int I420AlphaToARGBMatrix(const uint8_t* src_y,
@@ -1547,6 +1724,57 @@ int I444AlphaToARGBMatrix(const uint8_t* src_y,
int height,
int attenuate);
+// Convert I010 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I010AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate);
+
+// Convert I210 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I210AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate);
+
+// Convert I410 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I410AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate);
+
// Convert NV12 to ARGB with matrix.
LIBYUV_API
int NV12ToARGBMatrix(const uint8_t* src_y,
diff --git a/third_party/libyuv/include/libyuv/convert_from_argb.h b/third_party/libyuv/include/libyuv/convert_from_argb.h
index d992363ceb..bf48786041 100644
--- a/third_party/libyuv/include/libyuv/convert_from_argb.h
+++ b/third_party/libyuv/include/libyuv/convert_from_argb.h
@@ -153,6 +153,30 @@ int ARGBToI444(const uint8_t* src_argb,
int width,
int height);
+// Convert ARGB to AR64.
+LIBYUV_API
+int ARGBToAR64(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height);
+
+// Convert ABGR to AB64.
+#define ABGRToAB64 ARGBToAR64
+
+// Convert ARGB to AB64.
+LIBYUV_API
+int ARGBToAB64(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint16_t* dst_ab64,
+ int dst_stride_ab64,
+ int width,
+ int height);
+
+// Convert ABGR to AR64.
+#define ABGRToAR64 ARGBToAB64
+
// Convert ARGB To I422.
LIBYUV_API
int ARGBToI422(const uint8_t* src_argb,
diff --git a/third_party/libyuv/include/libyuv/planar_functions.h b/third_party/libyuv/include/libyuv/planar_functions.h
index ebefb5682f..def773cb44 100644
--- a/third_party/libyuv/include/libyuv/planar_functions.h
+++ b/third_party/libyuv/include/libyuv/planar_functions.h
@@ -229,6 +229,60 @@ void MergeARGBPlane(const uint8_t* src_r,
int width,
int height);
+// Merge separate 'depth' bit R, G and B planes stored in lsb
+// into one interleaved XR30 plane.
+// depth should in range [10, 16]
+LIBYUV_API
+void MergeXR30Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ int width,
+ int height,
+ int depth);
+
+// Merge separate 'depth' bit R, G, B and A planes stored in lsb
+// into one interleaved AR64 plane.
+// src_a can be NULL to fill opaque value to alpha.
+// depth should in range [1, 16]
+LIBYUV_API
+void MergeAR64Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height,
+ int depth);
+
+// Merge separate 'depth' bit R, G, B and A planes stored in lsb
+// into one interleaved ARGB plane.
+// src_a can be NULL to fill opaque value to alpha.
+// depth should in range [8, 16]
+LIBYUV_API
+void MergeARGB16To8Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height,
+ int depth);
+
// Copy I400. Supports inverting.
LIBYUV_API
int I400ToI400(const uint8_t* src_y,
@@ -945,7 +999,7 @@ void ARGBAffineRow_SSE2(const uint8_t* src_argb,
int width);
// Shuffle ARGB channel order. e.g. BGRA to ARGB.
-// shuffler is 16 bytes and must be aligned.
+// shuffler is 16 bytes.
LIBYUV_API
int ARGBShuffle(const uint8_t* src_bgra,
int src_stride_bgra,
@@ -955,6 +1009,17 @@ int ARGBShuffle(const uint8_t* src_bgra,
int width,
int height);
+// Shuffle AR64 channel order. e.g. AR64 to AB64.
+// shuffler is 16 bytes.
+LIBYUV_API
+int AR64Shuffle(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ const uint8_t* shuffler,
+ int width,
+ int height);
+
// Sobel ARGB effect with planar output.
LIBYUV_API
int ARGBSobelToPlane(const uint8_t* src_argb,
diff --git a/third_party/libyuv/include/libyuv/rotate_row.h b/third_party/libyuv/include/libyuv/rotate_row.h
index 022293eef2..f4c701fb4f 100644
--- a/third_party/libyuv/include/libyuv/rotate_row.h
+++ b/third_party/libyuv/include/libyuv/rotate_row.h
@@ -32,8 +32,9 @@ extern "C" {
#define LIBYUV_DISABLE_X86
#endif
#endif
-// The following are available for Visual C and clangcl 32 bit:
-#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER)
+// The following are available for Visual C 32 bit:
+#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \
+ !defined(__clang__)
#define HAS_TRANSPOSEWX8_SSSE3
#define HAS_TRANSPOSEUVWX8_SSE2
#endif
diff --git a/third_party/libyuv/include/libyuv/row.h b/third_party/libyuv/include/libyuv/row.h
index 68fb88b3e7..1444a04786 100644
--- a/third_party/libyuv/include/libyuv/row.h
+++ b/third_party/libyuv/include/libyuv/row.h
@@ -175,8 +175,8 @@ extern "C" {
defined(_MSC_VER)
// TODO(fbarchard): fix build error on android_full_debug=1
// https://code.google.com/p/libyuv/issues/detail?id=517
-#define HAS_I444ALPHATOARGBROW_SSSE3
#define HAS_I422ALPHATOARGBROW_SSSE3
+#define HAS_I444ALPHATOARGBROW_SSSE3
#endif
#endif
@@ -240,15 +240,15 @@ extern "C" {
defined(_MSC_VER)
// TODO(fbarchard): fix build error on android_full_debug=1
// https://code.google.com/p/libyuv/issues/detail?id=517
-#define HAS_I444ALPHATOARGBROW_AVX2
#define HAS_I422ALPHATOARGBROW_AVX2
+#define HAS_I444ALPHATOARGBROW_AVX2
#endif
#endif
-// The following are available for AVX2 Visual C and clangcl 32 bit:
+// The following are available for AVX2 Visual C 32 bit:
// TODO(fbarchard): Port to gcc.
#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \
- (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2))
+ !defined(__clang__) && defined(VISUALC_HAS_AVX2)
#define HAS_ARGB1555TOARGBROW_AVX2
#define HAS_ARGB4444TOARGBROW_AVX2
#define HAS_ARGBTOARGB1555ROW_AVX2
@@ -269,33 +269,54 @@ extern "C" {
// The following are available for gcc/clang x86 platforms:
// TODO(fbarchard): Port to Visual C
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#define HAS_ABGRTOAR30ROW_SSSE3
#define HAS_ARGBTOAR30ROW_SSSE3
+#define HAS_ARGBTOAR64ROW_SSSE3
+#define HAS_ARGBTOAB64ROW_SSSE3
+#define HAS_AR64TOARGBROW_SSSE3
+#define HAS_AB64TOARGBROW_SSSE3
#define HAS_CONVERT16TO8ROW_SSSE3
#define HAS_CONVERT8TO16ROW_SSE2
#define HAS_HALFMERGEUVROW_SSSE3
#define HAS_I210TOAR30ROW_SSSE3
#define HAS_I210TOARGBROW_SSSE3
+#define HAS_I212TOAR30ROW_SSSE3
+#define HAS_I212TOARGBROW_SSSE3
#define HAS_I400TOARGBROW_SSE2
#define HAS_I422TOAR30ROW_SSSE3
+#define HAS_I410TOAR30ROW_SSSE3
+#define HAS_I410TOARGBROW_SSSE3
#define HAS_MERGEARGBROW_SSE2
+#define HAS_MERGEXRGBROW_SSE2
#define HAS_MERGERGBROW_SSSE3
#define HAS_MIRRORUVROW_SSSE3
+#define HAS_P210TOAR30ROW_SSSE3
+#define HAS_P210TOARGBROW_SSSE3
+#define HAS_P410TOAR30ROW_SSSE3
+#define HAS_P410TOARGBROW_SSSE3
#define HAS_RAWTORGBAROW_SSSE3
#define HAS_RGB24MIRRORROW_SSSE3
#define HAS_RGBATOYJROW_SSSE3
#define HAS_SPLITARGBROW_SSE2
#define HAS_SPLITARGBROW_SSSE3
+#define HAS_SPLITXRGBROW_SSE2
+#define HAS_SPLITXRGBROW_SSSE3
#define HAS_SPLITRGBROW_SSSE3
#define HAS_SWAPUVROW_SSSE3
+
+#if defined(__x86_64__) || !defined(__pic__)
+// TODO(fbarchard): fix build error on android_full_debug=1
+// https://code.google.com/p/libyuv/issues/detail?id=517
+#define HAS_I210ALPHATOARGBROW_SSSE3
+#define HAS_I410ALPHATOARGBROW_SSSE3
+#endif
#endif
// The following are available for AVX2 gcc/clang x86 platforms:
// TODO(fbarchard): Port to Visual C
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \
+#if !defined(LIBYUV_DISABLE_X86) && \
+ (defined(__x86_64__) || defined(__i386__)) && \
(defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2))
#define HAS_ABGRTOAR30ROW_AVX2
#define HAS_ABGRTOUVROW_AVX2
@@ -303,14 +324,32 @@ extern "C" {
#define HAS_ARGBTOAR30ROW_AVX2
#define HAS_ARGBTORAWROW_AVX2
#define HAS_ARGBTORGB24ROW_AVX2
+#define HAS_ARGBTOAR64ROW_AVX2
+#define HAS_ARGBTOAB64ROW_AVX2
+#define HAS_AR64TOARGBROW_AVX2
+#define HAS_AB64TOARGBROW_AVX2
#define HAS_CONVERT16TO8ROW_AVX2
#define HAS_CONVERT8TO16ROW_AVX2
#define HAS_DIVIDEROW_16_AVX2
#define HAS_HALFMERGEUVROW_AVX2
+#define HAS_MERGEAR64ROW_AVX2
+#define HAS_MERGEARGB16TO8ROW_AVX2
#define HAS_MERGEARGBROW_AVX2
+#define HAS_MERGEXR30ROW_AVX2
+#define HAS_MERGEXR64ROW_AVX2
+#define HAS_MERGEXRGB16TO8ROW_AVX2
+#define HAS_MERGEXRGBROW_AVX2
#define HAS_I210TOAR30ROW_AVX2
#define HAS_I210TOARGBROW_AVX2
+#define HAS_I212TOAR30ROW_AVX2
+#define HAS_I212TOARGBROW_AVX2
#define HAS_I400TOARGBROW_AVX2
+#define HAS_I410TOAR30ROW_AVX2
+#define HAS_I410TOARGBROW_AVX2
+#define HAS_P210TOAR30ROW_AVX2
+#define HAS_P210TOARGBROW_AVX2
+#define HAS_P410TOAR30ROW_AVX2
+#define HAS_P410TOARGBROW_AVX2
#define HAS_I422TOAR30ROW_AVX2
#define HAS_I422TOUYVYROW_AVX2
#define HAS_I422TOYUY2ROW_AVX2
@@ -319,18 +358,25 @@ extern "C" {
#define HAS_MULTIPLYROW_16_AVX2
#define HAS_RGBATOYJROW_AVX2
#define HAS_SPLITARGBROW_AVX2
+#define HAS_SPLITXRGBROW_AVX2
#define HAS_SPLITUVROW_16_AVX2
#define HAS_SWAPUVROW_AVX2
// TODO(fbarchard): Fix AVX2 version of YUV24
// #define HAS_NV21TOYUV24ROW_AVX2
+
+#if defined(__x86_64__) || !defined(__pic__)
+// TODO(fbarchard): fix build error on android_full_debug=1
+// https://code.google.com/p/libyuv/issues/detail?id=517
+#define HAS_I210ALPHATOARGBROW_AVX2
+#define HAS_I410ALPHATOARGBROW_AVX2
+#endif
#endif
// The following are available for AVX512 clang x86 platforms:
// TODO(fbarchard): Port to GCC and Visual C
// TODO(fbarchard): re-enable HAS_ARGBTORGB24ROW_AVX512VBMI. Issue libyuv:789
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \
- (defined(CLANG_HAS_AVX512))
+#if !defined(LIBYUV_DISABLE_X86) && \
+ (defined(__x86_64__) || defined(__i386__)) && (defined(CLANG_HAS_AVX512))
#define HAS_ARGBTORGB24ROW_AVX512VBMI
#endif
@@ -353,6 +399,10 @@ extern "C" {
#define HAS_ARGBTORGB24ROW_NEON
#define HAS_ARGBTORGB565DITHERROW_NEON
#define HAS_ARGBTORGB565ROW_NEON
+#define HAS_ARGBTOAR64ROW_NEON
+#define HAS_ARGBTOAB64ROW_NEON
+#define HAS_AR64TOARGBROW_NEON
+#define HAS_AB64TOARGBROW_NEON
#define HAS_ARGBTOUV444ROW_NEON
#define HAS_ARGBTOUVJROW_NEON
#define HAS_ARGBTOUVROW_NEON
@@ -381,7 +431,13 @@ extern "C" {
#define HAS_I422TOYUY2ROW_NEON
#define HAS_I444TOARGBROW_NEON
#define HAS_J400TOARGBROW_NEON
+#define HAS_MERGEAR64ROW_NEON
+#define HAS_MERGEARGB16TO8ROW_NEON
#define HAS_MERGEARGBROW_NEON
+#define HAS_MERGEXR30ROW_NEON
+#define HAS_MERGEXR64ROW_NEON
+#define HAS_MERGEXRGB16TO8ROW_NEON
+#define HAS_MERGEXRGBROW_NEON
#define HAS_MERGEUVROW_NEON
#define HAS_MERGEUVROW_16_NEON
#define HAS_MIRRORROW_NEON
@@ -412,6 +468,7 @@ extern "C" {
#define HAS_RGBATOYROW_NEON
#define HAS_SETROW_NEON
#define HAS_SPLITARGBROW_NEON
+#define HAS_SPLITXRGBROW_NEON
#define HAS_SPLITRGBROW_NEON
#define HAS_SPLITUVROW_NEON
#define HAS_SPLITUVROW_16_NEON
@@ -490,24 +547,14 @@ extern "C" {
#define HAS_BGRATOYROW_MSA
#define HAS_HALFFLOATROW_MSA
#define HAS_I400TOARGBROW_MSA
-#define HAS_I422ALPHATOARGBROW_MSA
-#define HAS_I422TOARGBROW_MSA
-#define HAS_I422TORGB24ROW_MSA
-#define HAS_I422TORGBAROW_MSA
#define HAS_I422TOUYVYROW_MSA
#define HAS_I422TOYUY2ROW_MSA
-#define HAS_I444TOARGBROW_MSA
-#define HAS_I422TOARGB1555ROW_MSA
-#define HAS_I422TORGB565ROW_MSA
#define HAS_INTERPOLATEROW_MSA
#define HAS_J400TOARGBROW_MSA
#define HAS_MERGEUVROW_MSA
#define HAS_MIRRORROW_MSA
#define HAS_MIRRORUVROW_MSA
#define HAS_MIRRORSPLITUVROW_MSA
-#define HAS_NV12TOARGBROW_MSA
-#define HAS_NV12TORGB565ROW_MSA
-#define HAS_NV21TOARGBROW_MSA
#define HAS_RAWTOARGBROW_MSA
#define HAS_RAWTORGB24ROW_MSA
#define HAS_RAWTOUVROW_MSA
@@ -527,10 +574,8 @@ extern "C" {
#define HAS_SOBELXYROW_MSA
#define HAS_SOBELYROW_MSA
#define HAS_SPLITUVROW_MSA
-#define HAS_UYVYTOARGBROW_MSA
#define HAS_UYVYTOUVROW_MSA
#define HAS_UYVYTOYROW_MSA
-#define HAS_YUY2TOARGBROW_MSA
#define HAS_YUY2TOUV422ROW_MSA
#define HAS_YUY2TOUVROW_MSA
#define HAS_YUY2TOYROW_MSA
@@ -580,8 +625,6 @@ extern "C" {
#define HAS_I400TOARGBROW_MMI
#define HAS_I422TOUYVYROW_MMI
#define HAS_I422TOYUY2ROW_MMI
-#define HAS_I422TOARGBROW_MMI
-#define HAS_I444TOARGBROW_MMI
#define HAS_INTERPOLATEROW_MMI
#define HAS_J400TOARGBROW_MMI
#define HAS_MERGERGBROW_MMI
@@ -612,20 +655,6 @@ extern "C" {
#define HAS_YUY2TOUV422ROW_MMI
#define HAS_YUY2TOUVROW_MMI
#define HAS_YUY2TOYROW_MMI
-#define HAS_I210TOARGBROW_MMI
-#define HAS_I422TOARGB4444ROW_MMI
-#define HAS_I422TOARGB1555ROW_MMI
-#define HAS_I422TORGB565ROW_MMI
-#define HAS_NV21TORGB24ROW_MMI
-#define HAS_NV12TORGB24ROW_MMI
-#define HAS_I422ALPHATOARGBROW_MMI
-#define HAS_I422TORGB24ROW_MMI
-#define HAS_NV12TOARGBROW_MMI
-#define HAS_NV21TOARGBROW_MMI
-#define HAS_NV12TORGB565ROW_MMI
-#define HAS_YUY2TOARGBROW_MMI
-#define HAS_UYVYTOARGBROW_MMI
-#define HAS_I422TORGBAROW_MMI
#endif
#if defined(_MSC_VER) && !defined(__CLR_VER) && !defined(__clang__)
@@ -634,6 +663,7 @@ extern "C" {
#else
#define SIMD_ALIGNED(var) __declspec(align(16)) var
#endif
+#define LIBYUV_NOINLINE __declspec(noinline)
typedef __declspec(align(16)) int16_t vec16[8];
typedef __declspec(align(16)) int32_t vec32[4];
typedef __declspec(align(16)) float vecf32[4];
@@ -654,6 +684,7 @@ typedef __declspec(align(32)) uint8_t ulvec8[32];
#else
#define SIMD_ALIGNED(var) var __attribute__((aligned(16)))
#endif
+#define LIBYUV_NOINLINE __attribute__((noinline))
typedef int16_t __attribute__((vector_size(16))) vec16;
typedef int32_t __attribute__((vector_size(16))) vec32;
typedef float __attribute__((vector_size(16))) vecf32;
@@ -669,6 +700,7 @@ typedef uint32_t __attribute__((vector_size(32))) ulvec32;
typedef uint8_t __attribute__((vector_size(32))) ulvec8;
#else
#define SIMD_ALIGNED(var) var
+#define LIBYUV_NOINLINE
typedef int16_t vec16[8];
typedef int32_t vec32[4];
typedef float vecf32[4];
@@ -684,33 +716,18 @@ typedef uint32_t ulvec32[8];
typedef uint8_t ulvec8[32];
#endif
-#if defined(__aarch64__)
-// This struct is for Arm64 color conversion.
-struct YuvConstants {
- uvec16 kUVToRB;
- uvec16 kUVToRB2;
- uvec16 kUVToG;
- uvec16 kUVToG2;
- vec16 kUVBiasBGR;
- vec32 kYToRgb;
-};
-#elif defined(__arm__)
-// This struct is for ArmV7 color conversion.
+#if defined(__aarch64__) || defined(__arm__)
+// This struct is for ARM color conversion.
struct YuvConstants {
- uvec8 kUVToRB;
- uvec8 kUVToG;
- vec16 kUVBiasBGR;
- vec32 kYToRgb;
+ uvec8 kUVCoeff;
+ vec16 kRGBCoeffBias;
};
#else
// This struct is for Intel color conversion.
struct YuvConstants {
- int8_t kUVToB[32];
- int8_t kUVToG[32];
- int8_t kUVToR[32];
- int16_t kUVBiasB[16];
- int16_t kUVBiasG[16];
- int16_t kUVBiasR[16];
+ uint8_t kUVToB[32];
+ uint8_t kUVToG[32];
+ uint8_t kUVToR[32];
int16_t kYToRgb[16];
int16_t kYBiasToRgb[16];
};
@@ -719,11 +736,8 @@ struct YuvConstants {
#define KUVTOB 0
#define KUVTOG 32
#define KUVTOR 64
-#define KUVBIASB 96
-#define KUVBIASG 128
-#define KUVBIASR 160
-#define KYTORGB 192
-#define KYBIASTORGB 224
+#define KYTORGB 96
+#define KYBIASTORGB 128
#endif
@@ -995,11 +1009,11 @@ void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width);
void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width);
void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width);
void RGB24ToYRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width);
-void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width);
+void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_yj, int width);
void RAWToYRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width);
-void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width);
-void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_y, int width);
-void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_y, int width);
+void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_yj, int width);
+void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_yj, int width);
+void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_yj, int width);
void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width);
void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width);
void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width);
@@ -1194,16 +1208,16 @@ void RGB565ToYRow_MMI(const uint8_t* src_rgb565, uint8_t* dst_y, int width);
void ARGB1555ToYRow_MMI(const uint8_t* src_argb1555, uint8_t* dst_y, int width);
void ARGB4444ToYRow_MMI(const uint8_t* src_argb4444, uint8_t* dst_y, int width);
-void ARGBToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void ARGBToYJRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void RGBAToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width);
-void BGRAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void ABGRToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void RGBAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void RGB24ToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void RGB24ToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width);
-void RAWToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width);
-void RAWToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width);
+void ARGBToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void ARGBToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RGBAToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void BGRAToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void ABGRToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RGBAToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RGB24ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RGB24ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RAWToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
+void RAWToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width);
void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width);
void ARGB1555ToYRow_C(const uint8_t* src_argb1555, uint8_t* dst_y, int width);
void ARGB4444ToYRow_C(const uint8_t* src_argb4444, uint8_t* dst_y, int width);
@@ -1305,42 +1319,42 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba,
uint8_t* dst_v,
int width);
void ARGBToUVRow_Any_AVX2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ABGRToUVRow_Any_AVX2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ARGBToUVJRow_Any_AVX2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ARGBToUVRow_Any_SSSE3(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ARGBToUVJRow_Any_SSSE3(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void BGRAToUVRow_Any_SSSE3(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ABGRToUVRow_Any_SSSE3(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void RGBAToUVRow_Any_SSSE3(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -1349,7 +1363,7 @@ void ARGBToUV444Row_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_v,
int width);
void ARGBToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -1372,47 +1386,47 @@ void ARGBToUVRow_Any_MMI(const uint8_t* src_ptr,
uint8_t* dst_v,
int width);
void ARGBToUVJRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void BGRAToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ABGRToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void RGBAToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void RGB24ToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void RAWToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void RGB565ToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ARGB1555ToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
void ARGB4444ToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -1621,7 +1635,7 @@ void MirrorSplitUVRow_C(const uint8_t* src_uv,
void ARGBMirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width);
void ARGBMirrorRow_SSE2(const uint8_t* src, uint8_t* dst, int width);
-void ARGBMirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width);
+void ARGBMirrorRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width);
void ARGBMirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width);
void ARGBMirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width);
void ARGBMirrorRow_C(const uint8_t* src, uint8_t* dst, int width);
@@ -1637,9 +1651,13 @@ void ARGBMirrorRow_Any_NEON(const uint8_t* src_ptr,
void ARGBMirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void ARGBMirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
-void RGB24MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width);
-void RGB24MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width);
-void RGB24MirrorRow_C(const uint8_t* src, uint8_t* dst, int width);
+void RGB24MirrorRow_SSSE3(const uint8_t* src_rgb24,
+ uint8_t* dst_rgb24,
+ int width);
+void RGB24MirrorRow_NEON(const uint8_t* src_rgb24,
+ uint8_t* dst_rgb24,
+ int width);
+void RGB24MirrorRow_C(const uint8_t* src_rgb24, uint8_t* dst_rgb24, int width);
void RGB24MirrorRow_Any_SSSE3(const uint8_t* src_ptr,
uint8_t* dst_ptr,
int width);
@@ -1860,23 +1878,23 @@ void MergeARGBRow_NEON(const uint8_t* src_r,
const uint8_t* src_a,
uint8_t* dst_argb,
int width);
-void MergeARGBRow_Any_SSE2(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- const uint8_t* src_a,
- uint8_t* dst_argb,
+void MergeARGBRow_Any_SSE2(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ const uint8_t* a_buf,
+ uint8_t* dst_ptr,
int width);
-void MergeARGBRow_Any_AVX2(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- const uint8_t* src_a,
- uint8_t* dst_argb,
+void MergeARGBRow_Any_AVX2(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ const uint8_t* a_buf,
+ uint8_t* dst_ptr,
int width);
-void MergeARGBRow_Any_NEON(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- const uint8_t* src_a,
- uint8_t* dst_argb,
+void MergeARGBRow_Any_NEON(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ const uint8_t* a_buf,
+ uint8_t* dst_ptr,
int width);
void SplitARGBRow_C(const uint8_t* src_argb,
uint8_t* dst_r,
@@ -1902,31 +1920,31 @@ void SplitARGBRow_AVX2(const uint8_t* src_argb,
uint8_t* dst_b,
uint8_t* dst_a,
int width);
-void SplitARGBRow_NEON(const uint8_t* src_argb,
+void SplitARGBRow_NEON(const uint8_t* src_rgba,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
uint8_t* dst_a,
int width);
-void SplitARGBRow_Any_SSE2(const uint8_t* src_argb,
+void SplitARGBRow_Any_SSE2(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
uint8_t* dst_a,
int width);
-void SplitARGBRow_Any_SSSE3(const uint8_t* src_argb,
+void SplitARGBRow_Any_SSSE3(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
uint8_t* dst_a,
int width);
-void SplitARGBRow_Any_AVX2(const uint8_t* src_argb,
+void SplitARGBRow_Any_AVX2(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
uint8_t* dst_a,
int width);
-void SplitARGBRow_Any_NEON(const uint8_t* src_argb,
+void SplitARGBRow_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
@@ -1952,20 +1970,20 @@ void MergeXRGBRow_NEON(const uint8_t* src_r,
const uint8_t* src_b,
uint8_t* dst_argb,
int width);
-void MergeXRGBRow_Any_SSE2(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- uint8_t* dst_argb,
+void MergeXRGBRow_Any_SSE2(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ uint8_t* dst_ptr,
int width);
-void MergeXRGBRow_Any_AVX2(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- uint8_t* dst_argb,
+void MergeXRGBRow_Any_AVX2(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ uint8_t* dst_ptr,
int width);
-void MergeXRGBRow_Any_NEON(const uint8_t* src_r,
- const uint8_t* src_g,
- const uint8_t* src_b,
- uint8_t* dst_argb,
+void MergeXRGBRow_Any_NEON(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ uint8_t* dst_ptr,
int width);
void SplitXRGBRow_C(const uint8_t* src_argb,
uint8_t* dst_r,
@@ -1987,32 +2005,205 @@ void SplitXRGBRow_AVX2(const uint8_t* src_argb,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
-void SplitXRGBRow_NEON(const uint8_t* src_argb,
+void SplitXRGBRow_NEON(const uint8_t* src_rgba,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
-void SplitXRGBRow_Any_SSE2(const uint8_t* src_argb,
+void SplitXRGBRow_Any_SSE2(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
-void SplitXRGBRow_Any_SSSE3(const uint8_t* src_argb,
+void SplitXRGBRow_Any_SSSE3(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
-void SplitXRGBRow_Any_AVX2(const uint8_t* src_argb,
+void SplitXRGBRow_Any_AVX2(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
-void SplitXRGBRow_Any_NEON(const uint8_t* src_argb,
+void SplitXRGBRow_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_r,
uint8_t* dst_g,
uint8_t* dst_b,
int width);
+void MergeXR30Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width);
+void MergeAR64Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeARGB16To8Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR64Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeXRGB16To8Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR30Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width);
+void MergeAR64Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeARGB16To8Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR64Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeXRGB16To8Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR30Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width);
+void MergeXR30Row_10_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int /* depth */,
+ int width);
+void MergeAR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeARGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width);
+void MergeXRGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width);
+void MergeXR30Row_Any_AVX2(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeAR64Row_Any_AVX2(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ const uint16_t* a_buf,
+ uint16_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXR64Row_Any_AVX2(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint16_t* dst_ptr,
+ int depth,
+ int width);
+void MergeARGB16To8Row_Any_AVX2(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXRGB16To8Row_Any_AVX2(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXR30Row_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXR30Row_10_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeAR64Row_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ const uint16_t* a_buf,
+ uint16_t* dst_ptr,
+ int depth,
+ int width);
+void MergeARGB16To8Row_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXR64Row_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint16_t* dst_ptr,
+ int depth,
+ int width);
+void MergeXRGB16To8Row_Any_NEON(const uint16_t* r_buf,
+ const uint16_t* g_buf,
+ const uint16_t* b_buf,
+ uint8_t* dst_ptr,
+ int depth,
+ int width);
+
void MergeUVRow_16_C(const uint16_t* src_u,
const uint16_t* src_v,
uint16_t* dst_uv,
@@ -2024,10 +2215,10 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u,
int depth,
int width);
void MergeUVRow_16_Any_AVX2(const uint16_t* src_u,
- const uint16_t* src_v,
- uint16_t* dst_uv,
- int depth,
- int width);
+ const uint16_t* src_v,
+ uint16_t* dst_uv,
+ int depth,
+ int width);
void MergeUVRow_16_NEON(const uint16_t* src_u,
const uint16_t* src_v,
uint16_t* dst_uv,
@@ -2073,16 +2264,16 @@ void MultiplyRow_16_AVX2(const uint16_t* src_y,
uint16_t* dst_y,
int scale,
int width);
-void MultiplyRow_16_Any_AVX2(const uint16_t* src_y,
- uint16_t* dst_y,
+void MultiplyRow_16_Any_AVX2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
int scale,
int width);
void MultiplyRow_16_NEON(const uint16_t* src_y,
uint16_t* dst_y,
int scale,
int width);
-void MultiplyRow_16_Any_NEON(const uint16_t* src_y,
- uint16_t* dst_y,
+void MultiplyRow_16_Any_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
int scale,
int width);
@@ -2094,16 +2285,16 @@ void DivideRow_16_AVX2(const uint16_t* src_y,
uint16_t* dst_y,
int scale,
int width);
-void DivideRow_16_Any_AVX2(const uint16_t* src_y,
- uint16_t* dst_y,
+void DivideRow_16_Any_AVX2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
int scale,
int width);
void DivideRow_16_NEON(const uint16_t* src_y,
uint16_t* dst_y,
int scale,
int width);
-void DivideRow_16_Any_NEON(const uint16_t* src_y,
- uint16_t* dst_y,
+void DivideRow_16_Any_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
int scale,
int width);
@@ -2527,6 +2718,71 @@ void ARGBToARGB4444Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width);
void ABGRToAR30Row_C(const uint8_t* src_abgr, uint8_t* dst_ar30, int width);
void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width);
+void ARGBToAR64Row_C(const uint8_t* src_argb, uint16_t* dst_ar64, int width);
+void ARGBToAB64Row_C(const uint8_t* src_argb, uint16_t* dst_ab64, int width);
+void AR64ToARGBRow_C(const uint16_t* src_ar64, uint8_t* dst_argb, int width);
+void AB64ToARGBRow_C(const uint16_t* src_ab64, uint8_t* dst_argb, int width);
+void AR64ShuffleRow_C(const uint8_t* src_ar64,
+ uint8_t* dst_ar64,
+ const uint8_t* shuffler,
+ int width);
+void ARGBToAR64Row_SSSE3(const uint8_t* src_argb,
+ uint16_t* dst_ar64,
+ int width);
+void ARGBToAB64Row_SSSE3(const uint8_t* src_argb,
+ uint16_t* dst_ab64,
+ int width);
+void AR64ToARGBRow_SSSE3(const uint16_t* src_ar64,
+ uint8_t* dst_argb,
+ int width);
+void AB64ToARGBRow_SSSE3(const uint16_t* src_ab64,
+ uint8_t* dst_argb,
+ int width);
+void ARGBToAR64Row_AVX2(const uint8_t* src_argb, uint16_t* dst_ar64, int width);
+void ARGBToAB64Row_AVX2(const uint8_t* src_argb, uint16_t* dst_ab64, int width);
+void AR64ToARGBRow_AVX2(const uint16_t* src_ar64, uint8_t* dst_argb, int width);
+void AB64ToARGBRow_AVX2(const uint16_t* src_ab64, uint8_t* dst_argb, int width);
+void ARGBToAR64Row_NEON(const uint8_t* src_argb, uint16_t* dst_ar64, int width);
+void ARGBToAB64Row_NEON(const uint8_t* src_argb, uint16_t* dst_ab64, int width);
+void AR64ToARGBRow_NEON(const uint16_t* src_ar64, uint8_t* dst_argb, int width);
+void AB64ToARGBRow_NEON(const uint16_t* src_ab64, uint8_t* dst_argb, int width);
+void ARGBToAR64Row_Any_SSSE3(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void ARGBToAB64Row_Any_SSSE3(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void AR64ToARGBRow_Any_SSSE3(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+void AB64ToARGBRow_Any_SSSE3(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+void ARGBToAR64Row_Any_AVX2(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void ARGBToAB64Row_Any_AVX2(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void AR64ToARGBRow_Any_AVX2(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+void AB64ToARGBRow_Any_AVX2(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+void ARGBToAR64Row_Any_NEON(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void ARGBToAB64Row_Any_NEON(const uint8_t* src_ptr,
+ uint16_t* dst_ptr,
+ int width);
+void AR64ToARGBRow_Any_NEON(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+void AB64ToARGBRow_Any_NEON(const uint16_t* src_ptr,
+ uint8_t* dst_ptr,
+ int width);
+
void J400ToARGBRow_SSE2(const uint8_t* src_y, uint8_t* dst_argb, int width);
void J400ToARGBRow_AVX2(const uint8_t* src_y, uint8_t* dst_argb, int width);
void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width);
@@ -2575,6 +2831,44 @@ void I210ToARGBRow_C(const uint16_t* src_y,
uint8_t* rgb_buf,
const struct YuvConstants* yuvconstants,
int width);
+void I212ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I212ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I210AlphaToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ const uint16_t* src_a,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410AlphaToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ const uint16_t* src_a,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I444AlphaToARGBRow_C(const uint8_t* src_y,
const uint8_t* src_u,
const uint8_t* src_v,
@@ -2626,6 +2920,27 @@ void UYVYToARGBRow_C(const uint8_t* src_uyvy,
uint8_t* rgb_buf,
const struct YuvConstants* yuvconstants,
int width);
+void P210ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+
void I422ToRGBARow_C(const uint8_t* src_y,
const uint8_t* src_u,
const uint8_t* src_v,
@@ -2705,6 +3020,44 @@ void I210ToARGBRow_SSSE3(const uint16_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width);
+void I212ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I212ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I210AlphaToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410AlphaToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I422ToAR30Row_AVX2(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -2723,6 +3076,44 @@ void I210ToAR30Row_AVX2(const uint16_t* y_buf,
uint8_t* dst_ar30,
const struct YuvConstants* yuvconstants,
int width);
+void I212ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I212ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I210AlphaToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410AlphaToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I444AlphaToARGBRow_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -2821,6 +3212,48 @@ void UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width);
+
+void P210ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+
void I422ToRGBARow_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -2923,6 +3356,44 @@ void I210ToARGBRow_Any_SSSE3(const uint16_t* y_buf,
uint8_t* dst_ptr,
const struct YuvConstants* yuvconstants,
int width);
+void I212ToAR30Row_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I212ToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToAR30Row_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I210AlphaToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410AlphaToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I422ToAR30Row_Any_AVX2(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -2941,6 +3412,44 @@ void I210ToAR30Row_Any_AVX2(const uint16_t* y_buf,
uint8_t* dst_ptr,
const struct YuvConstants* yuvconstants,
int width);
+void I212ToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I212ToAR30Row_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToAR30Row_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410ToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I210AlphaToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void I410AlphaToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I444AlphaToARGBRow_Any_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -3039,6 +3548,46 @@ void UYVYToARGBRow_Any_AVX2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
const struct YuvConstants* yuvconstants,
int width);
+void P210ToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_Any_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_Any_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ptr,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I422ToRGBARow_Any_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -3120,15 +3669,15 @@ void I400ToARGBRow_MMI(const uint8_t* src_y,
int width);
void I400ToARGBRow_Any_SSE2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
- const struct YuvConstants* yuvconstants,
+ const struct YuvConstants* param,
int width);
void I400ToARGBRow_Any_AVX2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
- const struct YuvConstants* yuvconstants,
+ const struct YuvConstants* param,
int width);
void I400ToARGBRow_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
- const struct YuvConstants* yuvconstants,
+ const struct YuvConstants* param,
int width);
void I400ToARGBRow_Any_MSA(const uint8_t* src_ptr,
uint8_t* dst_ptr,
@@ -3140,11 +3689,11 @@ void I400ToARGBRow_Any_MMI(const uint8_t* src_ptr,
int width);
// ARGB preattenuated alpha blend.
-void ARGBBlendRow_SSSE3(const uint8_t* src_argb0,
+void ARGBBlendRow_SSSE3(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
-void ARGBBlendRow_NEON(const uint8_t* src_argb0,
+void ARGBBlendRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3156,7 +3705,7 @@ void ARGBBlendRow_MMI(const uint8_t* src_argb0,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
-void ARGBBlendRow_C(const uint8_t* src_argb0,
+void ARGBBlendRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3200,11 +3749,11 @@ void BlendPlaneRow_C(const uint8_t* src0,
// ARGB multiply images. Same API as Blend, but these require
// pointer and width alignment for SSE2.
-void ARGBMultiplyRow_C(const uint8_t* src_argb0,
+void ARGBMultiplyRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
-void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
+void ARGBMultiplyRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3212,7 +3761,7 @@ void ARGBMultiplyRow_Any_SSE2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
+void ARGBMultiplyRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3220,7 +3769,7 @@ void ARGBMultiplyRow_Any_AVX2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
+void ARGBMultiplyRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3246,11 +3795,11 @@ void ARGBMultiplyRow_Any_MMI(const uint8_t* y_buf,
int width);
// ARGB add images.
-void ARGBAddRow_C(const uint8_t* src_argb0,
+void ARGBAddRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
-void ARGBAddRow_SSE2(const uint8_t* src_argb0,
+void ARGBAddRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3258,7 +3807,7 @@ void ARGBAddRow_Any_SSE2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBAddRow_AVX2(const uint8_t* src_argb0,
+void ARGBAddRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3266,7 +3815,7 @@ void ARGBAddRow_Any_AVX2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBAddRow_NEON(const uint8_t* src_argb0,
+void ARGBAddRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3293,11 +3842,11 @@ void ARGBAddRow_Any_MMI(const uint8_t* y_buf,
// ARGB subtract images. Same API as Blend, but these require
// pointer and width alignment for SSE2.
-void ARGBSubtractRow_C(const uint8_t* src_argb0,
+void ARGBSubtractRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
-void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
+void ARGBSubtractRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3305,7 +3854,7 @@ void ARGBSubtractRow_Any_SSE2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBSubtractRow_AVX2(const uint8_t* src_argb0,
+void ARGBSubtractRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3313,7 +3862,7 @@ void ARGBSubtractRow_Any_AVX2(const uint8_t* y_buf,
const uint8_t* uv_buf,
uint8_t* dst_ptr,
int width);
-void ARGBSubtractRow_NEON(const uint8_t* src_argb0,
+void ARGBSubtractRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width);
@@ -3520,9 +4069,9 @@ void NV21ToRGB24Row_Any_NEON(const uint8_t* y_buf,
uint8_t* dst_ptr,
const struct YuvConstants* yuvconstants,
int width);
-void NV21ToYUV24Row_Any_NEON(const uint8_t* src_y,
- const uint8_t* src_vu,
- uint8_t* dst_yuv24,
+void NV21ToYUV24Row_Any_NEON(const uint8_t* y_buf,
+ const uint8_t* uv_buf,
+ uint8_t* dst_ptr,
int width);
void NV12ToRGB565Row_Any_NEON(const uint8_t* y_buf,
const uint8_t* uv_buf,
@@ -3537,6 +4086,46 @@ void UYVYToARGBRow_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
const struct YuvConstants* yuvconstants,
int width);
+void P210ToARGBRow_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToARGBRow_Any_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToARGBRow_Any_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P210ToAR30Row_Any_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
+void P410ToAR30Row_Any_NEON(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width);
void I444ToARGBRow_Any_MSA(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -3684,7 +4273,7 @@ void YUY2ToUV422Row_C(const uint8_t* src_yuy2,
int width);
void YUY2ToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void YUY2ToUVRow_Any_AVX2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3694,7 +4283,7 @@ void YUY2ToUV422Row_Any_AVX2(const uint8_t* src_ptr,
int width);
void YUY2ToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void YUY2ToUVRow_Any_SSE2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3704,7 +4293,7 @@ void YUY2ToUV422Row_Any_SSE2(const uint8_t* src_ptr,
int width);
void YUY2ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void YUY2ToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3805,7 +4394,7 @@ void UYVYToUV422Row_C(const uint8_t* src_uyvy,
int width);
void UYVYToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void UYVYToUVRow_Any_AVX2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3815,7 +4404,7 @@ void UYVYToUV422Row_Any_AVX2(const uint8_t* src_ptr,
int width);
void UYVYToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void UYVYToUVRow_Any_SSE2(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3825,7 +4414,7 @@ void UYVYToUV422Row_Any_SSE2(const uint8_t* src_ptr,
int width);
void UYVYToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void UYVYToUVRow_Any_NEON(const uint8_t* src_ptr,
- int src_stride_ptr,
+ int src_stride,
uint8_t* dst_u,
uint8_t* dst_v,
int width);
@@ -3862,29 +4451,29 @@ void SwapUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_vu, int width);
void SwapUVRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
void AYUVToYRow_C(const uint8_t* src_ayuv, uint8_t* dst_y, int width);
void AYUVToUVRow_C(const uint8_t* src_ayuv,
- int stride_ayuv,
+ int src_stride_ayuv,
uint8_t* dst_uv,
int width);
void AYUVToVURow_C(const uint8_t* src_ayuv,
- int stride_ayuv,
+ int src_stride_ayuv,
uint8_t* dst_vu,
int width);
void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width);
void AYUVToUVRow_NEON(const uint8_t* src_ayuv,
- int stride_ayuv,
+ int src_stride_ayuv,
uint8_t* dst_uv,
int width);
void AYUVToVURow_NEON(const uint8_t* src_ayuv,
- int stride_ayuv,
+ int src_stride_ayuv,
uint8_t* dst_vu,
int width);
-void AYUVToYRow_Any_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width);
-void AYUVToUVRow_Any_NEON(const uint8_t* src_ayuv,
- int stride_ayuv,
- uint8_t* dst_uv,
+void AYUVToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width);
+void AYUVToUVRow_Any_NEON(const uint8_t* src_ptr,
+ int src_stride,
+ uint8_t* dst_vu,
int width);
-void AYUVToVURow_Any_NEON(const uint8_t* src_ayuv,
- int stride_ayuv,
+void AYUVToVURow_Any_NEON(const uint8_t* src_ptr,
+ int src_stride,
uint8_t* dst_vu,
int width);
diff --git a/third_party/libyuv/include/libyuv/scale_row.h b/third_party/libyuv/include/libyuv/scale_row.h
index 18ffb546a3..461ac36f33 100644
--- a/third_party/libyuv/include/libyuv/scale_row.h
+++ b/third_party/libyuv/include/libyuv/scale_row.h
@@ -74,15 +74,16 @@ extern "C" {
// The following are available for gcc/clang x86 platforms:
// TODO(fbarchard): Port to Visual C
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#define HAS_SCALEUVROWDOWN2BOX_SSSE3
#define HAS_SCALEROWUP2LINEAR_SSE2
#define HAS_SCALEROWUP2LINEAR_SSSE3
#define HAS_SCALEROWUP2BILINEAR_SSE2
#define HAS_SCALEROWUP2BILINEAR_SSSE3
-#define HAS_SCALEROWUP2LINEAR_16_SSSE3
-#define HAS_SCALEROWUP2BILINEAR_16_SSSE3
+#define HAS_SCALEROWUP2LINEAR_12_SSSE3
+#define HAS_SCALEROWUP2BILINEAR_12_SSSE3
+#define HAS_SCALEROWUP2LINEAR_16_SSE2
+#define HAS_SCALEROWUP2BILINEAR_16_SSE2
#define HAS_SCALEUVROWUP2LINEAR_SSSE3
#define HAS_SCALEUVROWUP2BILINEAR_SSSE3
#define HAS_SCALEUVROWUP2LINEAR_16_SSE2
@@ -92,12 +93,14 @@ extern "C" {
// The following are available for gcc/clang x86 platforms, but
// require clang 3.4 or gcc 4.7.
// TODO(fbarchard): Port to Visual C
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || defined(__i386__)) && !defined(_MSC_VER) && \
+#if !defined(LIBYUV_DISABLE_X86) && \
+ (defined(__x86_64__) || defined(__i386__)) && \
(defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2))
#define HAS_SCALEUVROWDOWN2BOX_AVX2
#define HAS_SCALEROWUP2LINEAR_AVX2
#define HAS_SCALEROWUP2BILINEAR_AVX2
+#define HAS_SCALEROWUP2LINEAR_12_AVX2
+#define HAS_SCALEROWUP2BILINEAR_12_AVX2
#define HAS_SCALEROWUP2LINEAR_16_AVX2
#define HAS_SCALEROWUP2BILINEAR_16_AVX2
#define HAS_SCALEUVROWUP2LINEAR_AVX2
@@ -134,6 +137,8 @@ extern "C" {
#define HAS_SCALEUVROWDOWNEVEN_NEON
#define HAS_SCALEROWUP2LINEAR_NEON
#define HAS_SCALEROWUP2BILINEAR_NEON
+#define HAS_SCALEROWUP2LINEAR_12_NEON
+#define HAS_SCALEROWUP2BILINEAR_12_NEON
#define HAS_SCALEROWUP2LINEAR_16_NEON
#define HAS_SCALEROWUP2BILINEAR_16_NEON
#define HAS_SCALEUVROWUP2LINEAR_NEON
@@ -611,14 +616,22 @@ void ScaleRowUp2_Bilinear_SSE2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
-void ScaleRowUp2_Linear_16_SSSE3(const uint16_t* src_ptr,
+void ScaleRowUp2_Linear_12_SSSE3(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
-void ScaleRowUp2_Bilinear_16_SSSE3(const uint16_t* src_ptr,
+void ScaleRowUp2_Bilinear_12_SSSE3(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
+void ScaleRowUp2_Linear_16_SSE2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
+void ScaleRowUp2_Bilinear_16_SSE2(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
void ScaleRowUp2_Linear_SSSE3(const uint8_t* src_ptr,
uint8_t* dst_ptr,
int dst_width);
@@ -635,6 +648,14 @@ void ScaleRowUp2_Bilinear_AVX2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
+void ScaleRowUp2_Linear_12_AVX2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
+void ScaleRowUp2_Bilinear_12_AVX2(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
@@ -651,9 +672,17 @@ void ScaleRowUp2_Bilinear_Any_SSE2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
-void ScaleRowUp2_Linear_16_Any_SSSE3(const uint16_t* src_ptr,
+void ScaleRowUp2_Linear_12_Any_SSSE3(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
+void ScaleRowUp2_Bilinear_12_Any_SSSE3(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
+void ScaleRowUp2_Linear_16_Any_SSE2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
void ScaleRowUp2_Bilinear_16_Any_SSSE3(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
@@ -675,6 +704,14 @@ void ScaleRowUp2_Bilinear_Any_AVX2(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
+void ScaleRowUp2_Linear_12_Any_AVX2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
+void ScaleRowUp2_Bilinear_12_Any_AVX2(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
void ScaleRowUp2_Linear_16_Any_AVX2(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
@@ -1424,6 +1461,14 @@ void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
+void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
+void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
@@ -1440,6 +1485,14 @@ void ScaleRowUp2_Bilinear_Any_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
ptrdiff_t dst_stride,
int dst_width);
+void ScaleRowUp2_Linear_12_Any_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width);
+void ScaleRowUp2_Bilinear_12_Any_NEON(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width);
void ScaleRowUp2_Linear_16_Any_NEON(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width);
diff --git a/third_party/libyuv/include/libyuv/version.h b/third_party/libyuv/include/libyuv/version.h
index e59b316a60..f713c47704 100644
--- a/third_party/libyuv/include/libyuv/version.h
+++ b/third_party/libyuv/include/libyuv/version.h
@@ -11,6 +11,6 @@
#ifndef INCLUDE_LIBYUV_VERSION_H_
#define INCLUDE_LIBYUV_VERSION_H_
-#define LIBYUV_VERSION 1779
+#define LIBYUV_VERSION 1788
#endif // INCLUDE_LIBYUV_VERSION_H_
diff --git a/third_party/libyuv/include/libyuv/video_common.h b/third_party/libyuv/include/libyuv/video_common.h
index 0da3fb5544..32b8a5210b 100644
--- a/third_party/libyuv/include/libyuv/video_common.h
+++ b/third_party/libyuv/include/libyuv/video_common.h
@@ -65,12 +65,14 @@ enum FourCC {
// 1 Secondary YUV format: row biplanar. deprecated.
FOURCC_M420 = FOURCC('M', '4', '2', '0'),
- // 11 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc
+ // 13 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc 2 64 bpp
FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'),
FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'),
FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'),
FOURCC_AR30 = FOURCC('A', 'R', '3', '0'), // 10 bit per channel. 2101010.
FOURCC_AB30 = FOURCC('A', 'B', '3', '0'), // ABGR version of 10 bit
+ FOURCC_AR64 = FOURCC('A', 'R', '6', '4'), // 16 bit per channel.
+ FOURCC_AB64 = FOURCC('A', 'B', '6', '4'), // ABGR version of 16 bit
FOURCC_24BG = FOURCC('2', '4', 'B', 'G'),
FOURCC_RAW = FOURCC('r', 'a', 'w', ' '),
FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'),
@@ -163,6 +165,8 @@ enum FourCCBpp {
FOURCC_BPP_RGBA = 32,
FOURCC_BPP_AR30 = 32,
FOURCC_BPP_AB30 = 32,
+ FOURCC_BPP_AR64 = 64,
+ FOURCC_BPP_AB64 = 64,
FOURCC_BPP_24BG = 24,
FOURCC_BPP_RAW = 24,
FOURCC_BPP_RGBP = 16,
diff --git a/third_party/libyuv/source/compare_common.cc b/third_party/libyuv/source/compare_common.cc
index d4b170ad98..d1cab8d2b4 100644
--- a/third_party/libyuv/source/compare_common.cc
+++ b/third_party/libyuv/source/compare_common.cc
@@ -17,36 +17,6 @@ namespace libyuv {
extern "C" {
#endif
-#if ORIGINAL_OPT
-uint32_t HammingDistance_C1(const uint8_t* src_a,
- const uint8_t* src_b,
- int count) {
- uint32_t diff = 0u;
-
- int i;
- for (i = 0; i < count; ++i) {
- int x = src_a[i] ^ src_b[i];
- if (x & 1)
- ++diff;
- if (x & 2)
- ++diff;
- if (x & 4)
- ++diff;
- if (x & 8)
- ++diff;
- if (x & 16)
- ++diff;
- if (x & 32)
- ++diff;
- if (x & 64)
- ++diff;
- if (x & 128)
- ++diff;
- }
- return diff;
-}
-#endif
-
// Hakmem method for hamming distance.
uint32_t HammingDistance_C(const uint8_t* src_a,
const uint8_t* src_b,
diff --git a/third_party/libyuv/source/compare_gcc.cc b/third_party/libyuv/source/compare_gcc.cc
index 6700f9697e..b834b42ac4 100644
--- a/third_party/libyuv/source/compare_gcc.cc
+++ b/third_party/libyuv/source/compare_gcc.cc
@@ -19,8 +19,7 @@ extern "C" {
#endif
// This module is for GCC x86 and x64.
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#if defined(__x86_64__)
uint32_t HammingDistance_SSE42(const uint8_t* src_a,
diff --git a/third_party/libyuv/source/compare_win.cc b/third_party/libyuv/source/compare_win.cc
index d57d3d9d1c..9bb27f1dd1 100644
--- a/third_party/libyuv/source/compare_win.cc
+++ b/third_party/libyuv/source/compare_win.cc
@@ -22,8 +22,9 @@ namespace libyuv {
extern "C" {
#endif
-// This module is for 32 bit Visual C x86 and clangcl
-#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER)
+// This module is for 32 bit Visual C x86
+#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \
+ !defined(__clang__) && defined(_M_IX86)
uint32_t HammingDistance_SSE42(const uint8_t* src_a,
const uint8_t* src_b,
@@ -77,8 +78,7 @@ __declspec(naked) uint32_t
}
}
-// Visual C 2012 required for AVX2.
-#if _MSC_VER >= 1700
+#ifdef HAS_SUMSQUAREERROR_AVX2
// C4752: found Intel(R) Advanced Vector Extensions; consider using /arch:AVX.
#pragma warning(disable : 4752)
__declspec(naked) uint32_t
@@ -118,7 +118,7 @@ __declspec(naked) uint32_t
ret
}
}
-#endif // _MSC_VER >= 1700
+#endif // HAS_SUMSQUAREERROR_AVX2
uvec32 kHash16x33 = {0x92d9e201, 0, 0, 0}; // 33 ^ 16
uvec32 kHashMul0 = {
@@ -196,7 +196,7 @@ __declspec(naked) uint32_t
}
// Visual C 2012 required for AVX2.
-#if _MSC_VER >= 1700
+#ifdef HAS_HASHDJB2_AVX2
__declspec(naked) uint32_t
HashDjb2_AVX2(const uint8_t* src, int count, uint32_t seed) {
__asm {
@@ -231,7 +231,7 @@ __declspec(naked) uint32_t
ret
}
}
-#endif // _MSC_VER >= 1700
+#endif // HAS_HASHDJB2_AVX2
#endif // !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86)
diff --git a/third_party/libyuv/source/convert.cc b/third_party/libyuv/source/convert.cc
index 1bd596599b..69f7fb6e01 100644
--- a/third_party/libyuv/source/convert.cc
+++ b/third_party/libyuv/source/convert.cc
@@ -400,7 +400,7 @@ int I210ToI010(const uint16_t* src_y,
}
// Any I[420]1[02] to P[420]1[02] format with mirroring.
-static int Ix1xToPx1x(const uint16_t* src_y,
+static int IxxxToPxxx(const uint16_t* src_y,
int src_stride_y,
const uint16_t* src_u,
int src_stride_u,
@@ -441,7 +441,7 @@ int I010ToP010(const uint16_t* src_y,
int dst_stride_uv,
int width,
int height) {
- return Ix1xToPx1x(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
width, height, 1, 1, 10);
}
@@ -459,7 +459,7 @@ int I210ToP210(const uint16_t* src_y,
int dst_stride_uv,
int width,
int height) {
- return Ix1xToPx1x(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
width, height, 1, 0, 10);
}
@@ -477,7 +477,7 @@ int I012ToP012(const uint16_t* src_y,
int dst_stride_uv,
int width,
int height) {
- return Ix1xToPx1x(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
width, height, 1, 1, 12);
}
@@ -495,7 +495,7 @@ int I212ToP212(const uint16_t* src_y,
int dst_stride_uv,
int width,
int height) {
- return Ix1xToPx1x(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
width, height, 1, 0, 12);
}
@@ -1368,6 +1368,18 @@ int ARGBToI420(const uint8_t* src_argb,
src_argb = src_argb + (height - 1) * src_stride_argb;
src_stride_argb = -src_stride_argb;
}
+#if defined(HAS_ARGBTOYROW_NEON) && defined(HAS_ARGBTOUVROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToYRow = ARGBToYRow_Any_NEON;
+ ARGBToUVRow = ARGBToUVRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToYRow = ARGBToYRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVRow = ARGBToUVRow_NEON;
+ }
+ }
+ }
+#endif
#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
ARGBToUVRow = ARGBToUVRow_Any_SSSE3;
@@ -1388,22 +1400,6 @@ int ARGBToI420(const uint8_t* src_argb,
}
}
#endif
-#if defined(HAS_ARGBTOYROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- ARGBToYRow = ARGBToYRow_Any_NEON;
- if (IS_ALIGNED(width, 8)) {
- ARGBToYRow = ARGBToYRow_NEON;
- }
- }
-#endif
-#if defined(HAS_ARGBTOUVROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- ARGBToUVRow = ARGBToUVRow_Any_NEON;
- if (IS_ALIGNED(width, 16)) {
- ARGBToUVRow = ARGBToUVRow_NEON;
- }
- }
-#endif
#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI)
if (TestCpuFlag(kCpuHasMMI)) {
ARGBToYRow = ARGBToYRow_Any_MMI;
@@ -1771,7 +1767,7 @@ int RGB24ToI420(const uint8_t* src_rgb24,
}
// Neon version does direct RGB24 to YUV.
-#if defined(HAS_RGB24TOYROW_NEON)
+#if defined(HAS_RGB24TOYROW_NEON) && defined(HAS_RGB24TOUVROW_NEON)
if (TestCpuFlag(kCpuHasNEON)) {
RGB24ToUVRow = RGB24ToUVRow_Any_NEON;
RGB24ToYRow = RGB24ToYRow_Any_NEON;
@@ -1808,6 +1804,14 @@ int RGB24ToI420(const uint8_t* src_rgb24,
#endif
// Other platforms do intermediate conversion from RGB24 to ARGB.
#else
+#if defined(HAS_RGB24TOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ RGB24ToARGBRow = RGB24ToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ RGB24ToARGBRow = RGB24ToARGBRow_NEON;
+ }
+ }
+#endif
#if defined(HAS_RGB24TOARGBROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3;
@@ -1816,6 +1820,18 @@ int RGB24ToI420(const uint8_t* src_rgb24,
}
}
#endif
+#if defined(HAS_ARGBTOYROW_NEON) && defined(HAS_ARGBTOUVROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToUVRow = ARGBToUVRow_Any_NEON;
+ ARGBToYRow = ARGBToYRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToYRow = ARGBToYRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVRow = ARGBToUVRow_NEON;
+ }
+ }
+ }
+#endif
#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
ARGBToUVRow = ARGBToUVRow_Any_SSSE3;
@@ -1960,6 +1976,14 @@ int RGB24ToJ420(const uint8_t* src_rgb24,
}
#endif
#else
+#if defined(HAS_RGB24TOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ RGB24ToARGBRow = RGB24ToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ RGB24ToARGBRow = RGB24ToARGBRow_NEON;
+ }
+ }
+#endif
#if defined(HAS_RGB24TOARGBROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3;
@@ -1968,6 +1992,18 @@ int RGB24ToJ420(const uint8_t* src_rgb24,
}
}
#endif
+#if defined(HAS_ARGBTOYJROW_NEON) && defined(HAS_ARGBTOUVJROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToUVJRow = ARGBToUVJRow_Any_NEON;
+ ARGBToYJRow = ARGBToYJRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToYJRow = ARGBToYJRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVJRow = ARGBToUVJRow_NEON;
+ }
+ }
+ }
+#endif
#if defined(HAS_ARGBTOYJROW_SSSE3) && defined(HAS_ARGBTOUVJROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3;
@@ -2111,6 +2147,26 @@ int RAWToI420(const uint8_t* src_raw,
#endif
// Other platforms do intermediate conversion from RAW to ARGB.
#else
+#if defined(HAS_RAWTOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ RAWToARGBRow = RAWToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ RAWToARGBRow = RAWToARGBRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOYROW_NEON) && defined(HAS_ARGBTOUVROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToUVRow = ARGBToUVRow_Any_NEON;
+ ARGBToYRow = ARGBToYRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToYRow = ARGBToYRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVRow = ARGBToUVRow_NEON;
+ }
+ }
+ }
+#endif
#if defined(HAS_RAWTOARGBROW_SSSE3)
if (TestCpuFlag(kCpuHasSSSE3)) {
RAWToARGBRow = RAWToARGBRow_Any_SSSE3;
@@ -2186,6 +2242,178 @@ int RAWToI420(const uint8_t* src_raw,
return 0;
}
+// TODO(fbarchard): Use Matrix version to implement I420 and J420.
+// Convert RAW to J420.
+LIBYUV_API
+int RAWToJ420(const uint8_t* src_raw,
+ int src_stride_raw,
+ uint8_t* dst_y,
+ int dst_stride_y,
+ uint8_t* dst_u,
+ int dst_stride_u,
+ uint8_t* dst_v,
+ int dst_stride_v,
+ int width,
+ int height) {
+ int y;
+#if (defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)) || \
+ defined(HAS_RAWTOYJROW_MSA) || defined(HAS_RAWTOYJROW_MMI)
+ void (*RAWToUVJRow)(const uint8_t* src_raw, int src_stride_raw,
+ uint8_t* dst_u, uint8_t* dst_v, int width) =
+ RAWToUVJRow_C;
+ void (*RAWToYJRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) =
+ RAWToYJRow_C;
+#else
+ void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) =
+ RAWToARGBRow_C;
+ void (*ARGBToUVJRow)(const uint8_t* src_argb0, int src_stride_argb,
+ uint8_t* dst_u, uint8_t* dst_v, int width) =
+ ARGBToUVJRow_C;
+ void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_y, int width) =
+ ARGBToYJRow_C;
+#endif
+ if (!src_raw || !dst_y || !dst_u || !dst_v || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_raw = src_raw + (height - 1) * src_stride_raw;
+ src_stride_raw = -src_stride_raw;
+ }
+
+// Neon version does direct RAW to YUV.
+#if defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ RAWToUVJRow = RAWToUVJRow_Any_NEON;
+ RAWToYJRow = RAWToYJRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ RAWToYJRow = RAWToYJRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ RAWToUVJRow = RAWToUVJRow_NEON;
+ }
+ }
+ }
+// MMI and MSA version does direct RAW to YUV.
+#elif (defined(HAS_RAWTOYJROW_MMI) || defined(HAS_RAWTOYJROW_MSA))
+#if defined(HAS_RAWTOYJROW_MMI) && defined(HAS_RAWTOUVJROW_MMI)
+ if (TestCpuFlag(kCpuHasMMI)) {
+ RAWToUVJRow = RAWToUVJRow_Any_MMI;
+ RAWToYJRow = RAWToYJRow_Any_MMI;
+ if (IS_ALIGNED(width, 8)) {
+ RAWToYJRow = RAWToYJRow_MMI;
+ if (IS_ALIGNED(width, 16)) {
+ RAWToUVJRow = RAWToUVJRow_MMI;
+ }
+ }
+ }
+#endif
+#if defined(HAS_RAWTOYJROW_MSA) && defined(HAS_RAWTOUVJROW_MSA)
+ if (TestCpuFlag(kCpuHasMSA)) {
+ RAWToUVJRow = RAWToUVJRow_Any_MSA;
+ RAWToYJRow = RAWToYJRow_Any_MSA;
+ if (IS_ALIGNED(width, 16)) {
+ RAWToYJRow = RAWToYJRow_MSA;
+ RAWToUVJRow = RAWToUVJRow_MSA;
+ }
+ }
+#endif
+#else
+#if defined(HAS_RAWTOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ RAWToARGBRow = RAWToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ RAWToARGBRow = RAWToARGBRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOYJROW_NEON) && defined(HAS_ARGBTOUVJROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToUVJRow = ARGBToUVJRow_Any_NEON;
+ ARGBToYJRow = ARGBToYJRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToYJRow = ARGBToYJRow_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVJRow = ARGBToUVJRow_NEON;
+ }
+ }
+ }
+#endif
+#if defined(HAS_RAWTOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ RAWToARGBRow = RAWToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 16)) {
+ RAWToARGBRow = RAWToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOYJROW_SSSE3) && defined(HAS_ARGBTOUVJROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3;
+ ARGBToYJRow = ARGBToYJRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 16)) {
+ ARGBToUVJRow = ARGBToUVJRow_SSSE3;
+ ARGBToYJRow = ARGBToYJRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOYJROW_AVX2) && defined(HAS_ARGBTOUVJROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBToUVJRow = ARGBToUVJRow_Any_AVX2;
+ ARGBToYJRow = ARGBToYJRow_Any_AVX2;
+ if (IS_ALIGNED(width, 32)) {
+ ARGBToUVJRow = ARGBToUVJRow_AVX2;
+ ARGBToYJRow = ARGBToYJRow_AVX2;
+ }
+ }
+#endif
+#endif
+
+ {
+#if !((defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)) || \
+ defined(HAS_RAWTOYJROW_MSA) || defined(HAS_RAWTOYJROW_MMI))
+ // Allocate 2 rows of ARGB.
+ const int kRowSize = (width * 4 + 31) & ~31;
+ align_buffer_64(row, kRowSize * 2);
+#endif
+
+ for (y = 0; y < height - 1; y += 2) {
+#if ((defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)) || \
+ defined(HAS_RAWTOYJROW_MSA) || defined(HAS_RAWTOYJROW_MMI))
+ RAWToUVJRow(src_raw, src_stride_raw, dst_u, dst_v, width);
+ RAWToYJRow(src_raw, dst_y, width);
+ RAWToYJRow(src_raw + src_stride_raw, dst_y + dst_stride_y, width);
+#else
+ RAWToARGBRow(src_raw, row, width);
+ RAWToARGBRow(src_raw + src_stride_raw, row + kRowSize, width);
+ ARGBToUVJRow(row, kRowSize, dst_u, dst_v, width);
+ ARGBToYJRow(row, dst_y, width);
+ ARGBToYJRow(row + kRowSize, dst_y + dst_stride_y, width);
+#endif
+ src_raw += src_stride_raw * 2;
+ dst_y += dst_stride_y * 2;
+ dst_u += dst_stride_u;
+ dst_v += dst_stride_v;
+ }
+ if (height & 1) {
+#if ((defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)) || \
+ defined(HAS_RAWTOYJROW_MSA) || defined(HAS_RAWTOYJROW_MMI))
+ RAWToUVJRow(src_raw, 0, dst_u, dst_v, width);
+ RAWToYJRow(src_raw, dst_y, width);
+#else
+ RAWToARGBRow(src_raw, row, width);
+ ARGBToUVJRow(row, 0, dst_u, dst_v, width);
+ ARGBToYJRow(row, dst_y, width);
+#endif
+ }
+#if !((defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON)) || \
+ defined(HAS_RAWTOYJROW_MSA) || defined(HAS_RAWTOYJROW_MMI))
+ free_aligned_buffer_64(row);
+#endif
+ }
+ return 0;
+}
+
// Convert RGB565 to I420.
LIBYUV_API
int RGB565ToI420(const uint8_t* src_rgb565,
diff --git a/third_party/libyuv/source/convert_argb.cc b/third_party/libyuv/source/convert_argb.cc
index 87d7d73250..d8f7b27738 100644
--- a/third_party/libyuv/source/convert_argb.cc
+++ b/third_party/libyuv/source/convert_argb.cc
@@ -888,6 +888,63 @@ int U010ToAB30(const uint16_t* src_y,
&kYuv2020Constants, width, height);
}
+// Convert 12 bit YUV to ARGB with matrix.
+// TODO(fbarchard): Consider passing scale multiplier to I212ToARGB to
+// multiply 12 bit yuv into high bits to allow any number of bits.
+LIBYUV_API
+int I012ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*I212ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) =
+ I212ToAR30Row_C;
+ if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30;
+ dst_stride_ar30 = -dst_stride_ar30;
+ }
+#if defined(HAS_I212TOAR30ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I212ToAR30Row = I212ToAR30Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I212ToAR30Row = I212ToAR30Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I212TOAR30ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I212ToAR30Row = I212ToAR30Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I212ToAR30Row = I212ToAR30Row_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ I212ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width);
+ dst_ar30 += dst_stride_ar30;
+ src_y += src_stride_y;
+ if (y & 1) {
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ }
+ return 0;
+}
+
// Convert 10 bit YUV to ARGB with matrix.
// TODO(fbarchard): Consider passing scale multiplier to I210ToARGB to
// multiply 10 bit yuv into high bits to allow any number of bits.
@@ -1045,6 +1102,58 @@ int U210ToAB30(const uint16_t* src_y,
&kYuv2020Constants, width, height);
}
+LIBYUV_API
+int I410ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*I410ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) =
+ I410ToAR30Row_C;
+ if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30;
+ dst_stride_ar30 = -dst_stride_ar30;
+ }
+#if defined(HAS_I410TOAR30ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I410ToAR30Row = I410ToAR30Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I410ToAR30Row = I410ToAR30Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I410TOAR30ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I410ToAR30Row = I410ToAR30Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I410ToAR30Row = I410ToAR30Row_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ I410ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width);
+ dst_ar30 += dst_stride_ar30;
+ src_y += src_stride_y;
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ return 0;
+}
+
// Convert 10 bit YUV to ARGB with matrix.
LIBYUV_API
int I010ToARGBMatrix(const uint16_t* src_y,
@@ -1088,14 +1197,6 @@ int I010ToARGBMatrix(const uint16_t* src_y,
}
}
#endif
-#if defined(HAS_I210TOARGBROW_MMI)
- if (TestCpuFlag(kCpuHasMMI)) {
- I210ToARGBRow = I210ToARGBRow_Any_MMI;
- if (IS_ALIGNED(width, 4)) {
- I210ToARGBRow = I210ToARGBRow_MMI;
- }
- }
-#endif
for (y = 0; y < height; ++y) {
I210ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width);
dst_argb += dst_stride_argb;
@@ -1216,6 +1317,61 @@ int U010ToABGR(const uint16_t* src_y,
width, height);
}
+// Convert 12 bit YUV to ARGB with matrix.
+LIBYUV_API
+int I012ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*I212ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) =
+ I212ToARGBRow_C;
+ if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_I212TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I212ToARGBRow = I212ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I212ToARGBRow = I212ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I212TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I212ToARGBRow = I212ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I212ToARGBRow = I212ToARGBRow_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ I212ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width);
+ dst_argb += dst_stride_argb;
+ src_y += src_stride_y;
+ if (y & 1) {
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ }
+ return 0;
+}
+
// Convert 10 bit 422 YUV to ARGB with matrix.
LIBYUV_API
int I210ToARGBMatrix(const uint16_t* src_y,
@@ -1259,14 +1415,6 @@ int I210ToARGBMatrix(const uint16_t* src_y,
}
}
#endif
-#if defined(HAS_I210TOARGBROW_MMI)
- if (TestCpuFlag(kCpuHasMMI)) {
- I210ToARGBRow = I210ToARGBRow_Any_MMI;
- if (IS_ALIGNED(width, 4)) {
- I210ToARGBRow = I210ToARGBRow_MMI;
- }
- }
-#endif
for (y = 0; y < height; ++y) {
I210ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width);
dst_argb += dst_stride_argb;
@@ -1385,6 +1533,254 @@ int U210ToABGR(const uint16_t* src_y,
width, height);
}
+LIBYUV_API
+int I410ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*I410ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) =
+ I410ToARGBRow_C;
+ if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_I410TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I410ToARGBRow = I410ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I410ToARGBRow = I410ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I410TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I410ToARGBRow = I410ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I410ToARGBRow = I410ToARGBRow_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ I410ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width);
+ dst_argb += dst_stride_argb;
+ src_y += src_stride_y;
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ return 0;
+}
+
+LIBYUV_API
+int P010ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*P210ToARGBRow)(
+ const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) = P210ToARGBRow_C;
+ if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_P210TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ P210ToARGBRow = P210ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ P210ToARGBRow = P210ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_P210TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ P210ToARGBRow = P210ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ P210ToARGBRow = P210ToARGBRow_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ P210ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width);
+ dst_argb += dst_stride_argb;
+ src_y += src_stride_y;
+ if (y & 1) {
+ src_uv += src_stride_uv;
+ }
+ }
+ return 0;
+}
+
+LIBYUV_API
+int P210ToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*P210ToARGBRow)(
+ const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) = P210ToARGBRow_C;
+ if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_P210TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ P210ToARGBRow = P210ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ P210ToARGBRow = P210ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_P210TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ P210ToARGBRow = P210ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ P210ToARGBRow = P210ToARGBRow_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ P210ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width);
+ dst_argb += dst_stride_argb;
+ src_y += src_stride_y;
+ src_uv += src_stride_uv;
+ }
+ return 0;
+}
+
+LIBYUV_API
+int P010ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*P210ToAR30Row)(
+ const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) = P210ToAR30Row_C;
+ if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30;
+ dst_stride_ar30 = -dst_stride_ar30;
+ }
+#if defined(HAS_P210TOAR30ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ P210ToAR30Row = P210ToAR30Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ P210ToAR30Row = P210ToAR30Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_P210TOAR30ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ P210ToAR30Row = P210ToAR30Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ P210ToAR30Row = P210ToAR30Row_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ P210ToAR30Row(src_y, src_uv, dst_ar30, yuvconstants, width);
+ dst_ar30 += dst_stride_ar30;
+ src_y += src_stride_y;
+ if (y & 1) {
+ src_uv += src_stride_uv;
+ }
+ }
+ return 0;
+}
+
+LIBYUV_API
+int P210ToAR30Matrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_uv,
+ int src_stride_uv,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height) {
+ int y;
+ void (*P210ToAR30Row)(
+ const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants, int width) = P210ToAR30Row_C;
+ if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30;
+ dst_stride_ar30 = -dst_stride_ar30;
+ }
+#if defined(HAS_P210TOAR30ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ P210ToAR30Row = P210ToAR30Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ P210ToAR30Row = P210ToAR30Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_P210TOAR30ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ P210ToAR30Row = P210ToAR30Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ P210ToAR30Row = P210ToAR30Row_AVX2;
+ }
+ }
+#endif
+ for (y = 0; y < height; ++y) {
+ P210ToAR30Row(src_y, src_uv, dst_ar30, yuvconstants, width);
+ dst_ar30 += dst_stride_ar30;
+ src_y += src_stride_y;
+ src_uv += src_stride_uv;
+ }
+ return 0;
+}
+
// Convert I420 with Alpha to preattenuated ARGB with matrix.
LIBYUV_API
int I420AlphaToARGBMatrix(const uint8_t* src_y,
@@ -1903,6 +2299,323 @@ int I444AlphaToABGR(const uint8_t* src_y,
width, height, attenuate);
}
+// Convert I010 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I010AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate) {
+ int y;
+ void (*I210AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) = I210AlphaToARGBRow_C;
+ void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb,
+ int width) = ARGBAttenuateRow_C;
+ if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_I210ALPHATOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I210ALPHATOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MMI)
+ if (TestCpuFlag(kCpuHasMMI)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI;
+ if (IS_ALIGNED(width, 2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MMI;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MSA)
+ if (TestCpuFlag(kCpuHasMSA)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MSA;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ I210AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants,
+ width);
+ if (attenuate) {
+ ARGBAttenuateRow(dst_argb, dst_argb, width);
+ }
+ dst_argb += dst_stride_argb;
+ src_a += src_stride_a;
+ src_y += src_stride_y;
+ if (y & 1) {
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ }
+ return 0;
+}
+
+// Convert I210 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I210AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate) {
+ int y;
+ void (*I210AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) = I210AlphaToARGBRow_C;
+ void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb,
+ int width) = ARGBAttenuateRow_C;
+ if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_I210ALPHATOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I210ALPHATOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I210AlphaToARGBRow = I210AlphaToARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MMI)
+ if (TestCpuFlag(kCpuHasMMI)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI;
+ if (IS_ALIGNED(width, 2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MMI;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MSA)
+ if (TestCpuFlag(kCpuHasMSA)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MSA;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ I210AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants,
+ width);
+ if (attenuate) {
+ ARGBAttenuateRow(dst_argb, dst_argb, width);
+ }
+ dst_argb += dst_stride_argb;
+ src_a += src_stride_a;
+ src_y += src_stride_y;
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ return 0;
+}
+
+// Convert I410 with Alpha to preattenuated ARGB with matrix.
+LIBYUV_API
+int I410AlphaToARGBMatrix(const uint16_t* src_y,
+ int src_stride_y,
+ const uint16_t* src_u,
+ int src_stride_u,
+ const uint16_t* src_v,
+ int src_stride_v,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ const struct YuvConstants* yuvconstants,
+ int width,
+ int height,
+ int attenuate) {
+ int y;
+ void (*I410AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf,
+ const uint16_t* v_buf, const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) = I410AlphaToARGBRow_C;
+ void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb,
+ int width) = ARGBAttenuateRow_C;
+ if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+#if defined(HAS_I410ALPHATOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ I410AlphaToARGBRow = I410AlphaToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ I410AlphaToARGBRow = I410AlphaToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_I410ALPHATOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ I410AlphaToARGBRow = I410AlphaToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ I410AlphaToARGBRow = I410AlphaToARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MMI)
+ if (TestCpuFlag(kCpuHasMMI)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI;
+ if (IS_ALIGNED(width, 2)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MMI;
+ }
+ }
+#endif
+#if defined(HAS_ARGBATTENUATEROW_MSA)
+ if (TestCpuFlag(kCpuHasMSA)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBAttenuateRow = ARGBAttenuateRow_MSA;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ I410AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants,
+ width);
+ if (attenuate) {
+ ARGBAttenuateRow(dst_argb, dst_argb, width);
+ }
+ dst_argb += dst_stride_argb;
+ src_a += src_stride_a;
+ src_y += src_stride_y;
+ src_u += src_stride_u;
+ src_v += src_stride_v;
+ }
+ return 0;
+}
+
// Convert I400 to ARGB with matrix.
LIBYUV_API
int I400ToARGBMatrix(const uint8_t* src_y,
@@ -2078,6 +2791,10 @@ static const uvec8 kShuffleMaskABGRToARGB = {
static const uvec8 kShuffleMaskRGBAToARGB = {
1u, 2u, 3u, 0u, 5u, 6u, 7u, 4u, 9u, 10u, 11u, 8u, 13u, 14u, 15u, 12u};
+// Shuffle table for converting AR64 to AB64.
+static const uvec8 kShuffleMaskAR64ToAB64 = {
+ 4u, 5u, 2u, 3u, 0u, 1u, 6u, 7u, 12u, 13u, 10u, 11u, 8u, 9u, 14u, 15u};
+
// Convert BGRA to ARGB.
LIBYUV_API
int BGRAToARGB(const uint8_t* src_bgra,
@@ -2087,7 +2804,7 @@ int BGRAToARGB(const uint8_t* src_bgra,
int width,
int height) {
return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb,
- (const uint8_t*)(&kShuffleMaskBGRAToARGB), width, height);
+ (const uint8_t*)&kShuffleMaskBGRAToARGB, width, height);
}
// Convert ARGB to BGRA (same as BGRAToARGB).
@@ -2099,7 +2816,7 @@ int ARGBToBGRA(const uint8_t* src_bgra,
int width,
int height) {
return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb,
- (const uint8_t*)(&kShuffleMaskBGRAToARGB), width, height);
+ (const uint8_t*)&kShuffleMaskBGRAToARGB, width, height);
}
// Convert ABGR to ARGB.
@@ -2111,7 +2828,7 @@ int ABGRToARGB(const uint8_t* src_abgr,
int width,
int height) {
return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb,
- (const uint8_t*)(&kShuffleMaskABGRToARGB), width, height);
+ (const uint8_t*)&kShuffleMaskABGRToARGB, width, height);
}
// Convert ARGB to ABGR to (same as ABGRToARGB).
@@ -2123,7 +2840,7 @@ int ARGBToABGR(const uint8_t* src_abgr,
int width,
int height) {
return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb,
- (const uint8_t*)(&kShuffleMaskABGRToARGB), width, height);
+ (const uint8_t*)&kShuffleMaskABGRToARGB, width, height);
}
// Convert RGBA to ARGB.
@@ -2135,7 +2852,19 @@ int RGBAToARGB(const uint8_t* src_rgba,
int width,
int height) {
return ARGBShuffle(src_rgba, src_stride_rgba, dst_argb, dst_stride_argb,
- (const uint8_t*)(&kShuffleMaskRGBAToARGB), width, height);
+ (const uint8_t*)&kShuffleMaskRGBAToARGB, width, height);
+}
+
+// Convert AR64 To AB64.
+LIBYUV_API
+int AR64ToAB64(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint16_t* dst_ab64,
+ int dst_stride_ab64,
+ int width,
+ int height) {
+ return AR64Shuffle(src_ar64, src_stride_ar64, dst_ab64, dst_stride_ab64,
+ (const uint8_t*)&kShuffleMaskAR64ToAB64, width, height);
}
// Convert RGB24 to ARGB.
@@ -2644,6 +3373,124 @@ int AR30ToAB30(const uint8_t* src_ar30,
return 0;
}
+// Convert AR64 to ARGB.
+LIBYUV_API
+int AR64ToARGB(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height) {
+ int y;
+ void (*AR64ToARGBRow)(const uint16_t* src_ar64, uint8_t* dst_argb,
+ int width) = AR64ToARGBRow_C;
+ if (!src_ar64 || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_ar64 = src_ar64 + (height - 1) * src_stride_ar64;
+ src_stride_ar64 = -src_stride_ar64;
+ }
+ // Coalesce rows.
+ if (src_stride_ar64 == width * 4 && dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_ar64 = dst_stride_argb = 0;
+ }
+#if defined(HAS_AR64TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ AR64ToARGBRow = AR64ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ AR64ToARGBRow = AR64ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_AR64TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ AR64ToARGBRow = AR64ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ AR64ToARGBRow = AR64ToARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_AR64TOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ AR64ToARGBRow = AR64ToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ AR64ToARGBRow = AR64ToARGBRow_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ AR64ToARGBRow(src_ar64, dst_argb, width);
+ src_ar64 += src_stride_ar64;
+ dst_argb += dst_stride_argb;
+ }
+ return 0;
+}
+
+// Convert AB64 to ARGB.
+LIBYUV_API
+int AB64ToARGB(const uint16_t* src_ab64,
+ int src_stride_ab64,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height) {
+ int y;
+ void (*AB64ToARGBRow)(const uint16_t* src_ar64, uint8_t* dst_argb,
+ int width) = AB64ToARGBRow_C;
+ if (!src_ab64 || !dst_argb || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_ab64 = src_ab64 + (height - 1) * src_stride_ab64;
+ src_stride_ab64 = -src_stride_ab64;
+ }
+ // Coalesce rows.
+ if (src_stride_ab64 == width * 4 && dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_ab64 = dst_stride_argb = 0;
+ }
+#if defined(HAS_AB64TOARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ AB64ToARGBRow = AB64ToARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ AB64ToARGBRow = AB64ToARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_AB64TOARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ AB64ToARGBRow = AB64ToARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ AB64ToARGBRow = AB64ToARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_AB64TOARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ AB64ToARGBRow = AB64ToARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ AB64ToARGBRow = AB64ToARGBRow_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ AB64ToARGBRow(src_ab64, dst_argb, width);
+ src_ab64 += src_stride_ab64;
+ dst_argb += dst_stride_argb;
+ }
+ return 0;
+}
+
// Convert NV12 to ARGB with matrix.
LIBYUV_API
int NV12ToARGBMatrix(const uint8_t* src_y,
@@ -4463,6 +5310,40 @@ int H420ToAR30(const uint8_t* src_y,
&kYvuH709Constants, width, height);
}
+// Convert I420 to AB30.
+LIBYUV_API
+int I420ToAB30(const uint8_t* src_y,
+ int src_stride_y,
+ const uint8_t* src_u,
+ int src_stride_u,
+ const uint8_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ab30,
+ int dst_stride_ab30,
+ int width,
+ int height) {
+ return I420ToAR30Matrix(src_y, src_stride_y, src_v, src_stride_v, src_u,
+ src_stride_u, dst_ab30, dst_stride_ab30,
+ &kYvuI601Constants, width, height);
+}
+
+// Convert H420 to AB30.
+LIBYUV_API
+int H420ToAB30(const uint8_t* src_y,
+ int src_stride_y,
+ const uint8_t* src_u,
+ int src_stride_u,
+ const uint8_t* src_v,
+ int src_stride_v,
+ uint8_t* dst_ab30,
+ int dst_stride_ab30,
+ int width,
+ int height) {
+ return I420ToAR30Matrix(src_y, src_stride_y, src_v, src_stride_v, src_u,
+ src_stride_u, dst_ab30, dst_stride_ab30,
+ &kYvuH709Constants, width, height);
+}
+
#ifdef __cplusplus
} // extern "C"
} // namespace libyuv
diff --git a/third_party/libyuv/source/convert_from_argb.cc b/third_party/libyuv/source/convert_from_argb.cc
index 4ba4bb5e0f..e14615847d 100644
--- a/third_party/libyuv/source/convert_from_argb.cc
+++ b/third_party/libyuv/source/convert_from_argb.cc
@@ -2009,6 +2009,124 @@ int ARGBToJ422(const uint8_t* src_argb,
return 0;
}
+// Convert ARGB to AR64.
+LIBYUV_API
+int ARGBToAR64(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height) {
+ int y;
+ void (*ARGBToAR64Row)(const uint8_t* src_argb, uint16_t* dst_ar64,
+ int width) = ARGBToAR64Row_C;
+ if (!src_argb || !dst_ar64 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_argb = src_argb + (height - 1) * src_stride_argb;
+ src_stride_argb = -src_stride_argb;
+ }
+ // Coalesce rows.
+ if (src_stride_argb == width * 4 && dst_stride_ar64 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_argb = dst_stride_ar64 = 0;
+ }
+#if defined(HAS_ARGBTOAR64ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBToAR64Row = ARGBToAR64Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ ARGBToAR64Row = ARGBToAR64Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOAR64ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBToAR64Row = ARGBToAR64Row_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToAR64Row = ARGBToAR64Row_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOAR64ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToAR64Row = ARGBToAR64Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToAR64Row = ARGBToAR64Row_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ ARGBToAR64Row(src_argb, dst_ar64, width);
+ src_argb += src_stride_argb;
+ dst_ar64 += dst_stride_ar64;
+ }
+ return 0;
+}
+
+// Convert ARGB to AB64.
+LIBYUV_API
+int ARGBToAB64(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint16_t* dst_ab64,
+ int dst_stride_ab64,
+ int width,
+ int height) {
+ int y;
+ void (*ARGBToAB64Row)(const uint8_t* src_argb, uint16_t* dst_ar64,
+ int width) = ARGBToAB64Row_C;
+ if (!src_argb || !dst_ab64 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_argb = src_argb + (height - 1) * src_stride_argb;
+ src_stride_argb = -src_stride_argb;
+ }
+ // Coalesce rows.
+ if (src_stride_argb == width * 4 && dst_stride_ab64 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_argb = dst_stride_ab64 = 0;
+ }
+#if defined(HAS_ARGBTOAB64ROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ ARGBToAB64Row = ARGBToAB64Row_Any_SSSE3;
+ if (IS_ALIGNED(width, 4)) {
+ ARGBToAB64Row = ARGBToAB64Row_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOAB64ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ARGBToAB64Row = ARGBToAB64Row_Any_AVX2;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToAB64Row = ARGBToAB64Row_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBTOAB64ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ARGBToAB64Row = ARGBToAB64Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ ARGBToAB64Row = ARGBToAB64Row_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ ARGBToAB64Row(src_argb, dst_ab64, width);
+ src_argb += src_stride_argb;
+ dst_ab64 += dst_stride_ab64;
+ }
+ return 0;
+}
+
// Convert ARGB to J400.
LIBYUV_API
int ARGBToJ400(const uint8_t* src_argb,
diff --git a/third_party/libyuv/source/planar_functions.cc b/third_party/libyuv/source/planar_functions.cc
index 219c216509..7cea06c8d7 100644
--- a/third_party/libyuv/source/planar_functions.cc
+++ b/third_party/libyuv/source/planar_functions.cc
@@ -10,6 +10,7 @@
#include "libyuv/planar_functions.h"
+#include <assert.h>
#include <string.h> // for memset()
#include "libyuv/cpu_id.h"
@@ -563,9 +564,9 @@ void SplitUVPlane_16(const uint16_t* src_uv,
int height,
int depth) {
int y;
- int scale = 1 << depth;
- void (*SplitUVRow)(const uint16_t* src_uv, uint16_t* dst_u, uint16_t* dst_v,
- int scale, int width) = SplitUVRow_16_C;
+ void (*SplitUVRow_16)(const uint16_t* src_uv, uint16_t* dst_u,
+ uint16_t* dst_v, int depth, int width) =
+ SplitUVRow_16_C;
// Negative height means invert the image.
if (height < 0) {
height = -height;
@@ -583,24 +584,24 @@ void SplitUVPlane_16(const uint16_t* src_uv,
}
#if defined(HAS_SPLITUVROW_16_AVX2)
if (TestCpuFlag(kCpuHasAVX2)) {
- SplitUVRow = SplitUVRow_16_Any_AVX2;
+ SplitUVRow_16 = SplitUVRow_16_Any_AVX2;
if (IS_ALIGNED(width, 16)) {
- SplitUVRow = SplitUVRow_16_AVX2;
+ SplitUVRow_16 = SplitUVRow_16_AVX2;
}
}
#endif
#if defined(HAS_SPLITUVROW_16_NEON)
if (TestCpuFlag(kCpuHasNEON)) {
- SplitUVRow = SplitUVRow_16_Any_NEON;
+ SplitUVRow_16 = SplitUVRow_16_Any_NEON;
if (IS_ALIGNED(width, 8)) {
- SplitUVRow = SplitUVRow_16_NEON;
+ SplitUVRow_16 = SplitUVRow_16_NEON;
}
}
#endif
for (y = 0; y < height; ++y) {
// Copy a row of UV.
- SplitUVRow(src_uv, dst_u, dst_v, scale, width);
+ SplitUVRow_16(src_uv, dst_u, dst_v, depth, width);
dst_u += dst_stride_u;
dst_v += dst_stride_v;
src_uv += src_stride_uv;
@@ -618,9 +619,11 @@ void MergeUVPlane_16(const uint16_t* src_u,
int height,
int depth) {
int y;
- int scale = 1 << (16 - depth);
- void (*MergeUVRow)(const uint16_t* src_u, const uint16_t* src_v,
- uint16_t* dst_uv, int scale, int width) = MergeUVRow_16_C;
+ void (*MergeUVRow_16)(const uint16_t* src_u, const uint16_t* src_v,
+ uint16_t* dst_uv, int depth, int width) =
+ MergeUVRow_16_C;
+ assert(depth >= 8);
+ assert(depth <= 16);
// Negative height means invert the image.
if (height < 0) {
height = -height;
@@ -636,24 +639,24 @@ void MergeUVPlane_16(const uint16_t* src_u,
}
#if defined(HAS_MERGEUVROW_16_AVX2)
if (TestCpuFlag(kCpuHasAVX2)) {
- MergeUVRow = MergeUVRow_16_Any_AVX2;
+ MergeUVRow_16 = MergeUVRow_16_Any_AVX2;
if (IS_ALIGNED(width, 16)) {
- MergeUVRow = MergeUVRow_16_AVX2;
+ MergeUVRow_16 = MergeUVRow_16_AVX2;
}
}
#endif
#if defined(HAS_MERGEUVROW_16_NEON)
if (TestCpuFlag(kCpuHasNEON)) {
- MergeUVRow = MergeUVRow_16_Any_NEON;
+ MergeUVRow_16 = MergeUVRow_16_Any_NEON;
if (IS_ALIGNED(width, 8)) {
- MergeUVRow = MergeUVRow_16_NEON;
+ MergeUVRow_16 = MergeUVRow_16_NEON;
}
}
#endif
for (y = 0; y < height; ++y) {
// Merge a row of U and V into a row of UV.
- MergeUVRow(src_u, src_v, dst_uv, scale, width);
+ MergeUVRow_16(src_u, src_v, dst_uv, depth, width);
src_u += src_stride_u;
src_v += src_stride_v;
dst_uv += dst_stride_uv;
@@ -671,8 +674,8 @@ void ConvertToMSBPlane_16(const uint16_t* src_y,
int depth) {
int y;
int scale = 1 << (16 - depth);
- void (*MultiplyRow)(const uint16_t* src_y, uint16_t* dst_y, int scale,
- int width) = MultiplyRow_16_C;
+ void (*MultiplyRow_16)(const uint16_t* src_y, uint16_t* dst_y, int scale,
+ int width) = MultiplyRow_16_C;
// Negative height means invert the image.
if (height < 0) {
height = -height;
@@ -688,23 +691,23 @@ void ConvertToMSBPlane_16(const uint16_t* src_y,
#if defined(HAS_MULTIPLYROW_16_AVX2)
if (TestCpuFlag(kCpuHasAVX2)) {
- MultiplyRow = MultiplyRow_16_Any_AVX2;
+ MultiplyRow_16 = MultiplyRow_16_Any_AVX2;
if (IS_ALIGNED(width, 32)) {
- MultiplyRow = MultiplyRow_16_AVX2;
+ MultiplyRow_16 = MultiplyRow_16_AVX2;
}
}
#endif
#if defined(HAS_MULTIPLYROW_16_NEON)
if (TestCpuFlag(kCpuHasNEON)) {
- MultiplyRow = MultiplyRow_16_Any_NEON;
+ MultiplyRow_16 = MultiplyRow_16_Any_NEON;
if (IS_ALIGNED(width, 16)) {
- MultiplyRow = MultiplyRow_16_NEON;
+ MultiplyRow_16 = MultiplyRow_16_NEON;
}
}
#endif
for (y = 0; y < height; ++y) {
- MultiplyRow(src_y, dst_y, scale, width);
+ MultiplyRow_16(src_y, dst_y, scale, width);
src_y += src_stride_y;
dst_y += dst_stride_y;
}
@@ -982,6 +985,142 @@ void MergeRGBPlane(const uint8_t* src_r,
}
}
+LIBYUV_NOINLINE
+void SplitARGBPlaneAlpha(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint8_t* dst_r,
+ int dst_stride_r,
+ uint8_t* dst_g,
+ int dst_stride_g,
+ uint8_t* dst_b,
+ int dst_stride_b,
+ uint8_t* dst_a,
+ int dst_stride_a,
+ int width,
+ int height) {
+ int y;
+ void (*SplitARGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g,
+ uint8_t* dst_b, uint8_t* dst_a, int width) =
+ SplitARGBRow_C;
+
+ assert(height > 0);
+
+ if (src_stride_argb == width * 4 && dst_stride_r == width &&
+ dst_stride_g == width && dst_stride_b == width && dst_stride_a == width) {
+ width *= height;
+ height = 1;
+ src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b =
+ dst_stride_a = 0;
+ }
+
+#if defined(HAS_SPLITARGBROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ SplitARGBRow = SplitARGBRow_Any_SSE2;
+ if (IS_ALIGNED(width, 8)) {
+ SplitARGBRow = SplitARGBRow_SSE2;
+ }
+ }
+#endif
+#if defined(HAS_SPLITARGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ SplitARGBRow = SplitARGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ SplitARGBRow = SplitARGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_SPLITARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ SplitARGBRow = SplitARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ SplitARGBRow = SplitARGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_SPLITARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ SplitARGBRow = SplitARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ SplitARGBRow = SplitARGBRow_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ SplitARGBRow(src_argb, dst_r, dst_g, dst_b, dst_a, width);
+ dst_r += dst_stride_r;
+ dst_g += dst_stride_g;
+ dst_b += dst_stride_b;
+ dst_a += dst_stride_a;
+ src_argb += src_stride_argb;
+ }
+}
+
+LIBYUV_NOINLINE
+void SplitARGBPlaneOpaque(const uint8_t* src_argb,
+ int src_stride_argb,
+ uint8_t* dst_r,
+ int dst_stride_r,
+ uint8_t* dst_g,
+ int dst_stride_g,
+ uint8_t* dst_b,
+ int dst_stride_b,
+ int width,
+ int height) {
+ int y;
+ void (*SplitXRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g,
+ uint8_t* dst_b, int width) = SplitXRGBRow_C;
+ assert(height > 0);
+
+ if (src_stride_argb == width * 4 && dst_stride_r == width &&
+ dst_stride_g == width && dst_stride_b == width) {
+ width *= height;
+ height = 1;
+ src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b = 0;
+ }
+
+#if defined(HAS_SPLITXRGBROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ SplitXRGBRow = SplitXRGBRow_Any_SSE2;
+ if (IS_ALIGNED(width, 8)) {
+ SplitXRGBRow = SplitXRGBRow_SSE2;
+ }
+ }
+#endif
+#if defined(HAS_SPLITXRGBROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ SplitXRGBRow = SplitXRGBRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ SplitXRGBRow = SplitXRGBRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_SPLITXRGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ SplitXRGBRow = SplitXRGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ SplitXRGBRow = SplitXRGBRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_SPLITXRGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ SplitXRGBRow = SplitXRGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ SplitXRGBRow = SplitXRGBRow_NEON;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ SplitXRGBRow(src_argb, dst_r, dst_g, dst_b, width);
+ dst_r += dst_stride_r;
+ dst_g += dst_stride_g;
+ dst_b += dst_stride_b;
+ src_argb += src_stride_argb;
+ }
+}
+
LIBYUV_API
void SplitARGBPlane(const uint8_t* src_argb,
int src_stride_argb,
@@ -995,137 +1134,146 @@ void SplitARGBPlane(const uint8_t* src_argb,
int dst_stride_a,
int width,
int height) {
- int y;
- void (*SplitARGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g,
- uint8_t* dst_b, uint8_t* dst_a, int width) =
- SplitARGBRow_C;
- void (*SplitXRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g,
- uint8_t* dst_b, int width) = SplitXRGBRow_C;
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_r = dst_r + (height - 1) * dst_stride_r;
+ dst_g = dst_g + (height - 1) * dst_stride_g;
+ dst_b = dst_b + (height - 1) * dst_stride_b;
+ dst_a = dst_a + (height - 1) * dst_stride_a;
+ dst_stride_r = -dst_stride_r;
+ dst_stride_g = -dst_stride_g;
+ dst_stride_b = -dst_stride_b;
+ dst_stride_a = -dst_stride_a;
+ }
if (dst_a == NULL) {
- // Negative height means invert the image.
- if (height < 0) {
- height = -height;
- dst_r = dst_r + (height - 1) * dst_stride_r;
- dst_g = dst_g + (height - 1) * dst_stride_g;
- dst_b = dst_b + (height - 1) * dst_stride_b;
- dst_stride_r = -dst_stride_r;
- dst_stride_g = -dst_stride_g;
- dst_stride_b = -dst_stride_b;
- }
-
- // Coalesce rows.
- if (src_stride_argb == width * 4 && dst_stride_r == width &&
- dst_stride_g == width && dst_stride_b == width) {
- width *= height;
- height = 1;
- src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b =
- dst_stride_a = 0;
- }
+ SplitARGBPlaneOpaque(src_argb, src_stride_argb, dst_r, dst_stride_r, dst_g,
+ dst_stride_g, dst_b, dst_stride_b, width, height);
+ } else {
+ SplitARGBPlaneAlpha(src_argb, src_stride_argb, dst_r, dst_stride_r, dst_g,
+ dst_stride_g, dst_b, dst_stride_b, dst_a, dst_stride_a,
+ width, height);
+ }
+}
-#if defined(HAS_SPLITARGBROW_SSE2)
- if (TestCpuFlag(kCpuHasSSE2)) {
- SplitXRGBRow = SplitXRGBRow_Any_SSE2;
- if (IS_ALIGNED(width, 8)) {
- SplitXRGBRow = SplitXRGBRow_SSE2;
- }
- }
-#endif
-#if defined(HAS_SPLITARGBROW_SSSE3)
- if (TestCpuFlag(kCpuHasSSSE3)) {
- SplitXRGBRow = SplitXRGBRow_Any_SSSE3;
- if (IS_ALIGNED(width, 8)) {
- SplitXRGBRow = SplitXRGBRow_SSSE3;
- }
+LIBYUV_NOINLINE
+void MergeARGBPlaneAlpha(const uint8_t* src_r,
+ int src_stride_r,
+ const uint8_t* src_g,
+ int src_stride_g,
+ const uint8_t* src_b,
+ int src_stride_b,
+ const uint8_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height) {
+ int y;
+ void (*MergeARGBRow)(const uint8_t* src_r, const uint8_t* src_g,
+ const uint8_t* src_b, const uint8_t* src_a,
+ uint8_t* dst_argb, int width) = MergeARGBRow_C;
+
+ assert(height > 0);
+
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ src_stride_a == width && dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = src_stride_a =
+ dst_stride_argb = 0;
+ }
+#if defined(HAS_MERGEARGBROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ MergeARGBRow = MergeARGBRow_Any_SSE2;
+ if (IS_ALIGNED(width, 8)) {
+ MergeARGBRow = MergeARGBRow_SSE2;
}
+ }
#endif
-#if defined(HAS_SPLITARGBROW_AVX2)
- if (TestCpuFlag(kCpuHasAVX2)) {
- SplitXRGBRow = SplitXRGBRow_Any_AVX2;
- if (IS_ALIGNED(width, 16)) {
- SplitXRGBRow = SplitXRGBRow_AVX2;
- }
+#if defined(HAS_MERGEARGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeARGBRow = MergeARGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeARGBRow = MergeARGBRow_AVX2;
}
+ }
#endif
-#if defined(HAS_SPLITRGBROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- SplitXRGBRow = SplitXRGBRow_Any_NEON;
- if (IS_ALIGNED(width, 16)) {
- SplitXRGBRow = SplitXRGBRow_NEON;
- }
+#if defined(HAS_MERGEARGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeARGBRow = MergeARGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ MergeARGBRow = MergeARGBRow_NEON;
}
+ }
#endif
- for (y = 0; y < height; ++y) {
- SplitXRGBRow(src_argb, dst_r, dst_g, dst_b, width);
- dst_r += dst_stride_r;
- dst_g += dst_stride_g;
- dst_b += dst_stride_b;
- src_argb += src_stride_argb;
- }
- } else {
- if (height < 0) {
- height = -height;
- dst_r = dst_r + (height - 1) * dst_stride_r;
- dst_g = dst_g + (height - 1) * dst_stride_g;
- dst_b = dst_b + (height - 1) * dst_stride_b;
- dst_a = dst_a + (height - 1) * dst_stride_a;
- dst_stride_r = -dst_stride_r;
- dst_stride_g = -dst_stride_g;
- dst_stride_b = -dst_stride_b;
- dst_stride_a = -dst_stride_a;
- }
-
- if (src_stride_argb == width * 4 && dst_stride_r == width &&
- dst_stride_g == width && dst_stride_b == width &&
- dst_stride_a == width) {
- width *= height;
- height = 1;
- src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b =
- dst_stride_a = 0;
- }
+ for (y = 0; y < height; ++y) {
+ MergeARGBRow(src_r, src_g, src_b, src_a, dst_argb, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ src_a += src_stride_a;
+ dst_argb += dst_stride_argb;
+ }
+}
-#if defined(HAS_SPLITARGBROW_SSE2)
- if (TestCpuFlag(kCpuHasSSE2)) {
- SplitARGBRow = SplitARGBRow_Any_SSE2;
- if (IS_ALIGNED(width, 8)) {
- SplitARGBRow = SplitARGBRow_SSE2;
- }
- }
-#endif
-#if defined(HAS_SPLITARGBROW_SSSE3)
- if (TestCpuFlag(kCpuHasSSSE3)) {
- SplitARGBRow = SplitARGBRow_Any_SSSE3;
- if (IS_ALIGNED(width, 8)) {
- SplitARGBRow = SplitARGBRow_SSSE3;
- }
+LIBYUV_NOINLINE
+void MergeARGBPlaneOpaque(const uint8_t* src_r,
+ int src_stride_r,
+ const uint8_t* src_g,
+ int src_stride_g,
+ const uint8_t* src_b,
+ int src_stride_b,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height) {
+ int y;
+ void (*MergeXRGBRow)(const uint8_t* src_r, const uint8_t* src_g,
+ const uint8_t* src_b, uint8_t* dst_argb, int width) =
+ MergeXRGBRow_C;
+
+ assert(height > 0);
+
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = dst_stride_argb = 0;
+ }
+#if defined(HAS_MERGEXRGBROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ MergeXRGBRow = MergeXRGBRow_Any_SSE2;
+ if (IS_ALIGNED(width, 8)) {
+ MergeXRGBRow = MergeXRGBRow_SSE2;
}
+ }
#endif
-#if defined(HAS_SPLITARGBROW_AVX2)
- if (TestCpuFlag(kCpuHasAVX2)) {
- SplitARGBRow = SplitARGBRow_Any_AVX2;
- if (IS_ALIGNED(width, 16)) {
- SplitARGBRow = SplitARGBRow_AVX2;
- }
+#if defined(HAS_MERGEXRGBROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeXRGBRow = MergeXRGBRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeXRGBRow = MergeXRGBRow_AVX2;
}
+ }
#endif
-#if defined(HAS_SPLITRGBROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- SplitARGBRow = SplitARGBRow_Any_NEON;
- if (IS_ALIGNED(width, 16)) {
- SplitARGBRow = SplitARGBRow_NEON;
- }
+#if defined(HAS_MERGEXRGBROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeXRGBRow = MergeXRGBRow_Any_NEON;
+ if (IS_ALIGNED(width, 16)) {
+ MergeXRGBRow = MergeXRGBRow_NEON;
}
+ }
#endif
- for (y = 0; y < height; ++y) {
- SplitARGBRow(src_argb, dst_r, dst_g, dst_b, dst_a, width);
- dst_r += dst_stride_r;
- dst_g += dst_stride_g;
- dst_b += dst_stride_b;
- dst_a += dst_stride_a;
- src_argb += src_stride_argb;
- }
+ for (y = 0; y < height; ++y) {
+ MergeXRGBRow(src_r, src_g, src_b, dst_argb, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ dst_argb += dst_stride_argb;
}
}
@@ -1142,107 +1290,357 @@ void MergeARGBPlane(const uint8_t* src_r,
int dst_stride_argb,
int width,
int height) {
- int y;
- void (*MergeARGBRow)(const uint8_t* src_r, const uint8_t* src_g,
- const uint8_t* src_b, const uint8_t* src_a,
- uint8_t* dst_argb, int width) = MergeARGBRow_C;
- void (*MergeXRGBRow)(const uint8_t* src_r, const uint8_t* src_g,
- const uint8_t* src_b, uint8_t* dst_argb, int width) =
- MergeXRGBRow_C;
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
if (src_a == NULL) {
- // Negative height means invert the image.
- if (height < 0) {
- height = -height;
- dst_argb = dst_argb + (height - 1) * dst_stride_argb;
- dst_stride_argb = -dst_stride_argb;
- }
- // Coalesce rows.
- if (src_stride_r == width && src_stride_g == width &&
- src_stride_b == width && dst_stride_argb == width * 4) {
- width *= height;
- height = 1;
- src_stride_r = src_stride_g = src_stride_b = dst_stride_argb = 0;
+ MergeARGBPlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, dst_argb, dst_stride_argb, width,
+ height);
+ } else {
+ MergeARGBPlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, src_a, src_stride_a, dst_argb,
+ dst_stride_argb, width, height);
+ }
+}
+
+// TODO(yuan): Support 2 bit alpha channel.
+LIBYUV_API
+void MergeXR30Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ uint8_t* dst_ar30,
+ int dst_stride_ar30,
+ int width,
+ int height,
+ int depth) {
+ int y;
+ void (*MergeXR30Row)(const uint16_t* src_r, const uint16_t* src_g,
+ const uint16_t* src_b, uint8_t* dst_ar30, int depth,
+ int width) = MergeXR30Row_C;
+
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30;
+ dst_stride_ar30 = -dst_stride_ar30;
+ }
+ // Coalesce rows.
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ dst_stride_ar30 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = dst_stride_ar30 = 0;
+ }
+#if defined(HAS_MERGEXR30ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeXR30Row = MergeXR30Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeXR30Row = MergeXR30Row_AVX2;
}
-#if defined(HAS_MERGEARGBROW_SSE2)
- if (TestCpuFlag(kCpuHasSSE2)) {
- MergeXRGBRow = MergeXRGBRow_Any_SSE2;
+ }
+#endif
+#if defined(HAS_MERGEXR30ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ if (depth == 10) {
+ MergeXR30Row = MergeXR30Row_10_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ MergeXR30Row = MergeXR30Row_10_NEON;
+ }
+ } else {
+ MergeXR30Row = MergeXR30Row_Any_NEON;
if (IS_ALIGNED(width, 8)) {
- MergeXRGBRow = MergeXRGBRow_SSE2;
+ MergeXR30Row = MergeXR30Row_NEON;
}
}
+ }
#endif
-#if defined(HAS_MERGEARGBROW_AVX2)
- if (TestCpuFlag(kCpuHasAVX2)) {
- MergeXRGBRow = MergeXRGBRow_Any_AVX2;
- if (IS_ALIGNED(width, 16)) {
- MergeXRGBRow = MergeXRGBRow_AVX2;
- }
+
+ for (y = 0; y < height; ++y) {
+ MergeXR30Row(src_r, src_g, src_b, dst_ar30, depth, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ dst_ar30 += dst_stride_ar30;
+ }
+}
+
+LIBYUV_NOINLINE
+static void MergeAR64PlaneAlpha(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height,
+ int depth) {
+ int y;
+ void (*MergeAR64Row)(const uint16_t* src_r, const uint16_t* src_g,
+ const uint16_t* src_b, const uint16_t* src_a,
+ uint16_t* dst_argb, int depth, int width) =
+ MergeAR64Row_C;
+
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ src_stride_a == width && dst_stride_ar64 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = src_stride_a =
+ dst_stride_ar64 = 0;
+ }
+#if defined(HAS_MERGEAR64ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeAR64Row = MergeAR64Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeAR64Row = MergeAR64Row_AVX2;
}
+ }
#endif
-#if defined(HAS_MERGERGBROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- MergeXRGBRow = MergeXRGBRow_Any_NEON;
- if (IS_ALIGNED(width, 16)) {
- MergeXRGBRow = MergeXRGBRow_NEON;
- }
+#if defined(HAS_MERGEAR64ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeAR64Row = MergeAR64Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ MergeAR64Row = MergeAR64Row_NEON;
}
+ }
#endif
- for (y = 0; y < height; ++y) {
- MergeXRGBRow(src_r, src_g, src_b, dst_argb, width);
- src_r += src_stride_r;
- src_g += src_stride_g;
- src_b += src_stride_b;
- dst_argb += dst_stride_argb;
+ for (y = 0; y < height; ++y) {
+ MergeAR64Row(src_r, src_g, src_b, src_a, dst_ar64, depth, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ src_a += src_stride_a;
+ dst_ar64 += dst_stride_ar64;
+ }
+}
+
+LIBYUV_NOINLINE
+static void MergeAR64PlaneOpaque(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height,
+ int depth) {
+ int y;
+ void (*MergeXR64Row)(const uint16_t* src_r, const uint16_t* src_g,
+ const uint16_t* src_b, uint16_t* dst_argb, int depth,
+ int width) = MergeXR64Row_C;
+
+ // Coalesce rows.
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ dst_stride_ar64 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = dst_stride_ar64 = 0;
+ }
+#if defined(HAS_MERGEXR64ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeXR64Row = MergeXR64Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeXR64Row = MergeXR64Row_AVX2;
}
- } else {
- if (height < 0) {
- height = -height;
- dst_argb = dst_argb + (height - 1) * dst_stride_argb;
- dst_stride_argb = -dst_stride_argb;
+ }
+#endif
+#if defined(HAS_MERGEXR64ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeXR64Row = MergeXR64Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ MergeXR64Row = MergeXR64Row_NEON;
}
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ MergeXR64Row(src_r, src_g, src_b, dst_ar64, depth, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ dst_ar64 += dst_stride_ar64;
+ }
+}
+
+LIBYUV_API
+void MergeAR64Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ int width,
+ int height,
+ int depth) {
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_ar64 = dst_ar64 + (height - 1) * dst_stride_ar64;
+ dst_stride_ar64 = -dst_stride_ar64;
+ }
+
+ if (src_a == NULL) {
+ MergeAR64PlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, dst_ar64, dst_stride_ar64, width, height,
+ depth);
+ } else {
+ MergeAR64PlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, src_a, src_stride_a, dst_ar64,
+ dst_stride_ar64, width, height, depth);
+ }
+}
- if (src_stride_r == width && src_stride_g == width &&
- src_stride_b == width && src_stride_a == width &&
- dst_stride_argb == width * 4) {
- width *= height;
- height = 1;
- src_stride_r = src_stride_g = src_stride_b = src_stride_a =
- dst_stride_argb = 0;
+LIBYUV_NOINLINE
+static void MergeARGB16To8PlaneAlpha(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height,
+ int depth) {
+ int y;
+ void (*MergeARGB16To8Row)(const uint16_t* src_r, const uint16_t* src_g,
+ const uint16_t* src_b, const uint16_t* src_a,
+ uint8_t* dst_argb, int depth, int width) =
+ MergeARGB16To8Row_C;
+
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ src_stride_a == width && dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = src_stride_a =
+ dst_stride_argb = 0;
+ }
+#if defined(HAS_MERGEARGB16TO8ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeARGB16To8Row = MergeARGB16To8Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeARGB16To8Row = MergeARGB16To8Row_AVX2;
}
-#if defined(HAS_MERGEARGBROW_SSE2)
- if (TestCpuFlag(kCpuHasSSE2)) {
- MergeARGBRow = MergeARGBRow_Any_SSE2;
- if (IS_ALIGNED(width, 8)) {
- MergeARGBRow = MergeARGBRow_SSE2;
- }
+ }
+#endif
+#if defined(HAS_MERGEARGB16TO8ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeARGB16To8Row = MergeARGB16To8Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ MergeARGB16To8Row = MergeARGB16To8Row_NEON;
}
+ }
#endif
-#if defined(HAS_MERGEARGBROW_AVX2)
- if (TestCpuFlag(kCpuHasAVX2)) {
- MergeARGBRow = MergeARGBRow_Any_AVX2;
- if (IS_ALIGNED(width, 16)) {
- MergeARGBRow = MergeARGBRow_AVX2;
- }
+
+ for (y = 0; y < height; ++y) {
+ MergeARGB16To8Row(src_r, src_g, src_b, src_a, dst_argb, depth, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ src_a += src_stride_a;
+ dst_argb += dst_stride_argb;
+ }
+}
+
+LIBYUV_NOINLINE
+static void MergeARGB16To8PlaneOpaque(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height,
+ int depth) {
+ int y;
+ void (*MergeXRGB16To8Row)(const uint16_t* src_r, const uint16_t* src_g,
+ const uint16_t* src_b, uint8_t* dst_argb, int depth,
+ int width) = MergeXRGB16To8Row_C;
+
+ // Coalesce rows.
+ if (src_stride_r == width && src_stride_g == width && src_stride_b == width &&
+ dst_stride_argb == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_r = src_stride_g = src_stride_b = dst_stride_argb = 0;
+ }
+#if defined(HAS_MERGEXRGB16TO8ROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ MergeXRGB16To8Row = MergeXRGB16To8Row_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ MergeXRGB16To8Row = MergeXRGB16To8Row_AVX2;
}
+ }
#endif
-#if defined(HAS_MERGERGBROW_NEON)
- if (TestCpuFlag(kCpuHasNEON)) {
- MergeARGBRow = MergeARGBRow_Any_NEON;
- if (IS_ALIGNED(width, 16)) {
- MergeARGBRow = MergeARGBRow_NEON;
- }
+#if defined(HAS_MERGEXRGB16TO8ROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ MergeXRGB16To8Row = MergeXRGB16To8Row_Any_NEON;
+ if (IS_ALIGNED(width, 8)) {
+ MergeXRGB16To8Row = MergeXRGB16To8Row_NEON;
}
+ }
#endif
- for (y = 0; y < height; ++y) {
- MergeARGBRow(src_r, src_g, src_b, src_a, dst_argb, width);
- src_r += src_stride_r;
- src_g += src_stride_g;
- src_b += src_stride_b;
- dst_argb += dst_stride_argb;
- }
+ for (y = 0; y < height; ++y) {
+ MergeXRGB16To8Row(src_r, src_g, src_b, dst_argb, depth, width);
+ src_r += src_stride_r;
+ src_g += src_stride_g;
+ src_b += src_stride_b;
+ dst_argb += dst_stride_argb;
+ }
+}
+
+LIBYUV_API
+void MergeARGB16To8Plane(const uint16_t* src_r,
+ int src_stride_r,
+ const uint16_t* src_g,
+ int src_stride_g,
+ const uint16_t* src_b,
+ int src_stride_b,
+ const uint16_t* src_a,
+ int src_stride_a,
+ uint8_t* dst_argb,
+ int dst_stride_argb,
+ int width,
+ int height,
+ int depth) {
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_argb = dst_argb + (height - 1) * dst_stride_argb;
+ dst_stride_argb = -dst_stride_argb;
+ }
+
+ if (src_a == NULL) {
+ MergeARGB16To8PlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, dst_argb, dst_stride_argb, width,
+ height, depth);
+ } else {
+ MergeARGB16To8PlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b,
+ src_stride_b, src_a, src_stride_a, dst_argb,
+ dst_stride_argb, width, height, depth);
}
}
@@ -2244,12 +2642,12 @@ int ARGBAdd(const uint8_t* src_argb0,
height = 1;
src_stride_argb0 = src_stride_argb1 = dst_stride_argb = 0;
}
-#if defined(HAS_ARGBADDROW_SSE2) && (defined(_MSC_VER) && !defined(__clang__))
+#if defined(HAS_ARGBADDROW_SSE2)
if (TestCpuFlag(kCpuHasSSE2)) {
ARGBAddRow = ARGBAddRow_SSE2;
}
#endif
-#if defined(HAS_ARGBADDROW_SSE2) && !(defined(_MSC_VER) && !defined(__clang__))
+#if defined(HAS_ARGBADDROW_SSE2)
if (TestCpuFlag(kCpuHasSSE2)) {
ARGBAddRow = ARGBAddRow_Any_SSE2;
if (IS_ALIGNED(width, 4)) {
@@ -3527,6 +3925,76 @@ int ARGBShuffle(const uint8_t* src_bgra,
return 0;
}
+// Shuffle AR64 channel order. e.g. AR64 to AB64.
+LIBYUV_API
+int AR64Shuffle(const uint16_t* src_ar64,
+ int src_stride_ar64,
+ uint16_t* dst_ar64,
+ int dst_stride_ar64,
+ const uint8_t* shuffler,
+ int width,
+ int height) {
+ int y;
+ void (*AR64ShuffleRow)(const uint8_t* src_ar64, uint8_t* dst_ar64,
+ const uint8_t* shuffler, int width) = AR64ShuffleRow_C;
+ if (!src_ar64 || !dst_ar64 || width <= 0 || height == 0) {
+ return -1;
+ }
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ src_ar64 = src_ar64 + (height - 1) * src_stride_ar64;
+ src_stride_ar64 = -src_stride_ar64;
+ }
+ // Coalesce rows.
+ if (src_stride_ar64 == width * 4 && dst_stride_ar64 == width * 4) {
+ width *= height;
+ height = 1;
+ src_stride_ar64 = dst_stride_ar64 = 0;
+ }
+ // Assembly versions can be reused if it's implemented with shuffle.
+#if defined(HAS_ARGBSHUFFLEROW_SSSE3)
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ AR64ShuffleRow = ARGBShuffleRow_Any_SSSE3;
+ if (IS_ALIGNED(width, 8)) {
+ AR64ShuffleRow = ARGBShuffleRow_SSSE3;
+ }
+ }
+#endif
+#if defined(HAS_ARGBSHUFFLEROW_AVX2)
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ AR64ShuffleRow = ARGBShuffleRow_Any_AVX2;
+ if (IS_ALIGNED(width, 16)) {
+ AR64ShuffleRow = ARGBShuffleRow_AVX2;
+ }
+ }
+#endif
+#if defined(HAS_ARGBSHUFFLEROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ AR64ShuffleRow = ARGBShuffleRow_Any_NEON;
+ if (IS_ALIGNED(width, 4)) {
+ AR64ShuffleRow = ARGBShuffleRow_NEON;
+ }
+ }
+#endif
+#if defined(HAS_ARGBSHUFFLEROW_MMI)
+ if (TestCpuFlag(kCpuHasMMI)) {
+ AR64ShuffleRow = ARGBShuffleRow_Any_MMI;
+ if (IS_ALIGNED(width, 2)) {
+ AR64ShuffleRow = ARGBShuffleRow_MMI;
+ }
+ }
+#endif
+
+ for (y = 0; y < height; ++y) {
+ AR64ShuffleRow((uint8_t*)(src_ar64), (uint8_t*)(dst_ar64), shuffler,
+ width * 2);
+ src_ar64 += src_stride_ar64;
+ dst_ar64 += dst_stride_ar64;
+ }
+ return 0;
+}
+
// Gauss blur a float plane using Gaussian 5x5 filter with
// coefficients of 1, 4, 6, 4, 1.
// Each destination pixel is a blur of the 5x5
diff --git a/third_party/libyuv/source/rotate_gcc.cc b/third_party/libyuv/source/rotate_gcc.cc
index fd359d4ae6..1a3f8cbbda 100644
--- a/third_party/libyuv/source/rotate_gcc.cc
+++ b/third_party/libyuv/source/rotate_gcc.cc
@@ -17,8 +17,7 @@ extern "C" {
#endif
// This module is for GCC x86 and x64.
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
// Transpose 8x8. 32 or 64 bit, but not NaCL for 64 bit.
#if defined(HAS_TRANSPOSEWX8_SSSE3)
diff --git a/third_party/libyuv/source/rotate_win.cc b/third_party/libyuv/source/rotate_win.cc
index e887dd525c..a78873f843 100644
--- a/third_party/libyuv/source/rotate_win.cc
+++ b/third_party/libyuv/source/rotate_win.cc
@@ -16,8 +16,9 @@ namespace libyuv {
extern "C" {
#endif
-// This module is for 32 bit Visual C x86 and clangcl
-#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER)
+// This module is for 32 bit Visual C x86
+#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \
+ !defined(__clang__) && defined(_M_IX86)
__declspec(naked) void TransposeWx8_SSSE3(const uint8_t* src,
int src_stride,
diff --git a/third_party/libyuv/source/row_any.cc b/third_party/libyuv/source/row_any.cc
index 08ae1d2af7..c9a402eda2 100644
--- a/third_party/libyuv/source/row_any.cc
+++ b/third_party/libyuv/source/row_any.cc
@@ -61,6 +61,8 @@ ANY41(MergeARGBRow_Any_AVX2, MergeARGBRow_AVX2, 0, 0, 4, 15)
ANY41(MergeARGBRow_Any_NEON, MergeARGBRow_NEON, 0, 0, 4, 15)
#endif
+// Note that odd width replication includes 444 due to implementation
+// on arm that subsamples 444 to 422 internally.
// Any 4 planes to 1 with yuvconstants
#define ANY41C(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, BPP, MASK) \
void NAMEANY(const uint8_t* y_buf, const uint8_t* u_buf, \
@@ -77,6 +79,10 @@ ANY41(MergeARGBRow_Any_NEON, MergeARGBRow_NEON, 0, 0, 4, 15)
memcpy(temp + 64, u_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \
memcpy(temp + 128, v_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \
memcpy(temp + 192, a_buf + n, r); \
+ if (width & 1) { \
+ temp[64 + SS(r, UVSHIFT)] = temp[64 + SS(r, UVSHIFT) - 1]; \
+ temp[128 + SS(r, UVSHIFT)] = temp[128 + SS(r, UVSHIFT) - 1]; \
+ } \
ANY_SIMD(temp, temp + 64, temp + 128, temp + 192, temp + 256, \
yuvconstants, MASK + 1); \
memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, temp + 256, \
@@ -115,6 +121,124 @@ ANY41C(I422AlphaToARGBRow_Any_MMI, I422AlphaToARGBRow_MMI, 1, 0, 4, 7)
#endif
#undef ANY41C
+// Any 4 planes to 1 plane of 8 bit with yuvconstants
+#define ANY41CT(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, T, SBPP, BPP, MASK) \
+ void NAMEANY(const T* y_buf, const T* u_buf, const T* v_buf, const T* a_buf, \
+ uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, \
+ int width) { \
+ SIMD_ALIGNED(T temp[16 * 4]); \
+ SIMD_ALIGNED(uint8_t out[64]); \
+ memset(temp, 0, 16 * 4 * SBPP); /* for YUY2 and msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(y_buf, u_buf, v_buf, a_buf, dst_ptr, yuvconstants, n); \
+ } \
+ memcpy(temp, y_buf + n, r * SBPP); \
+ memcpy(temp + 16, u_buf + (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP); \
+ memcpy(temp + 32, v_buf + (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP); \
+ memcpy(temp + 48, a_buf + n, r * SBPP); \
+ ANY_SIMD(temp, temp + 16, temp + 32, temp + 48, out, yuvconstants, \
+ MASK + 1); \
+ memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, out, SS(r, DUVSHIFT) * BPP); \
+ }
+
+#ifdef HAS_I210ALPHATOARGBROW_SSSE3
+ANY41CT(I210AlphaToARGBRow_Any_SSSE3,
+ I210AlphaToARGBRow_SSSE3,
+ 1,
+ 0,
+ uint16_t,
+ 2,
+ 4,
+ 7)
+#endif
+
+#ifdef HAS_I210ALPHATOARGBROW_AVX2
+ANY41CT(I210AlphaToARGBRow_Any_AVX2,
+ I210AlphaToARGBRow_AVX2,
+ 1,
+ 0,
+ uint16_t,
+ 2,
+ 4,
+ 15)
+#endif
+
+#ifdef HAS_I410ALPHATOARGBROW_SSSE3
+ANY41CT(I410AlphaToARGBRow_Any_SSSE3,
+ I410AlphaToARGBRow_SSSE3,
+ 0,
+ 0,
+ uint16_t,
+ 2,
+ 4,
+ 7)
+#endif
+
+#ifdef HAS_I410ALPHATOARGBROW_AVX2
+ANY41CT(I410AlphaToARGBRow_Any_AVX2,
+ I410AlphaToARGBRow_AVX2,
+ 0,
+ 0,
+ uint16_t,
+ 2,
+ 4,
+ 15)
+#endif
+
+#undef ANY41CT
+
+// Any 4 planes to 1 plane with parameter
+#define ANY41PT(NAMEANY, ANY_SIMD, STYPE, SBPP, DTYPE, BPP, MASK) \
+ void NAMEANY(const STYPE* r_buf, const STYPE* g_buf, const STYPE* b_buf, \
+ const STYPE* a_buf, DTYPE* dst_ptr, int depth, int width) { \
+ SIMD_ALIGNED(STYPE temp[16 * 4]); \
+ SIMD_ALIGNED(DTYPE out[64]); \
+ memset(temp, 0, 16 * 4 * SBPP); /* for YUY2 and msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(r_buf, g_buf, b_buf, a_buf, dst_ptr, depth, n); \
+ } \
+ memcpy(temp, r_buf + n, r * SBPP); \
+ memcpy(temp + 16, g_buf + n, r * SBPP); \
+ memcpy(temp + 32, b_buf + n, r * SBPP); \
+ memcpy(temp + 48, a_buf + n, r * SBPP); \
+ ANY_SIMD(temp, temp + 16, temp + 32, temp + 48, out, depth, MASK + 1); \
+ memcpy((uint8_t*)dst_ptr + n * BPP, out, r * BPP); \
+ }
+
+#ifdef HAS_MERGEAR64ROW_AVX2
+ANY41PT(MergeAR64Row_Any_AVX2, MergeAR64Row_AVX2, uint16_t, 2, uint16_t, 8, 15)
+#endif
+
+#ifdef HAS_MERGEAR64ROW_NEON
+ANY41PT(MergeAR64Row_Any_NEON, MergeAR64Row_NEON, uint16_t, 2, uint16_t, 8, 7)
+#endif
+
+#ifdef HAS_MERGEARGB16TO8ROW_AVX2
+ANY41PT(MergeARGB16To8Row_Any_AVX2,
+ MergeARGB16To8Row_AVX2,
+ uint16_t,
+ 2,
+ uint8_t,
+ 4,
+ 15)
+#endif
+
+#ifdef HAS_MERGEARGB16TO8ROW_NEON
+ANY41PT(MergeARGB16To8Row_Any_NEON,
+ MergeARGB16To8Row_NEON,
+ uint16_t,
+ 2,
+ uint8_t,
+ 4,
+ 7)
+#endif
+
+#undef ANY41PT
+
// Any 3 planes to 1.
#define ANY31(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, BPP, MASK) \
void NAMEANY(const uint8_t* y_buf, const uint8_t* u_buf, \
@@ -144,13 +268,13 @@ ANY31(MergeRGBRow_Any_NEON, MergeRGBRow_NEON, 0, 0, 3, 15)
#ifdef HAS_MERGERGBROW_MMI
ANY31(MergeRGBRow_Any_MMI, MergeRGBRow_MMI, 0, 0, 3, 7)
#endif
-#ifdef HAS_MERGEARGBROW_SSE2
+#ifdef HAS_MERGEXRGBROW_SSE2
ANY31(MergeXRGBRow_Any_SSE2, MergeXRGBRow_SSE2, 0, 0, 4, 7)
#endif
-#ifdef HAS_MERGEARGBROW_AVX2
+#ifdef HAS_MERGEXRGBROW_AVX2
ANY31(MergeXRGBRow_Any_AVX2, MergeXRGBRow_AVX2, 0, 0, 4, 15)
#endif
-#ifdef HAS_MERGEARGBROW_NEON
+#ifdef HAS_MERGEXRGBROW_NEON
ANY31(MergeXRGBRow_Any_NEON, MergeXRGBRow_NEON, 0, 0, 4, 15)
#endif
#ifdef HAS_I422TOYUY2ROW_SSE2
@@ -327,11 +451,99 @@ ANY31CT(I210ToARGBRow_Any_AVX2, I210ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15)
#ifdef HAS_I210TOAR30ROW_AVX2
ANY31CT(I210ToAR30Row_Any_AVX2, I210ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15)
#endif
+#ifdef HAS_I410TOAR30ROW_SSSE3
+ANY31CT(I410ToAR30Row_Any_SSSE3, I410ToAR30Row_SSSE3, 0, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_I410TOARGBROW_SSSE3
+ANY31CT(I410ToARGBRow_Any_SSSE3, I410ToARGBRow_SSSE3, 0, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_I410TOARGBROW_AVX2
+ANY31CT(I410ToARGBRow_Any_AVX2, I410ToARGBRow_AVX2, 0, 0, uint16_t, 2, 4, 15)
+#endif
+#ifdef HAS_I410TOAR30ROW_AVX2
+ANY31CT(I410ToAR30Row_Any_AVX2, I410ToAR30Row_AVX2, 0, 0, uint16_t, 2, 4, 15)
+#endif
#ifdef HAS_I210TOARGBROW_MMI
ANY31CT(I210ToARGBRow_Any_MMI, I210ToARGBRow_MMI, 1, 0, uint16_t, 2, 4, 7)
#endif
+#ifdef HAS_I212TOAR30ROW_SSSE3
+ANY31CT(I212ToAR30Row_Any_SSSE3, I212ToAR30Row_SSSE3, 1, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_I212TOARGBROW_SSSE3
+ANY31CT(I212ToARGBRow_Any_SSSE3, I212ToARGBRow_SSSE3, 1, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_I212TOARGBROW_AVX2
+ANY31CT(I212ToARGBRow_Any_AVX2, I212ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15)
+#endif
+#ifdef HAS_I212TOAR30ROW_AVX2
+ANY31CT(I212ToAR30Row_Any_AVX2, I212ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15)
+#endif
#undef ANY31CT
+// Any 3 planes to 1 plane with parameter
+#define ANY31PT(NAMEANY, ANY_SIMD, STYPE, SBPP, DTYPE, BPP, MASK) \
+ void NAMEANY(const STYPE* r_buf, const STYPE* g_buf, const STYPE* b_buf, \
+ DTYPE* dst_ptr, int depth, int width) { \
+ SIMD_ALIGNED(STYPE temp[16 * 3]); \
+ SIMD_ALIGNED(DTYPE out[64]); \
+ memset(temp, 0, 16 * 3 * SBPP); /* for YUY2 and msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(r_buf, g_buf, b_buf, dst_ptr, depth, n); \
+ } \
+ memcpy(temp, r_buf + n, r * SBPP); \
+ memcpy(temp + 16, g_buf + n, r * SBPP); \
+ memcpy(temp + 32, b_buf + n, r * SBPP); \
+ ANY_SIMD(temp, temp + 16, temp + 32, out, depth, MASK + 1); \
+ memcpy((uint8_t*)dst_ptr + n * BPP, out, r * BPP); \
+ }
+
+#ifdef HAS_MERGEXR30ROW_AVX2
+ANY31PT(MergeXR30Row_Any_AVX2, MergeXR30Row_AVX2, uint16_t, 2, uint8_t, 4, 15)
+#endif
+
+#ifdef HAS_MERGEXR30ROW_NEON
+ANY31PT(MergeXR30Row_Any_NEON, MergeXR30Row_NEON, uint16_t, 2, uint8_t, 4, 3)
+ANY31PT(MergeXR30Row_10_Any_NEON,
+ MergeXR30Row_10_NEON,
+ uint16_t,
+ 2,
+ uint8_t,
+ 4,
+ 3)
+#endif
+
+#ifdef HAS_MERGEXR64ROW_AVX2
+ANY31PT(MergeXR64Row_Any_AVX2, MergeXR64Row_AVX2, uint16_t, 2, uint16_t, 8, 15)
+#endif
+
+#ifdef HAS_MERGEXR64ROW_NEON
+ANY31PT(MergeXR64Row_Any_NEON, MergeXR64Row_NEON, uint16_t, 2, uint16_t, 8, 7)
+#endif
+
+#ifdef HAS_MERGEXRGB16TO8ROW_AVX2
+ANY31PT(MergeXRGB16To8Row_Any_AVX2,
+ MergeXRGB16To8Row_AVX2,
+ uint16_t,
+ 2,
+ uint8_t,
+ 4,
+ 15)
+#endif
+
+#ifdef HAS_MERGEXRGB16TO8ROW_NEON
+ANY31PT(MergeXRGB16To8Row_Any_NEON,
+ MergeXRGB16To8Row_NEON,
+ uint16_t,
+ 2,
+ uint8_t,
+ 4,
+ 7)
+#endif
+
+#undef ANY31PT
+
// Any 2 planes to 1.
#define ANY21(NAMEANY, ANY_SIMD, UVSHIFT, SBPP, SBPP2, BPP, MASK) \
void NAMEANY(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, \
@@ -546,12 +758,57 @@ ANY21C(NV12ToRGB565Row_Any_MMI, NV12ToRGB565Row_MMI, 1, 1, 2, 2, 7)
#endif
#undef ANY21C
+// Any 2 planes of 16 bit to 1 with yuvconstants
+#define ANY21CT(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, T, SBPP, BPP, MASK) \
+ void NAMEANY(const T* y_buf, const T* uv_buf, uint8_t* dst_ptr, \
+ const struct YuvConstants* yuvconstants, int width) { \
+ SIMD_ALIGNED(T temp[16 * 3]); \
+ SIMD_ALIGNED(uint8_t out[64]); \
+ memset(temp, 0, 16 * 3 * SBPP); /* for YUY2 and msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(y_buf, uv_buf, dst_ptr, yuvconstants, n); \
+ } \
+ memcpy(temp, y_buf + n, r * SBPP); \
+ memcpy(temp + 16, uv_buf + 2 * (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP * 2); \
+ ANY_SIMD(temp, temp + 16, out, yuvconstants, MASK + 1); \
+ memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, out, SS(r, DUVSHIFT) * BPP); \
+ }
+
+#ifdef HAS_P210TOAR30ROW_SSSE3
+ANY21CT(P210ToAR30Row_Any_SSSE3, P210ToAR30Row_SSSE3, 1, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_P210TOARGBROW_SSSE3
+ANY21CT(P210ToARGBRow_Any_SSSE3, P210ToARGBRow_SSSE3, 1, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_P210TOARGBROW_AVX2
+ANY21CT(P210ToARGBRow_Any_AVX2, P210ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15)
+#endif
+#ifdef HAS_P210TOAR30ROW_AVX2
+ANY21CT(P210ToAR30Row_Any_AVX2, P210ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15)
+#endif
+#ifdef HAS_P410TOAR30ROW_SSSE3
+ANY21CT(P410ToAR30Row_Any_SSSE3, P410ToAR30Row_SSSE3, 0, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_P410TOARGBROW_SSSE3
+ANY21CT(P410ToARGBRow_Any_SSSE3, P410ToARGBRow_SSSE3, 0, 0, uint16_t, 2, 4, 7)
+#endif
+#ifdef HAS_P410TOARGBROW_AVX2
+ANY21CT(P410ToARGBRow_Any_AVX2, P410ToARGBRow_AVX2, 0, 0, uint16_t, 2, 4, 15)
+#endif
+#ifdef HAS_P410TOAR30ROW_AVX2
+ANY21CT(P410ToAR30Row_Any_AVX2, P410ToAR30Row_AVX2, 0, 0, uint16_t, 2, 4, 15)
+#endif
+
+#undef ANY21CT
+
// Any 2 16 bit planes with parameter to 1
#define ANY21PT(NAMEANY, ANY_SIMD, T, BPP, MASK) \
void NAMEANY(const T* src_u, const T* src_v, T* dst_uv, int depth, \
int width) { \
SIMD_ALIGNED(T temp[16 * 4]); \
- memset(temp, 0, 16 * 4); /* for msan */ \
+ memset(temp, 0, 16 * 4 * BPP); /* for msan */ \
int r = width & MASK; \
int n = width & ~MASK; \
if (n > 0) { \
@@ -1100,6 +1357,72 @@ ANY11P(ARGBShuffleRow_Any_MMI, ARGBShuffleRow_MMI, const uint8_t*, 4, 4, 1)
#undef ANY11P
#undef ANY11P
+// Any 1 to 1 with type
+#define ANY11T(NAMEANY, ANY_SIMD, SBPP, BPP, STYPE, DTYPE, MASK) \
+ void NAMEANY(const STYPE* src_ptr, DTYPE* dst_ptr, int width) { \
+ SIMD_ALIGNED(uint8_t temp[(MASK + 1) * SBPP]); \
+ SIMD_ALIGNED(uint8_t out[(MASK + 1) * BPP]); \
+ memset(temp, 0, (MASK + 1) * SBPP); /* for msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(src_ptr, dst_ptr, n); \
+ } \
+ memcpy(temp, (uint8_t*)(src_ptr) + n * SBPP, r * SBPP); \
+ ANY_SIMD((STYPE*)temp, (DTYPE*)out, MASK + 1); \
+ memcpy((uint8_t*)(dst_ptr) + n * BPP, out, r * BPP); \
+ }
+
+#ifdef HAS_ARGBTOAR64ROW_SSSE3
+ANY11T(ARGBToAR64Row_Any_SSSE3, ARGBToAR64Row_SSSE3, 4, 8, uint8_t, uint16_t, 3)
+#endif
+
+#ifdef HAS_ARGBTOAB64ROW_SSSE3
+ANY11T(ARGBToAB64Row_Any_SSSE3, ARGBToAB64Row_SSSE3, 4, 8, uint8_t, uint16_t, 3)
+#endif
+
+#ifdef HAS_AR64TOARGBROW_SSSE3
+ANY11T(AR64ToARGBRow_Any_SSSE3, AR64ToARGBRow_SSSE3, 8, 4, uint16_t, uint8_t, 3)
+#endif
+
+#ifdef HAS_ARGBTOAR64ROW_SSSE3
+ANY11T(AB64ToARGBRow_Any_SSSE3, AB64ToARGBRow_SSSE3, 8, 4, uint16_t, uint8_t, 3)
+#endif
+
+#ifdef HAS_ARGBTOAR64ROW_AVX2
+ANY11T(ARGBToAR64Row_Any_AVX2, ARGBToAR64Row_AVX2, 4, 8, uint8_t, uint16_t, 7)
+#endif
+
+#ifdef HAS_ARGBTOAB64ROW_AVX2
+ANY11T(ARGBToAB64Row_Any_AVX2, ARGBToAB64Row_AVX2, 4, 8, uint8_t, uint16_t, 7)
+#endif
+
+#ifdef HAS_AR64TOARGBROW_AVX2
+ANY11T(AR64ToARGBRow_Any_AVX2, AR64ToARGBRow_AVX2, 8, 4, uint16_t, uint8_t, 7)
+#endif
+
+#ifdef HAS_ARGBTOAR64ROW_AVX2
+ANY11T(AB64ToARGBRow_Any_AVX2, AB64ToARGBRow_AVX2, 8, 4, uint16_t, uint8_t, 7)
+#endif
+
+#ifdef HAS_ARGBTOAR64ROW_NEON
+ANY11T(ARGBToAR64Row_Any_NEON, ARGBToAR64Row_NEON, 4, 8, uint8_t, uint16_t, 7)
+#endif
+
+#ifdef HAS_ARGBTOAB64ROW_NEON
+ANY11T(ARGBToAB64Row_Any_NEON, ARGBToAB64Row_NEON, 4, 8, uint8_t, uint16_t, 7)
+#endif
+
+#ifdef HAS_AR64TOARGBROW_NEON
+ANY11T(AR64ToARGBRow_Any_NEON, AR64ToARGBRow_NEON, 8, 4, uint16_t, uint8_t, 7)
+#endif
+
+#ifdef HAS_ARGBTOAR64ROW_NEON
+ANY11T(AB64ToARGBRow_Any_NEON, AB64ToARGBRow_NEON, 8, 4, uint16_t, uint8_t, 7)
+#endif
+
+#undef ANY11T
+
// Any 1 to 1 with parameter and shorts. BPP measures in shorts.
#define ANY11C(NAMEANY, ANY_SIMD, SBPP, BPP, STYPE, DTYPE, MASK) \
void NAMEANY(const STYPE* src_ptr, DTYPE* dst_ptr, int scale, int width) { \
@@ -1266,38 +1589,38 @@ ANY11C(UYVYToARGBRow_Any_MMI, UYVYToARGBRow_MMI, 1, 4, 4, 7)
#undef ANY11C
// Any 1 to 1 interpolate. Takes 2 rows of source via stride.
-#define ANY11T(NAMEANY, ANY_SIMD, SBPP, BPP, MASK) \
- void NAMEANY(uint8_t* dst_ptr, const uint8_t* src_ptr, \
- ptrdiff_t src_stride_ptr, int width, int source_y_fraction) { \
- SIMD_ALIGNED(uint8_t temp[64 * 3]); \
- memset(temp, 0, 64 * 2); /* for msan */ \
- int r = width & MASK; \
- int n = width & ~MASK; \
- if (n > 0) { \
- ANY_SIMD(dst_ptr, src_ptr, src_stride_ptr, n, source_y_fraction); \
- } \
- memcpy(temp, src_ptr + n * SBPP, r * SBPP); \
- memcpy(temp + 64, src_ptr + src_stride_ptr + n * SBPP, r * SBPP); \
- ANY_SIMD(temp + 128, temp, 64, MASK + 1, source_y_fraction); \
- memcpy(dst_ptr + n * BPP, temp + 128, r * BPP); \
+#define ANY11I(NAMEANY, ANY_SIMD, SBPP, BPP, MASK) \
+ void NAMEANY(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, \
+ int width, int source_y_fraction) { \
+ SIMD_ALIGNED(uint8_t temp[64 * 3]); \
+ memset(temp, 0, 64 * 2); /* for msan */ \
+ int r = width & MASK; \
+ int n = width & ~MASK; \
+ if (n > 0) { \
+ ANY_SIMD(dst_ptr, src_ptr, src_stride, n, source_y_fraction); \
+ } \
+ memcpy(temp, src_ptr + n * SBPP, r * SBPP); \
+ memcpy(temp + 64, src_ptr + src_stride + n * SBPP, r * SBPP); \
+ ANY_SIMD(temp + 128, temp, 64, MASK + 1, source_y_fraction); \
+ memcpy(dst_ptr + n * BPP, temp + 128, r * BPP); \
}
#ifdef HAS_INTERPOLATEROW_AVX2
-ANY11T(InterpolateRow_Any_AVX2, InterpolateRow_AVX2, 1, 1, 31)
+ANY11I(InterpolateRow_Any_AVX2, InterpolateRow_AVX2, 1, 1, 31)
#endif
#ifdef HAS_INTERPOLATEROW_SSSE3
-ANY11T(InterpolateRow_Any_SSSE3, InterpolateRow_SSSE3, 1, 1, 15)
+ANY11I(InterpolateRow_Any_SSSE3, InterpolateRow_SSSE3, 1, 1, 15)
#endif
#ifdef HAS_INTERPOLATEROW_NEON
-ANY11T(InterpolateRow_Any_NEON, InterpolateRow_NEON, 1, 1, 15)
+ANY11I(InterpolateRow_Any_NEON, InterpolateRow_NEON, 1, 1, 15)
#endif
#ifdef HAS_INTERPOLATEROW_MSA
-ANY11T(InterpolateRow_Any_MSA, InterpolateRow_MSA, 1, 1, 31)
+ANY11I(InterpolateRow_Any_MSA, InterpolateRow_MSA, 1, 1, 31)
#endif
#ifdef HAS_INTERPOLATEROW_MMI
-ANY11T(InterpolateRow_Any_MMI, InterpolateRow_MMI, 1, 1, 7)
+ANY11I(InterpolateRow_Any_MMI, InterpolateRow_MMI, 1, 1, 7)
#endif
-#undef ANY11T
+#undef ANY11I
// Any 1 to 1 mirror.
#define ANY11M(NAMEANY, ANY_SIMD, BPP, MASK) \
@@ -1508,16 +1831,16 @@ ANY13(SplitRGBRow_Any_NEON, SplitRGBRow_NEON, 3, 15)
#ifdef HAS_SPLITRGBROW_MMI
ANY13(SplitRGBRow_Any_MMI, SplitRGBRow_MMI, 3, 3)
#endif
-#ifdef HAS_SPLITARGBROW_SSE2
+#ifdef HAS_SPLITXRGBROW_SSE2
ANY13(SplitXRGBRow_Any_SSE2, SplitXRGBRow_SSE2, 4, 7)
#endif
-#ifdef HAS_SPLITARGBROW_SSSE3
+#ifdef HAS_SPLITXRGBROW_SSSE3
ANY13(SplitXRGBRow_Any_SSSE3, SplitXRGBRow_SSSE3, 4, 7)
#endif
-#ifdef HAS_SPLITARGBROW_AVX2
+#ifdef HAS_SPLITXRGBROW_AVX2
ANY13(SplitXRGBRow_Any_AVX2, SplitXRGBRow_AVX2, 4, 15)
#endif
-#ifdef HAS_SPLITARGBROW_NEON
+#ifdef HAS_SPLITXRGBROW_NEON
ANY13(SplitXRGBRow_Any_NEON, SplitXRGBRow_NEON, 4, 15)
#endif
@@ -1557,17 +1880,17 @@ ANY14(SplitARGBRow_Any_NEON, SplitARGBRow_NEON, 4, 15)
// Any 1 to 2 with source stride (2 rows of source). Outputs UV planes.
// 128 byte row allows for 32 avx ARGB pixels.
#define ANY12S(NAMEANY, ANY_SIMD, UVSHIFT, BPP, MASK) \
- void NAMEANY(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, \
+ void NAMEANY(const uint8_t* src_ptr, int src_stride, uint8_t* dst_u, \
uint8_t* dst_v, int width) { \
SIMD_ALIGNED(uint8_t temp[128 * 4]); \
memset(temp, 0, 128 * 2); /* for msan */ \
int r = width & MASK; \
int n = width & ~MASK; \
if (n > 0) { \
- ANY_SIMD(src_ptr, src_stride_ptr, dst_u, dst_v, n); \
+ ANY_SIMD(src_ptr, src_stride, dst_u, dst_v, n); \
} \
memcpy(temp, src_ptr + (n >> UVSHIFT) * BPP, SS(r, UVSHIFT) * BPP); \
- memcpy(temp + 128, src_ptr + src_stride_ptr + (n >> UVSHIFT) * BPP, \
+ memcpy(temp + 128, src_ptr + src_stride + (n >> UVSHIFT) * BPP, \
SS(r, UVSHIFT) * BPP); \
if ((width & 1) && UVSHIFT == 0) { /* repeat last pixel for subsample */ \
memcpy(temp + SS(r, UVSHIFT) * BPP, temp + SS(r, UVSHIFT) * BPP - BPP, \
@@ -1714,17 +2037,17 @@ ANY12S(UYVYToUVRow_Any_MMI, UYVYToUVRow_MMI, 1, 4, 15)
// Any 1 to 1 with source stride (2 rows of source). Outputs UV plane.
// 128 byte row allows for 32 avx ARGB pixels.
#define ANY11S(NAMEANY, ANY_SIMD, UVSHIFT, BPP, MASK) \
- void NAMEANY(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_vu, \
+ void NAMEANY(const uint8_t* src_ptr, int src_stride, uint8_t* dst_vu, \
int width) { \
SIMD_ALIGNED(uint8_t temp[128 * 3]); \
memset(temp, 0, 128 * 2); /* for msan */ \
int r = width & MASK; \
int n = width & ~MASK; \
if (n > 0) { \
- ANY_SIMD(src_ptr, src_stride_ptr, dst_vu, n); \
+ ANY_SIMD(src_ptr, src_stride, dst_vu, n); \
} \
memcpy(temp, src_ptr + (n >> UVSHIFT) * BPP, SS(r, UVSHIFT) * BPP); \
- memcpy(temp + 128, src_ptr + src_stride_ptr + (n >> UVSHIFT) * BPP, \
+ memcpy(temp + 128, src_ptr + src_stride + (n >> UVSHIFT) * BPP, \
SS(r, UVSHIFT) * BPP); \
if ((width & 1) && UVSHIFT == 0) { /* repeat last pixel for subsample */ \
memcpy(temp + SS(r, UVSHIFT) * BPP, temp + SS(r, UVSHIFT) * BPP - BPP, \
diff --git a/third_party/libyuv/source/row_common.cc b/third_party/libyuv/source/row_common.cc
index a941c3f5fc..c6e412414e 100644
--- a/third_party/libyuv/source/row_common.cc
+++ b/third_party/libyuv/source/row_common.cc
@@ -10,6 +10,7 @@
#include "libyuv/row.h"
+#include <assert.h>
#include <stdio.h>
#include <string.h> // For memcpy and memset.
@@ -21,10 +22,14 @@ namespace libyuv {
extern "C" {
#endif
-// The following ifdef from row_win makes the C code match the row_win code,
-// which is 7 bit fixed point.
+// This macro control YUV to RGB using unsigned math to extend range of
+// YUV to RGB coefficients to 0 to 4 instead of 0 to 2 for more accuracy on B:
+// LIBYUV_UNLIMITED_DATA
+
+// The following macro from row_win makes the C code match the row_win code,
+// which is 7 bit fixed point for ARGBToI420:
#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \
- (defined(_M_IX86) || (defined(_M_X64) && !defined(__clang__)))
+ !defined(__clang__) && (defined(_M_IX86) || defined(_M_X64))
#define LIBYUV_RGB7 1
#endif
@@ -50,6 +55,11 @@ static __inline int32_t clamp1023(int32_t v) {
return (-(v >= 1023) | v) & 1023;
}
+// clamp to max
+static __inline int32_t ClampMax(int32_t v, int32_t max) {
+ return (-(v >= max) | v) & max;
+}
+
static __inline uint32_t Abs(int32_t v) {
int m = -(v < 0);
return (v + m) ^ m;
@@ -67,6 +77,10 @@ static __inline int32_t clamp1023(int32_t v) {
return (v > 1023) ? 1023 : v;
}
+static __inline int32_t ClampMax(int32_t v, int32_t max) {
+ return (v > max) ? max : v;
+}
+
static __inline uint32_t Abs(int32_t v) {
return (v < 0) ? -v : v;
}
@@ -413,6 +427,82 @@ void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width) {
}
}
+void ARGBToAR64Row_C(const uint8_t* src_argb, uint16_t* dst_ar64, int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ dst_ar64[0] = src_argb[0] * 0x0101;
+ dst_ar64[1] = src_argb[1] * 0x0101;
+ dst_ar64[2] = src_argb[2] * 0x0101;
+ dst_ar64[3] = src_argb[3] * 0x0101;
+ dst_ar64 += 4;
+ src_argb += 4;
+ }
+}
+
+void ARGBToAB64Row_C(const uint8_t* src_argb, uint16_t* dst_ab64, int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ dst_ab64[0] = src_argb[2] * 0x0101;
+ dst_ab64[1] = src_argb[1] * 0x0101;
+ dst_ab64[2] = src_argb[0] * 0x0101;
+ dst_ab64[3] = src_argb[3] * 0x0101;
+ dst_ab64 += 4;
+ src_argb += 4;
+ }
+}
+
+void AR64ToARGBRow_C(const uint16_t* src_ar64, uint8_t* dst_argb, int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ dst_argb[0] = src_ar64[0] >> 8;
+ dst_argb[1] = src_ar64[1] >> 8;
+ dst_argb[2] = src_ar64[2] >> 8;
+ dst_argb[3] = src_ar64[3] >> 8;
+ dst_argb += 4;
+ src_ar64 += 4;
+ }
+}
+
+void AB64ToARGBRow_C(const uint16_t* src_ab64, uint8_t* dst_argb, int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ dst_argb[0] = src_ab64[2] >> 8;
+ dst_argb[1] = src_ab64[1] >> 8;
+ dst_argb[2] = src_ab64[0] >> 8;
+ dst_argb[3] = src_ab64[3] >> 8;
+ dst_argb += 4;
+ src_ab64 += 4;
+ }
+}
+
+// TODO(fbarchard): Make shuffle compatible with SIMD versions
+void AR64ShuffleRow_C(const uint8_t* src_ar64,
+ uint8_t* dst_ar64,
+ const uint8_t* shuffler,
+ int width) {
+ const uint16_t* src_ar64_16 = (const uint16_t*)src_ar64;
+ uint16_t* dst_ar64_16 = (uint16_t*)dst_ar64;
+ int index0 = shuffler[0] / 2;
+ int index1 = shuffler[2] / 2;
+ int index2 = shuffler[4] / 2;
+ int index3 = shuffler[6] / 2;
+ // Shuffle a row of AR64.
+ int x;
+ for (x = 0; x < width / 2; ++x) {
+ // To support in-place conversion.
+ uint16_t b = src_ar64_16[index0];
+ uint16_t g = src_ar64_16[index1];
+ uint16_t r = src_ar64_16[index2];
+ uint16_t a = src_ar64_16[index3];
+ dst_ar64_16[0] = b;
+ dst_ar64_16[1] = g;
+ dst_ar64_16[2] = r;
+ dst_ar64_16[3] = a;
+ src_ar64_16 += 4;
+ dst_ar64_16 += 4;
+ }
+}
+
#ifdef LIBYUV_RGB7
// Old 7 bit math for compatibility on unsupported platforms.
static __inline int RGBToY(uint8_t r, uint8_t g, uint8_t b) {
@@ -462,80 +552,80 @@ static __inline int RGB2xToV(uint16_t r, uint16_t g, uint16_t b) {
// Intel version mimic SSE/AVX which does 2 pavgb
#if LIBYUV_ARGBTOUV_PAVGB
-#define MAKEROWY(NAME, R, G, B, BPP) \
- void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \
- int x; \
- for (x = 0; x < width; ++x) { \
- dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \
- src_argb0 += BPP; \
- dst_y += 1; \
- } \
- } \
- void NAME##ToUVRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \
- uint8_t* dst_u, uint8_t* dst_v, int width) { \
- const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \
- int x; \
- for (x = 0; x < width - 1; x += 2) { \
- uint8_t ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \
- AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \
- uint8_t ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \
- AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \
- uint8_t ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \
- AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \
- dst_u[0] = RGBToU(ar, ag, ab); \
- dst_v[0] = RGBToV(ar, ag, ab); \
- src_rgb0 += BPP * 2; \
- src_rgb1 += BPP * 2; \
- dst_u += 1; \
- dst_v += 1; \
- } \
- if (width & 1) { \
- uint8_t ab = AVGB(src_rgb0[B], src_rgb1[B]); \
- uint8_t ag = AVGB(src_rgb0[G], src_rgb1[G]); \
- uint8_t ar = AVGB(src_rgb0[R], src_rgb1[R]); \
- dst_u[0] = RGBToU(ar, ag, ab); \
- dst_v[0] = RGBToV(ar, ag, ab); \
- } \
+#define MAKEROWY(NAME, R, G, B, BPP) \
+ void NAME##ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \
+ int x; \
+ for (x = 0; x < width; ++x) { \
+ dst_y[0] = RGBToY(src_rgb[R], src_rgb[G], src_rgb[B]); \
+ src_rgb += BPP; \
+ dst_y += 1; \
+ } \
+ } \
+ void NAME##ToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, \
+ uint8_t* dst_u, uint8_t* dst_v, int width) { \
+ const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \
+ int x; \
+ for (x = 0; x < width - 1; x += 2) { \
+ uint8_t ab = AVGB(AVGB(src_rgb[B], src_rgb1[B]), \
+ AVGB(src_rgb[B + BPP], src_rgb1[B + BPP])); \
+ uint8_t ag = AVGB(AVGB(src_rgb[G], src_rgb1[G]), \
+ AVGB(src_rgb[G + BPP], src_rgb1[G + BPP])); \
+ uint8_t ar = AVGB(AVGB(src_rgb[R], src_rgb1[R]), \
+ AVGB(src_rgb[R + BPP], src_rgb1[R + BPP])); \
+ dst_u[0] = RGBToU(ar, ag, ab); \
+ dst_v[0] = RGBToV(ar, ag, ab); \
+ src_rgb += BPP * 2; \
+ src_rgb1 += BPP * 2; \
+ dst_u += 1; \
+ dst_v += 1; \
+ } \
+ if (width & 1) { \
+ uint8_t ab = AVGB(src_rgb[B], src_rgb1[B]); \
+ uint8_t ag = AVGB(src_rgb[G], src_rgb1[G]); \
+ uint8_t ar = AVGB(src_rgb[R], src_rgb1[R]); \
+ dst_u[0] = RGBToU(ar, ag, ab); \
+ dst_v[0] = RGBToV(ar, ag, ab); \
+ } \
}
#else
// ARM version does sum / 2 then multiply by 2x smaller coefficients
-#define MAKEROWY(NAME, R, G, B, BPP) \
- void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \
- int x; \
- for (x = 0; x < width; ++x) { \
- dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \
- src_argb0 += BPP; \
- dst_y += 1; \
- } \
- } \
- void NAME##ToUVRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \
- uint8_t* dst_u, uint8_t* dst_v, int width) { \
- const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \
- int x; \
- for (x = 0; x < width - 1; x += 2) { \
- uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \
- src_rgb1[B + BPP] + 1) >> \
- 1; \
- uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \
- src_rgb1[G + BPP] + 1) >> \
- 1; \
- uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \
- src_rgb1[R + BPP] + 1) >> \
- 1; \
- dst_u[0] = RGB2xToU(ar, ag, ab); \
- dst_v[0] = RGB2xToV(ar, ag, ab); \
- src_rgb0 += BPP * 2; \
- src_rgb1 += BPP * 2; \
- dst_u += 1; \
- dst_v += 1; \
- } \
- if (width & 1) { \
- uint16_t ab = src_rgb0[B] + src_rgb1[B]; \
- uint16_t ag = src_rgb0[G] + src_rgb1[G]; \
- uint16_t ar = src_rgb0[R] + src_rgb1[R]; \
- dst_u[0] = RGB2xToU(ar, ag, ab); \
- dst_v[0] = RGB2xToV(ar, ag, ab); \
- } \
+#define MAKEROWY(NAME, R, G, B, BPP) \
+ void NAME##ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \
+ int x; \
+ for (x = 0; x < width; ++x) { \
+ dst_y[0] = RGBToY(src_rgb[R], src_rgb[G], src_rgb[B]); \
+ src_rgb += BPP; \
+ dst_y += 1; \
+ } \
+ } \
+ void NAME##ToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, \
+ uint8_t* dst_u, uint8_t* dst_v, int width) { \
+ const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \
+ int x; \
+ for (x = 0; x < width - 1; x += 2) { \
+ uint16_t ab = (src_rgb[B] + src_rgb[B + BPP] + src_rgb1[B] + \
+ src_rgb1[B + BPP] + 1) >> \
+ 1; \
+ uint16_t ag = (src_rgb[G] + src_rgb[G + BPP] + src_rgb1[G] + \
+ src_rgb1[G + BPP] + 1) >> \
+ 1; \
+ uint16_t ar = (src_rgb[R] + src_rgb[R + BPP] + src_rgb1[R] + \
+ src_rgb1[R + BPP] + 1) >> \
+ 1; \
+ dst_u[0] = RGB2xToU(ar, ag, ab); \
+ dst_v[0] = RGB2xToV(ar, ag, ab); \
+ src_rgb += BPP * 2; \
+ src_rgb1 += BPP * 2; \
+ dst_u += 1; \
+ dst_v += 1; \
+ } \
+ if (width & 1) { \
+ uint16_t ab = src_rgb[B] + src_rgb1[B]; \
+ uint16_t ag = src_rgb[G] + src_rgb1[G]; \
+ uint16_t ar = src_rgb[R] + src_rgb1[R]; \
+ dst_u[0] = RGB2xToU(ar, ag, ab); \
+ dst_v[0] = RGB2xToV(ar, ag, ab); \
+ } \
}
#endif
@@ -603,80 +693,80 @@ static __inline int RGB2xToVJ(uint16_t r, uint16_t g, uint16_t b) {
// ARGBToYJ_C and ARGBToUVJ_C
// Intel version mimic SSE/AVX which does 2 pavgb
#if LIBYUV_ARGBTOUV_PAVGB
-#define MAKEROWYJ(NAME, R, G, B, BPP) \
- void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \
- int x; \
- for (x = 0; x < width; ++x) { \
- dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \
- src_argb0 += BPP; \
- dst_y += 1; \
- } \
- } \
- void NAME##ToUVJRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \
- uint8_t* dst_u, uint8_t* dst_v, int width) { \
- const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \
- int x; \
- for (x = 0; x < width - 1; x += 2) { \
- uint8_t ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \
- AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \
- uint8_t ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \
- AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \
- uint8_t ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \
- AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \
- dst_u[0] = RGBToUJ(ar, ag, ab); \
- dst_v[0] = RGBToVJ(ar, ag, ab); \
- src_rgb0 += BPP * 2; \
- src_rgb1 += BPP * 2; \
- dst_u += 1; \
- dst_v += 1; \
- } \
- if (width & 1) { \
- uint8_t ab = AVGB(src_rgb0[B], src_rgb1[B]); \
- uint8_t ag = AVGB(src_rgb0[G], src_rgb1[G]); \
- uint8_t ar = AVGB(src_rgb0[R], src_rgb1[R]); \
- dst_u[0] = RGBToUJ(ar, ag, ab); \
- dst_v[0] = RGBToVJ(ar, ag, ab); \
- } \
+#define MAKEROWYJ(NAME, R, G, B, BPP) \
+ void NAME##ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \
+ int x; \
+ for (x = 0; x < width; ++x) { \
+ dst_y[0] = RGBToYJ(src_rgb[R], src_rgb[G], src_rgb[B]); \
+ src_rgb += BPP; \
+ dst_y += 1; \
+ } \
+ } \
+ void NAME##ToUVJRow_C(const uint8_t* src_rgb, int src_stride_rgb, \
+ uint8_t* dst_u, uint8_t* dst_v, int width) { \
+ const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \
+ int x; \
+ for (x = 0; x < width - 1; x += 2) { \
+ uint8_t ab = AVGB(AVGB(src_rgb[B], src_rgb1[B]), \
+ AVGB(src_rgb[B + BPP], src_rgb1[B + BPP])); \
+ uint8_t ag = AVGB(AVGB(src_rgb[G], src_rgb1[G]), \
+ AVGB(src_rgb[G + BPP], src_rgb1[G + BPP])); \
+ uint8_t ar = AVGB(AVGB(src_rgb[R], src_rgb1[R]), \
+ AVGB(src_rgb[R + BPP], src_rgb1[R + BPP])); \
+ dst_u[0] = RGBToUJ(ar, ag, ab); \
+ dst_v[0] = RGBToVJ(ar, ag, ab); \
+ src_rgb += BPP * 2; \
+ src_rgb1 += BPP * 2; \
+ dst_u += 1; \
+ dst_v += 1; \
+ } \
+ if (width & 1) { \
+ uint8_t ab = AVGB(src_rgb[B], src_rgb1[B]); \
+ uint8_t ag = AVGB(src_rgb[G], src_rgb1[G]); \
+ uint8_t ar = AVGB(src_rgb[R], src_rgb1[R]); \
+ dst_u[0] = RGBToUJ(ar, ag, ab); \
+ dst_v[0] = RGBToVJ(ar, ag, ab); \
+ } \
}
#else
// ARM version does sum / 2 then multiply by 2x smaller coefficients
-#define MAKEROWYJ(NAME, R, G, B, BPP) \
- void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \
- int x; \
- for (x = 0; x < width; ++x) { \
- dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \
- src_argb0 += BPP; \
- dst_y += 1; \
- } \
- } \
- void NAME##ToUVJRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \
- uint8_t* dst_u, uint8_t* dst_v, int width) { \
- const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \
- int x; \
- for (x = 0; x < width - 1; x += 2) { \
- uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \
- src_rgb1[B + BPP] + 1) >> \
- 1; \
- uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \
- src_rgb1[G + BPP] + 1) >> \
- 1; \
- uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \
- src_rgb1[R + BPP] + 1) >> \
- 1; \
- dst_u[0] = RGB2xToUJ(ar, ag, ab); \
- dst_v[0] = RGB2xToVJ(ar, ag, ab); \
- src_rgb0 += BPP * 2; \
- src_rgb1 += BPP * 2; \
- dst_u += 1; \
- dst_v += 1; \
- } \
- if (width & 1) { \
- uint16_t ab = (src_rgb0[B] + src_rgb1[B]); \
- uint16_t ag = (src_rgb0[G] + src_rgb1[G]); \
- uint16_t ar = (src_rgb0[R] + src_rgb1[R]); \
- dst_u[0] = RGB2xToUJ(ar, ag, ab); \
- dst_v[0] = RGB2xToVJ(ar, ag, ab); \
- } \
+#define MAKEROWYJ(NAME, R, G, B, BPP) \
+ void NAME##ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \
+ int x; \
+ for (x = 0; x < width; ++x) { \
+ dst_y[0] = RGBToYJ(src_rgb[R], src_rgb[G], src_rgb[B]); \
+ src_rgb += BPP; \
+ dst_y += 1; \
+ } \
+ } \
+ void NAME##ToUVJRow_C(const uint8_t* src_rgb, int src_stride_rgb, \
+ uint8_t* dst_u, uint8_t* dst_v, int width) { \
+ const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \
+ int x; \
+ for (x = 0; x < width - 1; x += 2) { \
+ uint16_t ab = (src_rgb[B] + src_rgb[B + BPP] + src_rgb1[B] + \
+ src_rgb1[B + BPP] + 1) >> \
+ 1; \
+ uint16_t ag = (src_rgb[G] + src_rgb[G + BPP] + src_rgb1[G] + \
+ src_rgb1[G + BPP] + 1) >> \
+ 1; \
+ uint16_t ar = (src_rgb[R] + src_rgb[R + BPP] + src_rgb1[R] + \
+ src_rgb1[R + BPP] + 1) >> \
+ 1; \
+ dst_u[0] = RGB2xToUJ(ar, ag, ab); \
+ dst_v[0] = RGB2xToVJ(ar, ag, ab); \
+ src_rgb += BPP * 2; \
+ src_rgb1 += BPP * 2; \
+ dst_u += 1; \
+ dst_v += 1; \
+ } \
+ if (width & 1) { \
+ uint16_t ab = (src_rgb[B] + src_rgb1[B]); \
+ uint16_t ag = (src_rgb[G] + src_rgb1[G]); \
+ uint16_t ar = (src_rgb[R] + src_rgb1[R]); \
+ dst_u[0] = RGB2xToUJ(ar, ag, ab); \
+ dst_v[0] = RGB2xToVJ(ar, ag, ab); \
+ } \
}
#endif
@@ -1146,16 +1236,16 @@ void ARGBShadeRow_C(const uint8_t* src_argb,
#define REPEAT8(v) (v) | ((v) << 8)
#define SHADE(f, v) v* f >> 16
-void ARGBMultiplyRow_C(const uint8_t* src_argb0,
+void ARGBMultiplyRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
int i;
for (i = 0; i < width; ++i) {
- const uint32_t b = REPEAT8(src_argb0[0]);
- const uint32_t g = REPEAT8(src_argb0[1]);
- const uint32_t r = REPEAT8(src_argb0[2]);
- const uint32_t a = REPEAT8(src_argb0[3]);
+ const uint32_t b = REPEAT8(src_argb[0]);
+ const uint32_t g = REPEAT8(src_argb[1]);
+ const uint32_t r = REPEAT8(src_argb[2]);
+ const uint32_t a = REPEAT8(src_argb[3]);
const uint32_t b_scale = src_argb1[0];
const uint32_t g_scale = src_argb1[1];
const uint32_t r_scale = src_argb1[2];
@@ -1164,7 +1254,7 @@ void ARGBMultiplyRow_C(const uint8_t* src_argb0,
dst_argb[1] = SHADE(g, g_scale);
dst_argb[2] = SHADE(r, r_scale);
dst_argb[3] = SHADE(a, a_scale);
- src_argb0 += 4;
+ src_argb += 4;
src_argb1 += 4;
dst_argb += 4;
}
@@ -1174,16 +1264,16 @@ void ARGBMultiplyRow_C(const uint8_t* src_argb0,
#define SHADE(f, v) clamp255(v + f)
-void ARGBAddRow_C(const uint8_t* src_argb0,
+void ARGBAddRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
int i;
for (i = 0; i < width; ++i) {
- const int b = src_argb0[0];
- const int g = src_argb0[1];
- const int r = src_argb0[2];
- const int a = src_argb0[3];
+ const int b = src_argb[0];
+ const int g = src_argb[1];
+ const int r = src_argb[2];
+ const int a = src_argb[3];
const int b_add = src_argb1[0];
const int g_add = src_argb1[1];
const int r_add = src_argb1[2];
@@ -1192,7 +1282,7 @@ void ARGBAddRow_C(const uint8_t* src_argb0,
dst_argb[1] = SHADE(g, g_add);
dst_argb[2] = SHADE(r, r_add);
dst_argb[3] = SHADE(a, a_add);
- src_argb0 += 4;
+ src_argb += 4;
src_argb1 += 4;
dst_argb += 4;
}
@@ -1201,16 +1291,16 @@ void ARGBAddRow_C(const uint8_t* src_argb0,
#define SHADE(f, v) clamp0(f - v)
-void ARGBSubtractRow_C(const uint8_t* src_argb0,
+void ARGBSubtractRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
int i;
for (i = 0; i < width; ++i) {
- const int b = src_argb0[0];
- const int g = src_argb0[1];
- const int r = src_argb0[2];
- const int a = src_argb0[3];
+ const int b = src_argb[0];
+ const int g = src_argb[1];
+ const int r = src_argb[2];
+ const int a = src_argb[3];
const int b_sub = src_argb1[0];
const int g_sub = src_argb1[1];
const int r_sub = src_argb1[2];
@@ -1219,7 +1309,7 @@ void ARGBSubtractRow_C(const uint8_t* src_argb0,
dst_argb[1] = SHADE(g, g_sub);
dst_argb[2] = SHADE(r, r_sub);
dst_argb[3] = SHADE(a, a_sub);
- src_argb0 += 4;
+ src_argb += 4;
src_argb1 += 4;
dst_argb += 4;
}
@@ -1329,64 +1419,36 @@ void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width) {
// Macros to create SIMD specific yuv to rgb conversion constants.
-#if defined(__aarch64__)
-#define MAKEYUVCONSTANTS(name, YG, YB, UB, UG, VG, VR, BB, BG, BR) \
- const struct YuvConstants SIMD_ALIGNED(kYuv##name##Constants) = { \
- {UB, VR, UB, VR, UB, VR, UB, VR}, {UB, VR, UB, VR, UB, VR, UB, VR}, \
- {UG, VG, UG, VG, UG, VG, UG, VG}, {UG, VG, UG, VG, UG, VG, UG, VG}, \
- {BB, BG, BR, YB, 0, 0, 0, 0}, {0x0101 * YG, YG, 0, 0}}; \
- const struct YuvConstants SIMD_ALIGNED(kYvu##name##Constants) = { \
- {VR, UB, VR, UB, VR, UB, VR, UB}, {VR, UB, VR, UB, VR, UB, VR, UB}, \
- {VG, UG, VG, UG, VG, UG, VG, UG}, {VG, UG, VG, UG, VG, UG, VG, UG}, \
- {BR, BG, BB, YB, 0, 0, 0, 0}, {0x0101 * YG, YG, 0, 0}};
-
-#elif defined(__arm__)
-#define MAKEYUVCONSTANTS(name, YG, YB, UB, UG, VG, VR, BB, BG, BR) \
- const struct YuvConstants SIMD_ALIGNED(kYuv##name##Constants) = { \
- {UB, UB, UB, UB, VR, VR, VR, VR, 0, 0, 0, 0, 0, 0, 0, 0}, \
- {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, \
- {BB, BG, BR, YB, 0, 0, 0, 0}, \
- {0x0101 * YG, YG, 0, 0}}; \
- const struct YuvConstants SIMD_ALIGNED(kYvu##name##Constants) = { \
- {VR, VR, VR, VR, UB, UB, UB, UB, 0, 0, 0, 0, 0, 0, 0, 0}, \
- {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, \
- {BR, BG, BB, YB, 0, 0, 0, 0}, \
- {0x0101 * YG, YG, 0, 0}};
+// clang-format off
+#if defined(__aarch64__) || defined(__arm__)
+// Bias values include subtract 128 from U and V, bias from Y and rounding.
+// For B and R bias is negative. For G bias is positive.
+#define YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR) \
+ {{UB, VR, UG, VG, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, \
+ {YG, (UB * 128 - YB), (UG * 128 + VG * 128 + YB), (VR * 128 - YB), YB, 0, \
+ 0, 0}}
#else
-#define MAKEYUVCONSTANTS(name, YG, YB, UB, UG, VG, VR, BB, BG, BR) \
- const struct YuvConstants SIMD_ALIGNED(kYuv##name##Constants) = { \
- {-UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, \
- -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0}, \
- {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, \
- UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, \
- {0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, \
- 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR}, \
- {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, \
- {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, \
- {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, \
- {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, \
- {YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB}}; \
- const struct YuvConstants SIMD_ALIGNED(kYvu##name##Constants) = { \
- {-VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, \
- -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0, -VR, 0}, \
- {VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, \
- VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG}, \
- {0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, \
- 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB, 0, -UB}, \
- {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, \
- {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, \
- {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, \
- {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, \
- {YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB}};
+#define YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR) \
+ {{UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, \
+ UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, \
+ {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, \
+ UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, \
+ {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, \
+ 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, \
+ {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, \
+ {YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB}}
#endif
-// TODO(fbarchard): Generate SIMD structures from float matrix.
+// clang-format on
-// Bias values to round, and subtract 128 from U and V.
-#define BB (-UB * 128 + YB)
-#define BG (UG * 128 + VG * 128 + YB)
-#define BR (-VR * 128 + YB)
+#define MAKEYUVCONSTANTS(name, YG, YB, UB, UG, VG, VR) \
+ const struct YuvConstants SIMD_ALIGNED(kYuv##name##Constants) = \
+ YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR); \
+ const struct YuvConstants SIMD_ALIGNED(kYvu##name##Constants) = \
+ YUVCONSTANTSBODY(YG, YB, VR, VG, UG, UB);
+
+// TODO(fbarchard): Generate SIMD structures from float matrix.
// BT.601 limited range YUV to RGB reference
// R = (Y - 16) * 1.164 + V * 1.596
@@ -1395,7 +1457,11 @@ void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width) {
// KR = 0.299; KB = 0.114
// U and V contributions to R,G,B.
+#ifdef LIBYUV_UNLIMITED_DATA
+#define UB 129 /* round(2.018 * 64) */
+#else
#define UB 128 /* max(128, round(2.018 * 64)) */
+#endif
#define UG 25 /* round(0.391 * 64) */
#define VG 52 /* round(0.813 * 64) */
#define VR 102 /* round(1.596 * 64) */
@@ -1404,7 +1470,7 @@ void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width) {
#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */
#define YB -1160 /* 1.164 * 64 * -16 + 64 / 2 */
-MAKEYUVCONSTANTS(I601, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(I601, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1429,7 +1495,7 @@ MAKEYUVCONSTANTS(I601, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#define YG 16320 /* round(1.000 * 64 * 256 * 256 / 257) */
#define YB 32 /* 64 / 2 */
-MAKEYUVCONSTANTS(JPEG, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(JPEG, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1444,9 +1510,12 @@ MAKEYUVCONSTANTS(JPEG, YG, YB, UB, UG, VG, VR, BB, BG, BR)
// B = (Y - 16) * 1.164 + U * 2.112
// KR = 0.2126, KB = 0.0722
-// TODO(fbarchard): Find way to express 2.112 instead of 2.0.
// U and V contributions to R,G,B.
+#ifdef LIBYUV_UNLIMITED_DATA
+#define UB 135 /* round(2.112 * 64) */
+#else
#define UB 128 /* max(128, round(2.112 * 64)) */
+#endif
#define UG 14 /* round(0.213 * 64) */
#define VG 34 /* round(0.533 * 64) */
#define VR 115 /* round(1.793 * 64) */
@@ -1455,7 +1524,7 @@ MAKEYUVCONSTANTS(JPEG, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */
#define YB -1160 /* 1.164 * 64 * -16 + 64 / 2 */
-MAKEYUVCONSTANTS(H709, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(H709, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1480,7 +1549,7 @@ MAKEYUVCONSTANTS(H709, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#define YG 16320 /* round(1 * 64 * 256 * 256 / 257) */
#define YB 32 /* 64 / 2 */
-MAKEYUVCONSTANTS(F709, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(F709, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1495,9 +1564,12 @@ MAKEYUVCONSTANTS(F709, YG, YB, UB, UG, VG, VR, BB, BG, BR)
// B = (Y - 16) * 1.164384 + U * 2.14177
// KR = 0.2627; KB = 0.0593
-// TODO(fbarchard): Improve accuracy; the B channel is off by 7%.
// U and V contributions to R,G,B.
+#ifdef LIBYUV_UNLIMITED_DATA
+#define UB 137 /* round(2.142 * 64) */
+#else
#define UB 128 /* max(128, round(2.142 * 64)) */
+#endif
#define UG 12 /* round(0.187326 * 64) */
#define VG 42 /* round(0.65042 * 64) */
#define VR 107 /* round(1.67867 * 64) */
@@ -1506,7 +1578,7 @@ MAKEYUVCONSTANTS(F709, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#define YG 19003 /* round(1.164384 * 64 * 256 * 256 / 257) */
#define YB -1160 /* 1.164384 * 64 * -16 + 64 / 2 */
-MAKEYUVCONSTANTS(2020, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(2020, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1530,7 +1602,7 @@ MAKEYUVCONSTANTS(2020, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#define YG 16320 /* round(1 * 64 * 256 * 256 / 257) */
#define YB 32 /* 64 / 2 */
-MAKEYUVCONSTANTS(V2020, YG, YB, UB, UG, VG, VR, BB, BG, BR)
+MAKEYUVCONSTANTS(V2020, YG, YB, UB, UG, VG, VR)
#undef YG
#undef YB
@@ -1545,6 +1617,42 @@ MAKEYUVCONSTANTS(V2020, YG, YB, UB, UG, VG, VR, BB, BG, BR)
#undef MAKEYUVCONSTANTS
+#if defined(__aarch64__) || defined(__arm__)
+#define LOAD_YUV_CONSTANTS \
+ int ub = yuvconstants->kUVCoeff[0]; \
+ int vr = yuvconstants->kUVCoeff[1]; \
+ int ug = yuvconstants->kUVCoeff[2]; \
+ int vg = yuvconstants->kUVCoeff[3]; \
+ int yg = yuvconstants->kRGBCoeffBias[0]; \
+ int bb = yuvconstants->kRGBCoeffBias[1]; \
+ int bg = yuvconstants->kRGBCoeffBias[2]; \
+ int br = yuvconstants->kRGBCoeffBias[3]
+
+#define CALC_RGB16 \
+ int32_t y1 = (uint32_t)(y32 * yg) >> 16; \
+ int b16 = y1 + (u * ub) - bb; \
+ int g16 = y1 + bg - (u * ug + v * vg); \
+ int r16 = y1 + (v * vr) - br
+#else
+#define LOAD_YUV_CONSTANTS \
+ int ub = yuvconstants->kUVToB[0]; \
+ int ug = yuvconstants->kUVToG[0]; \
+ int vg = yuvconstants->kUVToG[1]; \
+ int vr = yuvconstants->kUVToR[1]; \
+ int yg = yuvconstants->kYToRgb[0]; \
+ int yb = yuvconstants->kYBiasToRgb[0]
+
+#define CALC_RGB16 \
+ int32_t y1 = ((uint32_t)(y32 * yg) >> 16) + yb; \
+ int8_t ui = u; \
+ int8_t vi = v; \
+ ui -= 0x80; \
+ vi -= 0x80; \
+ int b16 = y1 + (ui * ub); \
+ int g16 = y1 - (ui * ug + vi * vg); \
+ int r16 = y1 + (vi * vr)
+#endif
+
// C reference code that mimics the YUV assembly.
// Reads 8 bit YUV and leaves result as 16 bit.
static __inline void YuvPixel(uint8_t y,
@@ -1554,39 +1662,12 @@ static __inline void YuvPixel(uint8_t y,
uint8_t* g,
uint8_t* r,
const struct YuvConstants* yuvconstants) {
-#if defined(__aarch64__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = -yuvconstants->kUVToRB[1];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#elif defined(__arm__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[4];
- int vr = -yuvconstants->kUVToRB[4];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#else
- int ub = yuvconstants->kUVToB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = yuvconstants->kUVToR[1];
- int bb = yuvconstants->kUVBiasB[0];
- int bg = yuvconstants->kUVBiasG[0];
- int br = yuvconstants->kUVBiasR[0];
- int yg = yuvconstants->kYToRgb[0];
-#endif
-
- uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16;
- *b = Clamp((int32_t)(y1 + -(u * ub) + bb) >> 6);
- *g = Clamp((int32_t)(y1 + -(u * ug + v * vg) + bg) >> 6);
- *r = Clamp((int32_t)(y1 + -(v * vr) + br) >> 6);
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y * 0x0101;
+ CALC_RGB16;
+ *b = Clamp((int32_t)(b16) >> 6);
+ *g = Clamp((int32_t)(g16) >> 6);
+ *r = Clamp((int32_t)(r16) >> 6);
}
// Reads 8 bit YUV and leaves result as 16 bit.
@@ -1597,85 +1678,50 @@ static __inline void YuvPixel8_16(uint8_t y,
int* g,
int* r,
const struct YuvConstants* yuvconstants) {
-#if defined(__aarch64__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = -yuvconstants->kUVToRB[1];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#elif defined(__arm__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[4];
- int vr = -yuvconstants->kUVToRB[4];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#else
- int ub = yuvconstants->kUVToB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = yuvconstants->kUVToR[1];
- int bb = yuvconstants->kUVBiasB[0];
- int bg = yuvconstants->kUVBiasG[0];
- int br = yuvconstants->kUVBiasR[0];
- int yg = yuvconstants->kYToRgb[0];
-#endif
-
- uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16;
- *b = (int)(-(u * ub) + y1 + bb);
- *g = (int)(-(u * ug + v * vg) + y1 + bg);
- *r = (int)(-(v * vr) + y1 + br);
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y * 0x0101;
+ CALC_RGB16;
+ *b = b16;
+ *g = g16;
+ *r = r16;
}
// C reference code that mimics the YUV 16 bit assembly.
// Reads 10 bit YUV and leaves result as 16 bit.
-static __inline void YuvPixel16(int16_t y,
- int16_t u,
- int16_t v,
- int* b,
- int* g,
- int* r,
- const struct YuvConstants* yuvconstants) {
-#if defined(__aarch64__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = -yuvconstants->kUVToRB[1];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#elif defined(__arm__)
- int ub = -yuvconstants->kUVToRB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[4];
- int vr = -yuvconstants->kUVToRB[4];
- int bb = yuvconstants->kUVBiasBGR[0];
- int bg = yuvconstants->kUVBiasBGR[1];
- int br = yuvconstants->kUVBiasBGR[2];
- int yg = yuvconstants->kYToRgb[1];
-#else
- int ub = yuvconstants->kUVToB[0];
- int ug = yuvconstants->kUVToG[0];
- int vg = yuvconstants->kUVToG[1];
- int vr = yuvconstants->kUVToR[1];
- int bb = yuvconstants->kUVBiasB[0];
- int bg = yuvconstants->kUVBiasG[0];
- int br = yuvconstants->kUVBiasR[0];
- int yg = yuvconstants->kYToRgb[0];
-#endif
-
- uint32_t y1 = (uint32_t)((y << 6) * yg) >> 16;
+static __inline void YuvPixel10_16(uint16_t y,
+ uint16_t u,
+ uint16_t v,
+ int* b,
+ int* g,
+ int* r,
+ const struct YuvConstants* yuvconstants) {
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y << 6;
u = clamp255(u >> 2);
v = clamp255(v >> 2);
- *b = (int)(-(u * ub) + y1 + bb);
- *g = (int)(-(u * ug + v * vg) + y1 + bg);
- *r = (int)(-(v * vr) + y1 + br);
+ CALC_RGB16;
+ *b = b16;
+ *g = g16;
+ *r = r16;
+}
+
+// C reference code that mimics the YUV 16 bit assembly.
+// Reads 12 bit YUV and leaves result as 16 bit.
+static __inline void YuvPixel12_16(int16_t y,
+ int16_t u,
+ int16_t v,
+ int* b,
+ int* g,
+ int* r,
+ const struct YuvConstants* yuvconstants) {
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y << 4;
+ u = clamp255(u >> 4);
+ v = clamp255(v >> 4);
+ CALC_RGB16;
+ *b = b16;
+ *g = g16;
+ *r = r16;
}
// C reference code that mimics the YUV 10 bit assembly.
@@ -1690,22 +1736,78 @@ static __inline void YuvPixel10(uint16_t y,
int b16;
int g16;
int r16;
- YuvPixel16(y, u, v, &b16, &g16, &r16, yuvconstants);
+ YuvPixel10_16(y, u, v, &b16, &g16, &r16, yuvconstants);
+ *b = Clamp(b16 >> 6);
+ *g = Clamp(g16 >> 6);
+ *r = Clamp(r16 >> 6);
+}
+
+// C reference code that mimics the YUV 12 bit assembly.
+// Reads 12 bit YUV and clamps down to 8 bit RGB.
+static __inline void YuvPixel12(uint16_t y,
+ uint16_t u,
+ uint16_t v,
+ uint8_t* b,
+ uint8_t* g,
+ uint8_t* r,
+ const struct YuvConstants* yuvconstants) {
+ int b16;
+ int g16;
+ int r16;
+ YuvPixel12_16(y, u, v, &b16, &g16, &r16, yuvconstants);
*b = Clamp(b16 >> 6);
*g = Clamp(g16 >> 6);
*r = Clamp(r16 >> 6);
}
+// C reference code that mimics the YUV 16 bit assembly.
+// Reads 16 bit YUV and leaves result as 8 bit.
+static __inline void YuvPixel16_8(uint16_t y,
+ uint16_t u,
+ uint16_t v,
+ uint8_t* b,
+ uint8_t* g,
+ uint8_t* r,
+ const struct YuvConstants* yuvconstants) {
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y;
+ u = clamp255(u >> 8);
+ v = clamp255(v >> 8);
+ CALC_RGB16;
+ *b = Clamp((int32_t)(b16) >> 6);
+ *g = Clamp((int32_t)(g16) >> 6);
+ *r = Clamp((int32_t)(r16) >> 6);
+}
+
+// C reference code that mimics the YUV 16 bit assembly.
+// Reads 16 bit YUV and leaves result as 16 bit.
+static __inline void YuvPixel16_16(uint16_t y,
+ uint16_t u,
+ uint16_t v,
+ int* b,
+ int* g,
+ int* r,
+ const struct YuvConstants* yuvconstants) {
+ LOAD_YUV_CONSTANTS;
+ uint32_t y32 = y;
+ u = clamp255(u >> 8);
+ v = clamp255(v >> 8);
+ CALC_RGB16;
+ *b = b16;
+ *g = g16;
+ *r = r16;
+}
+
// C reference code that mimics the YUV assembly.
-// Reads 8 bit YUV and leaves result as 16 bit.
+// Reads 8 bit YUV and leaves result as 8 bit.
static __inline void YPixel(uint8_t y,
uint8_t* b,
uint8_t* g,
uint8_t* r,
const struct YuvConstants* yuvconstants) {
#if defined(__aarch64__) || defined(__arm__)
- int ygb = yuvconstants->kUVBiasBGR[3];
- int yg = yuvconstants->kYToRgb[1];
+ int yg = yuvconstants->kRGBCoeffBias[0];
+ int ygb = yuvconstants->kRGBCoeffBias[4];
#else
int ygb = yuvconstants->kYBiasToRgb[0];
int yg = yuvconstants->kYToRgb[0];
@@ -1716,38 +1818,6 @@ static __inline void YPixel(uint8_t y,
*r = Clamp(((int32_t)(y1) + ygb) >> 6);
}
-#if !defined(LIBYUV_DISABLE_NEON) && \
- (defined(__ARM_NEON__) || defined(__aarch64__) || defined(LIBYUV_NEON))
-// C mimic assembly.
-// TODO(fbarchard): Remove subsampling from Neon.
-void I444ToARGBRow_C(const uint8_t* src_y,
- const uint8_t* src_u,
- const uint8_t* src_v,
- uint8_t* rgb_buf,
- const struct YuvConstants* yuvconstants,
- int width) {
- int x;
- for (x = 0; x < width - 1; x += 2) {
- uint8_t u = (src_u[0] + src_u[1] + 1) >> 1;
- uint8_t v = (src_v[0] + src_v[1] + 1) >> 1;
- YuvPixel(src_y[0], u, v, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2,
- yuvconstants);
- rgb_buf[3] = 255;
- YuvPixel(src_y[1], u, v, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6,
- yuvconstants);
- rgb_buf[7] = 255;
- src_y += 2;
- src_u += 2;
- src_v += 2;
- rgb_buf += 8; // Advance 2 pixels.
- }
- if (width & 1) {
- YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
- rgb_buf + 2, yuvconstants);
- rgb_buf[3] = 255;
- }
-}
-#else
void I444ToARGBRow_C(const uint8_t* src_y,
const uint8_t* src_u,
const uint8_t* src_v,
@@ -1765,7 +1835,6 @@ void I444ToARGBRow_C(const uint8_t* src_y,
rgb_buf += 4; // Advance 1 pixel.
}
}
-#endif
// Also used for 420
void I422ToARGBRow_C(const uint8_t* src_y,
@@ -1821,9 +1890,102 @@ void I210ToARGBRow_C(const uint16_t* src_y,
}
}
+void I410ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = 255;
+ src_y += 1;
+ src_u += 1;
+ src_v += 1;
+ rgb_buf += 4; // Advance 1 pixels.
+ }
+}
+
+void I210AlphaToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ const uint16_t* src_a,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ for (x = 0; x < width - 1; x += 2) {
+ YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = clamp255(src_a[0] >> 2);
+ YuvPixel10(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5,
+ rgb_buf + 6, yuvconstants);
+ rgb_buf[7] = clamp255(src_a[1] >> 2);
+ src_y += 2;
+ src_u += 1;
+ src_v += 1;
+ src_a += 2;
+ rgb_buf += 8; // Advance 2 pixels.
+ }
+ if (width & 1) {
+ YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = clamp255(src_a[0] >> 2);
+ }
+}
+
+void I410AlphaToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ const uint16_t* src_a,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = clamp255(src_a[0] >> 2);
+ src_y += 1;
+ src_u += 1;
+ src_v += 1;
+ src_a += 1;
+ rgb_buf += 4; // Advance 1 pixels.
+ }
+}
+
+// 12 bit YUV to ARGB
+void I212ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ for (x = 0; x < width - 1; x += 2) {
+ YuvPixel12(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = 255;
+ YuvPixel12(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5,
+ rgb_buf + 6, yuvconstants);
+ rgb_buf[7] = 255;
+ src_y += 2;
+ src_u += 1;
+ src_v += 1;
+ rgb_buf += 8; // Advance 2 pixels.
+ }
+ if (width & 1) {
+ YuvPixel12(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
+ rgb_buf + 2, yuvconstants);
+ rgb_buf[3] = 255;
+ }
+}
+
static void StoreAR30(uint8_t* rgb_buf, int b, int g, int r) {
uint32_t ar30;
- b = b >> 4; // convert 10.6 to 10 bit.
+ b = b >> 4; // convert 8 bit 10.6 to 10 bit.
g = g >> 4;
r = r >> 4;
b = Clamp10(b);
@@ -1845,9 +2007,9 @@ void I210ToAR30Row_C(const uint16_t* src_y,
int g;
int r;
for (x = 0; x < width - 1; x += 2) {
- YuvPixel16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf, b, g, r);
- YuvPixel16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel10_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf + 4, b, g, r);
src_y += 2;
src_u += 1;
@@ -1855,16 +2017,15 @@ void I210ToAR30Row_C(const uint16_t* src_y,
rgb_buf += 8; // Advance 2 pixels.
}
if (width & 1) {
- YuvPixel16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf, b, g, r);
}
}
-// 8 bit YUV to 10 bit AR30
-// Uses same code as 10 bit YUV bit shifts the 8 bit values up to 10 bits.
-void I422ToAR30Row_C(const uint8_t* src_y,
- const uint8_t* src_u,
- const uint8_t* src_v,
+// 12 bit YUV to 10 bit AR30
+void I212ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
uint8_t* rgb_buf,
const struct YuvConstants* yuvconstants,
int width) {
@@ -1873,9 +2034,9 @@ void I422ToAR30Row_C(const uint8_t* src_y,
int g;
int r;
for (x = 0; x < width - 1; x += 2) {
- YuvPixel8_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel12_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf, b, g, r);
- YuvPixel8_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel12_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf + 4, b, g, r);
src_y += 2;
src_u += 1;
@@ -1883,45 +2044,142 @@ void I422ToAR30Row_C(const uint8_t* src_y,
rgb_buf += 8; // Advance 2 pixels.
}
if (width & 1) {
- YuvPixel8_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ YuvPixel12_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
StoreAR30(rgb_buf, b, g, r);
}
}
-#if !defined(LIBYUV_DISABLE_NEON) && \
- (defined(__ARM_NEON__) || defined(__aarch64__) || defined(LIBYUV_NEON))
-// C mimic assembly.
-// TODO(fbarchard): Remove subsampling from Neon.
-void I444AlphaToARGBRow_C(const uint8_t* src_y,
- const uint8_t* src_u,
- const uint8_t* src_v,
- const uint8_t* src_a,
- uint8_t* rgb_buf,
- const struct YuvConstants* yuvconstants,
- int width) {
+void I410ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_u,
+ const uint16_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ int b;
+ int g;
+ int r;
+ for (x = 0; x < width; ++x) {
+ YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ StoreAR30(rgb_buf, b, g, r);
+ src_y += 1;
+ src_u += 1;
+ src_v += 1;
+ rgb_buf += 4; // Advance 1 pixel.
+ }
+}
+
+// P210 has 10 bits in msb of 16 bit NV12 style layout.
+void P210ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
int x;
for (x = 0; x < width - 1; x += 2) {
- uint8_t u = (src_u[0] + src_u[1] + 1) >> 1;
- uint8_t v = (src_v[0] + src_v[1] + 1) >> 1;
- YuvPixel(src_y[0], u, v, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2,
- yuvconstants);
- rgb_buf[3] = src_a[0];
- YuvPixel(src_y[1], u, v, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6,
- yuvconstants);
- rgb_buf[7] = src_a[1];
+ YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1,
+ dst_argb + 2, yuvconstants);
+ dst_argb[3] = 255;
+ YuvPixel16_8(src_y[1], src_uv[0], src_uv[1], dst_argb + 4, dst_argb + 5,
+ dst_argb + 6, yuvconstants);
+ dst_argb[7] = 255;
src_y += 2;
- src_u += 2;
- src_v += 2;
- src_a += 2;
+ src_uv += 2;
+ dst_argb += 8; // Advance 2 pixels.
+ }
+ if (width & 1) {
+ YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1,
+ dst_argb + 2, yuvconstants);
+ dst_argb[3] = 255;
+ }
+}
+
+void P410ToARGBRow_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ for (x = 0; x < width; ++x) {
+ YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1,
+ dst_argb + 2, yuvconstants);
+ dst_argb[3] = 255;
+ src_y += 1;
+ src_uv += 2;
+ dst_argb += 4; // Advance 1 pixels.
+ }
+}
+
+void P210ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ int b;
+ int g;
+ int r;
+ for (x = 0; x < width - 1; x += 2) {
+ YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants);
+ StoreAR30(dst_ar30, b, g, r);
+ YuvPixel16_16(src_y[1], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants);
+ StoreAR30(dst_ar30 + 4, b, g, r);
+ src_y += 2;
+ src_uv += 2;
+ dst_ar30 += 8; // Advance 2 pixels.
+ }
+ if (width & 1) {
+ YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants);
+ StoreAR30(dst_ar30, b, g, r);
+ }
+}
+
+void P410ToAR30Row_C(const uint16_t* src_y,
+ const uint16_t* src_uv,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ int b;
+ int g;
+ int r;
+ for (x = 0; x < width; ++x) {
+ YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants);
+ StoreAR30(dst_ar30, b, g, r);
+ src_y += 1;
+ src_uv += 2;
+ dst_ar30 += 4; // Advance 1 pixel.
+ }
+}
+
+// 8 bit YUV to 10 bit AR30
+// Uses same code as 10 bit YUV bit shifts the 8 bit values up to 10 bits.
+void I422ToAR30Row_C(const uint8_t* src_y,
+ const uint8_t* src_u,
+ const uint8_t* src_v,
+ uint8_t* rgb_buf,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ int x;
+ int b;
+ int g;
+ int r;
+ for (x = 0; x < width - 1; x += 2) {
+ YuvPixel8_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ StoreAR30(rgb_buf, b, g, r);
+ YuvPixel8_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ StoreAR30(rgb_buf + 4, b, g, r);
+ src_y += 2;
+ src_u += 1;
+ src_v += 1;
rgb_buf += 8; // Advance 2 pixels.
}
if (width & 1) {
- YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1,
- rgb_buf + 2, yuvconstants);
- rgb_buf[3] = src_a[0];
+ YuvPixel8_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants);
+ StoreAR30(rgb_buf, b, g, r);
}
}
-#else
+
void I444AlphaToARGBRow_C(const uint8_t* src_y,
const uint8_t* src_u,
const uint8_t* src_v,
@@ -1941,7 +2199,6 @@ void I444AlphaToARGBRow_C(const uint8_t* src_y,
rgb_buf += 4; // Advance 1 pixel.
}
}
-#endif
void I422AlphaToARGBRow_C(const uint8_t* src_y,
const uint8_t* src_u,
@@ -2492,6 +2749,105 @@ void MergeARGBRow_C(const uint8_t* src_r,
}
}
+void MergeXR30Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width) {
+ assert(depth >= 10);
+ assert(depth <= 16);
+ int x;
+ int shift = depth - 10;
+ uint32_t* dst_ar30_32 = (uint32_t*)dst_ar30;
+ for (x = 0; x < width; ++x) {
+ uint32_t r = clamp1023(src_r[x] >> shift);
+ uint32_t g = clamp1023(src_g[x] >> shift);
+ uint32_t b = clamp1023(src_b[x] >> shift);
+ dst_ar30_32[x] = b | (g << 10) | (r << 20) | 0xc0000000;
+ }
+}
+
+void MergeAR64Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ assert(depth >= 1);
+ assert(depth <= 16);
+ int x;
+ int shift = 16 - depth;
+ int max = (1 << depth) - 1;
+ for (x = 0; x < width; ++x) {
+ dst_ar64[0] = ClampMax(src_b[x], max) << shift;
+ dst_ar64[1] = ClampMax(src_g[x], max) << shift;
+ dst_ar64[2] = ClampMax(src_r[x], max) << shift;
+ dst_ar64[3] = ClampMax(src_a[x], max) << shift;
+ dst_ar64 += 4;
+ }
+}
+
+void MergeARGB16To8Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ assert(depth >= 8);
+ assert(depth <= 16);
+ int x;
+ int shift = depth - 8;
+ for (x = 0; x < width; ++x) {
+ dst_argb[0] = clamp255(src_b[x] >> shift);
+ dst_argb[1] = clamp255(src_g[x] >> shift);
+ dst_argb[2] = clamp255(src_r[x] >> shift);
+ dst_argb[3] = clamp255(src_a[x] >> shift);
+ dst_argb += 4;
+ }
+}
+
+void MergeXR64Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ assert(depth >= 1);
+ assert(depth <= 16);
+ int x;
+ int shift = 16 - depth;
+ int max = (1 << depth) - 1;
+ for (x = 0; x < width; ++x) {
+ dst_ar64[0] = ClampMax(src_b[x], max) << shift;
+ dst_ar64[1] = ClampMax(src_g[x], max) << shift;
+ dst_ar64[2] = ClampMax(src_r[x], max) << shift;
+ dst_ar64[3] = 0xffff;
+ dst_ar64 += 4;
+ }
+}
+
+void MergeXRGB16To8Row_C(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ assert(depth >= 8);
+ assert(depth <= 16);
+ int x;
+ int shift = depth - 8;
+ for (x = 0; x < width; ++x) {
+ dst_argb[0] = clamp255(src_b[x] >> shift);
+ dst_argb[1] = clamp255(src_g[x] >> shift);
+ dst_argb[2] = clamp255(src_r[x] >> shift);
+ dst_argb[3] = 0xff;
+ dst_argb += 4;
+ }
+}
+
void SplitXRGBRow_C(const uint8_t* src_argb,
uint8_t* dst_r,
uint8_t* dst_g,
@@ -2528,6 +2884,8 @@ void MergeUVRow_16_C(const uint16_t* src_u,
int depth,
int width) {
int shift = 16 - depth;
+ assert(depth >= 8);
+ assert(depth <= 16);
int x;
for (x = 0; x < width; ++x) {
dst_uv[0] = src_u[x] << shift;
@@ -2544,6 +2902,8 @@ void SplitUVRow_16_C(const uint16_t* src_uv,
int width) {
int shift = 16 - depth;
int x;
+ assert(depth >= 8);
+ assert(depth <= 16);
for (x = 0; x < width; ++x) {
dst_u[x] = src_uv[0] >> shift;
dst_v[x] = src_uv[1] >> shift;
@@ -2581,6 +2941,9 @@ void Convert16To8Row_C(const uint16_t* src_y,
int scale,
int width) {
int x;
+ assert(scale >= 256);
+ assert(scale <= 32768);
+
for (x = 0; x < width; ++x) {
dst_y[x] = clamp255((src_y[x] * scale) >> 16);
}
@@ -2714,19 +3077,19 @@ void UYVYToYRow_C(const uint8_t* src_uyvy, uint8_t* dst_y, int width) {
#define BLEND(f, b, a) clamp255((((256 - a) * b) >> 8) + f)
-// Blend src_argb0 over src_argb1 and store to dst_argb.
-// dst_argb may be src_argb0 or src_argb1.
+// Blend src_argb over src_argb1 and store to dst_argb.
+// dst_argb may be src_argb or src_argb1.
// This code mimics the SSSE3 version for better testability.
-void ARGBBlendRow_C(const uint8_t* src_argb0,
+void ARGBBlendRow_C(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
int x;
for (x = 0; x < width - 1; x += 2) {
- uint32_t fb = src_argb0[0];
- uint32_t fg = src_argb0[1];
- uint32_t fr = src_argb0[2];
- uint32_t a = src_argb0[3];
+ uint32_t fb = src_argb[0];
+ uint32_t fg = src_argb[1];
+ uint32_t fr = src_argb[2];
+ uint32_t a = src_argb[3];
uint32_t bb = src_argb1[0];
uint32_t bg = src_argb1[1];
uint32_t br = src_argb1[2];
@@ -2735,10 +3098,10 @@ void ARGBBlendRow_C(const uint8_t* src_argb0,
dst_argb[2] = BLEND(fr, br, a);
dst_argb[3] = 255u;
- fb = src_argb0[4 + 0];
- fg = src_argb0[4 + 1];
- fr = src_argb0[4 + 2];
- a = src_argb0[4 + 3];
+ fb = src_argb[4 + 0];
+ fg = src_argb[4 + 1];
+ fr = src_argb[4 + 2];
+ a = src_argb[4 + 3];
bb = src_argb1[4 + 0];
bg = src_argb1[4 + 1];
br = src_argb1[4 + 2];
@@ -2746,16 +3109,16 @@ void ARGBBlendRow_C(const uint8_t* src_argb0,
dst_argb[4 + 1] = BLEND(fg, bg, a);
dst_argb[4 + 2] = BLEND(fr, br, a);
dst_argb[4 + 3] = 255u;
- src_argb0 += 8;
+ src_argb += 8;
src_argb1 += 8;
dst_argb += 8;
}
if (width & 1) {
- uint32_t fb = src_argb0[0];
- uint32_t fg = src_argb0[1];
- uint32_t fr = src_argb0[2];
- uint32_t a = src_argb0[3];
+ uint32_t fb = src_argb[0];
+ uint32_t fg = src_argb[1];
+ uint32_t fr = src_argb[2];
+ uint32_t a = src_argb[3];
uint32_t bb = src_argb1[0];
uint32_t bg = src_argb1[1];
uint32_t br = src_argb1[2];
@@ -3280,7 +3643,7 @@ void ARGBCopyYToAlphaRow_C(const uint8_t* src, uint8_t* dst, int width) {
// Maximum temporary width for wrappers to process at a time, in pixels.
#define MAXTWIDTH 2048
-#if !(defined(_MSC_VER) && defined(_M_IX86)) && \
+#if !(defined(_MSC_VER) && !defined(__clang__) && defined(_M_IX86)) && \
defined(HAS_I422TORGB565ROW_SSSE3)
// row_win.cc has asm version, but GCC uses 2 step wrapper.
void I422ToRGB565Row_SSSE3(const uint8_t* src_y,
@@ -3747,13 +4110,14 @@ void NV21ToYUV24Row_C(const uint8_t* src_y,
}
// Filter 2 rows of AYUV UV's (444) into UV (420).
+// AYUV is VUYA in memory. UV for NV12 is UV order in memory.
void AYUVToUVRow_C(const uint8_t* src_ayuv,
int src_stride_ayuv,
uint8_t* dst_uv,
int width) {
// Output a row of UV values, filtering 2x2 rows of AYUV.
int x;
- for (x = 0; x < width; x += 2) {
+ for (x = 0; x < width - 1; x += 2) {
dst_uv[0] = (src_ayuv[1] + src_ayuv[5] + src_ayuv[src_stride_ayuv + 1] +
src_ayuv[src_stride_ayuv + 5] + 2) >>
2;
@@ -3764,12 +4128,8 @@ void AYUVToUVRow_C(const uint8_t* src_ayuv,
dst_uv += 2;
}
if (width & 1) {
- dst_uv[0] = (src_ayuv[0] + src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] +
- src_ayuv[src_stride_ayuv + 0] + 2) >>
- 2;
- dst_uv[1] = (src_ayuv[1] + src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] +
- src_ayuv[src_stride_ayuv + 1] + 2) >>
- 2;
+ dst_uv[0] = (src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + 1) >> 1;
+ dst_uv[1] = (src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + 1) >> 1;
}
}
@@ -3780,7 +4140,7 @@ void AYUVToVURow_C(const uint8_t* src_ayuv,
int width) {
// Output a row of VU values, filtering 2x2 rows of AYUV.
int x;
- for (x = 0; x < width; x += 2) {
+ for (x = 0; x < width - 1; x += 2) {
dst_vu[0] = (src_ayuv[0] + src_ayuv[4] + src_ayuv[src_stride_ayuv + 0] +
src_ayuv[src_stride_ayuv + 4] + 2) >>
2;
@@ -3791,12 +4151,8 @@ void AYUVToVURow_C(const uint8_t* src_ayuv,
dst_vu += 2;
}
if (width & 1) {
- dst_vu[0] = (src_ayuv[0] + src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] +
- src_ayuv[src_stride_ayuv + 0] + 2) >>
- 2;
- dst_vu[1] = (src_ayuv[1] + src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] +
- src_ayuv[src_stride_ayuv + 1] + 2) >>
- 2;
+ dst_vu[0] = (src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + 1) >> 1;
+ dst_vu[1] = (src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + 1) >> 1;
}
}
diff --git a/third_party/libyuv/source/row_gcc.cc b/third_party/libyuv/source/row_gcc.cc
index faf0fc9104..001c353dbe 100644
--- a/third_party/libyuv/source/row_gcc.cc
+++ b/third_party/libyuv/source/row_gcc.cc
@@ -16,8 +16,7 @@ extern "C" {
#endif
// This module is for GCC x86 and x64.
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#if defined(HAS_ARGBTOYROW_SSSE3) || defined(HAS_ARGBGRAYROW_SSSE3)
@@ -1078,6 +1077,222 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) {
}
#endif
+static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7,
+ 10, 9, 8, 11, 14, 13, 12, 15};
+
+static const uvec8 kShuffleARGBToAB64Lo = {2, 2, 1, 1, 0, 0, 3, 3,
+ 6, 6, 5, 5, 4, 4, 7, 7};
+static const uvec8 kShuffleARGBToAB64Hi = {10, 10, 9, 9, 8, 8, 11, 11,
+ 14, 14, 13, 13, 12, 12, 15, 15};
+
+void ARGBToAR64Row_SSSE3(const uint8_t* src_argb,
+ uint16_t* dst_ar64,
+ int width) {
+ asm volatile(
+
+ LABELALIGN
+ "1: \n"
+ "movdqu (%0),%%xmm0 \n"
+ "movdqa %%xmm0,%%xmm1 \n"
+ "punpcklbw %%xmm0,%%xmm0 \n"
+ "punpckhbw %%xmm1,%%xmm1 \n"
+ "movdqu %%xmm0,(%1) \n"
+ "movdqu %%xmm1,0x10(%1) \n"
+ "lea 0x10(%0),%0 \n"
+ "lea 0x20(%1),%1 \n"
+ "sub $0x4,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ar64), // %1
+ "+r"(width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1");
+}
+
+void ARGBToAB64Row_SSSE3(const uint8_t* src_argb,
+ uint16_t* dst_ab64,
+ int width) {
+ asm volatile(
+
+ "movdqa %3,%%xmm2 \n"
+ "movdqa %4,%%xmm3 \n" LABELALIGN
+ "1: \n"
+ "movdqu (%0),%%xmm0 \n"
+ "movdqa %%xmm0,%%xmm1 \n"
+ "pshufb %%xmm2,%%xmm0 \n"
+ "pshufb %%xmm3,%%xmm1 \n"
+ "movdqu %%xmm0,(%1) \n"
+ "movdqu %%xmm1,0x10(%1) \n"
+ "lea 0x10(%0),%0 \n"
+ "lea 0x20(%1),%1 \n"
+ "sub $0x4,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ab64), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToAB64Lo), // %3
+ "m"(kShuffleARGBToAB64Hi) // %4
+ : "memory", "cc", "xmm0", "xmm1", "xmm2");
+}
+
+void AR64ToARGBRow_SSSE3(const uint16_t* src_ar64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+
+ LABELALIGN
+ "1: \n"
+ "movdqu (%0),%%xmm0 \n"
+ "movdqu 0x10(%0),%%xmm1 \n"
+ "psrlw $8,%%xmm0 \n"
+ "psrlw $8,%%xmm1 \n"
+ "packuswb %%xmm1,%%xmm0 \n"
+ "movdqu %%xmm0,(%1) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x10(%1),%1 \n"
+ "sub $0x4,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ar64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1");
+}
+
+void AB64ToARGBRow_SSSE3(const uint16_t* src_ab64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+
+ "movdqa %3,%%xmm2 \n" LABELALIGN
+ "1: \n"
+ "movdqu (%0),%%xmm0 \n"
+ "movdqu 0x10(%0),%%xmm1 \n"
+ "psrlw $8,%%xmm0 \n"
+ "psrlw $8,%%xmm1 \n"
+ "packuswb %%xmm1,%%xmm0 \n"
+ "pshufb %%xmm2,%%xmm0 \n"
+ "movdqu %%xmm0,(%1) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x10(%1),%1 \n"
+ "sub $0x4,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ab64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToABGR) // %3
+ : "memory", "cc", "xmm0", "xmm1", "xmm2");
+}
+
+#ifdef HAS_ARGBTOAR64ROW_AVX2
+void ARGBToAR64Row_AVX2(const uint8_t* src_argb,
+ uint16_t* dst_ar64,
+ int width) {
+ asm volatile(
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpunpckhbw %%ymm0,%%ymm0,%%ymm1 \n"
+ "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n"
+ "vmovdqu %%ymm0,(%1) \n"
+ "vmovdqu %%ymm1,0x20(%1) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x40(%1),%1 \n"
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ar64), // %1
+ "+r"(width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1");
+}
+#endif
+
+#ifdef HAS_ARGBTOAB64ROW_AVX2
+void ARGBToAB64Row_AVX2(const uint8_t* src_argb,
+ uint16_t* dst_ab64,
+ int width) {
+ asm volatile(
+
+ "vbroadcastf128 %3,%%ymm2 \n"
+ "vbroadcastf128 %4,%%ymm3 \n" LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpshufb %%ymm3,%%ymm0,%%ymm1 \n"
+ "vpshufb %%ymm2,%%ymm0,%%ymm0 \n"
+ "vmovdqu %%ymm0,(%1) \n"
+ "vmovdqu %%ymm1,0x20(%1) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x40(%1),%1 \n"
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ab64), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToAB64Lo), // %3
+ "m"(kShuffleARGBToAB64Hi) // %3
+ : "memory", "cc", "xmm0", "xmm1", "xmm2");
+}
+#endif
+
+#ifdef HAS_AR64TOARGBROW_AVX2
+void AR64ToARGBRow_AVX2(const uint16_t* src_ar64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vmovdqu 0x20(%0),%%ymm1 \n"
+ "vpsrlw $8,%%ymm0,%%ymm0 \n"
+ "vpsrlw $8,%%ymm1,%%ymm1 \n"
+ "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vmovdqu %%ymm0,(%1) \n"
+ "lea 0x40(%0),%0 \n"
+ "lea 0x20(%1),%1 \n"
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ar64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1");
+}
+#endif
+
+#ifdef HAS_AB64TOARGBROW_AVX2
+void AB64ToARGBRow_AVX2(const uint16_t* src_ab64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+
+ "vbroadcastf128 %3,%%ymm2 \n" LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vmovdqu 0x20(%0),%%ymm1 \n"
+ "vpsrlw $8,%%ymm0,%%ymm0 \n"
+ "vpsrlw $8,%%ymm1,%%ymm1 \n"
+ "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpshufb %%ymm2,%%ymm0,%%ymm0 \n"
+ "vmovdqu %%ymm0,(%1) \n"
+ "lea 0x40(%0),%0 \n"
+ "lea 0x20(%1),%1 \n"
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ab64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToABGR) // %3
+ : "memory", "cc", "xmm0", "xmm1", "xmm2");
+}
+#endif
+
// clang-format off
// TODO(mraptis): Consider passing R, G, B multipliers as parameter.
@@ -1290,7 +1505,7 @@ void RGBAToYJRow_AVX2(const uint8_t* src_rgba, uint8_t* dst_y, int width) {
#endif // HAS_RGBATOYJROW_AVX2
#ifdef HAS_ARGBTOUVROW_SSSE3
-void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
+void ARGBToUVRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1342,7 +1557,7 @@ void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
"lea 0x8(%1),%1 \n"
"sub $0x10,%3 \n"
"jg 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1359,7 +1574,7 @@ void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
static const lvec8 kShufARGBToUV_AVX = {
0, 1, 8, 9, 2, 3, 10, 11, 4, 5, 12, 13, 6, 7, 14, 15,
0, 1, 8, 9, 2, 3, 10, 11, 4, 5, 12, 13, 6, 7, 14, 15};
-void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
+void ARGBToUVRow_AVX2(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1407,7 +1622,7 @@ void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
"sub $0x20,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1422,7 +1637,7 @@ void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
#endif // HAS_ARGBTOUVROW_AVX2
#ifdef HAS_ABGRTOUVROW_AVX2
-void ABGRToUVRow_AVX2(const uint8_t* src_abgr0,
+void ABGRToUVRow_AVX2(const uint8_t* src_abgr,
int src_stride_abgr,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1470,7 +1685,7 @@ void ABGRToUVRow_AVX2(const uint8_t* src_abgr0,
"sub $0x20,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_abgr0), // %0
+ : "+r"(src_abgr), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1485,7 +1700,7 @@ void ABGRToUVRow_AVX2(const uint8_t* src_abgr0,
#endif // HAS_ABGRTOUVROW_AVX2
#ifdef HAS_ARGBTOUVJROW_AVX2
-void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
+void ARGBToUVJRow_AVX2(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1534,7 +1749,7 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
"sub $0x20,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1549,7 +1764,7 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
#endif // HAS_ARGBTOUVJROW_AVX2
#ifdef HAS_ARGBTOUVJROW_SSSE3
-void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0,
+void ARGBToUVJRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1602,7 +1817,7 @@ void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0,
"lea 0x8(%1),%1 \n"
"sub $0x10,%3 \n"
"jg 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1689,7 +1904,7 @@ void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width) {
"xmm7");
}
-void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0,
+void BGRAToUVRow_SSSE3(const uint8_t* src_bgra,
int src_stride_bgra,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1741,7 +1956,7 @@ void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0,
"lea 0x8(%1),%1 \n"
"sub $0x10,%3 \n"
"jg 1b \n"
- : "+r"(src_bgra0), // %0
+ : "+r"(src_bgra), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1786,7 +2001,7 @@ void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width) {
"xmm7");
}
-void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0,
+void ABGRToUVRow_SSSE3(const uint8_t* src_abgr,
int src_stride_abgr,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1838,7 +2053,7 @@ void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0,
"lea 0x8(%1),%1 \n"
"sub $0x10,%3 \n"
"jg 1b \n"
- : "+r"(src_abgr0), // %0
+ : "+r"(src_abgr), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1849,7 +2064,7 @@ void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0,
: "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm6", "xmm7");
}
-void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
+void RGBAToUVRow_SSSE3(const uint8_t* src_rgba,
int src_stride_rgba,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1901,7 +2116,7 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
"lea 0x8(%1),%1 \n"
"sub $0x10,%3 \n"
"jg 1b \n"
- : "+r"(src_rgba0), // %0
+ : "+r"(src_rgba), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
"+rm"(width) // %3
@@ -1916,21 +2131,21 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
// Read 8 UV from 444
#define READYUV444 \
- "movq (%[u_buf]),%%xmm0 \n" \
+ "movq (%[u_buf]),%%xmm3 \n" \
"movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x8(%[u_buf]),%[u_buf] \n" \
- "punpcklbw %%xmm1,%%xmm0 \n" \
+ "punpcklbw %%xmm1,%%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n"
// Read 4 UV from 422, upsample to 8 UV
#define READYUV422 \
- "movd (%[u_buf]),%%xmm0 \n" \
+ "movd (%[u_buf]),%%xmm3 \n" \
"movd 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x4(%[u_buf]),%[u_buf] \n" \
- "punpcklbw %%xmm1,%%xmm0 \n" \
- "punpcklwd %%xmm0,%%xmm0 \n" \
+ "punpcklbw %%xmm1,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n"
@@ -1940,24 +2155,87 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
// TODO(fbarchard): Consider pmulhuw to replace psraw
// TODO(fbarchard): Consider pmullw to replace psllw and allow different bits.
#define READYUV210 \
- "movq (%[u_buf]),%%xmm0 \n" \
+ "movq (%[u_buf]),%%xmm3 \n" \
"movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x8(%[u_buf]),%[u_buf] \n" \
- "punpcklwd %%xmm1,%%xmm0 \n" \
- "psraw $0x2,%%xmm0 \n" \
- "packuswb %%xmm0,%%xmm0 \n" \
- "punpcklwd %%xmm0,%%xmm0 \n" \
+ "punpcklwd %%xmm1,%%xmm3 \n" \
+ "psraw $2,%%xmm3 \n" \
+ "packuswb %%xmm3,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "psllw $6,%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n"
+
+#define READYUVA210 \
+ "movq (%[u_buf]),%%xmm3 \n" \
+ "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
+ "lea 0x8(%[u_buf]),%[u_buf] \n" \
+ "punpcklwd %%xmm1,%%xmm3 \n" \
+ "psraw $2,%%xmm3 \n" \
+ "packuswb %%xmm3,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "psllw $6,%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n" \
+ "movdqu (%[a_buf]),%%xmm5 \n" \
+ "psraw $2,%%xmm5 \n" \
+ "packuswb %%xmm5,%%xmm5 \n" \
+ "lea 0x10(%[a_buf]),%[a_buf] \n"
+
+// Read 8 UV from 444 10 bit
+#define READYUV410 \
+ "movdqu (%[u_buf]),%%xmm3 \n" \
+ "movdqu 0x00(%[u_buf],%[v_buf],1),%%xmm2 \n" \
+ "lea 0x10(%[u_buf]),%[u_buf] \n" \
+ "psraw $2,%%xmm3 \n" \
+ "psraw $2,%%xmm2 \n" \
+ "movdqa %%xmm3,%%xmm1 \n" \
+ "punpcklwd %%xmm2,%%xmm3 \n" \
+ "punpckhwd %%xmm2,%%xmm1 \n" \
+ "packuswb %%xmm1,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "psllw $6,%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n"
+
+// Read 8 UV from 444 10 bit. With 8 Alpha.
+#define READYUVA410 \
+ "movdqu (%[u_buf]),%%xmm3 \n" \
+ "movdqu 0x00(%[u_buf],%[v_buf],1),%%xmm2 \n" \
+ "lea 0x10(%[u_buf]),%[u_buf] \n" \
+ "psraw $2,%%xmm3 \n" \
+ "psraw $2,%%xmm2 \n" \
+ "movdqa %%xmm3,%%xmm1 \n" \
+ "punpcklwd %%xmm2,%%xmm3 \n" \
+ "punpckhwd %%xmm2,%%xmm1 \n" \
+ "packuswb %%xmm1,%%xmm3 \n" \
"movdqu (%[y_buf]),%%xmm4 \n" \
"psllw $0x6,%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n" \
+ "movdqu (%[a_buf]),%%xmm5 \n" \
+ "psraw $2,%%xmm5 \n" \
+ "packuswb %%xmm5,%%xmm5 \n" \
+ "lea 0x10(%[a_buf]),%[a_buf] \n"
+
+// Read 4 UV from 422 12 bit, upsample to 8 UV
+#define READYUV212 \
+ "movq (%[u_buf]),%%xmm3 \n" \
+ "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
+ "lea 0x8(%[u_buf]),%[u_buf] \n" \
+ "punpcklwd %%xmm1,%%xmm3 \n" \
+ "psraw $0x4,%%xmm3 \n" \
+ "packuswb %%xmm3,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "psllw $0x4,%%xmm4 \n" \
"lea 0x10(%[y_buf]),%[y_buf] \n"
// Read 4 UV from 422, upsample to 8 UV. With 8 Alpha.
#define READYUVA422 \
- "movd (%[u_buf]),%%xmm0 \n" \
+ "movd (%[u_buf]),%%xmm3 \n" \
"movd 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x4(%[u_buf]),%[u_buf] \n" \
- "punpcklbw %%xmm1,%%xmm0 \n" \
- "punpcklwd %%xmm0,%%xmm0 \n" \
+ "punpcklbw %%xmm1,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n" \
@@ -1966,10 +2244,10 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
// Read 8 UV from 444. With 8 Alpha.
#define READYUVA444 \
- "movq (%[u_buf]),%%xmm0 \n" \
+ "movq (%[u_buf]),%%xmm3 \n" \
"movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x8(%[u_buf]),%[u_buf] \n" \
- "punpcklbw %%xmm1,%%xmm0 \n" \
+ "punpcklbw %%xmm1,%%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n" \
@@ -1978,18 +2256,18 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
// Read 4 UV from NV12, upsample to 8 UV
#define READNV12 \
- "movq (%[uv_buf]),%%xmm0 \n" \
+ "movq (%[uv_buf]),%%xmm3 \n" \
"lea 0x8(%[uv_buf]),%[uv_buf] \n" \
- "punpcklwd %%xmm0,%%xmm0 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n"
// Read 4 VU from NV21, upsample to 8 UV
#define READNV21 \
- "movq (%[vu_buf]),%%xmm0 \n" \
+ "movq (%[vu_buf]),%%xmm3 \n" \
"lea 0x8(%[vu_buf]),%[vu_buf] \n" \
- "pshufb %[kShuffleNV21], %%xmm0 \n" \
+ "pshufb %[kShuffleNV21], %%xmm3 \n" \
"movq (%[y_buf]),%%xmm4 \n" \
"punpcklbw %%xmm4,%%xmm4 \n" \
"lea 0x8(%[y_buf]),%[y_buf] \n"
@@ -1998,68 +2276,92 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0,
#define READYUY2 \
"movdqu (%[yuy2_buf]),%%xmm4 \n" \
"pshufb %[kShuffleYUY2Y], %%xmm4 \n" \
- "movdqu (%[yuy2_buf]),%%xmm0 \n" \
- "pshufb %[kShuffleYUY2UV], %%xmm0 \n" \
+ "movdqu (%[yuy2_buf]),%%xmm3 \n" \
+ "pshufb %[kShuffleYUY2UV], %%xmm3 \n" \
"lea 0x10(%[yuy2_buf]),%[yuy2_buf] \n"
// Read 4 UYVY with 8 Y and update 4 UV to 8 UV.
#define READUYVY \
"movdqu (%[uyvy_buf]),%%xmm4 \n" \
"pshufb %[kShuffleUYVYY], %%xmm4 \n" \
- "movdqu (%[uyvy_buf]),%%xmm0 \n" \
- "pshufb %[kShuffleUYVYUV], %%xmm0 \n" \
+ "movdqu (%[uyvy_buf]),%%xmm3 \n" \
+ "pshufb %[kShuffleUYVYUV], %%xmm3 \n" \
"lea 0x10(%[uyvy_buf]),%[uyvy_buf] \n"
+// Read 4 UV from P210, upsample to 8 UV
+#define READP210 \
+ "movdqu (%[uv_buf]),%%xmm3 \n" \
+ "lea 0x10(%[uv_buf]),%[uv_buf] \n" \
+ "psrlw $0x8,%%xmm3 \n" \
+ "packuswb %%xmm3,%%xmm3 \n" \
+ "punpcklwd %%xmm3,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n"
+
+// Read 8 UV from P410
+#define READP410 \
+ "movdqu (%[uv_buf]),%%xmm3 \n" \
+ "movdqu 0x10(%[uv_buf]),%%xmm1 \n" \
+ "lea 0x20(%[uv_buf]),%[uv_buf] \n" \
+ "psrlw $0x8,%%xmm3 \n" \
+ "psrlw $0x8,%%xmm1 \n" \
+ "packuswb %%xmm1,%%xmm3 \n" \
+ "movdqu (%[y_buf]),%%xmm4 \n" \
+ "lea 0x10(%[y_buf]),%[y_buf] \n"
+
#if defined(__x86_64__)
#define YUVTORGB_SETUP(yuvconstants) \
+ "pcmpeqb %%xmm13,%%xmm13 \n" \
"movdqa (%[yuvconstants]),%%xmm8 \n" \
+ "pxor %%xmm12,%%xmm12 \n" \
"movdqa 32(%[yuvconstants]),%%xmm9 \n" \
+ "psllw $7,%%xmm13 \n" \
"movdqa 64(%[yuvconstants]),%%xmm10 \n" \
+ "pshufb %%xmm12,%%xmm13 \n" \
"movdqa 96(%[yuvconstants]),%%xmm11 \n" \
- "movdqa 128(%[yuvconstants]),%%xmm12 \n" \
- "movdqa 160(%[yuvconstants]),%%xmm13 \n" \
- "movdqa 192(%[yuvconstants]),%%xmm14 \n"
+ "movdqa 128(%[yuvconstants]),%%xmm12 \n"
+
// Convert 8 pixels: 8 UV and 8 Y
#define YUVTORGB16(yuvconstants) \
- "movdqa %%xmm0,%%xmm1 \n" \
- "movdqa %%xmm0,%%xmm2 \n" \
- "movdqa %%xmm0,%%xmm3 \n" \
- "movdqa %%xmm11,%%xmm0 \n" \
- "pmaddubsw %%xmm8,%%xmm1 \n" \
- "psubw %%xmm1,%%xmm0 \n" \
- "movdqa %%xmm12,%%xmm1 \n" \
- "pmaddubsw %%xmm9,%%xmm2 \n" \
- "psubw %%xmm2,%%xmm1 \n" \
- "movdqa %%xmm13,%%xmm2 \n" \
- "pmaddubsw %%xmm10,%%xmm3 \n" \
- "psubw %%xmm3,%%xmm2 \n" \
- "pmulhuw %%xmm14,%%xmm4 \n" \
+ "psubb %%xmm13,%%xmm3 \n" \
+ "pmulhuw %%xmm11,%%xmm4 \n" \
+ "movdqa %%xmm8,%%xmm0 \n" \
+ "movdqa %%xmm9,%%xmm1 \n" \
+ "movdqa %%xmm10,%%xmm2 \n" \
+ "paddw %%xmm12,%%xmm4 \n" \
+ "pmaddubsw %%xmm3,%%xmm0 \n" \
+ "pmaddubsw %%xmm3,%%xmm1 \n" \
+ "pmaddubsw %%xmm3,%%xmm2 \n" \
"paddsw %%xmm4,%%xmm0 \n" \
- "paddsw %%xmm4,%%xmm1 \n" \
- "paddsw %%xmm4,%%xmm2 \n"
-#define YUVTORGB_REGS \
- "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14",
+ "paddsw %%xmm4,%%xmm2 \n" \
+ "psubsw %%xmm1,%%xmm4 \n" \
+ "movdqa %%xmm4,%%xmm1 \n"
+
+#define YUVTORGB_REGS "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13",
#else
#define YUVTORGB_SETUP(yuvconstants)
// Convert 8 pixels: 8 UV and 8 Y
#define YUVTORGB16(yuvconstants) \
- "movdqa %%xmm0,%%xmm1 \n" \
- "movdqa %%xmm0,%%xmm2 \n" \
- "movdqa %%xmm0,%%xmm3 \n" \
- "movdqa 96(%[yuvconstants]),%%xmm0 \n" \
- "pmaddubsw (%[yuvconstants]),%%xmm1 \n" \
- "psubw %%xmm1,%%xmm0 \n" \
- "movdqa 128(%[yuvconstants]),%%xmm1 \n" \
- "pmaddubsw 32(%[yuvconstants]),%%xmm2 \n" \
- "psubw %%xmm2,%%xmm1 \n" \
- "movdqa 160(%[yuvconstants]),%%xmm2 \n" \
- "pmaddubsw 64(%[yuvconstants]),%%xmm3 \n" \
- "psubw %%xmm3,%%xmm2 \n" \
- "pmulhuw 192(%[yuvconstants]),%%xmm4 \n" \
+ "pcmpeqb %%xmm0,%%xmm0 \n" \
+ "pxor %%xmm1,%%xmm1 \n" \
+ "psllw $7,%%xmm0 \n" \
+ "pshufb %%xmm1,%%xmm0 \n" \
+ "psubb %%xmm0,%%xmm3 \n" \
+ "pmulhuw 96(%[yuvconstants]),%%xmm4 \n" \
+ "movdqa (%[yuvconstants]),%%xmm0 \n" \
+ "movdqa 32(%[yuvconstants]),%%xmm1 \n" \
+ "movdqa 64(%[yuvconstants]),%%xmm2 \n" \
+ "pmaddubsw %%xmm3,%%xmm0 \n" \
+ "pmaddubsw %%xmm3,%%xmm1 \n" \
+ "pmaddubsw %%xmm3,%%xmm2 \n" \
+ "movdqa 128(%[yuvconstants]),%%xmm3 \n" \
+ "paddw %%xmm3,%%xmm4 \n" \
"paddsw %%xmm4,%%xmm0 \n" \
- "paddsw %%xmm4,%%xmm1 \n" \
- "paddsw %%xmm4,%%xmm2 \n"
+ "paddsw %%xmm4,%%xmm2 \n" \
+ "psubsw %%xmm1,%%xmm4 \n" \
+ "movdqa %%xmm4,%%xmm1 \n"
+
#define YUVTORGB_REGS
#endif
@@ -2275,8 +2577,8 @@ void OMITFP I422ToAR30Row_SSSE3(const uint8_t* y_buf,
"pcmpeqb %%xmm5,%%xmm5 \n" // AR30 constants
"psrlw $14,%%xmm5 \n"
"psllw $4,%%xmm5 \n" // 2 alpha bits
- "pxor %%xmm6,%%xmm6 \n"
- "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
"psrlw $6,%%xmm7 \n" // 1023 for max
LABELALIGN
@@ -2327,6 +2629,36 @@ void OMITFP I210ToARGBRow_SSSE3(const uint16_t* y_buf,
);
}
+// 12 bit YUV to ARGB
+void OMITFP I212ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV212
+ YUVTORGB(yuvconstants)
+ STOREARGB
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+}
+
// 10 bit YUV to AR30
void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf,
const uint16_t* u_buf,
@@ -2340,8 +2672,8 @@ void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf,
"pcmpeqb %%xmm5,%%xmm5 \n"
"psrlw $14,%%xmm5 \n"
"psllw $4,%%xmm5 \n" // 2 alpha bits
- "pxor %%xmm6,%%xmm6 \n"
- "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
"psrlw $6,%%xmm7 \n" // 1023 for max
LABELALIGN
@@ -2362,6 +2694,176 @@ void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf,
);
}
+// 12 bit YUV to AR30
+void OMITFP I212ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+ "psrlw $14,%%xmm5 \n"
+ "psllw $4,%%xmm5 \n" // 2 alpha bits
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
+ "psrlw $6,%%xmm7 \n" // 1023 for max
+
+ LABELALIGN
+ "1: \n"
+ READYUV212
+ YUVTORGB16(yuvconstants)
+ STOREAR30
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+
+// 10 bit YUV to ARGB
+void OMITFP I410ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV410
+ YUVTORGB(yuvconstants)
+ STOREARGB
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+}
+
+#ifdef HAS_I210ALPHATOARGBROW_SSSE3
+// 10 bit YUVA to ARGB
+void OMITFP I210AlphaToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile(
+ YUVTORGB_SETUP(
+ yuvconstants) "sub %[u_buf],%[v_buf] \n"
+
+ LABELALIGN "1: \n" READYUVA210
+ YUVTORGB(yuvconstants) STOREARGB
+ "subl $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [u_buf] "+r"(u_buf), // %[u_buf]
+ [v_buf] "+r"(v_buf), // %[v_buf]
+ [a_buf] "+r"(a_buf),
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+#if defined(__i386__)
+ [width] "+m"(width) // %[width]
+#else
+ [width] "+rm"(width) // %[width]
+#endif
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4",
+ "xmm5");
+}
+#endif
+
+#ifdef HAS_I410ALPHATOARGBROW_SSSE3
+// 10 bit YUVA to ARGB
+void OMITFP I410AlphaToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ // clang-format off
+ asm volatile(
+ YUVTORGB_SETUP(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUVA410
+ YUVTORGB(yuvconstants)
+ STOREARGB
+ "subl $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [u_buf] "+r"(u_buf), // %[u_buf]
+ [v_buf] "+r"(v_buf), // %[v_buf]
+ [a_buf] "+r"(a_buf),
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+#if defined(__i386__)
+ [width] "+m"(width) // %[width]
+#else
+ [width] "+rm"(width) // %[width]
+#endif
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4",
+ "xmm5");
+ // clang-format on
+}
+#endif
+
+// 10 bit YUV to AR30
+void OMITFP I410ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+ "psrlw $14,%%xmm5 \n"
+ "psllw $4,%%xmm5 \n" // 2 alpha bits
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
+ "psrlw $6,%%xmm7 \n" // 1023 for max
+
+ LABELALIGN
+ "1: \n"
+ READYUV410
+ YUVTORGB16(yuvconstants)
+ STOREAR30
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+
#ifdef HAS_I422ALPHATOARGBROW_SSSE3
void OMITFP I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
@@ -2513,6 +3015,112 @@ void OMITFP UYVYToARGBRow_SSSE3(const uint8_t* uyvy_buf,
// clang-format on
}
+void OMITFP P210ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile(
+ YUVTORGB_SETUP(
+ yuvconstants) "pcmpeqb %%xmm5,%%xmm5 \n"
+
+ LABELALIGN "1: \n" READP210
+ YUVTORGB(yuvconstants) STOREARGB
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [uv_buf] "+r"(uv_buf), // %[u_buf]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+rm"(width) // %[width]
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4",
+ "xmm5");
+}
+
+void OMITFP P410ToARGBRow_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile(
+ YUVTORGB_SETUP(
+ yuvconstants) "pcmpeqb %%xmm5,%%xmm5 \n"
+
+ LABELALIGN "1: \n" READP410
+ YUVTORGB(yuvconstants) STOREARGB
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [uv_buf] "+r"(uv_buf), // %[u_buf]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+rm"(width) // %[width]
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4",
+ "xmm5");
+}
+
+void OMITFP P210ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+ "psrlw $14,%%xmm5 \n"
+ "psllw $4,%%xmm5 \n" // 2 alpha bits
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
+ "psrlw $6,%%xmm7 \n" // 1023 for max
+
+ LABELALIGN
+ "1: \n"
+ READP210
+ YUVTORGB16(yuvconstants)
+ STOREAR30
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+
+void OMITFP P410ToAR30Row_SSSE3(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP(yuvconstants)
+ "pcmpeqb %%xmm5,%%xmm5 \n"
+ "psrlw $14,%%xmm5 \n"
+ "psllw $4,%%xmm5 \n" // 2 alpha bits
+ "pxor %%xmm6,%%xmm6 \n" // 0 for min
+ "pcmpeqb %%xmm7,%%xmm7 \n"
+ "psrlw $6,%%xmm7 \n" // 1023 for max
+
+ LABELALIGN
+ "1: \n"
+ READP410
+ YUVTORGB16(yuvconstants)
+ STOREAR30
+ "sub $0x8,%[width] \n"
+ "jg 1b \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+
void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
@@ -2546,12 +3154,12 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
// Read 16 UV from 444
#define READYUV444_AVX2 \
- "vmovdqu (%[u_buf]),%%xmm0 \n" \
+ "vmovdqu (%[u_buf]),%%xmm3 \n" \
"vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x10(%[u_buf]),%[u_buf] \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
"vpermq $0xd8,%%ymm1,%%ymm1 \n" \
- "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \
+ "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
@@ -2559,42 +3167,108 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
// Read 8 UV from 422, upsample to 16 UV.
#define READYUV422_AVX2 \
- "vmovq (%[u_buf]),%%xmm0 \n" \
+ "vmovq (%[u_buf]),%%xmm3 \n" \
"vmovq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x8(%[u_buf]),%[u_buf] \n" \
- "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
- "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \
+ "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
"lea 0x10(%[y_buf]),%[y_buf] \n"
-// Read 8 UV from 210 10 bit, upsample to 16 UV
+// Read 8 UV from 210, upsample to 16 UV
// TODO(fbarchard): Consider vshufb to replace pack/unpack
// TODO(fbarchard): Consider vunpcklpd to combine the 2 registers into 1.
#define READYUV210_AVX2 \
- "vmovdqu (%[u_buf]),%%xmm0 \n" \
+ "vmovdqu (%[u_buf]),%%xmm3 \n" \
+ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
+ "lea 0x10(%[u_buf]),%[u_buf] \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n" \
+ "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpsraw $2,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "vpsllw $6,%%ymm4,%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n"
+
+// Read 8 UV from 210, upsample to 16 UV. With 16 Alpha.
+#define READYUVA210_AVX2 \
+ "vmovdqu (%[u_buf]),%%xmm3 \n" \
+ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
+ "lea 0x10(%[u_buf]),%[u_buf] \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n" \
+ "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpsraw $2,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "vpsllw $6,%%ymm4,%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n" \
+ "vmovdqu (%[a_buf]),%%ymm5 \n" \
+ "vpsraw $2,%%ymm5,%%ymm5 \n" \
+ "vpackuswb %%ymm5,%%ymm5,%%ymm5 \n" \
+ "lea 0x20(%[a_buf]),%[a_buf] \n"
+
+// Read 16 UV from 410
+#define READYUV410_AVX2 \
+ "vmovdqu (%[u_buf]),%%ymm3 \n" \
+ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%ymm2 \n" \
+ "lea 0x20(%[u_buf]),%[u_buf] \n" \
+ "vpsraw $2,%%ymm3,%%ymm3 \n" \
+ "vpsraw $2,%%ymm2,%%ymm2 \n" \
+ "vpunpckhwd %%ymm2,%%ymm3,%%ymm1 \n" \
+ "vpunpcklwd %%ymm2,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "vpsllw $6,%%ymm4,%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n"
+
+// Read 8 UV from 212 12 bit, upsample to 16 UV
+#define READYUV212_AVX2 \
+ "vmovdqu (%[u_buf]),%%xmm3 \n" \
"vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x10(%[u_buf]),%[u_buf] \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
"vpermq $0xd8,%%ymm1,%%ymm1 \n" \
- "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" \
- "vpsraw $0x2,%%ymm0,%%ymm0 \n" \
- "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" \
- "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \
+ "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpsraw $0x4,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%ymm4 \n" \
- "vpsllw $0x6,%%ymm4,%%ymm4 \n" \
+ "vpsllw $0x4,%%ymm4,%%ymm4 \n" \
"lea 0x20(%[y_buf]),%[y_buf] \n"
+// Read 16 UV from 410. With 16 Alpha.
+#define READYUVA410_AVX2 \
+ "vmovdqu (%[u_buf]),%%ymm3 \n" \
+ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%ymm2 \n" \
+ "lea 0x20(%[u_buf]),%[u_buf] \n" \
+ "vpsraw $2,%%ymm3,%%ymm3 \n" \
+ "vpsraw $2,%%ymm2,%%ymm2 \n" \
+ "vpunpckhwd %%ymm2,%%ymm3,%%ymm1 \n" \
+ "vpunpcklwd %%ymm2,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "vpsllw $6,%%ymm4,%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n" \
+ "vmovdqu (%[a_buf]),%%ymm5 \n" \
+ "vpsraw $2,%%ymm5,%%ymm5 \n" \
+ "vpackuswb %%ymm5,%%ymm5,%%ymm5 \n" \
+ "lea 0x20(%[a_buf]),%[a_buf] \n"
+
// Read 16 UV from 444. With 16 Alpha.
#define READYUVA444_AVX2 \
- "vmovdqu (%[u_buf]),%%xmm0 \n" \
+ "vmovdqu (%[u_buf]),%%xmm3 \n" \
"vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x10(%[u_buf]),%[u_buf] \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
"vpermq $0xd8,%%ymm1,%%ymm1 \n" \
- "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \
+ "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
@@ -2605,12 +3279,12 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
// Read 8 UV from 422, upsample to 16 UV. With 16 Alpha.
#define READYUVA422_AVX2 \
- "vmovq (%[u_buf]),%%xmm0 \n" \
+ "vmovq (%[u_buf]),%%xmm3 \n" \
"vmovq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \
"lea 0x8(%[u_buf]),%[u_buf] \n" \
- "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
- "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \
+ "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
@@ -2621,10 +3295,10 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
// Read 8 UV from NV12, upsample to 16 UV.
#define READNV12_AVX2 \
- "vmovdqu (%[uv_buf]),%%xmm0 \n" \
+ "vmovdqu (%[uv_buf]),%%xmm3 \n" \
"lea 0x10(%[uv_buf]),%[uv_buf] \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
- "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
@@ -2632,73 +3306,98 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
// Read 8 VU from NV21, upsample to 16 UV.
#define READNV21_AVX2 \
- "vmovdqu (%[vu_buf]),%%xmm0 \n" \
+ "vmovdqu (%[vu_buf]),%%xmm3 \n" \
"lea 0x10(%[vu_buf]),%[vu_buf] \n" \
- "vpermq $0xd8,%%ymm0,%%ymm0 \n" \
- "vpshufb %[kShuffleNV21], %%ymm0, %%ymm0 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vpshufb %[kShuffleNV21], %%ymm3, %%ymm3 \n" \
"vmovdqu (%[y_buf]),%%xmm4 \n" \
"vpermq $0xd8,%%ymm4,%%ymm4 \n" \
"vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \
"lea 0x10(%[y_buf]),%[y_buf] \n"
+// Read 4 UV from P210, upsample to 8 UV
+#define READP210_AVX2 \
+ "vmovdqu (%[uv_buf]),%%ymm3 \n" \
+ "lea 0x20(%[uv_buf]),%[uv_buf] \n" \
+ "vpsrlw $0x8,%%ymm3,%%ymm3 \n" \
+ "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n"
+
+// Read 8 UV from P410
+#define READP410_AVX2 \
+ "vmovdqu (%[uv_buf]),%%ymm3 \n" \
+ "vmovdqu 0x20(%[uv_buf]),%%ymm1 \n" \
+ "lea 0x40(%[uv_buf]),%[uv_buf] \n" \
+ "vpsrlw $0x8,%%ymm3,%%ymm3 \n" \
+ "vpsrlw $0x8,%%ymm1,%%ymm1 \n" \
+ "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \
+ "vpermq $0xd8,%%ymm3,%%ymm3 \n" \
+ "vmovdqu (%[y_buf]),%%ymm4 \n" \
+ "lea 0x20(%[y_buf]),%[y_buf] \n"
+
// Read 8 YUY2 with 16 Y and upsample 8 UV to 16 UV.
#define READYUY2_AVX2 \
"vmovdqu (%[yuy2_buf]),%%ymm4 \n" \
"vpshufb %[kShuffleYUY2Y], %%ymm4, %%ymm4 \n" \
- "vmovdqu (%[yuy2_buf]),%%ymm0 \n" \
- "vpshufb %[kShuffleYUY2UV], %%ymm0, %%ymm0 \n" \
+ "vmovdqu (%[yuy2_buf]),%%ymm3 \n" \
+ "vpshufb %[kShuffleYUY2UV], %%ymm3, %%ymm3 \n" \
"lea 0x20(%[yuy2_buf]),%[yuy2_buf] \n"
// Read 8 UYVY with 16 Y and upsample 8 UV to 16 UV.
#define READUYVY_AVX2 \
"vmovdqu (%[uyvy_buf]),%%ymm4 \n" \
"vpshufb %[kShuffleUYVYY], %%ymm4, %%ymm4 \n" \
- "vmovdqu (%[uyvy_buf]),%%ymm0 \n" \
- "vpshufb %[kShuffleUYVYUV], %%ymm0, %%ymm0 \n" \
+ "vmovdqu (%[uyvy_buf]),%%ymm3 \n" \
+ "vpshufb %[kShuffleUYVYUV], %%ymm3, %%ymm3 \n" \
"lea 0x20(%[uyvy_buf]),%[uyvy_buf] \n"
#if defined(__x86_64__)
-#define YUVTORGB_SETUP_AVX2(yuvconstants) \
- "vmovdqa (%[yuvconstants]),%%ymm8 \n" \
- "vmovdqa 32(%[yuvconstants]),%%ymm9 \n" \
- "vmovdqa 64(%[yuvconstants]),%%ymm10 \n" \
- "vmovdqa 96(%[yuvconstants]),%%ymm11 \n" \
- "vmovdqa 128(%[yuvconstants]),%%ymm12 \n" \
- "vmovdqa 160(%[yuvconstants]),%%ymm13 \n" \
- "vmovdqa 192(%[yuvconstants]),%%ymm14 \n"
+#define YUVTORGB_SETUP_AVX2(yuvconstants) \
+ "vpcmpeqb %%xmm13,%%xmm13,%%xmm13 \n" \
+ "vmovdqa (%[yuvconstants]),%%ymm8 \n" \
+ "vpsllw $7,%%xmm13,%%xmm13 \n" \
+ "vmovdqa 32(%[yuvconstants]),%%ymm9 \n" \
+ "vpbroadcastb %%xmm13,%%ymm13 \n" \
+ "vmovdqa 64(%[yuvconstants]),%%ymm10 \n" \
+ "vmovdqa 96(%[yuvconstants]),%%ymm11 \n" \
+ "vmovdqa 128(%[yuvconstants]),%%ymm12 \n"
#define YUVTORGB16_AVX2(yuvconstants) \
- "vpmaddubsw %%ymm10,%%ymm0,%%ymm2 \n" \
- "vpmaddubsw %%ymm9,%%ymm0,%%ymm1 \n" \
- "vpmaddubsw %%ymm8,%%ymm0,%%ymm0 \n" \
- "vpsubw %%ymm2,%%ymm13,%%ymm2 \n" \
- "vpsubw %%ymm1,%%ymm12,%%ymm1 \n" \
- "vpsubw %%ymm0,%%ymm11,%%ymm0 \n" \
- "vpmulhuw %%ymm14,%%ymm4,%%ymm4 \n" \
+ "vpsubb %%ymm13,%%ymm3,%%ymm3 \n" \
+ "vpmulhuw %%ymm11,%%ymm4,%%ymm4 \n" \
+ "vpmaddubsw %%ymm3,%%ymm8,%%ymm0 \n" \
+ "vpmaddubsw %%ymm3,%%ymm9,%%ymm1 \n" \
+ "vpmaddubsw %%ymm3,%%ymm10,%%ymm2 \n" \
+ "vpaddw %%ymm4,%%ymm12,%%ymm4 \n" \
"vpaddsw %%ymm4,%%ymm0,%%ymm0 \n" \
- "vpaddsw %%ymm4,%%ymm1,%%ymm1 \n" \
+ "vpsubsw %%ymm1,%%ymm4,%%ymm1 \n" \
"vpaddsw %%ymm4,%%ymm2,%%ymm2 \n"
-#define YUVTORGB_REGS_AVX2 \
- "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14",
+#define YUVTORGB_REGS_AVX2 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13",
#else // Convert 16 pixels: 16 UV and 16 Y.
#define YUVTORGB_SETUP_AVX2(yuvconstants)
#define YUVTORGB16_AVX2(yuvconstants) \
- "vpmaddubsw 64(%[yuvconstants]),%%ymm0,%%ymm2 \n" \
- "vpmaddubsw 32(%[yuvconstants]),%%ymm0,%%ymm1 \n" \
- "vpmaddubsw (%[yuvconstants]),%%ymm0,%%ymm0 \n" \
- "vmovdqu 160(%[yuvconstants]),%%ymm3 \n" \
- "vpsubw %%ymm2,%%ymm3,%%ymm2 \n" \
- "vmovdqu 128(%[yuvconstants]),%%ymm3 \n" \
- "vpsubw %%ymm1,%%ymm3,%%ymm1 \n" \
- "vmovdqu 96(%[yuvconstants]),%%ymm3 \n" \
- "vpsubw %%ymm0,%%ymm3,%%ymm0 \n" \
- "vpmulhuw 192(%[yuvconstants]),%%ymm4,%%ymm4 \n" \
+ "vpcmpeqb %%xmm0,%%xmm0,%%xmm0 \n" \
+ "vpsllw $7,%%xmm0,%%xmm0 \n" \
+ "vpbroadcastb %%xmm0,%%ymm0 \n" \
+ "vpsubb %%ymm0,%%ymm3,%%ymm3 \n" \
+ "vpmulhuw 96(%[yuvconstants]),%%ymm4,%%ymm4 \n" \
+ "vmovdqa (%[yuvconstants]),%%ymm0 \n" \
+ "vmovdqa 32(%[yuvconstants]),%%ymm1 \n" \
+ "vmovdqa 64(%[yuvconstants]),%%ymm2 \n" \
+ "vpmaddubsw %%ymm3,%%ymm0,%%ymm0 \n" \
+ "vpmaddubsw %%ymm3,%%ymm1,%%ymm1 \n" \
+ "vpmaddubsw %%ymm3,%%ymm2,%%ymm2 \n" \
+ "vmovdqa 128(%[yuvconstants]),%%ymm3 \n" \
+ "vpaddw %%ymm4,%%ymm3,%%ymm4 \n" \
"vpaddsw %%ymm4,%%ymm0,%%ymm0 \n" \
- "vpaddsw %%ymm4,%%ymm1,%%ymm1 \n" \
+ "vpsubsw %%ymm1,%%ymm4,%%ymm1 \n" \
"vpaddsw %%ymm4,%%ymm2,%%ymm2 \n"
+
#define YUVTORGB_REGS_AVX2
#endif
@@ -2721,7 +3420,7 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf,
"vpunpckhwd %%ymm2,%%ymm0,%%ymm0 \n" \
"vmovdqu %%ymm1,(%[dst_argb]) \n" \
"vmovdqu %%ymm0,0x20(%[dst_argb]) \n" \
- "lea 0x40(%[dst_argb]), %[dst_argb] \n"
+ "lea 0x40(%[dst_argb]), %[dst_argb] \n"
// Store 16 AR30 values.
#define STOREAR30_AVX2 \
@@ -2894,6 +3593,41 @@ void OMITFP I210ToARGBRow_AVX2(const uint16_t* y_buf,
}
#endif // HAS_I210TOARGBROW_AVX2
+#if defined(HAS_I212TOARGBROW_AVX2)
+// 16 pixels
+// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes).
+void OMITFP I212ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV212_AVX2
+ YUVTORGB_AVX2(yuvconstants)
+ STOREARGB_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+}
+#endif // HAS_I212TOARGBROW_AVX2
+
#if defined(HAS_I210TOAR30ROW_AVX2)
// 16 pixels
// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 AR30 (64 bytes).
@@ -2929,11 +3663,198 @@ void OMITFP I210ToAR30Row_AVX2(const uint16_t* y_buf,
[width]"+rm"(width) // %[width]
: [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
: "memory", "cc", YUVTORGB_REGS_AVX2
- "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
);
}
#endif // HAS_I210TOAR30ROW_AVX2
+#if defined(HAS_I212TOAR30ROW_AVX2)
+// 16 pixels
+// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 AR30 (64 bytes).
+void OMITFP I212ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants
+ "vpsrlw $14,%%ymm5,%%ymm5 \n"
+ "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits
+ "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min
+ "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max
+ "vpsrlw $6,%%ymm7,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV212_AVX2
+ YUVTORGB16_AVX2(yuvconstants)
+ STOREAR30_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+#endif // HAS_I212TOAR30ROW_AVX2
+
+#if defined(HAS_I410TOARGBROW_AVX2)
+// 16 pixels
+// 16 UV values with 16 Y producing 16 ARGB (64 bytes).
+void OMITFP I410ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV410_AVX2
+ YUVTORGB_AVX2(yuvconstants)
+ STOREARGB_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+}
+#endif // HAS_I410TOARGBROW_AVX2
+
+#if defined(HAS_I210ALPHATOARGBROW_AVX2)
+// 16 pixels
+// 8 UV, 16 Y and 16 A producing 16 ARGB (64 bytes).
+void OMITFP I210AlphaToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile(
+ YUVTORGB_SETUP_AVX2(
+ yuvconstants) "sub %[u_buf],%[v_buf] \n"
+
+ LABELALIGN "1: \n" READYUVA210_AVX2
+ YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2
+ "subl $0x10,%[width] \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [u_buf] "+r"(u_buf), // %[u_buf]
+ [v_buf] "+r"(v_buf), // %[v_buf]
+ [a_buf] "+r"(a_buf), // %[a_buf]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+#if defined(__i386__)
+ [width] "+m"(width) // %[width]
+#else
+ [width] "+rm"(width) // %[width]
+#endif
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2 "xmm0", "xmm1", "xmm2", "xmm3",
+ "xmm4", "xmm5");
+}
+#endif // HAS_I210TOARGBROW_AVX2
+
+#if defined(HAS_I410ALPHATOARGBROW_AVX2)
+// 16 pixels
+// 16 UV, 16 Y and 16 A producing 16 ARGB (64 bytes).
+void OMITFP I410AlphaToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ const uint16_t* a_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile(
+ YUVTORGB_SETUP_AVX2(
+ yuvconstants) "sub %[u_buf],%[v_buf] \n"
+
+ LABELALIGN "1: \n" READYUVA410_AVX2
+ YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2
+ "subl $0x10,%[width] \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+
+ : [y_buf] "+r"(y_buf), // %[y_buf]
+ [u_buf] "+r"(u_buf), // %[u_buf]
+ [v_buf] "+r"(v_buf), // %[v_buf]
+ [a_buf] "+r"(a_buf), // %[a_buf]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+#if defined(__i386__)
+ [width] "+m"(width) // %[width]
+#else
+ [width] "+rm"(width) // %[width]
+#endif
+ : [yuvconstants] "r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2 "xmm0", "xmm1", "xmm2", "xmm3",
+ "xmm4", "xmm5");
+}
+#endif // HAS_I410TOARGBROW_AVX2
+
+#if defined(HAS_I410TOAR30ROW_AVX2)
+// 16 pixels
+// 16 UV values with 16 Y producing 16 AR30 (64 bytes).
+void OMITFP I410ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* u_buf,
+ const uint16_t* v_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "sub %[u_buf],%[v_buf] \n"
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants
+ "vpsrlw $14,%%ymm5,%%ymm5 \n"
+ "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits
+ "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min
+ "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max
+ "vpsrlw $6,%%ymm7,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ READYUV410_AVX2
+ YUVTORGB16_AVX2(yuvconstants)
+ STOREAR30_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [u_buf]"+r"(u_buf), // %[u_buf]
+ [v_buf]"+r"(v_buf), // %[v_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+#endif // HAS_I410TOAR30ROW_AVX2
+
#if defined(HAS_I444ALPHATOARGBROW_AVX2)
// 16 pixels
// 16 UV values with 16 Y and 16 A producing 16 ARGB.
@@ -3193,14 +4114,154 @@ void OMITFP UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf,
}
#endif // HAS_UYVYTOARGBROW_AVX2
+#if defined(HAS_P210TOARGBROW_AVX2)
+// 16 pixels.
+// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes).
+void OMITFP P210ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ // clang-format off
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READP210_AVX2
+ YUVTORGB_AVX2(yuvconstants)
+ STOREARGB_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+ // clang-format on
+}
+#endif // HAS_P210TOARGBROW_AVX2
+
+#if defined(HAS_P410TOARGBROW_AVX2)
+// 16 pixels.
+// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes).
+void OMITFP P410ToARGBRow_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_argb,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ // clang-format off
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n"
+
+ LABELALIGN
+ "1: \n"
+ READP410_AVX2
+ YUVTORGB_AVX2(yuvconstants)
+ STOREARGB_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_argb]"+r"(dst_argb), // %[dst_argb]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"
+ );
+ // clang-format on
+}
+#endif // HAS_P410TOARGBROW_AVX2
+
+#if defined(HAS_P210TOAR30ROW_AVX2)
+// 16 pixels
+// 16 UV values with 16 Y producing 16 AR30 (64 bytes).
+void OMITFP P210ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants
+ "vpsrlw $14,%%ymm5,%%ymm5 \n"
+ "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits
+ "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min
+ "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max
+ "vpsrlw $6,%%ymm7,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ READP210_AVX2
+ YUVTORGB16_AVX2(yuvconstants)
+ STOREAR30_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+#endif // HAS_P210TOAR30ROW_AVX2
+
+#if defined(HAS_P410TOAR30ROW_AVX2)
+// 16 pixels
+// 16 UV values with 16 Y producing 16 AR30 (64 bytes).
+void OMITFP P410ToAR30Row_AVX2(const uint16_t* y_buf,
+ const uint16_t* uv_buf,
+ uint8_t* dst_ar30,
+ const struct YuvConstants* yuvconstants,
+ int width) {
+ asm volatile (
+ YUVTORGB_SETUP_AVX2(yuvconstants)
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants
+ "vpsrlw $14,%%ymm5,%%ymm5 \n"
+ "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits
+ "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min
+ "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max
+ "vpsrlw $6,%%ymm7,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ READP410_AVX2
+ YUVTORGB16_AVX2(yuvconstants)
+ STOREAR30_AVX2
+ "sub $0x10,%[width] \n"
+ "jg 1b \n"
+
+ "vzeroupper \n"
+ : [y_buf]"+r"(y_buf), // %[y_buf]
+ [uv_buf]"+r"(uv_buf), // %[uv_buf]
+ [dst_ar30]"+r"(dst_ar30), // %[dst_ar30]
+ [width]"+rm"(width) // %[width]
+ : [yuvconstants]"r"(yuvconstants) // %[yuvconstants]
+ : "memory", "cc", YUVTORGB_REGS_AVX2
+ "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"
+ );
+}
+#endif // HAS_P410TOAR30ROW_AVX2
+
#ifdef HAS_I400TOARGBROW_SSE2
void I400ToARGBRow_SSE2(const uint8_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
- "movdqa 192(%3),%%xmm2 \n" // yg = 18997 = 1.164
- "movdqa 224(%3),%%xmm3 \n" // ygb = 1160 = 1.164 * 16
+ "movdqa 96(%3),%%xmm2 \n" // yg = 18997 = 1.164
+ "movdqa 128(%3),%%xmm3 \n" // ygb = 1160 = 1.164 * 16
"pcmpeqb %%xmm4,%%xmm4 \n" // 0xff000000
"pslld $0x18,%%xmm4 \n"
@@ -3244,8 +4305,8 @@ void I400ToARGBRow_AVX2(const uint8_t* y_buf,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
- "vmovdqa 192(%3),%%ymm2 \n" // yg = 18997 = 1.164
- "vmovdqa 224(%3),%%ymm3 \n" // ygb = -1160 = 1.164*16
+ "vmovdqa 96(%3),%%ymm2 \n" // yg = 18997 = 1.164
+ "vmovdqa 128(%3),%%ymm3 \n" // ygb = -1160 = 1.164*16
"vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" // 0xff000000
"vpslld $0x18,%%ymm4,%%ymm4 \n"
@@ -3663,8 +4724,6 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u,
// clang-format off
asm volatile (
"vmovd %4,%%xmm3 \n"
- "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n"
- "vbroadcastss %%xmm3,%%xmm3 \n"
"sub %0,%1 \n"
// 16 pixels per loop.
@@ -3696,7 +4755,7 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u,
}
#endif // HAS_MERGEUVROW_AVX2
-#ifdef HAS_MERGEUVROW_16_AVX2
+#ifdef HAS_SPLITUVROW_16_AVX2
const uvec8 kSplitUVShuffle16 = {0, 1, 4, 5, 8, 9, 12, 13,
2, 3, 6, 7, 10, 11, 14, 15};
void SplitUVRow_16_AVX2(const uint16_t* src_uv,
@@ -3707,44 +4766,41 @@ void SplitUVRow_16_AVX2(const uint16_t* src_uv,
depth = 16 - depth;
// clang-format off
asm volatile (
- "vmovd %4,%%xmm3 \n"
- "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n"
- "vbroadcastss %%xmm3,%%xmm3 \n"
- "vbroadcastf128 %5,%%ymm4 \n"
- "sub %1,%2 \n"
+ "vmovd %4,%%xmm3 \n"
+ "vbroadcastf128 %5,%%ymm4 \n"
+ "sub %1,%2 \n"
// 16 pixels per loop.
LABELALIGN
- "1: \n"
- "vmovdqu (%0),%%ymm0 \n"
- "vmovdqu 0x20(%0),%%ymm1 \n"
- "add $0x40,%0 \n"
-
- "vpsrlw %%xmm3,%%ymm0,%%ymm0 \n"
- "vpsrlw %%xmm3,%%ymm1,%%ymm1 \n"
- "vpshufb %%ymm4,%%ymm0,%%ymm0 \n"
- "vpshufb %%ymm4,%%ymm1,%%ymm1 \n"
- "vpermq $0xd8,%%ymm0,%%ymm0 \n"
- "vpermq $0xd8,%%ymm1,%%ymm1 \n"
- "vextractf128 $0x0,%%ymm0,(%1) \n"
- "vextractf128 $0x0,%%ymm1,0x10(%1) \n"
- "vextractf128 $0x1,%%ymm0,(%1,%2) \n"
- "vextractf128 $0x1,%%ymm1,0x10(%1,%2) \n"
- "add $0x20,%1 \n"
- "sub $0x10,%3 \n"
- "jg 1b \n"
- "vzeroupper \n"
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vmovdqu 0x20(%0),%%ymm1 \n"
+ "add $0x40,%0 \n"
+
+ "vpsrlw %%xmm3,%%ymm0,%%ymm0 \n"
+ "vpsrlw %%xmm3,%%ymm1,%%ymm1 \n"
+ "vpshufb %%ymm4,%%ymm0,%%ymm0 \n"
+ "vpshufb %%ymm4,%%ymm1,%%ymm1 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n"
+ "vextractf128 $0x0,%%ymm0,(%1) \n"
+ "vextractf128 $0x0,%%ymm1,0x10(%1) \n"
+ "vextractf128 $0x1,%%ymm0,(%1,%2) \n"
+ "vextractf128 $0x1,%%ymm1,0x10(%1,%2) \n"
+ "add $0x20,%1 \n"
+ "sub $0x10,%3 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
: "+r"(src_uv), // %0
- "+r"(dst_u), // %1
- "+r"(dst_v), // %2
- "+r"(width), // %3
- "+r"(depth) // %4
- :
+ "+r"(dst_u), // %1
+ "+r"(dst_v), // %2
+ "+r"(width) // %3
+ : "r"(depth), // %4
"m"(kSplitUVShuffle16) // %5
: "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4");
// clang-format on
}
-#endif // HAS_MERGEUVROW_AVX2
+#endif // HAS_SPLITUVROW_16_AVX2
// Use scale to convert lsb formats to msb, depending how many bits there are:
// 128 = 9 bits
@@ -3797,24 +4853,24 @@ void DivideRow_16_AVX2(const uint16_t* src_y,
int width) {
// clang-format off
asm volatile (
- "vmovd %3,%%xmm3 \n"
- "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n"
- "vbroadcastss %%xmm3,%%ymm3 \n"
- "sub %0,%1 \n"
+ "vmovd %3,%%xmm3 \n"
+ "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n"
+ "vbroadcastss %%xmm3,%%ymm3 \n"
+ "sub %0,%1 \n"
// 32 pixels per loop.
LABELALIGN
- "1: \n"
- "vmovdqu (%0),%%ymm0 \n"
- "vmovdqu 0x20(%0),%%ymm1 \n"
- "vpmulhuw %%ymm3,%%ymm0,%%ymm0 \n"
- "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n"
- "vmovdqu %%ymm0,(%0,%1) \n"
- "vmovdqu %%ymm1,0x20(%0,%1) \n"
- "add $0x40,%0 \n"
- "sub $0x20,%2 \n"
- "jg 1b \n"
- "vzeroupper \n"
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vmovdqu 0x20(%0),%%ymm1 \n"
+ "vpmulhuw %%ymm3,%%ymm0,%%ymm0 \n"
+ "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n"
+ "vmovdqu %%ymm0,(%0,%1) \n"
+ "vmovdqu %%ymm1,0x20(%0,%1) \n"
+ "add $0x40,%0 \n"
+ "sub $0x20,%2 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
: "+r"(src_y), // %0
"+r"(dst_y), // %1
"+r"(width), // %2
@@ -4202,7 +5258,9 @@ void MergeARGBRow_SSE2(const uint8_t* src_r,
:
: "memory", "cc", "xmm0", "xmm1", "xmm2");
}
+#endif
+#ifdef HAS_MERGEXRGBROW_SSE2
void MergeXRGBRow_SSE2(const uint8_t* src_r,
const uint8_t* src_g,
const uint8_t* src_b,
@@ -4286,7 +5344,9 @@ void MergeARGBRow_AVX2(const uint8_t* src_r,
:
: "memory", "cc", "xmm0", "xmm1", "xmm2");
}
+#endif
+#ifdef HAS_MERGEXRGBROW_AVX2
void MergeXRGBRow_AVX2(const uint8_t* src_r,
const uint8_t* src_g,
const uint8_t* src_b,
@@ -4380,7 +5440,9 @@ void SplitARGBRow_SSE2(const uint8_t* src_argb,
:
: "memory", "cc", "xmm0", "xmm1", "xmm2");
}
+#endif
+#ifdef HAS_SPLITXRGBROW_SSE2
void SplitXRGBRow_SSE2(const uint8_t* src_argb,
uint8_t* dst_r,
uint8_t* dst_g,
@@ -4471,12 +5533,14 @@ void SplitARGBRow_SSSE3(const uint8_t* src_argb,
#if defined(__i386__)
"+m"(width) // %5
#else
- "+rm"(width) // %5
+ "+rm"(width) // %5
#endif
: "m"(kShuffleMaskARGBSplit) // %6
: "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3");
}
+#endif
+#ifdef HAS_SPLITXRGBROW_SSSE3
void SplitXRGBRow_SSSE3(const uint8_t* src_argb,
uint8_t* dst_r,
uint8_t* dst_g,
@@ -4562,13 +5626,15 @@ void SplitARGBRow_AVX2(const uint8_t* src_argb,
#if defined(__i386__)
"+m"(width) // %5
#else
- "+rm"(width) // %5
+ "+rm"(width) // %5
#endif
: "m"(kShuffleMaskARGBSplit), // %6
"m"(kShuffleMaskARGBPermute) // %7
: "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4");
}
+#endif
+#ifdef HAS_SPLITXRGBROW_AVX2
void SplitXRGBRow_AVX2(const uint8_t* src_argb,
uint8_t* dst_r,
uint8_t* dst_g,
@@ -4610,7 +5676,318 @@ void SplitXRGBRow_AVX2(const uint8_t* src_argb,
"+r"(width) // %4
: "m"(kShuffleMaskARGBSplit), // %5
"m"(kShuffleMaskARGBPermute) // %6
- : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3");
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4");
+}
+#endif
+
+#ifdef HAS_MERGEXR30ROW_AVX2
+void MergeXR30Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width) {
+ int shift = depth - 10;
+ asm volatile(
+
+ "sub %0,%1 \n"
+ "sub %0,%2 \n"
+ "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants
+ "vpsrlw $14,%%ymm5,%%ymm5 \n"
+ "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits
+ "vpcmpeqb %%ymm6,%%ymm6,%%ymm6 \n"
+ "vpsrlw $6,%%ymm6,%%ymm6 \n"
+ "vmovd %5,%%xmm4 \n"
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n"
+ "vmovdqu (%0,%1),%%ymm1 \n"
+ "vmovdqu (%0,%2),%%ymm2 \n"
+ "vpsrlw %%xmm4,%%ymm0,%%ymm0 \n"
+ "vpsrlw %%xmm4,%%ymm1,%%ymm1 \n"
+ "vpsrlw %%xmm4,%%ymm2,%%ymm2 \n"
+ "vpminuw %%ymm0,%%ymm6,%%ymm0 \n"
+ "vpminuw %%ymm1,%%ymm6,%%ymm1 \n"
+ "vpminuw %%ymm2,%%ymm6,%%ymm2 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n"
+ "vpermq $0xd8,%%ymm2,%%ymm2 \n"
+ "vpsllw $0x4,%%ymm0,%%ymm0 \n" // Shift R to target bit
+ "vpunpckhwd %%ymm0,%%ymm2,%%ymm3 \n" // RB
+ "vpunpcklwd %%ymm0,%%ymm2,%%ymm0 \n"
+ "vpunpckhwd %%ymm5,%%ymm1,%%ymm2 \n" // AG
+ "vpunpcklwd %%ymm5,%%ymm1,%%ymm1 \n"
+ "vpslld $0xa,%%ymm1,%%ymm1 \n" // Shift AG to target bit
+ "vpslld $0xa,%%ymm2,%%ymm2 \n"
+ "vpor %%ymm1,%%ymm0,%%ymm0 \n" // Combine
+ "vpor %%ymm2,%%ymm3,%%ymm3 \n"
+ "vmovdqu %%ymm0,(%3) \n"
+ "vmovdqu %%ymm3,0x20(%3) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x40(%3),%3 \n"
+ "sub $0x10,%4 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar30), // %3
+ "+r"(width) // %4
+#if defined(__i386__)
+ : "m"(shift) // %5
+#else
+ : "rm"(shift) // %5
+#endif
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5");
+}
+#endif
+
+#ifdef HAS_MERGEAR64ROW_AVX2
+static const lvec32 MergeAR64Permute = {0, 4, 2, 6, 1, 5, 3, 7};
+void MergeAR64Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ mask = (mask << 16) + mask;
+ asm volatile(
+
+ "sub %0,%1 \n"
+ "sub %0,%2 \n"
+ "sub %0,%3 \n"
+ "vmovdqa %8,%%ymm5 \n"
+ "vmovd %6,%%xmm6 \n"
+ "vbroadcastss %7,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n" // R
+ "vmovdqu (%0,%1),%%ymm1 \n" // G
+ "vmovdqu (%0,%2),%%ymm2 \n" // B
+ "vmovdqu (%0,%3),%%ymm3 \n" // A
+ "vpminuw %%ymm0,%%ymm7,%%ymm0 \n"
+ "vpminuw %%ymm1,%%ymm7,%%ymm1 \n"
+ "vpminuw %%ymm2,%%ymm7,%%ymm2 \n"
+ "vpminuw %%ymm3,%%ymm7,%%ymm3 \n"
+ "vpsllw %%xmm6,%%ymm0,%%ymm0 \n"
+ "vpsllw %%xmm6,%%ymm1,%%ymm1 \n"
+ "vpsllw %%xmm6,%%ymm2,%%ymm2 \n"
+ "vpsllw %%xmm6,%%ymm3,%%ymm3 \n"
+ "vpermd %%ymm0,%%ymm5,%%ymm0 \n"
+ "vpermd %%ymm1,%%ymm5,%%ymm1 \n"
+ "vpermd %%ymm2,%%ymm5,%%ymm2 \n"
+ "vpermd %%ymm3,%%ymm5,%%ymm3 \n"
+ "vpunpcklwd %%ymm1,%%ymm2,%%ymm4 \n" // BG(low)
+ "vpunpckhwd %%ymm1,%%ymm2,%%ymm1 \n" // BG(hi)
+ "vpunpcklwd %%ymm3,%%ymm0,%%ymm2 \n" // RA(low)
+ "vpunpckhwd %%ymm3,%%ymm0,%%ymm0 \n" // RA(hi)
+ "vpunpckldq %%ymm2,%%ymm4,%%ymm3 \n" // BGRA(1)
+ "vpunpckhdq %%ymm2,%%ymm4,%%ymm4 \n" // BGRA(3)
+ "vpunpckldq %%ymm0,%%ymm1,%%ymm2 \n" // BGRA(2)
+ "vpunpckhdq %%ymm0,%%ymm1,%%ymm1 \n" // BGRA(4)
+ "vmovdqu %%ymm3,(%4) \n"
+ "vmovdqu %%ymm2,0x20(%4) \n"
+ "vmovdqu %%ymm4,0x40(%4) \n"
+ "vmovdqu %%ymm1,0x60(%4) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x80(%4),%4 \n"
+ "subl $0x10,%5 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_ar64), // %4
+#if defined(__i386__)
+ "+m"(width) // %5
+#else
+ "+rm"(width) // %5
+#endif
+ : "m"(shift), // %6
+ "m"(mask), // %7
+ "m"(MergeAR64Permute) // %8
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6",
+ "xmm7");
+}
+#endif
+
+#ifdef HAS_MERGEXR64ROW_AVX2
+void MergeXR64Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ mask = (mask << 16) + mask;
+ asm volatile(
+
+ "sub %0,%1 \n"
+ "sub %0,%2 \n"
+ "vmovdqa %7,%%ymm5 \n"
+ "vmovd %5,%%xmm6 \n"
+ "vbroadcastss %6,%%ymm7 \n"
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n" // R
+ "vmovdqu (%0,%1),%%ymm1 \n" // G
+ "vmovdqu (%0,%2),%%ymm2 \n" // B
+ "vpminuw %%ymm0,%%ymm7,%%ymm0 \n"
+ "vpminuw %%ymm1,%%ymm7,%%ymm1 \n"
+ "vpminuw %%ymm2,%%ymm7,%%ymm2 \n"
+ "vpsllw %%xmm6,%%ymm0,%%ymm0 \n"
+ "vpsllw %%xmm6,%%ymm1,%%ymm1 \n"
+ "vpsllw %%xmm6,%%ymm2,%%ymm2 \n"
+ "vpermd %%ymm0,%%ymm5,%%ymm0 \n"
+ "vpermd %%ymm1,%%ymm5,%%ymm1 \n"
+ "vpermd %%ymm2,%%ymm5,%%ymm2 \n"
+ "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n" // A (0xffff)
+ "vpunpcklwd %%ymm1,%%ymm2,%%ymm4 \n" // BG(low)
+ "vpunpckhwd %%ymm1,%%ymm2,%%ymm1 \n" // BG(hi)
+ "vpunpcklwd %%ymm3,%%ymm0,%%ymm2 \n" // RA(low)
+ "vpunpckhwd %%ymm3,%%ymm0,%%ymm0 \n" // RA(hi)
+ "vpunpckldq %%ymm2,%%ymm4,%%ymm3 \n" // BGRA(1)
+ "vpunpckhdq %%ymm2,%%ymm4,%%ymm4 \n" // BGRA(3)
+ "vpunpckldq %%ymm0,%%ymm1,%%ymm2 \n" // BGRA(2)
+ "vpunpckhdq %%ymm0,%%ymm1,%%ymm1 \n" // BGRA(4)
+ "vmovdqu %%ymm3,(%3) \n"
+ "vmovdqu %%ymm2,0x20(%3) \n"
+ "vmovdqu %%ymm4,0x40(%3) \n"
+ "vmovdqu %%ymm1,0x60(%3) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x80(%3),%3 \n"
+ "subl $0x10,%4 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar64), // %3
+ "+r"(width) // %4
+ : "m"(shift), // %5
+ "m"(mask), // %6
+ "m"(MergeAR64Permute) // %7
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6",
+ "xmm7");
+}
+#endif
+
+#ifdef HAS_MERGEARGB16TO8ROW_AVX2
+static const uvec8 MergeARGB16To8Shuffle = {0, 8, 1, 9, 2, 10, 3, 11,
+ 4, 12, 5, 13, 6, 14, 7, 15};
+void MergeARGB16To8Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = depth - 8;
+ asm volatile(
+
+ "sub %0,%1 \n"
+ "sub %0,%2 \n"
+ "sub %0,%3 \n"
+ "vbroadcastf128 %7,%%ymm5 \n"
+ "vmovd %6,%%xmm6 \n"
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n" // R
+ "vmovdqu (%0,%1),%%ymm1 \n" // G
+ "vmovdqu (%0,%2),%%ymm2 \n" // B
+ "vmovdqu (%0,%3),%%ymm3 \n" // A
+ "vpsrlw %%xmm6,%%ymm0,%%ymm0 \n"
+ "vpsrlw %%xmm6,%%ymm1,%%ymm1 \n"
+ "vpsrlw %%xmm6,%%ymm2,%%ymm2 \n"
+ "vpsrlw %%xmm6,%%ymm3,%%ymm3 \n"
+ "vpackuswb %%ymm1,%%ymm2,%%ymm1 \n" // BG (planar)
+ "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" // RA (planar)
+ "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" // BG (interleave)
+ "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // RA (interleave)
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpunpcklwd %%ymm0,%%ymm1,%%ymm2 \n" // BGRA (low)
+ "vpunpckhwd %%ymm0,%%ymm1,%%ymm0 \n" // BGRA (hi)
+ "vmovdqu %%ymm2,(%4) \n"
+ "vmovdqu %%ymm0,0x20(%4) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x40(%4),%4 \n"
+ "subl $0x10,%5 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_argb), // %4
+#if defined(__i386__)
+ "+m"(width) // %5
+#else
+ "+rm"(width) // %5
+#endif
+ : "m"(shift), // %6
+ "m"(MergeARGB16To8Shuffle) // %7
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6");
+}
+#endif
+
+#ifdef HAS_MERGEXRGB16TO8ROW_AVX2
+void MergeXRGB16To8Row_AVX2(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = depth - 8;
+ asm volatile(
+
+ "sub %0,%1 \n"
+ "sub %0,%2 \n"
+ "vbroadcastf128 %6,%%ymm5 \n"
+ "vmovd %5,%%xmm6 \n"
+ "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n"
+ "vpsrlw $8,%%ymm3,%%ymm3 \n" // A (0xff)
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%ymm0 \n" // R
+ "vmovdqu (%0,%1),%%ymm1 \n" // G
+ "vmovdqu (%0,%2),%%ymm2 \n" // B
+ "vpsrlw %%xmm6,%%ymm0,%%ymm0 \n"
+ "vpsrlw %%xmm6,%%ymm1,%%ymm1 \n"
+ "vpsrlw %%xmm6,%%ymm2,%%ymm2 \n"
+ "vpackuswb %%ymm1,%%ymm2,%%ymm1 \n" // BG (planar)
+ "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" // RA (planar)
+ "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" // BG (interleave)
+ "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // RA (interleave)
+ "vpermq $0xd8,%%ymm1,%%ymm1 \n"
+ "vpermq $0xd8,%%ymm0,%%ymm0 \n"
+ "vpunpcklwd %%ymm0,%%ymm1,%%ymm2 \n" // BGRA (low)
+ "vpunpckhwd %%ymm0,%%ymm1,%%ymm0 \n" // BGRA (hi)
+ "vmovdqu %%ymm2,(%3) \n"
+ "vmovdqu %%ymm0,0x20(%3) \n"
+ "lea 0x20(%0),%0 \n"
+ "lea 0x40(%3),%3 \n"
+ "subl $0x10,%4 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_argb), // %3
+ "+r"(width) // %4
+ : "m"(shift), // %5
+ "m"(MergeARGB16To8Shuffle) // %6
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6");
}
#endif
@@ -5339,7 +6716,7 @@ static const uvec8 kShuffleAlpha = {3u, 0x80, 3u, 0x80, 7u, 0x80, 7u, 0x80,
11u, 0x80, 11u, 0x80, 15u, 0x80, 15u, 0x80};
// Blend 8 pixels at a time
-void ARGBBlendRow_SSSE3(const uint8_t* src_argb0,
+void ARGBBlendRow_SSSE3(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -5410,7 +6787,7 @@ void ARGBBlendRow_SSSE3(const uint8_t* src_argb0,
"sub $0x1,%3 \n"
"jge 91b \n"
"99: \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6012,7 +7389,7 @@ void ARGBShadeRow_SSE2(const uint8_t* src_argb,
#ifdef HAS_ARGBMULTIPLYROW_SSE2
// Multiply 2 rows of ARGB pixels together, 4 pixels at a time.
-void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
+void ARGBMultiplyRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6040,7 +7417,7 @@ void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
"lea 0x10(%2),%2 \n"
"sub $0x4,%3 \n"
"jg 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6051,7 +7428,7 @@ void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBMULTIPLYROW_AVX2
// Multiply 2 rows of ARGB pixels together, 8 pixels at a time.
-void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
+void ARGBMultiplyRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6078,7 +7455,7 @@ void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
"sub $0x8,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6089,7 +7466,7 @@ void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
#ifdef HAS_ARGBADDROW_SSE2
// Add 2 rows of ARGB pixels together, 4 pixels at a time.
-void ARGBAddRow_SSE2(const uint8_t* src_argb0,
+void ARGBAddRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6106,7 +7483,7 @@ void ARGBAddRow_SSE2(const uint8_t* src_argb0,
"lea 0x10(%2),%2 \n"
"sub $0x4,%3 \n"
"jg 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6117,7 +7494,7 @@ void ARGBAddRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBADDROW_AVX2
// Add 2 rows of ARGB pixels together, 4 pixels at a time.
-void ARGBAddRow_AVX2(const uint8_t* src_argb0,
+void ARGBAddRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6134,7 +7511,7 @@ void ARGBAddRow_AVX2(const uint8_t* src_argb0,
"sub $0x8,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6145,7 +7522,7 @@ void ARGBAddRow_AVX2(const uint8_t* src_argb0,
#ifdef HAS_ARGBSUBTRACTROW_SSE2
// Subtract 2 rows of ARGB pixels, 4 pixels at a time.
-void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
+void ARGBSubtractRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6162,7 +7539,7 @@ void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
"lea 0x10(%2),%2 \n"
"sub $0x4,%3 \n"
"jg 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -6173,7 +7550,7 @@ void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBSUBTRACTROW_AVX2
// Subtract 2 rows of ARGB pixels, 8 pixels at a time.
-void ARGBSubtractRow_AVX2(const uint8_t* src_argb0,
+void ARGBSubtractRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -6190,7 +7567,7 @@ void ARGBSubtractRow_AVX2(const uint8_t* src_argb0,
"sub $0x8,%3 \n"
"jg 1b \n"
"vzeroupper \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -7279,7 +8656,7 @@ void HalfFloatRow_AVX2(const uint16_t* src,
#if defined(__x86_64__)
: "x"(scale) // %3
#else
- : "m"(scale) // %3
+ : "m"(scale) // %3
#endif
: "memory", "cc", "xmm2", "xmm3", "xmm4", "xmm5");
}
@@ -7317,7 +8694,7 @@ void HalfFloatRow_F16C(const uint16_t* src,
#if defined(__x86_64__)
: "x"(scale) // %3
#else
- : "m"(scale) // %3
+ : "m"(scale) // %3
#endif
: "memory", "cc", "xmm2", "xmm3", "xmm4");
}
diff --git a/third_party/libyuv/source/row_mmi.cc b/third_party/libyuv/source/row_mmi.cc
index 9a8e2cb2d1..362fd1cfcc 100644
--- a/third_party/libyuv/source/row_mmi.cc
+++ b/third_party/libyuv/source/row_mmi.cc
@@ -605,7 +605,7 @@ void ARGBToARGB4444Row_MMI(const uint8_t* src_argb,
: "memory");
}
-void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ARGBToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -613,8 +613,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -626,8 +626,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x08(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -639,8 +639,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x17(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x10(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -652,8 +652,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x18(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -671,20 +671,20 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x20 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
+void ARGBToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -700,9 +700,9 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -720,8 +720,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -748,8 +748,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src1], %[src0] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -767,8 +767,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -795,8 +795,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src1], %[src0] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -814,8 +814,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -842,8 +842,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src1], %[src0] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -861,8 +861,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -901,7 +901,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -913,7 +913,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -921,7 +921,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void BGRAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -929,8 +929,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -942,8 +942,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x08(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -955,8 +955,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x17(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x10(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -968,8 +968,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x18(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -987,20 +987,20 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x20 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
+void BGRAToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1016,9 +1016,9 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1036,8 +1036,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1064,8 +1064,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src0], %[src1] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1083,8 +1083,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1111,8 +1111,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src0], %[src1] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1130,8 +1130,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1158,8 +1158,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src0], %[src1] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1177,8 +1177,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1217,7 +1217,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -1229,7 +1229,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -1237,7 +1237,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ABGRToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -1245,8 +1245,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1258,8 +1258,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x08(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1271,8 +1271,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x17(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x10(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1284,8 +1284,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x18(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1303,20 +1303,20 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x20 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
+void ABGRToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1332,9 +1332,9 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1352,8 +1352,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1380,8 +1380,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src0], %[src1] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1399,8 +1399,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1427,8 +1427,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src0], %[src1] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1446,8 +1446,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1474,8 +1474,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src0], %[src1] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1493,8 +1493,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1533,7 +1533,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -1545,7 +1545,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -1553,7 +1553,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RGBAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -1561,8 +1561,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1574,8 +1574,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x08(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1587,8 +1587,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x17(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x10(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1600,8 +1600,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x18(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1619,20 +1619,20 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x20 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
+void RGBAToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1648,9 +1648,9 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1668,8 +1668,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1696,8 +1696,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src1], %[src0] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1715,8 +1715,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1743,8 +1743,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src1], %[src0] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1762,8 +1762,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1790,8 +1790,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src1], %[src0] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1809,8 +1809,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1849,7 +1849,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -1861,7 +1861,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -1869,7 +1869,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RGB24ToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -1877,8 +1877,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1891,8 +1891,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0d(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x06(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0d(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x06(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1905,8 +1905,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x13(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x0c(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x13(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x0c(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1919,8 +1919,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x19(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x12(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x19(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x12(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -1939,20 +1939,20 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x18 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x18 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
+void RGB24ToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1968,9 +1968,9 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -1990,8 +1990,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x06(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0d(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x06(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0d(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x06(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0d(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2020,8 +2020,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src1], %[src0] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x0c(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x13(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x0c(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x13(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x0c(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x13(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2041,8 +2041,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x12(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x19(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x12(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x19(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x12(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x19(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2071,8 +2071,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src1], %[src0] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2092,8 +2092,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x1e(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x25(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x1e(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x25(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x1e(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x25(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2122,8 +2122,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src1], %[src0] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x24(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2b(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x24(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2b(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x24(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2b(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2143,8 +2143,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x2a(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x31(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x2a(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x31(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x2a(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x31(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2185,7 +2185,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x30 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x30 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -2197,7 +2197,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -2205,7 +2205,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RAWToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest0, dest1, dest2, dest3;
const uint64_t value = 0x1080;
@@ -2213,8 +2213,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
__asm__ volatile(
"1: \n\t"
- "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x07(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x00(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -2227,8 +2227,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest0], %[dest0], %[src] \n\t"
"psrlw %[dest0], %[dest0], %[eight] \n\t"
- "gsldlc1 %[src], 0x0d(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x06(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x0d(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x06(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -2241,8 +2241,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest1], %[dest1], %[src] \n\t"
"psrlw %[dest1], %[dest1], %[eight] \n\t"
- "gsldlc1 %[src], 0x13(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x0c(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x13(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x0c(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -2255,8 +2255,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"paddw %[dest2], %[dest2], %[src] \n\t"
"psrlw %[dest2], %[dest2], %[eight] \n\t"
- "gsldlc1 %[src], 0x19(%[src_argb0]) \n\t"
- "gsldrc1 %[src], 0x12(%[src_argb0]) \n\t"
+ "gsldlc1 %[src], 0x19(%[src_argb]) \n\t"
+ "gsldrc1 %[src], 0x12(%[src_argb]) \n\t"
"punpcklbh %[src_lo], %[src], %[zero] \n\t"
"pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t"
"pmaddhw %[src_lo], %[src_lo], %[mask] \n\t"
@@ -2275,20 +2275,20 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
"gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t"
"gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t"
- "daddiu %[src_argb0], %[src_argb0], 0x18 \n\t"
+ "daddiu %[src_argb], %[src_argb], 0x18 \n\t"
"daddiu %[dst_y], %[dst_y], 0x08 \n\t"
"daddi %[width], %[width], -0x08 \n\t"
"bnez %[width], 1b \n\t"
: [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo),
[dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2),
[dest3] "=&f"(dest3)
- : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width),
+ : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width),
[mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08),
[zero] "f"(0x00)
: "memory");
}
-void RAWToUVRow_MMI(const uint8_t* src_rgb0,
+void RAWToUVRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -2304,9 +2304,9 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"dli %[tmp0], 0x0001000100010001 \n\t"
"dmtc1 %[tmp0], %[ftmp12] \n\t"
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2326,8 +2326,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x06(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0d(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x06(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0d(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x06(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0d(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2356,8 +2356,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src0], %[src1] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x0c(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x13(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x0c(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x13(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x0c(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x13(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2377,8 +2377,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x12(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x19(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x12(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x19(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x12(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x19(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2407,8 +2407,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src0], %[src1] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2428,8 +2428,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x1e(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x25(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x1e(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x25(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x1e(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x25(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2458,8 +2458,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src0], %[src1] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x24(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2b(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x24(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2b(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x24(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2b(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2479,8 +2479,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x2a(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x31(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x2a(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x31(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x2a(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x31(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2521,7 +2521,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x30 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x30 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -2533,7 +2533,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]),
[ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01),
@@ -2541,7 +2541,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0,
: "memory");
}
-void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ARGBToYJRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) {
uint64_t src, src_hi, src_lo;
uint64_t dest, dest0, dest1, dest2, dest3;
uint64_t tmp0, tmp1;
@@ -2618,13 +2618,13 @@ void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
[src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1),
[dest2] "=&f"(dest2), [dest3] "=&f"(dest3), [tmp0] "=&f"(tmp0),
[tmp1] "=&f"(tmp1)
- : [src_ptr] "r"(src_argb0), [dst_ptr] "r"(dst_y), [mask0] "f"(mask0),
+ : [src_ptr] "r"(src_argb), [dst_ptr] "r"(dst_y), [mask0] "f"(mask0),
[mask1] "f"(mask1), [shift] "f"(shift), [value] "f"(value),
[width] "r"(width)
: "memory");
}
-void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
+void ARGBToUVJRow_MMI(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -2637,9 +2637,9 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
__asm__ volatile(
"1: \n\t"
- "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t"
- "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t"
+ "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t"
+ "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2655,8 +2655,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t"
"pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2681,8 +2681,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest0_v], %[src1], %[src0] \n\t"
"psraw %[dest0_v], %[dest0_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2698,8 +2698,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t"
"pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2724,8 +2724,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest1_v], %[src1], %[src0] \n\t"
"psraw %[dest1_v], %[dest1_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2741,8 +2741,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t"
"pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2767,8 +2767,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"psubw %[dest2_v], %[src1], %[src0] \n\t"
"psraw %[dest2_v], %[dest2_v], %[eight] \n\t"
- "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2784,8 +2784,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t"
"pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t"
- "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t"
- "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t"
+ "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t"
+ "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t"
"gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t"
"gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t"
"punpcklbh %[src_lo], %[src0], %[zero] \n\t"
@@ -2822,7 +2822,7 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
"gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t"
"gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t"
- "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t"
+ "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t"
"daddiu %[dst_u], %[dst_u], 0x08 \n\t"
"daddiu %[dst_v], %[dst_v], 0x08 \n\t"
"daddi %[width], %[width], -0x10 \n\t"
@@ -2833,7 +2833,7 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0,
[dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]),
[dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]),
[dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11])
- : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb),
+ : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb),
[dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width),
[mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value),
[zero] "f"(0x00), [eight] "f"(0x08),
@@ -4386,7 +4386,7 @@ void ARGBShadeRow_MMI(const uint8_t* src_argb,
: "memory");
}
-void ARGBMultiplyRow_MMI(const uint8_t* src_argb0,
+void ARGBMultiplyRow_MMI(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -4422,12 +4422,12 @@ void ARGBMultiplyRow_MMI(const uint8_t* src_argb0,
[src1_hi] "=&f"(src1_hi), [src1_lo] "=&f"(src1_lo),
[dest_hi] "=&f"(dest_hi), [dest_lo] "=&f"(dest_lo), [src0] "=&f"(src0),
[src1] "=&f"(src1), [dest] "=&f"(dest)
- : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1),
+ : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1),
[dst_ptr] "r"(dst_argb), [width] "r"(width), [mask] "f"(mask)
: "memory");
}
-void ARGBAddRow_MMI(const uint8_t* src_argb0,
+void ARGBAddRow_MMI(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -4449,12 +4449,12 @@ void ARGBAddRow_MMI(const uint8_t* src_argb0,
"daddi %[width], %[width], -0x02 \n\t"
"bnez %[width], 1b \n\t"
: [src0] "=&f"(src0), [src1] "=&f"(src1), [dest] "=&f"(dest)
- : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1),
+ : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1),
[dst_ptr] "r"(dst_argb), [width] "r"(width)
: "memory");
}
-void ARGBSubtractRow_MMI(const uint8_t* src_argb0,
+void ARGBSubtractRow_MMI(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -4476,7 +4476,7 @@ void ARGBSubtractRow_MMI(const uint8_t* src_argb0,
"daddi %[width], %[width], -0x02 \n\t"
"bnez %[width], 1b \n\t"
: [src0] "=&f"(src0), [src1] "=&f"(src1), [dest] "=&f"(dest)
- : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1),
+ : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1),
[dst_ptr] "r"(dst_argb), [width] "r"(width)
: "memory");
}
@@ -5552,10 +5552,10 @@ void UYVYToYRow_MMI(const uint8_t* src_uyvy, uint8_t* dst_y, int width) {
: "memory");
}
-// Blend src_argb0 over src_argb1 and store to dst_argb.
-// dst_argb may be src_argb0 or src_argb1.
+// Blend src_argb over src_argb1 and store to dst_argb.
+// dst_argb may be src_argb or src_argb1.
// This code mimics the SSSE3 version for better testability.
-void ARGBBlendRow_MMI(const uint8_t* src_argb0,
+void ARGBBlendRow_MMI(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -5608,7 +5608,7 @@ void ARGBBlendRow_MMI(const uint8_t* src_argb0,
[dest] "=&f"(dest), [src0_hi] "=&f"(src0_hi), [src0_lo] "=&f"(src0_lo),
[src1_hi] "=&f"(src1_hi), [src1_lo] "=&f"(src1_lo),
[dest_hi] "=&f"(dest_hi), [dest_lo] "=&f"(dest_lo)
- : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1),
+ : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1),
[dst_ptr] "r"(dst_argb), [mask0] "f"(mask0), [mask1] "f"(mask1),
[mask2] "f"(mask2), [mask3] "f"(mask3), [mask4] "f"(mask4),
[shift] "f"(shift), [width] "r"(width)
diff --git a/third_party/libyuv/source/row_msa.cc b/third_party/libyuv/source/row_msa.cc
index fe6df93a60..c0b13b0fd0 100644
--- a/third_party/libyuv/source/row_msa.cc
+++ b/third_party/libyuv/source/row_msa.cc
@@ -781,7 +781,7 @@ void UYVYToUV422Row_MSA(const uint8_t* src_uyvy,
}
}
-void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ARGBToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, src3, vec0, vec1, vec2, vec3, dst0;
v8u16 reg0, reg1, reg2, reg3, reg4, reg5;
@@ -792,10 +792,10 @@ void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080);
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 48);
+ src0 = (v16u8)__msa_ld_b((v16u8*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((v16u8*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((v16u8*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((v16u8*)src_argb, 48);
vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0);
vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2);
vec2 = (v16u8)__msa_pckod_b((v16i8)src1, (v16i8)src0);
@@ -822,18 +822,18 @@ void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
reg1 = (v8u16)__msa_srai_h((v8i16)reg1, 8);
dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0);
ST_UB(dst0, dst_y);
- src_argb0 += 64;
+ src_argb += 64;
dst_y += 16;
}
}
-void ARGBToUVRow_MSA(const uint8_t* src_argb0,
+void ARGBToUVRow_MSA(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* src_argb0_next = src_argb0 + src_stride_argb;
+ const uint8_t* src_argb_next = src_argb + src_stride_argb;
v16u8 src0, src1, src2, src3, src4, src5, src6, src7;
v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, vec8, vec9;
v8u16 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, reg8, reg9;
@@ -847,14 +847,14 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0,
v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001);
for (x = 0; x < width; x += 32) {
- src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 48);
- src4 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 64);
- src5 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 80);
- src6 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 96);
- src7 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 112);
+ src0 = (v16u8)__msa_ld_b((v16u8*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((v16u8*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((v16u8*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((v16u8*)src_argb, 48);
+ src4 = (v16u8)__msa_ld_b((v16u8*)src_argb, 64);
+ src5 = (v16u8)__msa_ld_b((v16u8*)src_argb, 80);
+ src6 = (v16u8)__msa_ld_b((v16u8*)src_argb, 96);
+ src7 = (v16u8)__msa_ld_b((v16u8*)src_argb, 112);
vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0);
vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2);
vec2 = (v16u8)__msa_pckev_b((v16i8)src5, (v16i8)src4);
@@ -875,14 +875,14 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0,
reg3 = __msa_hadd_u_h(vec5, vec5);
reg4 = __msa_hadd_u_h(vec0, vec0);
reg5 = __msa_hadd_u_h(vec1, vec1);
- src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 0);
- src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 16);
- src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 32);
- src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 48);
- src4 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 64);
- src5 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 80);
- src6 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 96);
- src7 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 112);
+ src0 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 0);
+ src1 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 16);
+ src2 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 32);
+ src3 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 48);
+ src4 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 64);
+ src5 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 80);
+ src6 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 96);
+ src7 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 112);
vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0);
vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2);
vec2 = (v16u8)__msa_pckev_b((v16i8)src5, (v16i8)src4);
@@ -945,8 +945,8 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0,
dst1 = (v16u8)__msa_pckev_b((v16i8)reg5, (v16i8)reg4);
ST_UB(dst0, dst_u);
ST_UB(dst1, dst_v);
- src_argb0 += 128;
- src_argb0_next += 128;
+ src_argb += 128;
+ src_argb_next += 128;
dst_u += 16;
dst_v += 16;
}
@@ -1173,7 +1173,7 @@ void ARGBToUV444Row_MSA(const uint8_t* src_argb,
}
}
-void ARGBMultiplyRow_MSA(const uint8_t* src_argb0,
+void ARGBMultiplyRow_MSA(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -1184,7 +1184,7 @@ void ARGBMultiplyRow_MSA(const uint8_t* src_argb0,
v8i16 zero = {0};
for (x = 0; x < width; x += 4) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
src1 = (v16u8)__msa_ld_b((void*)src_argb1, 0);
vec0 = (v8u16)__msa_ilvr_b((v16i8)src0, (v16i8)src0);
vec1 = (v8u16)__msa_ilvl_b((v16i8)src0, (v16i8)src0);
@@ -1206,13 +1206,13 @@ void ARGBMultiplyRow_MSA(const uint8_t* src_argb0,
vec1 = (v8u16)__msa_pckev_h((v8i16)reg3, (v8i16)reg2);
dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0);
ST_UB(dst0, dst_argb);
- src_argb0 += 16;
+ src_argb += 16;
src_argb1 += 16;
dst_argb += 16;
}
}
-void ARGBAddRow_MSA(const uint8_t* src_argb0,
+void ARGBAddRow_MSA(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -1220,20 +1220,20 @@ void ARGBAddRow_MSA(const uint8_t* src_argb0,
v16u8 src0, src1, src2, src3, dst0, dst1;
for (x = 0; x < width; x += 8) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0);
src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16);
dst0 = __msa_adds_u_b(src0, src2);
dst1 = __msa_adds_u_b(src1, src3);
ST_UB2(dst0, dst1, dst_argb, 16);
- src_argb0 += 32;
+ src_argb += 32;
src_argb1 += 32;
dst_argb += 32;
}
}
-void ARGBSubtractRow_MSA(const uint8_t* src_argb0,
+void ARGBSubtractRow_MSA(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -1241,14 +1241,14 @@ void ARGBSubtractRow_MSA(const uint8_t* src_argb0,
v16u8 src0, src1, src2, src3, dst0, dst1;
for (x = 0; x < width; x += 8) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0);
src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16);
dst0 = __msa_subs_u_b(src0, src2);
dst1 = __msa_subs_u_b(src1, src3);
ST_UB2(dst0, dst1, dst_argb, 16);
- src_argb0 += 32;
+ src_argb += 32;
src_argb1 += 32;
dst_argb += 32;
}
@@ -1794,7 +1794,7 @@ void RGB565ToYRow_MSA(const uint8_t* src_rgb565, uint8_t* dst_y, int width) {
}
}
-void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RGB24ToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, reg0, reg1, reg2, reg3, dst0;
v8u16 vec0, vec1, vec2, vec3;
@@ -1809,9 +1809,9 @@ void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v16i8 zero = {0};
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
reg0 = (v16u8)__msa_vshf_b(mask0, zero, (v16i8)src0);
reg1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0);
reg2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src2, (v16i8)src1);
@@ -1830,12 +1830,12 @@ void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
vec1 = (v8u16)__msa_srai_h((v8i16)vec1, 8);
dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0);
ST_UB(dst0, dst_y);
- src_argb0 += 48;
+ src_argb += 48;
dst_y += 16;
}
}
-void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RAWToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, reg0, reg1, reg2, reg3, dst0;
v8u16 vec0, vec1, vec2, vec3;
@@ -1850,9 +1850,9 @@ void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v16i8 zero = {0};
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
reg0 = (v16u8)__msa_vshf_b(mask0, zero, (v16i8)src0);
reg1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0);
reg2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src2, (v16i8)src1);
@@ -1871,7 +1871,7 @@ void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
vec1 = (v8u16)__msa_srai_h((v8i16)vec1, 8);
dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0);
ST_UB(dst0, dst_y);
- src_argb0 += 48;
+ src_argb += 48;
dst_y += 16;
}
}
@@ -2037,14 +2037,14 @@ void RGB565ToUVRow_MSA(const uint8_t* src_rgb565,
}
}
-void RGB24ToUVRow_MSA(const uint8_t* src_rgb0,
+void RGB24ToUVRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
int64_t res0, res1;
v16u8 src0, src1, src2, src3, src4, src5, src6, src7;
v16u8 inp0, inp1, inp2, inp3, inp4, inp5;
@@ -2147,14 +2147,14 @@ void RGB24ToUVRow_MSA(const uint8_t* src_rgb0,
}
}
-void RAWToUVRow_MSA(const uint8_t* src_rgb0,
+void RAWToUVRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
int64_t res0, res1;
v16u8 inp0, inp1, inp2, inp3, inp4, inp5;
v16u8 src0, src1, src2, src3, src4, src5, src6, src7;
@@ -2446,7 +2446,7 @@ void SobelXYRow_MSA(const uint8_t* src_sobelx,
}
}
-void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ARGBToYJRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, src3, dst0;
v16u8 const_0x961D = (v16u8)__msa_fill_h(0x961D);
@@ -2454,19 +2454,19 @@ void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v8u16 const_0x80 = (v8u16)__msa_fill_h(0x80);
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((void*)src_argb, 48);
ARGBTOY(src0, src1, src2, src3, const_0x961D, const_0x4D, const_0x80, 8,
dst0);
ST_UB(dst0, dst_y);
- src_argb0 += 64;
+ src_argb += 64;
dst_y += 16;
}
}
-void BGRAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void BGRAToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, src3, dst0;
v16u8 const_0x4200 = (v16u8)__msa_fill_h(0x4200);
@@ -2474,19 +2474,19 @@ void BGRAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080);
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((void*)src_argb, 48);
ARGBTOY(src0, src1, src2, src3, const_0x4200, const_0x1981, const_0x1080, 8,
dst0);
ST_UB(dst0, dst_y);
- src_argb0 += 64;
+ src_argb += 64;
dst_y += 16;
}
}
-void ABGRToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void ABGRToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, src3, dst0;
v16u8 const_0x8142 = (v16u8)__msa_fill_h(0x8142);
@@ -2494,19 +2494,19 @@ void ABGRToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080);
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((void*)src_argb, 48);
ARGBTOY(src0, src1, src2, src3, const_0x8142, const_0x19, const_0x1080, 8,
dst0);
ST_UB(dst0, dst_y);
- src_argb0 += 64;
+ src_argb += 64;
dst_y += 16;
}
}
-void RGBAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
+void RGBAToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) {
int x;
v16u8 src0, src1, src2, src3, dst0;
v16u8 const_0x1900 = (v16u8)__msa_fill_h(0x1900);
@@ -2514,26 +2514,26 @@ void RGBAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) {
v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080);
for (x = 0; x < width; x += 16) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
- src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32);
- src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
+ src2 = (v16u8)__msa_ld_b((void*)src_argb, 32);
+ src3 = (v16u8)__msa_ld_b((void*)src_argb, 48);
ARGBTOY(src0, src1, src2, src3, const_0x1900, const_0x4281, const_0x1080, 8,
dst0);
ST_UB(dst0, dst_y);
- src_argb0 += 64;
+ src_argb += 64;
dst_y += 16;
}
}
-void ARGBToUVJRow_MSA(const uint8_t* src_rgb0,
+void ARGBToUVJRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
v8u16 src0, src1, src2, src3, src4, src5, src6, src7;
v8u16 vec0, vec1, vec2, vec3;
v8u16 dst0, dst1, dst2, dst3;
@@ -2658,14 +2658,14 @@ void ARGBToUVJRow_MSA(const uint8_t* src_rgb0,
}
}
-void BGRAToUVRow_MSA(const uint8_t* src_rgb0,
+void BGRAToUVRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
const uint8_t unused = 0xf;
v8u16 src0, src1, src2, src3;
v16u8 dst0, dst1;
@@ -2693,14 +2693,14 @@ void BGRAToUVRow_MSA(const uint8_t* src_rgb0,
}
}
-void ABGRToUVRow_MSA(const uint8_t* src_rgb0,
+void ABGRToUVRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
const uint8_t unused = 0xf;
v8u16 src0, src1, src2, src3;
v16u8 dst0, dst1;
@@ -2728,14 +2728,14 @@ void ABGRToUVRow_MSA(const uint8_t* src_rgb0,
}
}
-void RGBAToUVRow_MSA(const uint8_t* src_rgb0,
+void RGBAToUVRow_MSA(const uint8_t* src_rgb,
int src_stride_rgb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
int x;
- const uint8_t* s = src_rgb0;
- const uint8_t* t = src_rgb0 + src_stride_rgb;
+ const uint8_t* s = src_rgb;
+ const uint8_t* t = src_rgb + src_stride_rgb;
const uint8_t unused = 0xf;
v8u16 src0, src1, src2, src3;
v16u8 dst0, dst1;
@@ -3109,7 +3109,7 @@ void ARGBExtractAlphaRow_MSA(const uint8_t* src_argb,
}
}
-void ARGBBlendRow_MSA(const uint8_t* src_argb0,
+void ARGBBlendRow_MSA(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -3123,8 +3123,8 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0,
v16i8 zero = {0};
for (x = 0; x < width; x += 8) {
- src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0);
- src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16);
+ src0 = (v16u8)__msa_ld_b((void*)src_argb, 0);
+ src1 = (v16u8)__msa_ld_b((void*)src_argb, 16);
src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0);
src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16);
vec0 = (v8u16)__msa_ilvr_b(zero, (v16i8)src0);
@@ -3168,7 +3168,7 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0,
dst0 = __msa_bmnz_v(dst0, const_255, mask);
dst1 = __msa_bmnz_v(dst1, const_255, mask);
ST_UB2(dst0, dst1, dst_argb, 16);
- src_argb0 += 32;
+ src_argb += 32;
src_argb1 += 32;
dst_argb += 32;
}
diff --git a/third_party/libyuv/source/row_neon.cc b/third_party/libyuv/source/row_neon.cc
index 43a2cac752..6ef6f1c463 100644
--- a/third_party/libyuv/source/row_neon.cc
+++ b/third_party/libyuv/source/row_neon.cc
@@ -21,90 +21,115 @@ extern "C" {
#if !defined(LIBYUV_DISABLE_NEON) && defined(__ARM_NEON__) && \
!defined(__aarch64__)
+// q0: Y uint16x8_t
+// d2: U uint8x8_t
+// d3: V uint8x8_t
+
// Read 8 Y, 4 U and 4 V from 422
#define READYUV422 \
- "vld1.8 {d0}, [%0]! \n" \
- "vld1.32 {d2[0]}, [%1]! \n" \
- "vld1.32 {d2[1]}, [%2]! \n"
+ "vld1.8 {d0}, [%[src_y]]! \n" \
+ "vld1.32 {d2[0]}, [%[src_u]]! \n" \
+ "vld1.32 {d2[1]}, [%[src_v]]! \n" \
+ "vmov.u8 d1, d0 \n" \
+ "vmovl.u8 q1, d2 \n" \
+ "vzip.u8 d0, d1 \n" \
+ "vsli.u16 q1, q1, #8 \n"
// Read 8 Y, 8 U and 8 V from 444
#define READYUV444 \
- "vld1.8 {d0}, [%0]! \n" \
- "vld1.8 {d2}, [%1]! \n" \
- "vld1.8 {d3}, [%2]! \n" \
- "vpaddl.u8 q1, q1 \n" \
- "vrshrn.u16 d2, q1, #1 \n"
+ "vld1.8 {d0}, [%[src_y]]! \n" \
+ "vld1.8 {d2}, [%[src_u]]! \n" \
+ "vmovl.u8 q0, d0 \n" \
+ "vld1.8 {d3}, [%[src_v]]! \n" \
+ "vsli.u16 q0, q0, #8 \n"
// Read 8 Y, and set 4 U and 4 V to 128
#define READYUV400 \
- "vld1.8 {d0}, [%0]! \n" \
- "vmov.u8 d2, #128 \n"
+ "vld1.8 {d0}, [%[src_y]]! \n" \
+ "vmov.u8 q1, #128 \n" \
+ "vmovl.u8 q0, d0 \n" \
+ "vsli.u16 q0, q0, #8 \n"
// Read 8 Y and 4 UV from NV12
-#define READNV12 \
- "vld1.8 {d0}, [%0]! \n" \
- "vld1.8 {d2}, [%1]! \n" \
- "vmov.u8 d3, d2 \n" /* split odd/even uv apart */ \
- "vuzp.u8 d2, d3 \n" \
- "vtrn.u32 d2, d3 \n"
+#define READNV12 \
+ "vld1.8 {d0}, [%[src_y]]! \n" \
+ "vld1.8 {d2}, [%[src_uv]]! \n" \
+ "vmov.u8 d1, d0 \n" \
+ "vmov.u8 d3, d2 \n" \
+ "vzip.u8 d0, d1 \n" \
+ "vsli.u16 d2, d2, #8 \n" /* Duplicate low byte (U) */ \
+ "vsri.u16 d3, d3, #8 \n" /* Duplicate high byte (V) */
// Read 8 Y and 4 VU from NV21
#define READNV21 \
- "vld1.8 {d0}, [%0]! \n" \
- "vld1.8 {d2}, [%1]! \n" \
- "vmov.u8 d3, d2 \n" /* split odd/even uv apart */ \
- "vuzp.u8 d3, d2 \n" \
- "vtrn.u32 d2, d3 \n"
+ "vld1.8 {d0}, [%[src_y]]! \n" \
+ "vld1.8 {d2}, [%[src_vu]]! \n" \
+ "vmov.u8 d1, d0 \n" \
+ "vmov.u8 d3, d2 \n" \
+ "vzip.u8 d0, d1 \n" \
+ "vsri.u16 d2, d2, #8 \n" /* Duplicate high byte (U) */ \
+ "vsli.u16 d3, d3, #8 \n" /* Duplicate low byte (V) */
// Read 8 YUY2
#define READYUY2 \
- "vld2.8 {d0, d2}, [%0]! \n" \
+ "vld2.8 {d0, d2}, [%[src_yuy2]]! \n" \
+ "vmovl.u8 q0, d0 \n" \
"vmov.u8 d3, d2 \n" \
- "vuzp.u8 d2, d3 \n" \
- "vtrn.u32 d2, d3 \n"
+ "vsli.u16 q0, q0, #8 \n" \
+ "vsli.u16 d2, d2, #8 \n" \
+ "vsri.u16 d3, d3, #8 \n"
// Read 8 UYVY
#define READUYVY \
- "vld2.8 {d2, d3}, [%0]! \n" \
- "vmov.u8 d0, d3 \n" \
+ "vld2.8 {d2, d3}, [%[src_uyvy]]! \n" \
+ "vmovl.u8 q0, d3 \n" \
"vmov.u8 d3, d2 \n" \
- "vuzp.u8 d2, d3 \n" \
- "vtrn.u32 d2, d3 \n"
-
-#define YUVTORGB_SETUP \
- "vld1.8 {d24}, [%[kUVToRB]] \n" \
- "vld1.8 {d25}, [%[kUVToG]] \n" \
- "vld1.16 {d26[], d27[]}, [%[kUVBiasBGR]]! \n" \
- "vld1.16 {d8[], d9[]}, [%[kUVBiasBGR]]! \n" \
- "vld1.16 {d28[], d29[]}, [%[kUVBiasBGR]] \n" \
- "vld1.32 {d30[], d31[]}, [%[kYToRgb]] \n"
-
-#define YUVTORGB \
- "vmull.u8 q8, d2, d24 \n" /* u/v B/R component */ \
- "vmull.u8 q9, d2, d25 \n" /* u/v G component */ \
- "vmovl.u8 q0, d0 \n" /* Y */ \
- "vmovl.s16 q10, d1 \n" \
- "vmovl.s16 q0, d0 \n" \
- "vmul.s32 q10, q10, q15 \n" \
- "vmul.s32 q0, q0, q15 \n" \
- "vqshrun.s32 d0, q0, #16 \n" \
- "vqshrun.s32 d1, q10, #16 \n" /* Y */ \
- "vadd.s16 d18, d19 \n" \
- "vshll.u16 q1, d16, #16 \n" /* Replicate u * UB */ \
- "vshll.u16 q10, d17, #16 \n" /* Replicate v * VR */ \
- "vshll.u16 q3, d18, #16 \n" /* Replicate (v*VG + u*UG)*/ \
- "vaddw.u16 q1, q1, d16 \n" \
- "vaddw.u16 q10, q10, d17 \n" \
- "vaddw.u16 q3, q3, d18 \n" \
- "vqadd.s16 q8, q0, q13 \n" /* B */ \
- "vqadd.s16 q9, q0, q14 \n" /* R */ \
- "vqadd.s16 q0, q0, q4 \n" /* G */ \
- "vqadd.s16 q8, q8, q1 \n" /* B */ \
- "vqadd.s16 q9, q9, q10 \n" /* R */ \
- "vqsub.s16 q0, q0, q3 \n" /* G */ \
- "vqshrun.s16 d20, q8, #6 \n" /* B */ \
- "vqshrun.s16 d22, q9, #6 \n" /* R */ \
- "vqshrun.s16 d21, q0, #6 \n" /* G */
+ "vsli.u16 q0, q0, #8 \n" \
+ "vsli.u16 d2, d2, #8 \n" \
+ "vsri.u16 d3, d3, #8 \n"
+
+#define YUVTORGB_SETUP \
+ "vld4.8 {d26[], d27[], d28[], d29[]}, [%[kUVCoeff]] \n" \
+ "vld1.16 {d31[]}, [%[kRGBCoeffBias]]! \n" \
+ "vld1.16 {d20[], d21[]}, [%[kRGBCoeffBias]]! \n" \
+ "vld1.16 {d22[], d23[]}, [%[kRGBCoeffBias]]! \n" \
+ "vld1.16 {d24[], d25[]}, [%[kRGBCoeffBias]] \n"
+
+// q0: B uint16x8_t
+// q1: G uint16x8_t
+// q2: R uint16x8_t
+
+// Convert from YUV to 2.14 fixed point RGB
+#define YUVTORGB \
+ "vmull.u16 q2, d1, d31 \n" \
+ "vmull.u8 q8, d3, d29 \n" /* DGV */ \
+ "vmull.u16 q0, d0, d31 \n" \
+ "vmlal.u8 q8, d2, d28 \n" /* DG */ \
+ "vqshrn.u32 d0, q0, #16 \n" \
+ "vqshrn.u32 d1, q2, #16 \n" /* Y */ \
+ "vmull.u8 q9, d2, d26 \n" /* DB */ \
+ "vmull.u8 q2, d3, d27 \n" /* DR */ \
+ "vadd.u16 q4, q0, q11 \n" /* G */ \
+ "vadd.u16 q2, q0, q2 \n" /* R */ \
+ "vadd.u16 q0, q0, q9 \n" /* B */ \
+ "vqsub.u16 q1, q4, q8 \n" /* G */ \
+ "vqsub.u16 q0, q0, q10 \n" /* B */ \
+ "vqsub.u16 q2, q2, q12 \n" /* R */
+
+// Convert from 2.14 fixed point RGB To 8 bit RGB
+#define RGBTORGB8 \
+ "vqshrn.u16 d4, q2, #6 \n" /* R */ \
+ "vqshrn.u16 d2, q1, #6 \n" /* G */ \
+ "vqshrn.u16 d0, q0, #6 \n" /* B */
+
+#define YUVTORGB_REGS \
+ "q0", "q1", "q2", "q4", "q8", "q9", "q10", "q11", "q12", "q13", "q14", "d31"
+
+#define STORERGBA \
+ "vmov.u8 d1, d0 \n" \
+ "vmov.u8 d3, d4 \n" \
+ "vmov.u8 d0, d6 \n" \
+ "vst4.8 {d0, d1, d2, d3}, [%[dst_rgba]]! \n"
void I444ToARGBRow_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -114,22 +139,20 @@ void I444ToARGBRow_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV444 YUVTORGB
- "subs %4, %4, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%3]! \n"
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void I422ToARGBRow_NEON(const uint8_t* src_y,
@@ -140,22 +163,20 @@ void I422ToARGBRow_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV422 YUVTORGB
- "subs %4, %4, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%3]! \n"
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void I444AlphaToARGBRow_NEON(const uint8_t* src_y,
@@ -168,22 +189,20 @@ void I444AlphaToARGBRow_NEON(const uint8_t* src_y,
asm volatile(
YUVTORGB_SETUP
"1: \n" READYUV444 YUVTORGB
- "vld1.8 {d23}, [%3]! \n"
- "subs %5, %5, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%4]! \n"
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(src_a), // %3
- "+r"(dst_argb), // %4
- "+r"(width) // %5
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ RGBTORGB8
+ "vld1.8 {d6}, [%[src_a]]! \n"
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [src_a] "+r"(src_a), // %[src_a]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void I422AlphaToARGBRow_NEON(const uint8_t* src_y,
@@ -196,22 +215,20 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y,
asm volatile(
YUVTORGB_SETUP
"1: \n" READYUV422 YUVTORGB
- "subs %5, %5, #8 \n"
- "vld1.8 {d23}, [%3]! \n"
- "vst4.8 {d20, d21, d22, d23}, [%4]! \n"
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(src_a), // %3
- "+r"(dst_argb), // %4
- "+r"(width) // %5
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ RGBTORGB8
+ "vld1.8 {d6}, [%[src_a]]! \n"
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [src_a] "+r"(src_a), // %[src_a]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void I422ToRGBARow_NEON(const uint8_t* src_y,
@@ -222,22 +239,18 @@ void I422ToRGBARow_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV422 YUVTORGB
- "subs %4, %4, #8 \n"
- "vmov.u8 d19, #255 \n" // YUVTORGB modified d19
- "vst4.8 {d19, d20, d21, d22}, [%3]! \n"
+ RGBTORGB8 "subs %[width], %[width], #8 \n" STORERGBA
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgba), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgba] "+r"(dst_rgba), // %[dst_rgba]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void I422ToRGB24Row_NEON(const uint8_t* src_y,
@@ -248,29 +261,28 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV422 YUVTORGB
- "subs %4, %4, #8 \n"
- "vst3.8 {d20, d21, d22}, [%3]! \n"
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgb24), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
#define ARGBTORGB565 \
- "vshll.u8 q0, d22, #8 \n" /* R */ \
- "vshll.u8 q8, d21, #8 \n" /* G */ \
- "vshll.u8 q9, d20, #8 \n" /* B */ \
- "vsri.16 q0, q8, #5 \n" /* RG */ \
- "vsri.16 q0, q9, #11 \n" /* RGB */
+ "vshll.u8 q2, d4, #8 \n" /* R */ \
+ "vshll.u8 q1, d2, #8 \n" /* G */ \
+ "vshll.u8 q0, d0, #8 \n" /* B */ \
+ "vsri.16 q2, q1, #5 \n" /* RG */ \
+ "vsri.16 q2, q0, #11 \n" /* RGB */
void I422ToRGB565Row_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -280,31 +292,29 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV422 YUVTORGB
- "subs %4, %4, #8 \n" ARGBTORGB565
- "vst1.8 {q0}, [%3]! \n" // store 8 pixels RGB565.
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgb565), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ RGBTORGB8 "subs %[width], %[width], #8 \n" ARGBTORGB565
+ "vst1.8 {q2}, [%[dst_rgb565]]! \n" // store 8 pixels RGB565.
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
#define ARGBTOARGB1555 \
- "vshll.u8 q0, d23, #8 \n" /* A */ \
- "vshll.u8 q8, d22, #8 \n" /* R */ \
- "vshll.u8 q9, d21, #8 \n" /* G */ \
- "vshll.u8 q10, d20, #8 \n" /* B */ \
- "vsri.16 q0, q8, #1 \n" /* AR */ \
- "vsri.16 q0, q9, #6 \n" /* ARG */ \
- "vsri.16 q0, q10, #11 \n" /* ARGB */
+ "vshll.u8 q3, d6, #8 \n" /* A */ \
+ "vshll.u8 q2, d4, #8 \n" /* R */ \
+ "vshll.u8 q1, d2, #8 \n" /* G */ \
+ "vshll.u8 q0, d0, #8 \n" /* B */ \
+ "vsri.16 q3, q2, #1 \n" /* AR */ \
+ "vsri.16 q3, q1, #6 \n" /* ARG */ \
+ "vsri.16 q3, q0, #11 \n" /* ARGB */
void I422ToARGB1555Row_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -315,30 +325,28 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y,
asm volatile(
YUVTORGB_SETUP
"1: \n" READYUV422 YUVTORGB
- "subs %4, %4, #8 \n"
- "vmov.u8 d23, #255 \n" ARGBTOARGB1555
- "vst1.8 {q0}, [%3]! \n" // store 8 pixels
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb1555), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vmov.u8 d6, #0xff \n" ARGBTOARGB1555
+ "vst1.8 {q3}, [%[dst_argb1555]]! \n" // store 8 pixels RGB1555.
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb1555] "+r"(dst_argb1555), // %[dst_argb1555]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "q3");
}
#define ARGBTOARGB4444 \
- "vshr.u8 d20, d20, #4 \n" /* B */ \
- "vbic.32 d21, d21, d4 \n" /* G */ \
- "vshr.u8 d22, d22, #4 \n" /* R */ \
- "vbic.32 d23, d23, d4 \n" /* A */ \
- "vorr d0, d20, d21 \n" /* BG */ \
- "vorr d1, d22, d23 \n" /* RA */ \
+ "vshr.u8 d0, d0, #4 \n" /* B */ \
+ "vbic.32 d2, d2, d7 \n" /* G */ \
+ "vshr.u8 d4, d4, #4 \n" /* R */ \
+ "vbic.32 d6, d6, d7 \n" /* A */ \
+ "vorr d0, d0, d2 \n" /* BG */ \
+ "vorr d1, d4, d6 \n" /* RA */ \
"vzip.u8 d0, d1 \n" /* BGRA */
void I422ToARGB4444Row_NEON(const uint8_t* src_y,
@@ -349,25 +357,21 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
- "vmov.u8 d4, #0x0f \n" // vbic bits to clear
- "1: \n"
-
- READYUV422 YUVTORGB
- "subs %4, %4, #8 \n"
- "vmov.u8 d23, #255 \n" ARGBTOARGB4444
- "vst1.8 {q0}, [%3]! \n" // store 8 pixels
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb4444), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ "vmov.u8 d6, #255 \n"
+ "vmov.u8 d7, #0x0f \n" // vbic bits to clear
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n" ARGBTOARGB4444
+ "vst1.8 {q0}, [%[dst_argb4444]]! \n" // store 8 pixels
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb4444] "+r"(dst_argb4444), // %[dst_argb4444]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "q3");
}
void I400ToARGBRow_NEON(const uint8_t* src_y,
@@ -376,20 +380,18 @@ void I400ToARGBRow_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
+ "vmov.u8 d6, #255 \n"
"1: \n" READYUV400 YUVTORGB
- "subs %2, %2, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%1]! \n"
+ RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) {
@@ -414,22 +416,20 @@ void NV12ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile(YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
- "1: \n" READNV12 YUVTORGB
- "subs %3, %3, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%2]! \n"
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_argb), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9",
- "q10", "q11", "q12", "q13", "q14", "q15");
+ asm volatile(
+ YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void NV21ToARGBRow_NEON(const uint8_t* src_y,
@@ -437,22 +437,20 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile(YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
- "1: \n" READNV21 YUVTORGB
- "subs %3, %3, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%2]! \n"
- "bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_vu), // %1
- "+r"(dst_argb), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9",
- "q10", "q11", "q12", "q13", "q14", "q15");
+ asm volatile(
+ YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READNV21 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
+ "bgt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_vu] "+r"(src_vu), // %[src_vu]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void NV12ToRGB24Row_NEON(const uint8_t* src_y,
@@ -461,25 +459,19 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
-
YUVTORGB_SETUP
-
- "1: \n"
-
- READNV12 YUVTORGB
- "subs %3, %3, #8 \n"
- "vst3.8 {d20, d21, d22}, [%2]! \n"
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_rgb24), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
void NV21ToRGB24Row_NEON(const uint8_t* src_y,
@@ -488,25 +480,19 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
-
YUVTORGB_SETUP
-
- "1: \n"
-
- READNV21 YUVTORGB
- "subs %3, %3, #8 \n"
- "vst3.8 {d20, d21, d22}, [%2]! \n"
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READNV21 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n"
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_vu), // %1
- "+r"(dst_rgb24), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_vu] "+r"(src_vu), // %[src_vu]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
void NV12ToRGB565Row_NEON(const uint8_t* src_y,
@@ -516,62 +502,56 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y,
int width) {
asm volatile(
YUVTORGB_SETUP
- "1: \n" READNV12 YUVTORGB
- "subs %3, %3, #8 \n" ARGBTORGB565
- "vst1.8 {q0}, [%2]! \n" // store 8 pixels RGB565.
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n" ARGBTORGB565
+ "vst1.8 {q2}, [%[dst_rgb565]]! \n" // store 8 pixels RGB565.
"bgt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_rgb565), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11",
- "q12", "q13", "q14", "q15");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile(YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
- "1: \n" READYUY2 YUVTORGB
- "subs %2, %2, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%1]! \n"
+ asm volatile(
+ YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READYUY2 YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
"bgt 1b \n"
- : "+r"(src_yuy2), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9",
- "q10", "q11", "q12", "q13", "q14", "q15");
+ : [src_yuy2] "+r"(src_yuy2), // %[src_yuy2]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
void UYVYToARGBRow_NEON(const uint8_t* src_uyvy,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile(YUVTORGB_SETUP
- "vmov.u8 d23, #255 \n"
- "1: \n" READUYVY YUVTORGB
- "subs %2, %2, #8 \n"
- "vst4.8 {d20, d21, d22, d23}, [%1]! \n"
+ asm volatile(
+ YUVTORGB_SETUP
+ "vmov.u8 d6, #255 \n"
+ "1: \n" READUYVY YUVTORGB RGBTORGB8
+ "subs %[width], %[width], #8 \n"
+ "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n"
"bgt 1b \n"
- : "+r"(src_uyvy), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9",
- "q10", "q11", "q12", "q13", "q14", "q15");
+ : [src_uyvy] "+r"(src_uyvy), // %[src_uyvy]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "d6");
}
// Reads 16 pairs of UV and write even values to dst_u and odd to dst_v.
@@ -760,8 +740,8 @@ void MergeXRGBRow_NEON(const uint8_t* src_r,
"vld1.8 {q1}, [%1]! \n" // load G
"vld1.8 {q0}, [%2]! \n" // load B
"subs %4, %4, #16 \n" // 16 processed per loop
- "vst4.8 {d0, d2, d4, d6}, [%4]! \n" // store 8 ARGB
- "vst4.8 {d1, d3, d5, d7}, [%4]! \n" // next 8 ARGB
+ "vst4.8 {d0, d2, d4, d6}, [%3]! \n" // store 8 ARGB
+ "vst4.8 {d1, d3, d5, d7}, [%3]! \n" // next 8 ARGB
"bgt 1b \n"
: "+r"(src_r), // %0
"+r"(src_g), // %1
@@ -773,6 +753,226 @@ void MergeXRGBRow_NEON(const uint8_t* src_r,
);
}
+void MergeXR30Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width) {
+ int shift = 10 - depth;
+ asm volatile(
+ "vmov.u32 q14, #1023 \n"
+ "vdup.32 q15, %5 \n"
+ "1: \n"
+ "vld1.16 {d4}, [%2]! \n" // B
+ "vld1.16 {d2}, [%1]! \n" // G
+ "vld1.16 {d0}, [%0]! \n" // R
+ "vmovl.u16 q2, d4 \n" // B
+ "vmovl.u16 q1, d2 \n" // G
+ "vmovl.u16 q0, d0 \n" // R
+ "vshl.u32 q2, q2, q15 \n" // 000B
+ "vshl.u32 q1, q1, q15 \n"
+ "vshl.u32 q0, q0, q15 \n"
+ "vmin.u32 q2, q2, q14 \n"
+ "vmin.u32 q1, q1, q14 \n"
+ "vmin.u32 q0, q0, q14 \n"
+ "vsli.u32 q2, q1, #10 \n" // 00GB
+ "vsli.u32 q2, q0, #20 \n" // 0RGB
+ "vorr.u32 q2, #0xc0000000 \n" // ARGB (AR30)
+ "subs %4, %4, #4 \n"
+ "vst1.8 {q2}, [%3]! \n"
+ "bgt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar30), // %3
+ "+r"(width) // %4
+ : "r"(shift) // %5
+ : "memory", "cc", "q0", "q1", "q2", "q14", "q15");
+}
+
+void MergeXR30Row_10_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int /* depth */,
+ int width) {
+ asm volatile(
+ "vmov.u32 q14, #1023 \n"
+ "1: \n"
+ "vld1.16 {d4}, [%2]! \n" // B
+ "vld1.16 {d2}, [%1]! \n" // G
+ "vld1.16 {d0}, [%0]! \n" // R
+ "vmovl.u16 q2, d4 \n" // 000B
+ "vmovl.u16 q1, d2 \n" // G
+ "vmovl.u16 q0, d0 \n" // R
+ "vmin.u32 q2, q2, q14 \n"
+ "vmin.u32 q1, q1, q14 \n"
+ "vmin.u32 q0, q0, q14 \n"
+ "vsli.u32 q2, q1, #10 \n" // 00GB
+ "vsli.u32 q2, q0, #20 \n" // 0RGB
+ "vorr.u32 q2, #0xc0000000 \n" // ARGB (AR30)
+ "subs %4, %4, #4 \n"
+ "vst1.8 {q2}, [%3]! \n"
+ "bgt 1b \n"
+ "3: \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar30), // %3
+ "+r"(width) // %4
+ :
+ : "memory", "cc", "q0", "q1", "q2", "q14");
+}
+
+void MergeAR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ asm volatile(
+
+ "vdup.u16 q15, %6 \n"
+ "vdup.u16 q14, %7 \n"
+ "1: \n"
+ "vld1.16 {q2}, [%0]! \n" // R
+ "vld1.16 {q1}, [%1]! \n" // G
+ "vld1.16 {q0}, [%2]! \n" // B
+ "vld1.16 {q3}, [%3]! \n" // A
+ "vmin.u16 q2, q2, q14 \n"
+ "vmin.u16 q1, q1, q14 \n"
+ "vmin.u16 q0, q0, q14 \n"
+ "vmin.u16 q3, q3, q14 \n"
+ "vshl.u16 q2, q2, q15 \n"
+ "vshl.u16 q1, q1, q15 \n"
+ "vshl.u16 q0, q0, q15 \n"
+ "vshl.u16 q3, q3, q15 \n"
+ "subs %5, %5, #8 \n"
+ "vst4.16 {d0, d2, d4, d6}, [%4]! \n"
+ "vst4.16 {d1, d3, d5, d7}, [%4]! \n"
+ "bgt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_ar64), // %4
+ "+r"(width) // %5
+ : "r"(shift), // %6
+ "r"(mask) // %7
+ : "memory", "cc", "q0", "q1", "q2", "q3", "q15");
+}
+
+void MergeXR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ asm volatile(
+
+ "vmov.u8 q3, #0xff \n" // A (0xffff)
+ "vdup.u16 q15, %5 \n"
+ "vdup.u16 q14, %6 \n"
+ "1: \n"
+ "vld1.16 {q2}, [%0]! \n" // R
+ "vld1.16 {q1}, [%1]! \n" // G
+ "vld1.16 {q0}, [%2]! \n" // B
+ "vmin.u16 q2, q2, q14 \n"
+ "vmin.u16 q1, q1, q14 \n"
+ "vmin.u16 q0, q0, q14 \n"
+ "vshl.u16 q2, q2, q15 \n"
+ "vshl.u16 q1, q1, q15 \n"
+ "vshl.u16 q0, q0, q15 \n"
+ "subs %4, %4, #8 \n"
+ "vst4.16 {d0, d2, d4, d6}, [%3]! \n"
+ "vst4.16 {d1, d3, d5, d7}, [%3]! \n"
+ "bgt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar64), // %3
+ "+r"(width) // %4
+ : "r"(shift), // %5
+ "r"(mask) // %6
+ : "memory", "cc", "q0", "q1", "q2", "q3", "q15");
+}
+
+void MergeARGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = 8 - depth;
+ asm volatile(
+
+ "vdup.16 q15, %6 \n"
+ "1: \n"
+ "vld1.16 {q2}, [%0]! \n" // R
+ "vld1.16 {q1}, [%1]! \n" // G
+ "vld1.16 {q0}, [%2]! \n" // B
+ "vld1.16 {q3}, [%3]! \n" // A
+ "vshl.u16 q2, q2, q15 \n"
+ "vshl.u16 q1, q1, q15 \n"
+ "vshl.u16 q0, q0, q15 \n"
+ "vshl.u16 q3, q3, q15 \n"
+ "vqmovn.u16 d0, q0 \n"
+ "vqmovn.u16 d1, q1 \n"
+ "vqmovn.u16 d2, q2 \n"
+ "vqmovn.u16 d3, q3 \n"
+ "subs %5, %5, #8 \n"
+ "vst4.8 {d0, d1, d2, d3}, [%4]! \n"
+ "bgt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_argb), // %4
+ "+r"(width) // %5
+ : "r"(shift) // %6
+ : "memory", "cc", "q0", "q1", "q2", "q3", "q15");
+}
+
+void MergeXRGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = 8 - depth;
+ asm volatile(
+
+ "vdup.16 q15, %5 \n"
+ "vmov.u8 d6, #0xff \n" // A (0xff)
+ "1: \n"
+ "vld1.16 {q2}, [%0]! \n" // R
+ "vld1.16 {q1}, [%1]! \n" // G
+ "vld1.16 {q0}, [%2]! \n" // B
+ "vshl.u16 q2, q2, q15 \n"
+ "vshl.u16 q1, q1, q15 \n"
+ "vshl.u16 q0, q0, q15 \n"
+ "vqmovn.u16 d5, q2 \n"
+ "vqmovn.u16 d4, q1 \n"
+ "vqmovn.u16 d3, q0 \n"
+ "subs %4, %4, #8 \n"
+ "vst4.u8 {d3, d4, d5, d6}, [%3]! \n"
+ "bgt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_argb), // %3
+ "+r"(width) // %4
+ : "r"(shift) // %5
+ : "memory", "cc", "q0", "q1", "q2", "d6", "q15");
+}
+
// Copy multiple of 32. vld4.8 allow unaligned and is fastest on a15.
void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) {
asm volatile(
@@ -1328,16 +1528,16 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb,
int width) {
asm volatile(
"1: \n"
- "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB.
+ "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB.
"subs %2, %2, #8 \n" // 8 processed per loop.
ARGBTORGB565
- "vst1.8 {q0}, [%1]! \n" // store 8 pixels RGB565.
+ "vst1.8 {q2}, [%1]! \n" // store 8 pixels RGB565.
"bgt 1b \n"
: "+r"(src_argb), // %0
"+r"(dst_rgb565), // %1
"+r"(width) // %2
:
- : "cc", "memory", "q0", "q8", "q9", "q10", "q11");
+ : "cc", "memory", "q0", "q1", "q2", "d6");
}
void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb,
@@ -1345,21 +1545,21 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb,
const uint32_t dither4,
int width) {
asm volatile(
- "vdup.32 d2, %2 \n" // dither4
+ "vdup.32 d7, %2 \n" // dither4
"1: \n"
- "vld4.8 {d20, d21, d22, d23}, [%1]! \n" // load 8 pixels of ARGB.
+ "vld4.8 {d0, d2, d4, d6}, [%1]! \n" // load 8 pixels of ARGB.
"subs %3, %3, #8 \n" // 8 processed per loop.
- "vqadd.u8 d20, d20, d2 \n"
- "vqadd.u8 d21, d21, d2 \n"
- "vqadd.u8 d22, d22, d2 \n" // add for dither
+ "vqadd.u8 d0, d0, d7 \n"
+ "vqadd.u8 d2, d2, d7 \n"
+ "vqadd.u8 d4, d4, d7 \n" // add for dither
ARGBTORGB565
- "vst1.8 {q0}, [%0]! \n" // store 8 RGB565.
+ "vst1.8 {q2}, [%0]! \n" // store 8 RGB565.
"bgt 1b \n"
: "+r"(dst_rgb) // %0
: "r"(src_argb), // %1
"r"(dither4), // %2
"r"(width) // %3
- : "cc", "memory", "q0", "q1", "q8", "q9", "q10", "q11");
+ : "cc", "memory", "q0", "q1", "q2", "q3");
}
void ARGBToARGB1555Row_NEON(const uint8_t* src_argb,
@@ -1367,26 +1567,26 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb,
int width) {
asm volatile(
"1: \n"
- "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB.
+ "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB.
"subs %2, %2, #8 \n" // 8 processed per loop.
ARGBTOARGB1555
- "vst1.8 {q0}, [%1]! \n" // store 8 ARGB1555.
+ "vst1.8 {q3}, [%1]! \n" // store 8 ARGB1555.
"bgt 1b \n"
: "+r"(src_argb), // %0
"+r"(dst_argb1555), // %1
"+r"(width) // %2
:
- : "cc", "memory", "q0", "q8", "q9", "q10", "q11");
+ : "cc", "memory", "q0", "q1", "q2", "q3");
}
void ARGBToARGB4444Row_NEON(const uint8_t* src_argb,
uint8_t* dst_argb4444,
int width) {
asm volatile(
- "vmov.u8 d4, #0x0f \n" // bits to clear with
+ "vmov.u8 d7, #0x0f \n" // bits to clear with
// vbic.
"1: \n"
- "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB.
+ "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB.
"subs %2, %2, #8 \n" // 8 processed per loop.
ARGBTOARGB4444
"vst1.8 {q0}, [%1]! \n" // store 8 ARGB4444.
@@ -1395,7 +1595,7 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb,
"+r"(dst_argb4444), // %1
"+r"(width) // %2
:
- : "cc", "memory", "q0", "q8", "q9", "q10", "q11");
+ : "cc", "memory", "q0", "q1", "q2", "q3");
}
void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
@@ -1460,7 +1660,7 @@ void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
: "cc", "memory", "q0", "q1", "q2", "q12", "q13");
}
-void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
+void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) {
asm volatile(
"vmov.u8 d24, #29 \n" // B * 0.1140 coefficient
"vmov.u8 d25, #150 \n" // G * 0.5870 coefficient
@@ -1474,7 +1674,7 @@ void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
"vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y
"vst1.8 {d0}, [%1]! \n" // store 8 pixels Y.
"bgt 1b \n"
- : "+r"(src_argb), // %0
+ : "+r"(src_rgba), // %0
"+r"(dst_y), // %1
"+r"(width) // %2
:
@@ -2119,6 +2319,105 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444,
: "cc", "memory", "q0", "q1", "q2", "q3", "q12", "q13");
}
+static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7,
+ 10, 9, 8, 11, 14, 13, 12, 15};
+
+void ARGBToAR64Row_NEON(const uint8_t* src_argb,
+ uint16_t* dst_ar64,
+ int width) {
+ asm volatile(
+ "1: \n"
+ "vld1.8 {q0}, [%0]! \n"
+ "vld1.8 {q2}, [%0]! \n"
+ "vmov.u8 q1, q0 \n"
+ "vmov.u8 q3, q2 \n"
+ "subs %2, %2, #8 \n" // 8 processed per loop.
+ "vst2.8 {q0, q1}, [%1]! \n" // store 4 pixels
+ "vst2.8 {q2, q3}, [%1]! \n" // store 4 pixels
+ "bgt 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ar64), // %1
+ "+r"(width) // %2
+ :
+ : "cc", "memory", "q0", "q1", "q2", "q3");
+}
+
+void ARGBToAB64Row_NEON(const uint8_t* src_argb,
+ uint16_t* dst_ab64,
+ int width) {
+ asm volatile(
+ "vld1.8 q4, %3 \n" // shuffler
+ "1: \n"
+ "vld1.8 {q0}, [%0]! \n"
+ "vld1.8 {q2}, [%0]! \n"
+ "vtbl.8 d2, {d0, d1}, d8 \n"
+ "vtbl.8 d3, {d0, d1}, d9 \n"
+ "vtbl.8 d6, {d4, d5}, d8 \n"
+ "vtbl.8 d7, {d4, d5}, d9 \n"
+ "vmov.u8 q0, q1 \n"
+ "vmov.u8 q2, q3 \n"
+ "subs %2, %2, #8 \n" // 8 processed per loop.
+ "vst2.8 {q0, q1}, [%1]! \n" // store 4 pixels
+ "vst2.8 {q2, q3}, [%1]! \n" // store 4 pixels
+ "bgt 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ab64), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToABGR) // %3
+ : "cc", "memory", "q0", "q1", "q2", "q3", "q4");
+}
+
+void AR64ToARGBRow_NEON(const uint16_t* src_ar64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+ "1: \n"
+ "vld1.16 {q0}, [%0]! \n"
+ "vld1.16 {q1}, [%0]! \n"
+ "vld1.16 {q2}, [%0]! \n"
+ "vld1.16 {q3}, [%0]! \n"
+ "vshrn.u16 d0, q0, #8 \n"
+ "vshrn.u16 d1, q1, #8 \n"
+ "vshrn.u16 d4, q2, #8 \n"
+ "vshrn.u16 d5, q3, #8 \n"
+ "subs %2, %2, #8 \n" // 8 processed per loop.
+ "vst1.8 {q0}, [%1]! \n" // store 4 pixels
+ "vst1.8 {q2}, [%1]! \n" // store 4 pixels
+ "bgt 1b \n"
+ : "+r"(src_ar64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ :
+ : "cc", "memory", "q0", "q1", "q2", "q3");
+}
+
+static const uvec8 kShuffleAB64ToARGB = {5, 3, 1, 7, 13, 11, 9, 15};
+
+void AB64ToARGBRow_NEON(const uint16_t* src_ab64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+ "vld1.8 d8, %3 \n" // shuffler
+ "1: \n"
+ "vld1.16 {q0}, [%0]! \n"
+ "vld1.16 {q1}, [%0]! \n"
+ "vld1.16 {q2}, [%0]! \n"
+ "vld1.16 {q3}, [%0]! \n"
+ "vtbl.8 d0, {d0, d1}, d8 \n"
+ "vtbl.8 d1, {d2, d3}, d8 \n"
+ "vtbl.8 d4, {d4, d5}, d8 \n"
+ "vtbl.8 d5, {d6, d7}, d8 \n"
+ "subs %2, %2, #8 \n" // 8 processed per loop.
+ "vst1.8 {q0}, [%1]! \n" // store 4 pixels
+ "vst1.8 {q2}, [%1]! \n" // store 4 pixels
+ "bgt 1b \n"
+ : "+r"(src_ab64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleAB64ToARGB) // %3
+ : "cc", "memory", "q0", "q1", "q2", "q3", "q4");
+}
+
void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) {
asm volatile(
"vmov.u8 d6, #25 \n" // B * 0.1016 coefficient
@@ -2263,9 +2562,9 @@ void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) {
"1: \n"
"vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW.
"subs %2, %2, #8 \n" // 8 processed per loop.
- "vmull.u8 q4, d0, d4 \n" // B
+ "vmull.u8 q4, d0, d4 \n" // R
"vmlal.u8 q4, d1, d5 \n" // G
- "vmlal.u8 q4, d2, d6 \n" // R
+ "vmlal.u8 q4, d2, d6 \n" // B
"vqrshrn.u16 d0, q4, #8 \n" // 16 bit to 8 bit Y
"vst1.8 {d0}, [%1]! \n" // store 8 pixels Y.
"bgt 1b \n"
@@ -2336,7 +2635,7 @@ void InterpolateRow_NEON(uint8_t* dst_ptr,
}
// dr * (256 - sa) / 256 + sr = dr - dr * sa / 256 + sr
-void ARGBBlendRow_NEON(const uint8_t* src_argb0,
+void ARGBBlendRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2387,7 +2686,7 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0,
"99: \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2625,7 +2924,7 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb,
}
// Multiply 2 rows of ARGB pixels together, 8 pixels at a time.
-void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
+void ARGBMultiplyRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2645,7 +2944,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
"vrshrn.u16 d3, q3, #8 \n" // 16 bit to 8 bit A
"vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels.
"bgt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2654,7 +2953,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
}
// Add 2 rows of ARGB pixels together, 8 pixels at a time.
-void ARGBAddRow_NEON(const uint8_t* src_argb0,
+void ARGBAddRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2668,7 +2967,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0,
"vqadd.u8 q1, q1, q3 \n" // add R, A
"vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels.
"bgt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2677,7 +2976,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0,
}
// Subtract 2 rows of ARGB pixels, 8 pixels at a time.
-void ARGBSubtractRow_NEON(const uint8_t* src_argb0,
+void ARGBSubtractRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2691,7 +2990,7 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0,
"vqsub.u8 q1, q1, q3 \n" // subtract R, A
"vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels.
"bgt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -3171,32 +3470,22 @@ void SplitUVRow_16_NEON(const uint16_t* src_uv,
uint16_t* dst_v,
int depth,
int width) {
+ int shift = depth - 16; // Negative for right shift.
asm volatile(
- "vdup.32 q0, %3 \n"
+ "vdup.16 q2, %4 \n"
"1: \n"
- "vld2.16 {q1, q2}, [%0]! \n" // load 8 UV
- "vmovl.u16 q3, d2 \n"
- "vmovl.u16 q4, d3 \n"
- "vshl.u32 q3, q3, q0 \n"
- "vshl.u32 q4, q4, q0 \n"
- "vmovn.u32 d2, q3 \n"
- "vmovn.u32 d3, q4 \n"
- "vmovl.u16 q3, d4 \n"
- "vmovl.u16 q4, d5 \n"
- "vshl.u32 q3, q3, q0 \n"
- "vshl.u32 q4, q4, q0 \n"
- "vmovn.u32 d4, q3 \n"
- "vmovn.u32 d5, q4 \n"
- "subs %4, %4, #8 \n" // 8 src pixels per loop
- "vst1.16 {q1}, [%1]! \n" // store 8 U pixels
- "vst1.16 {q2}, [%2]! \n" // store 8 V pixels
+ "vld2.16 {q0, q1}, [%0]! \n" // load 8 UV
+ "vshl.u16 q0, q0, q2 \n"
+ "vshl.u16 q1, q1, q2 \n"
+ "subs %3, %3, #8 \n" // 8 src pixels per loop
+ "vst1.16 {q0}, [%1]! \n" // store 8 U pixels
+ "vst1.16 {q1}, [%2]! \n" // store 8 V pixels
"bgt 1b \n"
: "+r"(src_uv), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
- "+r"(depth), // %3
- "+r"(width) // %4
- :
+ "+r"(width) // %3
+ : "r"(shift) // %4
: "cc", "memory", "q0", "q1", "q2", "q3", "q4");
}
@@ -3207,21 +3496,20 @@ void MergeUVRow_16_NEON(const uint16_t* src_u,
int width) {
int shift = 16 - depth;
asm volatile(
- "vdup.16 q2, %3 \n"
+ "vdup.16 q2, %4 \n"
"1: \n"
"vld1.16 {q0}, [%0]! \n" // load 8 U
"vld1.16 {q1}, [%1]! \n" // load 8 V
"vshl.u16 q0, q0, q2 \n"
"vshl.u16 q1, q1, q2 \n"
- "subs %4, %4, #8 \n" // 8 src pixels per loop
+ "subs %3, %3, #8 \n" // 8 src pixels per loop
"vst2.16 {q0, q1}, [%2]! \n" // store 8 UV pixels
"bgt 1b \n"
: "+r"(src_u), // %0
"+r"(src_v), // %1
"+r"(dst_uv), // %2
- "+r"(shift), // %3
- "+r"(width) // %4
- :
+ "+r"(width) // %3
+ : "r"(shift) // %4
: "cc", "memory", "q0", "q1", "q2");
}
diff --git a/third_party/libyuv/source/row_neon64.cc b/third_party/libyuv/source/row_neon64.cc
index 941c9b9805..da7e3c7cd4 100644
--- a/third_party/libyuv/source/row_neon64.cc
+++ b/third_party/libyuv/source/row_neon64.cc
@@ -18,93 +18,101 @@ extern "C" {
// This module is for GCC Neon armv8 64 bit.
#if !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__)
+// v0.8h: Y
+// v1.16b: 8U, 8V
+
// Read 8 Y, 4 U and 4 V from 422
#define READYUV422 \
- "ld1 {v0.8b}, [%0], #8 \n" \
- "ld1 {v1.s}[0], [%1], #4 \n" \
- "ld1 {v1.s}[1], [%2], #4 \n"
+ "ldr d0, [%[src_y]], #8 \n" \
+ "ld1 {v1.s}[0], [%[src_u]], #4 \n" \
+ "ld1 {v1.s}[1], [%[src_v]], #4 \n" \
+ "zip1 v0.16b, v0.16b, v0.16b \n" \
+ "prfm pldl1keep, [%[src_y], 448] \n" \
+ "zip1 v1.16b, v1.16b, v1.16b \n" \
+ "prfm pldl1keep, [%[src_u], 128] \n" \
+ "prfm pldl1keep, [%[src_v], 128] \n"
// Read 8 Y, 8 U and 8 V from 444
#define READYUV444 \
- "ld1 {v0.8b}, [%0], #8 \n" \
- "ld1 {v1.d}[0], [%1], #8 \n" \
- "ld1 {v1.d}[1], [%2], #8 \n" \
- "uaddlp v1.8h, v1.16b \n" \
- "rshrn v1.8b, v1.8h, #1 \n"
+ "ldr d0, [%[src_y]], #8 \n" \
+ "ld1 {v1.d}[0], [%[src_u]], #8 \n" \
+ "prfm pldl1keep, [%[src_y], 448] \n" \
+ "ld1 {v1.d}[1], [%[src_v]], #8 \n" \
+ "prfm pldl1keep, [%[src_u], 448] \n" \
+ "zip1 v0.16b, v0.16b, v0.16b \n" \
+ "prfm pldl1keep, [%[src_v], 448] \n"
// Read 8 Y, and set 4 U and 4 V to 128
#define READYUV400 \
- "ld1 {v0.8b}, [%0], #8 \n" \
- "movi v1.8b , #128 \n"
+ "ldr d0, [%[src_y]], #8 \n" \
+ "movi v1.16b, #128 \n" \
+ "prfm pldl1keep, [%[src_y], 448] \n" \
+ "zip1 v0.16b, v0.16b, v0.16b \n"
+
+static const uvec8 kNV12Table = {0, 0, 2, 2, 4, 4, 6, 6,
+ 1, 1, 3, 3, 5, 5, 7, 7};
+static const uvec8 kNV21Table = {1, 1, 3, 3, 5, 5, 7, 7,
+ 0, 0, 2, 2, 4, 4, 6, 6};
-// Read 8 Y and 4 UV from NV12
+// Read 8 Y and 4 UV from NV12 or NV21
#define READNV12 \
- "ld1 {v0.8b}, [%0], #8 \n" \
- "ld1 {v2.8b}, [%1], #8 \n" \
- "uzp1 v1.8b, v2.8b, v2.8b \n" \
- "uzp2 v3.8b, v2.8b, v2.8b \n" \
- "ins v1.s[1], v3.s[0] \n"
-
-// Read 8 Y and 4 VU from NV21
-#define READNV21 \
- "ld1 {v0.8b}, [%0], #8 \n" \
- "ld1 {v2.8b}, [%1], #8 \n" \
- "uzp1 v3.8b, v2.8b, v2.8b \n" \
- "uzp2 v1.8b, v2.8b, v2.8b \n" \
- "ins v1.s[1], v3.s[0] \n"
+ "ldr d0, [%[src_y]], #8 \n" \
+ "ldr d1, [%[src_uv]], #8 \n" \
+ "zip1 v0.16b, v0.16b, v0.16b \n" \
+ "prfm pldl1keep, [%[src_y], 448] \n" \
+ "tbl v1.16b, {v1.16b}, v2.16b \n" \
+ "prfm pldl1keep, [%[src_uv], 448] \n"
// Read 8 YUY2
-#define READYUY2 \
- "ld2 {v0.8b, v1.8b}, [%0], #16 \n" \
- "uzp2 v3.8b, v1.8b, v1.8b \n" \
- "uzp1 v1.8b, v1.8b, v1.8b \n" \
- "ins v1.s[1], v3.s[0] \n"
+#define READYUY2 \
+ "ld2 {v0.8b, v1.8b}, [%[src_yuy2]], #16 \n" \
+ "zip1 v0.16b, v0.16b, v0.16b \n" \
+ "prfm pldl1keep, [%[src_yuy2], 448] \n" \
+ "tbl v1.16b, {v1.16b}, v2.16b \n"
// Read 8 UYVY
-#define READUYVY \
- "ld2 {v2.8b, v3.8b}, [%0], #16 \n" \
- "orr v0.8b, v3.8b, v3.8b \n" \
- "uzp1 v1.8b, v2.8b, v2.8b \n" \
- "uzp2 v3.8b, v2.8b, v2.8b \n" \
- "ins v1.s[1], v3.s[0] \n"
-
-#define YUVTORGB_SETUP \
- "ld3r {v24.8h, v25.8h, v26.8h}, [%[kUVBiasBGR]] \n" \
- "ld1r {v31.4s}, [%[kYToRgb]] \n" \
- "ld2 {v27.8h, v28.8h}, [%[kUVToRB]] \n" \
- "ld2 {v29.8h, v30.8h}, [%[kUVToG]] \n"
-
-// clang-format off
-
-#define YUVTORGB(vR, vG, vB) \
- "uxtl v0.8h, v0.8b \n" /* Extract Y */ \
- "shll v2.8h, v1.8b, #8 \n" /* Replicate UV */ \
- "ushll2 v3.4s, v0.8h, #0 \n" /* Y */ \
- "ushll v0.4s, v0.4h, #0 \n" \
- "mul v3.4s, v3.4s, v31.4s \n" \
- "mul v0.4s, v0.4s, v31.4s \n" \
- "sqshrun v0.4h, v0.4s, #16 \n" \
- "sqshrun2 v0.8h, v3.4s, #16 \n" /* Y */ \
- "uaddw v1.8h, v2.8h, v1.8b \n" /* Replicate UV */ \
- "mov v2.d[0], v1.d[1] \n" /* Extract V */ \
- "uxtl v2.8h, v2.8b \n" \
- "uxtl v1.8h, v1.8b \n" /* Extract U */ \
- "mul v3.8h, v27.8h, v1.8h \n" \
- "mul v5.8h, v29.8h, v1.8h \n" \
- "mul v6.8h, v30.8h, v2.8h \n" \
- "mul v7.8h, v28.8h, v2.8h \n" \
- "sqadd v6.8h, v6.8h, v5.8h \n" \
- "sqadd " #vB ".8h, v24.8h, v0.8h \n" /* B */ \
- "sqadd " #vG ".8h, v25.8h, v0.8h \n" /* G */ \
- "sqadd " #vR ".8h, v26.8h, v0.8h \n" /* R */ \
- "sqadd " #vB ".8h, " #vB ".8h, v3.8h \n" /* B */ \
- "sqsub " #vG ".8h, " #vG ".8h, v6.8h \n" /* G */ \
- "sqadd " #vR ".8h, " #vR ".8h, v7.8h \n" /* R */ \
- "sqshrun " #vB ".8b, " #vB ".8h, #6 \n" /* B */ \
- "sqshrun " #vG ".8b, " #vG ".8h, #6 \n" /* G */ \
- "sqshrun " #vR ".8b, " #vR ".8h, #6 \n" /* R */
-
-// clang-format on
+#define READUYVY \
+ "ld2 {v3.8b, v4.8b}, [%[src_uyvy]], #16 \n" \
+ "zip1 v0.16b, v4.16b, v4.16b \n" \
+ "prfm pldl1keep, [%[src_uyvy], 448] \n" \
+ "tbl v1.16b, {v3.16b}, v2.16b \n"
+
+// UB VR UG VG
+// YG BB BG BR
+#define YUVTORGB_SETUP \
+ "ld4r {v28.16b, v29.16b, v30.16b, v31.16b}, [%[kUVCoeff]] \n" \
+ "ld4r {v24.8h, v25.8h, v26.8h, v27.8h}, [%[kRGBCoeffBias]] \n"
+
+// v16.8h: B
+// v17.8h: G
+// v18.8h: R
+
+// Convert from YUV to 2.14 fixed point RGB
+#define YUVTORGB \
+ "umull2 v3.4s, v0.8h, v24.8h \n" \
+ "umull v6.8h, v1.8b, v30.8b \n" \
+ "umull v0.4s, v0.4h, v24.4h \n" \
+ "umlal2 v6.8h, v1.16b, v31.16b \n" /* DG */ \
+ "uqshrn v0.4h, v0.4s, #16 \n" \
+ "uqshrn2 v0.8h, v3.4s, #16 \n" /* Y */ \
+ "umull v4.8h, v1.8b, v28.8b \n" /* DB */ \
+ "umull2 v5.8h, v1.16b, v29.16b \n" /* DR */ \
+ "add v17.8h, v0.8h, v26.8h \n" /* G */ \
+ "add v16.8h, v0.8h, v4.8h \n" /* B */ \
+ "add v18.8h, v0.8h, v5.8h \n" /* R */ \
+ "uqsub v17.8h, v17.8h, v6.8h \n" /* G */ \
+ "uqsub v16.8h, v16.8h, v25.8h \n" /* B */ \
+ "uqsub v18.8h, v18.8h, v27.8h \n" /* R */
+
+// Convert from 2.14 fixed point RGB To 8 bit RGB
+#define RGBTORGB8 \
+ "uqshrn v17.8b, v17.8h, #6 \n" \
+ "uqshrn v16.8b, v16.8h, #6 \n" \
+ "uqshrn v18.8b, v18.8h, #6 \n"
+
+#define YUVTORGB_REGS \
+ "v0", "v1", "v3", "v4", "v5", "v6", "v7", "v16", "v17", "v18", "v24", "v25", \
+ "v26", "v27", "v28", "v29", "v30", "v31"
void I444ToARGBRow_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -112,30 +120,22 @@ void I444ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n" /* A */
- "1: \n"
- READYUV444
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 448] \n"
- "prfm pldl1keep, [%2, 448] \n"
- "subs %w4, %w4, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb), // %3
- "+r"(width) // %4
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n" /* A */
+ "1: \n" READYUV444 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
void I422ToARGBRow_NEON(const uint8_t* src_y,
@@ -144,31 +144,22 @@ void I422ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n" /* A */
-
- "1: \n"
- READYUV422
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "subs %w4, %w4, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb), // %3
- "+r"(width) // %4
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n" /* A */
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
void I444AlphaToARGBRow_NEON(const uint8_t* src_y,
@@ -178,32 +169,23 @@ void I444AlphaToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
+ asm volatile(
+ YUVTORGB_SETUP
"1: \n"
- READYUV444
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "ld1 {v23.8b}, [%3], #8 \n"
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "prfm pldl1keep, [%3, 448] \n"
- "subs %w5, %w5, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%4], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(src_a), // %3
- "+r"(dst_argb), // %4
- "+r"(width) // %5
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ "ld1 {v19.8b}, [%[src_a]], #8 \n" READYUV444
+ "prfm pldl1keep, [%[src_a], 448] \n" YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [src_a] "+r"(src_a), // %[src_a]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
void I422AlphaToARGBRow_NEON(const uint8_t* src_y,
@@ -213,32 +195,23 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
+ asm volatile(
+ YUVTORGB_SETUP
"1: \n"
- READYUV422
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "ld1 {v23.8b}, [%3], #8 \n"
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "prfm pldl1keep, [%3, 448] \n"
- "subs %w5, %w5, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%4], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(src_a), // %3
- "+r"(dst_argb), // %4
- "+r"(width) // %5
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ "ld1 {v19.8b}, [%[src_a]], #8 \n" READYUV422
+ "prfm pldl1keep, [%[src_a], 448] \n" YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [src_a] "+r"(src_a), // %[src_a]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
void I422ToRGBARow_NEON(const uint8_t* src_y,
@@ -247,30 +220,22 @@ void I422ToRGBARow_NEON(const uint8_t* src_y,
uint8_t* dst_rgba,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v20.8b, #255 \n" /* A */
- "1: \n"
- READYUV422
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v23, v22, v21)
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "subs %w4, %w4, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgba), // %3
- "+r"(width) // %4
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v15.8b, #255 \n" /* A */
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v15.8b,v16.8b,v17.8b,v18.8b}, [%[dst_rgba]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgba] "+r"(dst_rgba), // %[dst_rgba]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v15");
}
void I422ToRGB24Row_NEON(const uint8_t* src_y,
@@ -279,39 +244,29 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y,
uint8_t* dst_rgb24,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "1: \n"
- READYUV422
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "subs %w4, %w4, #8 \n"
- "st3 {v20.8b,v21.8b,v22.8b}, [%3], #24 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgb24), // %3
- "+r"(width) // %4
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
#define ARGBTORGB565 \
- "shll v0.8h, v22.8b, #8 \n" /* R */ \
- "shll v21.8h, v21.8b, #8 \n" /* G */ \
- "shll v20.8h, v20.8b, #8 \n" /* B */ \
- "sri v0.8h, v21.8h, #5 \n" /* RG */ \
- "sri v0.8h, v20.8h, #11 \n" /* RGB */
-
-// clang-format off
+ "shll v18.8h, v18.8b, #8 \n" /* R */ \
+ "shll v17.8h, v17.8b, #8 \n" /* G */ \
+ "shll v16.8h, v16.8b, #8 \n" /* B */ \
+ "sri v18.8h, v17.8h, #5 \n" /* RG */ \
+ "sri v18.8h, v16.8h, #11 \n" /* RGB */
void I422ToRGB565Row_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -320,38 +275,29 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
- YUVTORGB_SETUP
- "1: \n"
- READYUV422
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%0, 448] \n"
- "subs %w4, %w4, #8 \n"
- ARGBTORGB565
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565.
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_rgb565), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30");
+ YUVTORGB_SETUP
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8 "subs %w[width], %w[width], #8 \n" ARGBTORGB565
+ "st1 {v18.8h}, [%[dst_rgb565]], #16 \n" // store 8 pixels RGB565.
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS);
}
#define ARGBTOARGB1555 \
- "shll v0.8h, v23.8b, #8 \n" /* A */ \
- "shll v22.8h, v22.8b, #8 \n" /* R */ \
- "shll v21.8h, v21.8b, #8 \n" /* G */ \
- "shll v20.8h, v20.8b, #8 \n" /* B */ \
- "sri v0.8h, v22.8h, #1 \n" /* AR */ \
- "sri v0.8h, v21.8h, #6 \n" /* ARG */ \
- "sri v0.8h, v20.8h, #11 \n" /* ARGB */
+ "shll v0.8h, v19.8b, #8 \n" /* A */ \
+ "shll v18.8h, v18.8b, #8 \n" /* R */ \
+ "shll v17.8h, v17.8b, #8 \n" /* G */ \
+ "shll v16.8h, v16.8b, #8 \n" /* B */ \
+ "sri v0.8h, v18.8h, #1 \n" /* AR */ \
+ "sri v0.8h, v17.8h, #6 \n" /* ARG */ \
+ "sri v0.8h, v16.8h, #11 \n" /* ARGB */
void I422ToARGB1555Row_NEON(const uint8_t* src_y,
const uint8_t* src_u,
@@ -360,40 +306,32 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READYUV422
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%0, 448] \n"
- "subs %w4, %w4, #8 \n"
- ARGBTOARGB1555
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565.
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb1555), // %3
- "+r"(width) // %4
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30");
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n" ARGBTOARGB1555
+ "st1 {v0.8h}, [%[dst_argb1555]], #16 \n" // store 8 pixels
+ // RGB565.
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb1555] "+r"(dst_argb1555), // %[dst_argb1555]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
-// clang-format on
#define ARGBTOARGB4444 \
- /* Input v20.8b<=B, v21.8b<=G, v22.8b<=R, v23.8b<=A, v4.8b<=0x0f */ \
- "ushr v20.8b, v20.8b, #4 \n" /* B */ \
- "bic v21.8b, v21.8b, v4.8b \n" /* G */ \
- "ushr v22.8b, v22.8b, #4 \n" /* R */ \
- "bic v23.8b, v23.8b, v4.8b \n" /* A */ \
- "orr v0.8b, v20.8b, v21.8b \n" /* BG */ \
- "orr v1.8b, v22.8b, v23.8b \n" /* RA */ \
+ /* Input v16.8b<=B, v17.8b<=G, v18.8b<=R, v19.8b<=A, v23.8b<=0x0f */ \
+ "ushr v16.8b, v16.8b, #4 \n" /* B */ \
+ "bic v17.8b, v17.8b, v23.8b \n" /* G */ \
+ "ushr v18.8b, v18.8b, #4 \n" /* R */ \
+ "bic v19.8b, v19.8b, v23.8b \n" /* A */ \
+ "orr v0.8b, v16.8b, v17.8b \n" /* BG */ \
+ "orr v1.8b, v18.8b, v19.8b \n" /* RA */ \
"zip1 v0.16b, v0.16b, v1.16b \n" /* BGRA */
void I422ToARGB4444Row_NEON(const uint8_t* src_y,
@@ -402,58 +340,46 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y,
uint8_t* dst_argb4444,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v4.16b, #0x0f \n" // bits to clear with vbic.
- "1: \n"
- READYUV422
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%0, 448] \n"
- "subs %w4, %w4, #8 \n"
- "movi v23.8b, #255 \n"
- ARGBTOARGB4444
- "prfm pldl1keep, [%1, 128] \n"
- "prfm pldl1keep, [%2, 128] \n"
- "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels ARGB4444.
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_u), // %1
- "+r"(src_v), // %2
- "+r"(dst_argb4444), // %3
- "+r"(width) // %4
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v23.16b, #0x0f \n" // bits to clear with
+ // vbic.
+ "1: \n" READYUV422 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "movi v19.8b, #255 \n" ARGBTOARGB4444
+ "st1 {v0.8h}, [%[dst_argb4444]], #16 \n" // store 8
+ // pixels
+ // ARGB4444.
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_u] "+r"(src_u), // %[src_u]
+ [src_v] "+r"(src_v), // %[src_v]
+ [dst_argb4444] "+r"(dst_argb4444), // %[dst_argb4444]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19", "v23");
}
void I400ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READYUV400
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n"
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "1: \n" READYUV400 YUVTORGB
+ RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
"b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ : [src_y] "+r"(src_y), // %[src_y]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias]
+ : "cc", "memory", YUVTORGB_REGS, "v19");
}
void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) {
@@ -479,28 +405,22 @@ void NV12ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READNV12
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 256] \n"
- "subs %w3, %w3, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_argb), // %2
- "+r"(width) // %3
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV12Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2", "v19");
}
void NV21ToARGBRow_NEON(const uint8_t* src_y,
@@ -508,28 +428,22 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READNV21
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 256] \n"
- "subs %w3, %w3, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_vu), // %1
- "+r"(dst_argb), // %2
- "+r"(width) // %3
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_vu), // %[src_uv]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV21Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2", "v19");
}
void NV12ToRGB24Row_NEON(const uint8_t* src_y,
@@ -537,27 +451,21 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y,
uint8_t* dst_rgb24,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "1: \n"
- READNV12
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 256] \n"
- "subs %w3, %w3, #8 \n"
- "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_rgb24), // %2
- "+r"(width) // %3
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV12Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2");
}
void NV21ToRGB24Row_NEON(const uint8_t* src_y,
@@ -565,27 +473,21 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y,
uint8_t* dst_rgb24,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "1: \n"
- READNV21
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%1, 256] \n"
- "subs %w3, %w3, #8 \n"
- "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n"
- "b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_vu), // %1
- "+r"(dst_rgb24), // %2
- "+r"(width) // %3
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n"
+ "b.gt 1b \n"
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_vu), // %[src_uv]
+ [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV21Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2");
}
void NV12ToRGB565Row_NEON(const uint8_t* src_y,
@@ -594,75 +496,64 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y,
const struct YuvConstants* yuvconstants,
int width) {
asm volatile(
- YUVTORGB_SETUP "1: \n" READNV12
- "prfm pldl1keep, [%0, 448] \n" YUVTORGB(
- v22, v21, v20) ARGBTORGB565
- "prfm pldl1keep, [%1, 256] \n"
- "subs %w3, %w3, #8 \n"
- "st1 {v0.8h}, [%2], 16 \n" // store 8 pixels
+ YUVTORGB_SETUP
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READNV12 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n" ARGBTORGB565
+ "st1 {v18.8h}, [%[dst_rgb565]], #16 \n" // store 8
+ // pixels
+ // RGB565.
"b.gt 1b \n"
- : "+r"(src_y), // %0
- "+r"(src_uv), // %1
- "+r"(dst_rgb565), // %2
- "+r"(width) // %3
- : [kUVToRB] "r"(&yuvconstants->kUVToRB),
- [kUVToG] "r"(&yuvconstants->kUVToG),
- [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb] "r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30");
+ : [src_y] "+r"(src_y), // %[src_y]
+ [src_uv] "+r"(src_uv), // %[src_uv]
+ [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV12Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2");
}
void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READYUY2
- "prfm pldl1keep, [%0, 448] \n"
- YUVTORGB(v22, v21, v20)
- "subs %w2, %w2, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n"
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READYUY2 YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
"b.gt 1b \n"
- : "+r"(src_yuy2), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ : [src_yuy2] "+r"(src_yuy2), // %[src_yuy2]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV12Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2", "v19");
}
void UYVYToARGBRow_NEON(const uint8_t* src_uyvy,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- asm volatile (
- YUVTORGB_SETUP
- "movi v23.8b, #255 \n"
- "1: \n"
- READUYVY
- YUVTORGB(v22, v21, v20)
- "prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n"
- "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], 32 \n"
- "b.gt 1b \n"
- : "+r"(src_uyvy), // %0
- "+r"(dst_argb), // %1
- "+r"(width) // %2
- : [kUVToRB]"r"(&yuvconstants->kUVToRB),
- [kUVToG]"r"(&yuvconstants->kUVToG),
- [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR),
- [kYToRgb]"r"(&yuvconstants->kYToRgb)
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20",
- "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"
- );
+ asm volatile(
+ YUVTORGB_SETUP
+ "movi v19.8b, #255 \n"
+ "ldr q2, [%[kNV12Table]] \n"
+ "1: \n" READUYVY YUVTORGB RGBTORGB8
+ "subs %w[width], %w[width], #8 \n"
+ "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n"
+ "b.gt 1b \n"
+ : [src_uyvy] "+r"(src_uyvy), // %[src_yuy2]
+ [dst_argb] "+r"(dst_argb), // %[dst_argb]
+ [width] "+r"(width) // %[width]
+ : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff]
+ [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias]
+ [kNV12Table] "r"(&kNV12Table)
+ : "cc", "memory", YUVTORGB_REGS, "v2", "v19");
}
// Reads 16 pairs of UV and write even values to dst_u and odd to dst_v.
@@ -673,8 +564,8 @@ void SplitUVRow_NEON(const uint8_t* src_uv,
asm volatile(
"1: \n"
"ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pairs of UV
- "prfm pldl1keep, [%0, 448] \n"
"subs %w3, %w3, #16 \n" // 16 processed per loop
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.16b}, [%1], #16 \n" // store U
"st1 {v1.16b}, [%2], #16 \n" // store V
"b.gt 1b \n"
@@ -696,9 +587,9 @@ void MergeUVRow_NEON(const uint8_t* src_u,
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load U
"ld1 {v1.16b}, [%1], #16 \n" // load V
+ "subs %w3, %w3, #16 \n" // 16 processed per loop
"prfm pldl1keep, [%0, 448] \n"
"prfm pldl1keep, [%1, 448] \n"
- "subs %w3, %w3, #16 \n" // 16 processed per loop
"st2 {v0.16b,v1.16b}, [%2], #32 \n" // store 16 pairs of UV
"b.gt 1b \n"
: "+r"(src_u), // %0
@@ -719,8 +610,8 @@ void SplitRGBRow_NEON(const uint8_t* src_rgb,
asm volatile(
"1: \n"
"ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w4, %w4, #16 \n" // 16 processed per loop
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.16b}, [%1], #16 \n" // store R
"st1 {v1.16b}, [%2], #16 \n" // store G
"st1 {v2.16b}, [%3], #16 \n" // store B
@@ -746,12 +637,11 @@ void MergeRGBRow_NEON(const uint8_t* src_r,
"ld1 {v0.16b}, [%0], #16 \n" // load R
"ld1 {v1.16b}, [%1], #16 \n" // load G
"ld1 {v2.16b}, [%2], #16 \n" // load B
+ "subs %w4, %w4, #16 \n" // 16 processed per loop
"prfm pldl1keep, [%0, 448] \n"
"prfm pldl1keep, [%1, 448] \n"
"prfm pldl1keep, [%2, 448] \n"
- "subs %w4, %w4, #16 \n" // 16 processed per loop
"st3 {v0.16b,v1.16b,v2.16b}, [%3], #48 \n" // store 16 RGB
- "prfm pldl1keep, [%0, 448] \n"
"b.gt 1b \n"
: "+r"(src_r), // %0
"+r"(src_g), // %1
@@ -773,8 +663,8 @@ void SplitARGBRow_NEON(const uint8_t* src_rgba,
asm volatile(
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w5, %w5, #16 \n" // 16 processed per loop
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.16b}, [%3], #16 \n" // store B
"st1 {v1.16b}, [%2], #16 \n" // store G
"st1 {v2.16b}, [%1], #16 \n" // store R
@@ -804,11 +694,11 @@ void MergeARGBRow_NEON(const uint8_t* src_r,
"ld1 {v1.16b}, [%1], #16 \n" // load G
"ld1 {v0.16b}, [%2], #16 \n" // load B
"ld1 {v3.16b}, [%3], #16 \n" // load A
+ "subs %w5, %w5, #16 \n" // 16 processed per loop
"prfm pldl1keep, [%0, 448] \n"
"prfm pldl1keep, [%1, 448] \n"
"prfm pldl1keep, [%2, 448] \n"
"prfm pldl1keep, [%3, 448] \n"
- "subs %w5, %w5, #16 \n" // 16 processed per loop
"st4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%4], #64 \n" // store 16ARGB
"b.gt 1b \n"
: "+r"(src_r), // %0
@@ -831,8 +721,8 @@ void SplitXRGBRow_NEON(const uint8_t* src_rgba,
asm volatile(
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w4, %w4, #16 \n" // 16 processed per loop
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.16b}, [%3], #16 \n" // store B
"st1 {v1.16b}, [%2], #16 \n" // store G
"st1 {v2.16b}, [%1], #16 \n" // store R
@@ -859,10 +749,10 @@ void MergeXRGBRow_NEON(const uint8_t* src_r,
"ld1 {v2.16b}, [%0], #16 \n" // load R
"ld1 {v1.16b}, [%1], #16 \n" // load G
"ld1 {v0.16b}, [%2], #16 \n" // load B
+ "subs %w4, %w4, #16 \n" // 16 processed per loop
"prfm pldl1keep, [%0, 448] \n"
"prfm pldl1keep, [%1, 448] \n"
"prfm pldl1keep, [%2, 448] \n"
- "subs %w4, %w4, #16 \n" // 16 processed per loop
"st4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%3], #64 \n" // store 16ARGB
"b.gt 1b \n"
: "+r"(src_r), // %0
@@ -875,6 +765,240 @@ void MergeXRGBRow_NEON(const uint8_t* src_r,
);
}
+void MergeXR30Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int depth,
+ int width) {
+ int shift = 10 - depth;
+ asm volatile(
+ "movi v30.16b, #255 \n"
+ "ushr v30.4s, v30.4s, #22 \n" // 1023
+ "dup v31.4s, %w5 \n"
+ "1: \n"
+ "ldr d2, [%2], #8 \n" // B
+ "ldr d1, [%1], #8 \n" // G
+ "ldr d0, [%0], #8 \n" // R
+ "ushll v2.4s, v2.4h, #0 \n" // B
+ "ushll v1.4s, v1.4h, #0 \n" // G
+ "ushll v0.4s, v0.4h, #0 \n" // R
+ "ushl v2.4s, v2.4s, v31.4s \n" // 000B
+ "ushl v1.4s, v1.4s, v31.4s \n" // G
+ "ushl v0.4s, v0.4s, v31.4s \n" // R
+ "umin v2.4s, v2.4s, v30.4s \n"
+ "umin v1.4s, v1.4s, v30.4s \n"
+ "umin v0.4s, v0.4s, v30.4s \n"
+ "sli v2.4s, v1.4s, #10 \n" // 00GB
+ "sli v2.4s, v0.4s, #20 \n" // 0RGB
+ "orr v2.4s, #0xc0, lsl #24 \n" // ARGB (AR30)
+ "subs %w4, %w4, #4 \n"
+ "str q2, [%3], #16 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar30), // %3
+ "+r"(width) // %4
+ : "r"(shift) // %5
+ : "memory", "cc", "v0", "v1", "v2", "v30", "v31");
+}
+
+void MergeXR30Row_10_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_ar30,
+ int /* depth */,
+ int width) {
+ asm volatile(
+ "movi v30.16b, #255 \n"
+ "ushr v30.4s, v30.4s, #22 \n" // 1023
+ "1: \n"
+ "ldr d2, [%2], #8 \n" // B
+ "ldr d1, [%1], #8 \n" // G
+ "ldr d0, [%0], #8 \n" // R
+ "ushll v2.4s, v2.4h, #0 \n" // 000B
+ "ushll v1.4s, v1.4h, #0 \n" // G
+ "ushll v0.4s, v0.4h, #0 \n" // R
+ "umin v2.4s, v2.4s, v30.4s \n"
+ "umin v1.4s, v1.4s, v30.4s \n"
+ "umin v0.4s, v0.4s, v30.4s \n"
+ "sli v2.4s, v1.4s, #10 \n" // 00GB
+ "sli v2.4s, v0.4s, #20 \n" // 0RGB
+ "orr v2.4s, #0xc0, lsl #24 \n" // ARGB (AR30)
+ "subs %w4, %w4, #4 \n"
+ "str q2, [%3], #16 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar30), // %3
+ "+r"(width) // %4
+ :
+ : "memory", "cc", "v0", "v1", "v2", "v30");
+}
+
+void MergeAR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ asm volatile(
+
+ "dup v30.8h, %w7 \n"
+ "dup v31.8h, %w6 \n"
+ "1: \n"
+ "ldr q2, [%0], #16 \n" // R
+ "ldr q1, [%1], #16 \n" // G
+ "ldr q0, [%2], #16 \n" // B
+ "ldr q3, [%3], #16 \n" // A
+ "umin v2.8h, v2.8h, v30.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "umin v1.8h, v1.8h, v30.8h \n"
+ "prfm pldl1keep, [%1, 448] \n"
+ "umin v0.8h, v0.8h, v30.8h \n"
+ "prfm pldl1keep, [%2, 448] \n"
+ "umin v3.8h, v3.8h, v30.8h \n"
+ "prfm pldl1keep, [%3, 448] \n"
+ "ushl v2.8h, v2.8h, v31.8h \n"
+ "ushl v1.8h, v1.8h, v31.8h \n"
+ "ushl v0.8h, v0.8h, v31.8h \n"
+ "ushl v3.8h, v3.8h, v31.8h \n"
+ "subs %w5, %w5, #8 \n"
+ "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%4], #64 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_ar64), // %4
+ "+r"(width) // %5
+ : "r"(shift), // %6
+ "r"(mask) // %7
+ : "memory", "cc", "v0", "v1", "v2", "v3", "v31");
+}
+
+void MergeXR64Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint16_t* dst_ar64,
+ int depth,
+ int width) {
+ int shift = 16 - depth;
+ int mask = (1 << depth) - 1;
+ asm volatile(
+
+ "movi v3.16b, #0xff \n" // A (0xffff)
+ "dup v30.8h, %w6 \n"
+ "dup v31.8h, %w5 \n"
+
+ "1: \n"
+ "ldr q2, [%0], #16 \n" // R
+ "ldr q1, [%1], #16 \n" // G
+ "ldr q0, [%2], #16 \n" // B
+ "umin v2.8h, v2.8h, v30.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "umin v1.8h, v1.8h, v30.8h \n"
+ "prfm pldl1keep, [%1, 448] \n"
+ "umin v0.8h, v0.8h, v30.8h \n"
+ "prfm pldl1keep, [%2, 448] \n"
+ "ushl v2.8h, v2.8h, v31.8h \n"
+ "ushl v1.8h, v1.8h, v31.8h \n"
+ "ushl v0.8h, v0.8h, v31.8h \n"
+ "subs %w4, %w4, #8 \n"
+ "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%3], #64 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_ar64), // %3
+ "+r"(width) // %4
+ : "r"(shift), // %5
+ "r"(mask) // %6
+ : "memory", "cc", "v0", "v1", "v2", "v3", "v31");
+}
+
+void MergeARGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ const uint16_t* src_a,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = 8 - depth;
+ asm volatile(
+
+ "dup v31.8h, %w6 \n"
+ "1: \n"
+ "ldr q2, [%0], #16 \n" // R
+ "ldr q1, [%1], #16 \n" // G
+ "ldr q0, [%2], #16 \n" // B
+ "ldr q3, [%3], #16 \n" // A
+ "ushl v2.8h, v2.8h, v31.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "ushl v1.8h, v1.8h, v31.8h \n"
+ "prfm pldl1keep, [%1, 448] \n"
+ "ushl v0.8h, v0.8h, v31.8h \n"
+ "prfm pldl1keep, [%2, 448] \n"
+ "ushl v3.8h, v3.8h, v31.8h \n"
+ "prfm pldl1keep, [%3, 448] \n"
+ "uqxtn v2.8b, v2.8h \n"
+ "uqxtn v1.8b, v1.8h \n"
+ "uqxtn v0.8b, v0.8h \n"
+ "uqxtn v3.8b, v3.8h \n"
+ "subs %w5, %w5, #8 \n"
+ "st4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%4], #32 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(src_a), // %3
+ "+r"(dst_argb), // %4
+ "+r"(width) // %5
+ : "r"(shift) // %6
+ : "memory", "cc", "v0", "v1", "v2", "v3", "v31");
+}
+
+void MergeXRGB16To8Row_NEON(const uint16_t* src_r,
+ const uint16_t* src_g,
+ const uint16_t* src_b,
+ uint8_t* dst_argb,
+ int depth,
+ int width) {
+ int shift = 8 - depth;
+ asm volatile(
+
+ "dup v31.8h, %w5 \n"
+ "movi v3.8b, #0xff \n" // A (0xff)
+ "1: \n"
+ "ldr q2, [%0], #16 \n" // R
+ "ldr q1, [%1], #16 \n" // G
+ "ldr q0, [%2], #16 \n" // B
+ "ushl v2.8h, v2.8h, v31.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "ushl v1.8h, v1.8h, v31.8h \n"
+ "prfm pldl1keep, [%1, 448] \n"
+ "ushl v0.8h, v0.8h, v31.8h \n"
+ "prfm pldl1keep, [%2, 448] \n"
+ "uqxtn v2.8b, v2.8h \n"
+ "uqxtn v1.8b, v1.8h \n"
+ "uqxtn v0.8b, v0.8h \n"
+ "subs %w4, %w4, #8 \n"
+ "st4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%3], #32 \n"
+ "b.gt 1b \n"
+ : "+r"(src_r), // %0
+ "+r"(src_g), // %1
+ "+r"(src_b), // %2
+ "+r"(dst_argb), // %3
+ "+r"(width) // %4
+ : "r"(shift) // %5
+ : "memory", "cc", "v0", "v1", "v2", "v3", "v31");
+}
+
// Copy multiple of 32.
void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) {
asm volatile(
@@ -1072,10 +1196,10 @@ void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) {
"movi v5.8b, #255 \n" // Alpha
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "orr v3.8b, v1.8b, v1.8b \n" // move g
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "orr v3.8b, v1.8b, v1.8b \n" // move g
- "orr v4.8b, v0.8b, v0.8b \n" // move r
+ "orr v4.8b, v0.8b, v0.8b \n" // move r
"st4 {v2.8b,v3.8b,v4.8b,v5.8b}, [%1], #32 \n" // store b g r a
"b.gt 1b \n"
: "+r"(src_raw), // %0
@@ -1091,10 +1215,10 @@ void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) {
"movi v0.8b, #255 \n" // Alpha
"1: \n"
"ld3 {v3.8b,v4.8b,v5.8b}, [%0], #24 \n" // read r g b
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "orr v2.8b, v4.8b, v4.8b \n" // move g
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "orr v2.8b, v4.8b, v4.8b \n" // move g
- "orr v1.8b, v5.8b, v5.8b \n" // move r
+ "orr v1.8b, v5.8b, v5.8b \n" // move r
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store a b g r
"b.gt 1b \n"
: "+r"(src_raw), // %0
@@ -1109,9 +1233,9 @@ void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) {
asm volatile(
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"orr v3.8b, v1.8b, v1.8b \n" // move g
+ "prfm pldl1keep, [%0, 448] \n"
"orr v4.8b, v0.8b, v0.8b \n" // move r
"st3 {v2.8b,v3.8b,v4.8b}, [%1], #24 \n" // store b g r
"b.gt 1b \n"
@@ -1143,9 +1267,8 @@ void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565,
"movi v3.8b, #255 \n" // Alpha
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
- RGB565TOARGB
+ "prfm pldl1keep, [%0, 448] \n" RGB565TOARGB
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB
"b.gt 1b \n"
: "+r"(src_rgb565), // %0
@@ -1233,9 +1356,8 @@ void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444,
asm volatile(
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
- ARGB4444TOARGB
+ "prfm pldl1keep, [%0, 448] \n" ARGB4444TOARGB
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB
"b.gt 1b \n"
: "+r"(src_argb4444), // %0
@@ -1252,8 +1374,8 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb,
asm volatile(
"1: \n"
"ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load 8 ARGB
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
"st3 {v1.8b,v2.8b,v3.8b}, [%1], #24 \n" // store 8 pixels of
// RGB24
"b.gt 1b \n"
@@ -1269,9 +1391,9 @@ void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) {
asm volatile(
"1: \n"
"ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load b g r a
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "orr v4.8b, v2.8b, v2.8b \n" // mov g
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "orr v4.8b, v2.8b, v2.8b \n" // mov g
"orr v5.8b, v1.8b, v1.8b \n" // mov b
"st3 {v3.8b,v4.8b,v5.8b}, [%1], #24 \n" // store r g b
"b.gt 1b \n"
@@ -1287,8 +1409,8 @@ void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) {
asm volatile(
"1: \n"
"ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of YUY2.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #16 \n" // 16 processed per loop.
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.16b}, [%1], #16 \n" // store 16 pixels of Y.
"b.gt 1b \n"
: "+r"(src_yuy2), // %0
@@ -1303,8 +1425,8 @@ void UYVYToYRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_y, int width) {
asm volatile(
"1: \n"
"ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of UYVY.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #16 \n" // 16 processed per loop.
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v1.16b}, [%1], #16 \n" // store 16 pixels of Y.
"b.gt 1b \n"
: "+r"(src_uyvy), // %0
@@ -1322,8 +1444,8 @@ void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2,
asm volatile(
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 YUY2
- "prfm pldl1keep, [%0, 448] \n"
"subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs.
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v1.8b}, [%1], #8 \n" // store 8 U.
"st1 {v3.8b}, [%2], #8 \n" // store 8 V.
"b.gt 1b \n"
@@ -1343,8 +1465,8 @@ void UYVYToUV422Row_NEON(const uint8_t* src_uyvy,
asm volatile(
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 UYVY
- "prfm pldl1keep, [%0, 448] \n"
"subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs.
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v0.8b}, [%1], #8 \n" // store 8 U.
"st1 {v2.8b}, [%2], #8 \n" // store 8 V.
"b.gt 1b \n"
@@ -1366,10 +1488,10 @@ void YUY2ToUVRow_NEON(const uint8_t* src_yuy2,
asm volatile(
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs.
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row
"urhadd v1.8b, v1.8b, v5.8b \n" // average rows of U
+ "prfm pldl1keep, [%0, 448] \n"
"urhadd v3.8b, v3.8b, v7.8b \n" // average rows of V
"st1 {v1.8b}, [%2], #8 \n" // store 8 U.
"st1 {v3.8b}, [%3], #8 \n" // store 8 V.
@@ -1394,10 +1516,10 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy,
asm volatile(
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs.
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row
"urhadd v0.8b, v0.8b, v4.8b \n" // average rows of U
+ "prfm pldl1keep, [%0, 448] \n"
"urhadd v2.8b, v2.8b, v6.8b \n" // average rows of V
"st1 {v0.8b}, [%2], #8 \n" // store 8 U.
"st1 {v2.8b}, [%3], #8 \n" // store 8 V.
@@ -1422,8 +1544,8 @@ void ARGBShuffleRow_NEON(const uint8_t* src_argb,
"ld1 {v2.16b}, [%3] \n" // shuffler
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 4 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #4 \n" // 4 processed per loop
+ "prfm pldl1keep, [%0, 448] \n"
"tbl v1.16b, {v0.16b}, v2.16b \n" // look up 4 pixels
"st1 {v1.16b}, [%1], #16 \n" // store 4.
"b.gt 1b \n"
@@ -1443,11 +1565,11 @@ void I422ToYUY2Row_NEON(const uint8_t* src_y,
asm volatile(
"1: \n"
"ld2 {v0.8b, v1.8b}, [%0], #16 \n" // load 16 Ys
- "prfm pldl1keep, [%0, 448] \n"
+ "subs %w4, %w4, #16 \n" // 16 pixels
"orr v2.8b, v1.8b, v1.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"ld1 {v1.8b}, [%1], #8 \n" // load 8 Us
"ld1 {v3.8b}, [%2], #8 \n" // load 8 Vs
- "subs %w4, %w4, #16 \n" // 16 pixels
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels.
"b.gt 1b \n"
: "+r"(src_y), // %0
@@ -1467,8 +1589,8 @@ void I422ToUYVYRow_NEON(const uint8_t* src_y,
asm volatile(
"1: \n"
"ld2 {v1.8b,v2.8b}, [%0], #16 \n" // load 16 Ys
- "prfm pldl1keep, [%0, 448] \n"
"orr v3.8b, v2.8b, v2.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"ld1 {v0.8b}, [%1], #8 \n" // load 8 Us
"ld1 {v2.8b}, [%2], #8 \n" // load 8 Vs
"subs %w4, %w4, #16 \n" // 16 pixels
@@ -1488,18 +1610,17 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb,
int width) {
asm volatile(
"1: \n"
- "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8
+ "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8
// pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
- ARGBTORGB565
- "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels RGB565.
+ "prfm pldl1keep, [%0, 448] \n" ARGBTORGB565
+ "st1 {v18.16b}, [%1], #16 \n" // store 8 pixels RGB565.
"b.gt 1b \n"
: "+r"(src_argb), // %0
"+r"(dst_rgb565), // %1
"+r"(width) // %2
:
- : "cc", "memory", "v0", "v20", "v21", "v22", "v23");
+ : "cc", "memory", "v16", "v17", "v18", "v19");
}
void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb,
@@ -1509,20 +1630,20 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb,
asm volatile(
"dup v1.4s, %w2 \n" // dither4
"1: \n"
- "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" // load 8
+ "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%1], #32 \n" // load 8
// pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
- "uqadd v20.8b, v20.8b, v1.8b \n"
- "uqadd v21.8b, v21.8b, v1.8b \n"
- "uqadd v22.8b, v22.8b, v1.8b \n" ARGBTORGB565
- "st1 {v0.16b}, [%0], #16 \n" // store 8 pixels RGB565.
+ "uqadd v16.8b, v16.8b, v1.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "uqadd v17.8b, v17.8b, v1.8b \n"
+ "uqadd v18.8b, v18.8b, v1.8b \n" ARGBTORGB565
+ "st1 {v18.16b}, [%0], #16 \n" // store 8 pixels RGB565.
"b.gt 1b \n"
: "+r"(dst_rgb) // %0
: "r"(src_argb), // %1
"r"(dither4), // %2
"r"(width) // %3
- : "cc", "memory", "v0", "v1", "v20", "v21", "v22", "v23");
+ : "cc", "memory", "v1", "v16", "v17", "v18", "v19");
}
void ARGBToARGB1555Row_NEON(const uint8_t* src_argb,
@@ -1530,39 +1651,131 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb,
int width) {
asm volatile(
"1: \n"
- "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8
+ "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8
// pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
- ARGBTOARGB1555
+ "prfm pldl1keep, [%0, 448] \n" ARGBTOARGB1555
"st1 {v0.16b}, [%1], #16 \n" // store 8 pixels
"b.gt 1b \n"
: "+r"(src_argb), // %0
"+r"(dst_argb1555), // %1
"+r"(width) // %2
:
- : "cc", "memory", "v0", "v20", "v21", "v22", "v23");
+ : "cc", "memory", "v0", "v16", "v17", "v18", "v19");
}
void ARGBToARGB4444Row_NEON(const uint8_t* src_argb,
uint8_t* dst_argb4444,
int width) {
asm volatile(
- "movi v4.16b, #0x0f \n" // bits to clear with
+ "movi v23.16b, #0x0f \n" // bits to clear with
// vbic.
"1: \n"
- "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8
+ "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8
// pixels
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
- ARGBTOARGB4444
+ "prfm pldl1keep, [%0, 448] \n" ARGBTOARGB4444
"st1 {v0.16b}, [%1], #16 \n" // store 8 pixels
"b.gt 1b \n"
: "+r"(src_argb), // %0
"+r"(dst_argb4444), // %1
"+r"(width) // %2
:
- : "cc", "memory", "v0", "v1", "v4", "v20", "v21", "v22", "v23");
+ : "cc", "memory", "v0", "v1", "v16", "v17", "v18", "v19", "v23");
+}
+
+static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7,
+ 10, 9, 8, 11, 14, 13, 12, 15};
+
+void ARGBToAR64Row_NEON(const uint8_t* src_argb,
+ uint16_t* dst_ar64,
+ int width) {
+ asm volatile(
+ "1: \n"
+ "ldp q0, q2, [%0], #32 \n" // load 8 pixels
+ "mov v1.16b, v0.16b \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "mov v3.16b, v2.16b \n"
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "st2 {v0.16b, v1.16b}, [%1], #32 \n" // store 4 pixels
+ "st2 {v2.16b, v3.16b}, [%1], #32 \n" // store 4 pixels
+ "b.gt 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ar64), // %1
+ "+r"(width) // %2
+ :
+ : "cc", "memory", "v0", "v1", "v2", "v3");
+}
+
+void ARGBToAB64Row_NEON(const uint8_t* src_argb,
+ uint16_t* dst_ab64,
+ int width) {
+ asm volatile(
+ "ld1 {v4.16b}, %3 \n" // shuffler
+ "1: \n"
+ "ldp q0, q2, [%0], #32 \n" // load 8 pixels
+ "tbl v0.16b, {v0.16b}, v4.16b \n"
+ "tbl v2.16b, {v2.16b}, v4.16b \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "mov v1.16b, v0.16b \n"
+ "mov v3.16b, v2.16b \n"
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "st2 {v0.16b, v1.16b}, [%1], #32 \n" // store 4 pixels
+ "st2 {v2.16b, v3.16b}, [%1], #32 \n" // store 4 pixels
+ "b.gt 1b \n"
+ : "+r"(src_argb), // %0
+ "+r"(dst_ab64), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleARGBToABGR) // %3
+ : "cc", "memory", "v0", "v1", "v2", "v3", "v4");
+}
+
+static const uvec8 kShuffleAR64ToARGB = {1, 3, 5, 7, 9, 11, 13, 15,
+ 17, 19, 21, 23, 25, 27, 29, 31};
+
+void AR64ToARGBRow_NEON(const uint16_t* src_ar64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+ "ld1 {v4.16b}, %3 \n" // shuffler
+ "1: \n"
+ "ldp q0, q1, [%0], #32 \n" // load 4 pixels
+ "ldp q2, q3, [%0], #32 \n" // load 4 pixels
+ "tbl v0.16b, {v0.16b, v1.16b}, v4.16b \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "tbl v2.16b, {v2.16b, v3.16b}, v4.16b \n"
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "stp q0, q2, [%1], #32 \n" // store 8 pixels
+ "b.gt 1b \n"
+ : "+r"(src_ar64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleAR64ToARGB) // %3
+ : "cc", "memory", "v0", "v1", "v2", "v3", "v4");
+}
+
+static const uvec8 kShuffleAB64ToARGB = {5, 3, 1, 7, 13, 11, 9, 15,
+ 21, 19, 17, 23, 29, 27, 25, 31};
+
+void AB64ToARGBRow_NEON(const uint16_t* src_ab64,
+ uint8_t* dst_argb,
+ int width) {
+ asm volatile(
+ "ld1 {v4.16b}, %3 \n" // shuffler
+ "1: \n"
+ "ldp q0, q1, [%0], #32 \n" // load 4 pixels
+ "ldp q2, q3, [%0], #32 \n" // load 4 pixels
+ "tbl v0.16b, {v0.16b, v1.16b}, v4.16b \n"
+ "prfm pldl1keep, [%0, 448] \n"
+ "tbl v2.16b, {v2.16b, v3.16b}, v4.16b \n"
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "stp q0, q2, [%1], #32 \n" // store 8 pixels
+ "b.gt 1b \n"
+ : "+r"(src_ab64), // %0
+ "+r"(dst_argb), // %1
+ "+r"(width) // %2
+ : "m"(kShuffleAB64ToARGB) // %3
+ : "cc", "memory", "v0", "v1", "v2", "v3", "v4");
}
void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
@@ -1573,9 +1786,9 @@ void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v3.8h, v0.8b, v4.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v3.8h, v1.8b, v5.8b \n" // G
"umlal v3.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y
@@ -1614,9 +1827,9 @@ void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
"movi v6.8b, #77 \n" // R * 0.2990 coefficient
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v3.8h, v0.8b, v4.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v3.8h, v1.8b, v5.8b \n" // G
"umlal v3.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y
@@ -1629,22 +1842,22 @@ void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
: "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6");
}
-void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) {
+void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) {
asm volatile(
"movi v4.8b, #29 \n" // B * 0.1140 coefficient
"movi v5.8b, #150 \n" // G * 0.5870 coefficient
"movi v6.8b, #77 \n" // R * 0.2990 coefficient
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 RGBA
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v0.8h, v1.8b, v4.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v0.8h, v2.8b, v5.8b \n" // G
"umlal v0.8h, v3.8b, v6.8b \n" // R
"uqrshrn v3.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y
"st1 {v3.8b}, [%1], #8 \n" // store 8 pixels Y.
"b.gt 1b \n"
- : "+r"(src_argb), // %0
+ : "+r"(src_rgba), // %0
"+r"(dst_y), // %1
"+r"(width) // %2
:
@@ -1666,9 +1879,9 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb,
"movi v29.16b,#0x80 \n" // 128.5
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"umull v4.8h, v0.8b, v24.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlsl v4.8h, v1.8b, v25.8b \n" // G
"umlsl v4.8h, v2.8b, v26.8b \n" // R
"add v4.8h, v4.8h, v29.8h \n" // +128 -> unsigned
@@ -1729,14 +1942,14 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb,
RGBTOUV_SETUP_REG
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts.
@@ -1775,13 +1988,13 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb,
"movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit)
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts.
@@ -1815,13 +2028,13 @@ void BGRAToUVRow_NEON(const uint8_t* src_bgra,
RGBTOUV_SETUP_REG
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v3.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v3.8h, v2.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v2.8h, v1.16b \n" // R 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v7.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v3.8h, v6.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v2.8h, v5.16b \n" // R 16 bytes -> 8 shorts.
@@ -1855,13 +2068,13 @@ void ABGRToUVRow_NEON(const uint8_t* src_abgr,
RGBTOUV_SETUP_REG
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v3.8h, v2.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v2.8h, v1.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v1.8h, v0.16b \n" // R 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more.
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v3.8h, v6.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v2.8h, v5.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v1.8h, v4.16b \n" // R 16 bytes -> 8 shorts.
@@ -1895,13 +2108,13 @@ void RGBAToUVRow_NEON(const uint8_t* src_rgba,
RGBTOUV_SETUP_REG
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v1.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v2.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v2.8h, v3.16b \n" // R 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more.
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v5.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v6.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v2.8h, v7.16b \n" // R 16 bytes -> 8 shorts.
@@ -1935,13 +2148,13 @@ void RGB24ToUVRow_NEON(const uint8_t* src_rgb24,
RGBTOUV_SETUP_REG
"1: \n"
"ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts.
"ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 16 more.
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts.
@@ -1975,13 +2188,13 @@ void RAWToUVRow_NEON(const uint8_t* src_raw,
RGBTOUV_SETUP_REG
"1: \n"
"ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 8 RAW pixels.
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v2.8h, v2.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts.
"uaddlp v0.8h, v0.16b \n" // R 16 bytes -> 8 shorts.
"ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 8 more RAW pixels
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v2.8h, v6.16b \n" // B 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts.
"uadalp v0.8h, v4.16b \n" // R 16 bytes -> 8 shorts.
@@ -2016,9 +2229,9 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565,
RGBTOUV_SETUP_REG
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels.
- "prfm pldl1keep, [%0, 448] \n"
RGB565TOARGB
"uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%0], #16 \n" // next 8 RGB565 pixels.
@@ -2028,9 +2241,9 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565,
"uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // load 8 RGB565 pixels.
- "prfm pldl1keep, [%1, 448] \n"
RGB565TOARGB
"uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // next 8 RGB565 pixels.
@@ -2074,9 +2287,9 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555,
RGBTOUV_SETUP_REG
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels.
- "prfm pldl1keep, [%0, 448] \n"
RGB555TOARGB
"uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB1555 pixels.
@@ -2086,9 +2299,9 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555,
"uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB1555 pixels.
- "prfm pldl1keep, [%1, 448] \n"
RGB555TOARGB
"uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB1555 pixels.
@@ -2132,9 +2345,9 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444,
RGBTOUV_SETUP_REG // sets v20-v25
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels.
- "prfm pldl1keep, [%0, 448] \n"
ARGB4444TOARGB
"uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB4444 pixels.
@@ -2144,9 +2357,9 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444,
"uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB4444 pixels.
- "prfm pldl1keep, [%1, 448] \n"
ARGB4444TOARGB
"uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts.
"uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts.
"ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB4444 pixels.
@@ -2189,10 +2402,10 @@ void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width) {
"movi v27.8b, #16 \n" // Add 16 constant
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
RGB565TOARGB
"umull v3.8h, v0.8b, v24.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v3.8h, v1.8b, v25.8b \n" // G
"umlal v3.8h, v2.8b, v26.8b \n" // R
"uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2217,10 +2430,10 @@ void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555,
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
ARGB1555TOARGB
"umull v3.8h, v0.8b, v4.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v3.8h, v1.8b, v5.8b \n" // G
"umlal v3.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2244,10 +2457,10 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444,
"movi v27.8b, #16 \n" // Add 16 constant
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
ARGB4444TOARGB
"umull v3.8h, v0.8b, v24.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v3.8h, v1.8b, v25.8b \n" // G
"umlal v3.8h, v2.8b, v26.8b \n" // R
"uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2269,9 +2482,9 @@ void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v16.8h, v1.8b, v4.8b \n" // R
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v16.8h, v2.8b, v5.8b \n" // G
"umlal v16.8h, v3.8b, v6.8b \n" // B
"uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2293,9 +2506,9 @@ void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v16.8h, v0.8b, v4.8b \n" // R
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v16.8h, v1.8b, v5.8b \n" // G
"umlal v16.8h, v2.8b, v6.8b \n" // B
"uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2317,9 +2530,9 @@ void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels.
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v16.8h, v1.8b, v4.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v16.8h, v2.8b, v5.8b \n" // G
"umlal v16.8h, v3.8b, v6.8b \n" // R
"uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2341,9 +2554,9 @@ void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels.
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "umull v16.8h, v0.8b, v4.8b \n" // B
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "umull v16.8h, v0.8b, v4.8b \n" // B
"umlal v16.8h, v1.8b, v5.8b \n" // G
"umlal v16.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2365,9 +2578,9 @@ void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) {
"movi v7.8b, #16 \n" // Add 16 constant
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels.
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "umull v16.8h, v0.8b, v4.8b \n" // B
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "umull v16.8h, v0.8b, v4.8b \n" // B
"umlal v16.8h, v1.8b, v5.8b \n" // G
"umlal v16.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2388,9 +2601,9 @@ void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) {
"movi v6.8b, #77 \n" // R * 0.2990 coefficient
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels.
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "umull v0.8h, v0.8b, v4.8b \n" // B
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "umull v0.8h, v0.8b, v4.8b \n" // B
"umlal v0.8h, v1.8b, v5.8b \n" // G
"umlal v0.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2410,9 +2623,9 @@ void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) {
"movi v4.8b, #77 \n" // R * 0.2990 coefficient
"1: \n"
"ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels.
+ "subs %w2, %w2, #8 \n" // 8 processed per loop.
+ "umull v0.8h, v0.8b, v4.8b \n" // B
"prfm pldl1keep, [%0, 448] \n"
- "subs %w2, %w2, #8 \n" // 8 processed per loop.
- "umull v0.8h, v0.8b, v4.8b \n" // B
"umlal v0.8h, v1.8b, v5.8b \n" // G
"umlal v0.8h, v2.8b, v6.8b \n" // R
"uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y
@@ -2446,11 +2659,11 @@ void InterpolateRow_NEON(uint8_t* dst_ptr,
"1: \n"
"ld1 {v0.16b}, [%1], #16 \n"
"ld1 {v1.16b}, [%2], #16 \n"
- "prfm pldl1keep, [%1, 448] \n"
- "prfm pldl1keep, [%2, 448] \n"
"subs %w3, %w3, #16 \n"
"umull v2.8h, v0.8b, v4.8b \n"
+ "prfm pldl1keep, [%1, 448] \n"
"umull2 v3.8h, v0.16b, v4.16b \n"
+ "prfm pldl1keep, [%2, 448] \n"
"umlal v2.8h, v1.8b, v5.8b \n"
"umlal2 v3.8h, v1.16b, v5.16b \n"
"rshrn v0.8b, v2.8h, #8 \n"
@@ -2463,10 +2676,10 @@ void InterpolateRow_NEON(uint8_t* dst_ptr,
"50: \n"
"ld1 {v0.16b}, [%1], #16 \n"
"ld1 {v1.16b}, [%2], #16 \n"
- "prfm pldl1keep, [%1, 448] \n"
- "prfm pldl1keep, [%2, 448] \n"
"subs %w3, %w3, #16 \n"
+ "prfm pldl1keep, [%1, 448] \n"
"urhadd v0.16b, v0.16b, v1.16b \n"
+ "prfm pldl1keep, [%2, 448] \n"
"st1 {v0.16b}, [%0], #16 \n"
"b.gt 50b \n"
"b 99f \n"
@@ -2474,8 +2687,8 @@ void InterpolateRow_NEON(uint8_t* dst_ptr,
// Blend 100 / 0 - Copy row unchanged.
"100: \n"
"ld1 {v0.16b}, [%1], #16 \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #16 \n"
+ "prfm pldl1keep, [%1, 448] \n"
"st1 {v0.16b}, [%0], #16 \n"
"b.gt 100b \n"
@@ -2491,7 +2704,7 @@ void InterpolateRow_NEON(uint8_t* dst_ptr,
}
// dr * (256 - sa) / 256 + sr = dr - dr * sa / 256 + sr
-void ARGBBlendRow_NEON(const uint8_t* src_argb0,
+void ARGBBlendRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2502,11 +2715,11 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0,
"8: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB0
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 ARGB1
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"umull v16.8h, v4.8b, v3.8b \n" // db * a
+ "prfm pldl1keep, [%0, 448] \n"
"umull v17.8h, v5.8b, v3.8b \n" // dg * a
+ "prfm pldl1keep, [%1, 448] \n"
"umull v18.8h, v6.8b, v3.8b \n" // dr * a
"uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8
"uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8
@@ -2532,11 +2745,11 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0,
// ARGB0.
"ld4 {v4.b,v5.b,v6.b,v7.b}[0], [%1], #4 \n" // load 1 pixel
// ARGB1.
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #1 \n" // 1 processed per loop.
"umull v16.8h, v4.8b, v3.8b \n" // db * a
+ "prfm pldl1keep, [%0, 448] \n"
"umull v17.8h, v5.8b, v3.8b \n" // dg * a
+ "prfm pldl1keep, [%1, 448] \n"
"umull v18.8h, v6.8b, v3.8b \n" // dr * a
"uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8
"uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8
@@ -2553,7 +2766,7 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0,
"99: \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2570,14 +2783,14 @@ void ARGBAttenuateRow_NEON(const uint8_t* src_argb,
// Attenuate 8 pixels.
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v4.8h, v0.8b, v3.8b \n" // b * a
- "umull v5.8h, v1.8b, v3.8b \n" // g * a
- "umull v6.8h, v2.8b, v3.8b \n" // r * a
- "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8
- "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8
- "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8
+ "prfm pldl1keep, [%0, 448] \n"
+ "umull v5.8h, v1.8b, v3.8b \n" // g * a
+ "umull v6.8h, v2.8b, v3.8b \n" // r * a
+ "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8
+ "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8
+ "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB
"b.gt 1b \n"
: "+r"(src_argb), // %0
@@ -2603,9 +2816,9 @@ void ARGBQuantizeRow_NEON(uint8_t* dst_argb,
// 8 pixel loop.
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB.
+ "subs %w1, %w1, #8 \n" // 8 processed per loop.
+ "uxtl v0.8h, v0.8b \n" // b (0 .. 255)
"prfm pldl1keep, [%0, 448] \n"
- "subs %w1, %w1, #8 \n" // 8 processed per loop.
- "uxtl v0.8h, v0.8b \n" // b (0 .. 255)
"uxtl v1.8h, v1.8b \n"
"uxtl v2.8h, v2.8b \n"
"sqdmulh v0.8h, v0.8h, v4.8h \n" // b * scale
@@ -2645,9 +2858,9 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb,
// 8 pixel loop.
"1: \n"
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"uxtl v4.8h, v4.8b \n" // b (0 .. 255)
+ "prfm pldl1keep, [%0, 448] \n"
"uxtl v5.8h, v5.8b \n"
"uxtl v6.8h, v6.8b \n"
"uxtl v7.8h, v7.8b \n"
@@ -2678,9 +2891,9 @@ void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) {
"movi v26.8b, #77 \n" // R * 0.2990 coefficient
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"umull v4.8h, v0.8b, v24.8b \n" // B
+ "prfm pldl1keep, [%0, 448] \n"
"umlal v4.8h, v1.8b, v25.8b \n" // G
"umlal v4.8h, v2.8b, v26.8b \n" // R
"uqrshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit B
@@ -2713,9 +2926,9 @@ void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width) {
"movi v30.8b, #50 \n" // BR coefficient
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB pixels.
+ "subs %w1, %w1, #8 \n" // 8 processed per loop.
+ "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B
"prfm pldl1keep, [%0, 448] \n"
- "subs %w1, %w1, #8 \n" // 8 processed per loop.
- "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B
"umlal v4.8h, v1.8b, v21.8b \n" // G
"umlal v4.8h, v2.8b, v22.8b \n" // R
"umull v5.8h, v0.8b, v24.8b \n" // B to Sepia G
@@ -2750,9 +2963,9 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb,
"1: \n"
"ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 ARGB
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop.
"uxtl v16.8h, v16.8b \n" // b (0 .. 255) 16 bit
+ "prfm pldl1keep, [%0, 448] \n"
"uxtl v17.8h, v17.8b \n" // g
"uxtl v18.8h, v18.8b \n" // r
"uxtl v19.8h, v19.8b \n" // a
@@ -2800,7 +3013,7 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb,
// TODO(fbarchard): fix vqshrun in ARGBMultiplyRow_NEON and reenable.
// Multiply 2 rows of ARGB pixels together, 8 pixels at a time.
-void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
+void ARGBMultiplyRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2809,11 +3022,11 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"umull v0.8h, v0.8b, v4.8b \n" // multiply B
+ "prfm pldl1keep, [%0, 448] \n"
"umull v1.8h, v1.8b, v5.8b \n" // multiply G
+ "prfm pldl1keep, [%1, 448] \n"
"umull v2.8h, v2.8b, v6.8b \n" // multiply R
"umull v3.8h, v3.8b, v7.8b \n" // multiply A
"rshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit B
@@ -2822,7 +3035,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
"rshrn v3.8b, v3.8h, #8 \n" // 16 bit to 8 bit A
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB
"b.gt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2831,7 +3044,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0,
}
// Add 2 rows of ARGB pixels together, 8 pixels at a time.
-void ARGBAddRow_NEON(const uint8_t* src_argb0,
+void ARGBAddRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2840,16 +3053,16 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0,
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"uqadd v0.8b, v0.8b, v4.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"uqadd v1.8b, v1.8b, v5.8b \n"
+ "prfm pldl1keep, [%1, 448] \n"
"uqadd v2.8b, v2.8b, v6.8b \n"
"uqadd v3.8b, v3.8b, v7.8b \n"
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB
"b.gt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2858,7 +3071,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0,
}
// Subtract 2 rows of ARGB pixels, 8 pixels at a time.
-void ARGBSubtractRow_NEON(const uint8_t* src_argb0,
+void ARGBSubtractRow_NEON(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
@@ -2867,16 +3080,16 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0,
"1: \n"
"ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB
"ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"uqsub v0.8b, v0.8b, v4.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"uqsub v1.8b, v1.8b, v5.8b \n"
+ "prfm pldl1keep, [%1, 448] \n"
"uqsub v2.8b, v2.8b, v6.8b \n"
"uqsub v3.8b, v3.8b, v7.8b \n"
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB
"b.gt 1b \n"
- : "+r"(src_argb0), // %0
+ : "+r"(src_argb), // %0
"+r"(src_argb1), // %1
"+r"(dst_argb), // %2
"+r"(width) // %3
@@ -2899,11 +3112,11 @@ void SobelRow_NEON(const uint8_t* src_sobelx,
"1: \n"
"ld1 {v0.8b}, [%0], #8 \n" // load 8 sobelx.
"ld1 {v1.8b}, [%1], #8 \n" // load 8 sobely.
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
"uqadd v0.8b, v0.8b, v1.8b \n" // add
+ "prfm pldl1keep, [%0, 448] \n"
"orr v1.8b, v0.8b, v0.8b \n"
+ "prfm pldl1keep, [%1, 448] \n"
"orr v2.8b, v0.8b, v0.8b \n"
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB
"b.gt 1b \n"
@@ -2925,10 +3138,10 @@ void SobelToPlaneRow_NEON(const uint8_t* src_sobelx,
"1: \n"
"ld1 {v0.16b}, [%0], #16 \n" // load 16 sobelx.
"ld1 {v1.16b}, [%1], #16 \n" // load 16 sobely.
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #16 \n" // 16 processed per loop.
+ "prfm pldl1keep, [%0, 448] \n"
"uqadd v0.16b, v0.16b, v1.16b \n" // add
+ "prfm pldl1keep, [%1, 448] \n"
"st1 {v0.16b}, [%2], #16 \n" // store 16 pixels.
"b.gt 1b \n"
: "+r"(src_sobelx), // %0
@@ -2954,10 +3167,10 @@ void SobelXYRow_NEON(const uint8_t* src_sobelx,
"1: \n"
"ld1 {v2.8b}, [%0], #8 \n" // load 8 sobelx.
"ld1 {v0.8b}, [%1], #8 \n" // load 8 sobely.
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 processed per loop.
+ "prfm pldl1keep, [%0, 448] \n"
"uqadd v1.8b, v0.8b, v2.8b \n" // add
+ "prfm pldl1keep, [%1, 448] \n"
"st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB
"b.gt 1b \n"
: "+r"(src_sobelx), // %0
@@ -2981,18 +3194,18 @@ void SobelXRow_NEON(const uint8_t* src_y0,
"1: \n"
"ld1 {v0.8b}, [%0],%5 \n" // top
"ld1 {v1.8b}, [%0],%6 \n"
- "prfm pldl1keep, [%0, 448] \n"
"usubl v0.8h, v0.8b, v1.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"ld1 {v2.8b}, [%1],%5 \n" // center * 2
"ld1 {v3.8b}, [%1],%6 \n"
- "prfm pldl1keep, [%1, 448] \n"
"usubl v1.8h, v2.8b, v3.8b \n"
+ "prfm pldl1keep, [%1, 448] \n"
"add v0.8h, v0.8h, v1.8h \n"
"add v0.8h, v0.8h, v1.8h \n"
"ld1 {v2.8b}, [%2],%5 \n" // bottom
"ld1 {v3.8b}, [%2],%6 \n"
- "prfm pldl1keep, [%2, 448] \n"
"subs %w4, %w4, #8 \n" // 8 pixels
+ "prfm pldl1keep, [%2, 448] \n"
"usubl v1.8h, v2.8b, v3.8b \n"
"add v0.8h, v0.8h, v1.8h \n"
"abs v0.8h, v0.8h \n"
@@ -3030,11 +3243,11 @@ void SobelYRow_NEON(const uint8_t* src_y0,
"add v0.8h, v0.8h, v1.8h \n"
"ld1 {v2.8b}, [%0],%5 \n" // right
"ld1 {v3.8b}, [%1],%5 \n"
- "prfm pldl1keep, [%0, 448] \n"
- "prfm pldl1keep, [%1, 448] \n"
"subs %w3, %w3, #8 \n" // 8 pixels
"usubl v1.8h, v2.8b, v3.8b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"add v0.8h, v0.8h, v1.8h \n"
+ "prfm pldl1keep, [%1, 448] \n"
"abs v0.8h, v0.8h \n"
"uqxtn v0.8b, v0.8h \n"
"st1 {v0.8b}, [%2], #8 \n" // store 8 sobely
@@ -3057,9 +3270,9 @@ void HalfFloat1Row_NEON(const uint16_t* src,
asm volatile(
"1: \n"
"ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 pixels per loop
"uxtl v2.4s, v1.4h \n" // 8 int's
+ "prfm pldl1keep, [%0, 448] \n"
"uxtl2 v3.4s, v1.8h \n"
"scvtf v2.4s, v2.4s \n" // 8 floats
"scvtf v3.4s, v3.4s \n"
@@ -3081,9 +3294,9 @@ void HalfFloatRow_NEON(const uint16_t* src,
asm volatile(
"1: \n"
"ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 pixels per loop
"uxtl v2.4s, v1.4h \n" // 8 int's
+ "prfm pldl1keep, [%0, 448] \n"
"uxtl2 v3.4s, v1.8h \n"
"scvtf v2.4s, v2.4s \n" // 8 floats
"scvtf v3.4s, v3.4s \n"
@@ -3107,9 +3320,9 @@ void ByteToFloatRow_NEON(const uint8_t* src,
asm volatile(
"1: \n"
"ld1 {v1.8b}, [%0], #8 \n" // load 8 bytes
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 pixels per loop
"uxtl v1.8h, v1.8b \n" // 8 shorts
+ "prfm pldl1keep, [%0, 448] \n"
"uxtl v2.4s, v1.4h \n" // 8 ints
"uxtl2 v3.4s, v1.8h \n"
"scvtf v2.4s, v2.4s \n" // 8 floats
@@ -3136,9 +3349,9 @@ float ScaleMaxSamples_NEON(const float* src,
"1: \n"
"ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop
"fmul v3.4s, v1.4s, %4.s[0] \n" // scale
+ "prfm pldl1keep, [%0, 448] \n"
"fmul v4.4s, v2.4s, %4.s[0] \n" // scale
"fmax v5.4s, v5.4s, v1.4s \n" // max
"fmax v6.4s, v6.4s, v2.4s \n"
@@ -3166,9 +3379,9 @@ float ScaleSumSamples_NEON(const float* src,
"1: \n"
"ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #8 \n" // 8 processed per loop
"fmul v3.4s, v1.4s, %4.s[0] \n" // scale
+ "prfm pldl1keep, [%0, 448] \n"
"fmul v4.4s, v2.4s, %4.s[0] \n"
"fmla v5.4s, v1.4s, v1.4s \n" // sum of squares
"fmla v6.4s, v2.4s, v2.4s \n"
@@ -3376,10 +3589,10 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y,
"1: \n"
"ld1 {v2.16b}, [%0], #16 \n" // load 16 Y values
"ld2 {v0.8b, v1.8b}, [%1], #16 \n" // load 8 VU values
+ "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values
"prfm pldl1keep, [%0, 448] \n"
+ "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values
"prfm pldl1keep, [%1, 448] \n"
- "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values
- "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values
"subs %w3, %w3, #16 \n" // 16 pixels per loop
"st3 {v0.16b,v1.16b,v2.16b}, [%2], #48 \n" // store 16 YUV pixels
"b.gt 1b \n"
@@ -3391,6 +3604,7 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y,
: "cc", "memory", "v0", "v1", "v2");
}
+// AYUV is YVUA in memory. UV for NV12 is UV order in memory.
void AYUVToUVRow_NEON(const uint8_t* src_ayuv,
int src_stride_ayuv,
uint8_t* dst_uv,
@@ -3400,12 +3614,12 @@ void AYUVToUVRow_NEON(const uint8_t* src_ayuv,
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts.
"uqrshrn v3.8b, v0.8h, #2 \n" // 2x2 average
"uqrshrn v2.8b, v1.8h, #2 \n"
@@ -3429,12 +3643,12 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv,
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv
- "prfm pldl1keep, [%0, 448] \n"
"uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%0, 448] \n"
"uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts.
"ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16
- "prfm pldl1keep, [%1, 448] \n"
"uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts.
+ "prfm pldl1keep, [%1, 448] \n"
"uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts.
"uqrshrn v0.8b, v0.8h, #2 \n" // 2x2 average
"uqrshrn v1.8b, v1.8h, #2 \n"
@@ -3454,8 +3668,8 @@ void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) {
asm volatile(
"1: \n"
"ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #16 \n" // 16 pixels per loop
+ "prfm pldl1keep, [%0, 448] \n"
"st1 {v2.16b}, [%1], #16 \n" // store 16 Y pixels
"b.gt 1b \n"
: "+r"(src_ayuv), // %0
@@ -3476,9 +3690,9 @@ void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width) {
"1: \n"
"ld1 {v0.16b}, [%0], 16 \n" // load 16 UV values
"ld1 {v1.16b}, [%0], 16 \n"
- "prfm pldl1keep, [%0, 448] \n"
"subs %w2, %w2, #16 \n" // 16 pixels per loop
"tbl v0.16b, {v0.16b}, v2.16b \n"
+ "prfm pldl1keep, [%0, 448] \n"
"tbl v1.16b, {v1.16b}, v2.16b \n"
"stp q0, q1, [%1], 32 \n" // store 16 VU pixels
"b.gt 1b \n"
@@ -3531,34 +3745,24 @@ void SplitUVRow_16_NEON(const uint16_t* src_uv,
uint16_t* dst_v,
int depth,
int width) {
+ int shift = depth - 16; // Negative for right shift.
asm volatile(
- "dup v0.4s, %w3 \n"
+ "dup v2.8h, %w4 \n"
"1: \n"
- "ld2 {v1.8h, v2.8h}, [%0], #32 \n" // load 8 UV
+ "ld2 {v0.8h, v1.8h}, [%0], #32 \n" // load 8 UV
+ "subs %w3, %w3, #8 \n" // 8 src pixels per loop
+ "ushl v0.8h, v0.8h, v2.8h \n"
"prfm pldl1keep, [%0, 448] \n"
- "ushll v3.4s, v1.4h, #0 \n"
- "ushll2 v4.4s, v1.8h, #0 \n"
- "ushl v3.4s, v3.4s, v0.4s \n"
- "ushl v4.4s, v4.4s, v0.4s \n"
- "xtn v1.4h, v3.4s \n"
- "xtn2 v1.8h, v4.4s \n"
- "ushll v3.4s, v2.4h, #0 \n"
- "ushll2 v4.4s, v2.8h, #0 \n"
- "ushl v3.4s, v3.4s, v0.4s \n"
- "ushl v4.4s, v4.4s, v0.4s \n"
- "xtn v2.4h, v3.4s \n"
- "xtn2 v2.8h, v4.4s \n"
- "subs %w4, %w4, #8 \n" // 8 src pixels per loop
- "st1 {v1.8h}, [%1], #16 \n" // store 8 U pixels
- "st1 {v2.8h}, [%2], #16 \n" // store 8 V pixels
+ "ushl v1.8h, v1.8h, v2.8h \n"
+ "st1 {v0.8h}, [%1], #16 \n" // store 8 U pixels
+ "st1 {v1.8h}, [%2], #16 \n" // store 8 V pixels
"b.gt 1b \n"
: "+r"(src_uv), // %0
"+r"(dst_u), // %1
"+r"(dst_v), // %2
- "+r"(depth), // %3
- "+r"(width) // %4
- :
- : "cc", "memory", "v0", "v1", "v2", "v3", "v4");
+ "+r"(width) // %3
+ : "r"(shift) // %4
+ : "cc", "memory", "v0", "v1", "v2");
}
void MergeUVRow_16_NEON(const uint16_t* src_u,
@@ -3568,23 +3772,22 @@ void MergeUVRow_16_NEON(const uint16_t* src_u,
int width) {
int shift = 16 - depth;
asm volatile(
- "dup v2.8h, %w3 \n"
+ "dup v2.8h, %w4 \n"
"1: \n"
"ld1 {v0.8h}, [%0], #16 \n" // load 8 U
- "prfm pldl1keep, [%0, 448] \n"
+ "subs %w3, %w3, #8 \n" // 8 src pixels per loop
"ld1 {v1.8h}, [%1], #16 \n" // load 8 V
- "prfm pldl1keep, [%1, 448] \n"
"ushl v0.8h, v0.8h, v2.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
"ushl v1.8h, v1.8h, v2.8h \n"
- "subs %w4, %w4, #8 \n" // 8 src pixels per loop
+ "prfm pldl1keep, [%1, 448] \n"
"st2 {v0.8h, v1.8h}, [%2], #32 \n" // store 8 UV pixels
"b.gt 1b \n"
: "+r"(src_u), // %0
"+r"(src_v), // %1
"+r"(dst_uv), // %2
- "+r"(shift), // %3
- "+r"(width) // %4
- :
+ "+r"(width) // %3
+ : "r"(shift) // %4
: "cc", "memory", "v0", "v1", "v2");
}
@@ -3595,10 +3798,9 @@ void MultiplyRow_16_NEON(const uint16_t* src_y,
asm volatile(
"dup v2.8h, %w2 \n"
"1: \n"
- "ldp q0, q1, [%0] \n"
- "add %0, %0, #32 \n"
- "prfm pldl1keep, [%0, 448] \n"
+ "ldp q0, q1, [%0], #32 \n"
"mul v0.8h, v0.8h, v2.8h \n"
+ "prfm pldl1keep, [%0, 448] \n"
"mul v1.8h, v1.8h, v2.8h \n"
"stp q0, q1, [%1] \n" // store 16 pixels
"add %1, %1, #32 \n"
@@ -3619,11 +3821,10 @@ void DivideRow_16_NEON(const uint16_t* src_y,
asm volatile(
"dup v0.8h, %w2 \n"
"1: \n"
- "ldp q1, q2, [%0] \n"
- "add %0, %0, #32 \n"
- "prfm pldl1keep, [%0, 448] \n"
+ "ldp q1, q2, [%0], #32 \n"
"ushll v3.4s, v1.4h, #0 \n"
"ushll v4.4s, v2.4h, #0 \n"
+ "prfm pldl1keep, [%0, 448] \n"
"ushll2 v1.4s, v1.8h, #0 \n"
"ushll2 v2.4s, v2.8h, #0 \n"
"mul v3.4s, v0.4s, v3.4s \n"
diff --git a/third_party/libyuv/source/row_win.cc b/third_party/libyuv/source/row_win.cc
index 951518926f..5203b57c69 100644
--- a/third_party/libyuv/source/row_win.cc
+++ b/third_party/libyuv/source/row_win.cc
@@ -10,9 +10,9 @@
#include "libyuv/row.h"
-// This module is for Visual C 32/64 bit and clangcl 32 bit
+// This module is for Visual C 32/64 bit
#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \
- (defined(_M_IX86) || (defined(_M_X64) && !defined(__clang__)))
+ !defined(__clang__) && (defined(_M_IX86) || defined(_M_X64))
#if defined(_M_X64)
#include <emmintrin.h>
@@ -29,9 +29,9 @@ extern "C" {
// Read 8 UV from 444
#define READYUV444 \
- xmm0 = _mm_loadl_epi64((__m128i*)u_buf); \
+ xmm3 = _mm_loadl_epi64((__m128i*)u_buf); \
xmm1 = _mm_loadl_epi64((__m128i*)(u_buf + offset)); \
- xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \
+ xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \
u_buf += 8; \
xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \
xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \
@@ -39,9 +39,9 @@ extern "C" {
// Read 8 UV from 444, With 8 Alpha.
#define READYUVA444 \
- xmm0 = _mm_loadl_epi64((__m128i*)u_buf); \
+ xmm3 = _mm_loadl_epi64((__m128i*)u_buf); \
xmm1 = _mm_loadl_epi64((__m128i*)(u_buf + offset)); \
- xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \
+ xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \
u_buf += 8; \
xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \
xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \
@@ -51,10 +51,10 @@ extern "C" {
// Read 4 UV from 422, upsample to 8 UV.
#define READYUV422 \
- xmm0 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \
+ xmm3 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \
xmm1 = _mm_cvtsi32_si128(*(uint32_t*)(u_buf + offset)); \
- xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \
- xmm0 = _mm_unpacklo_epi16(xmm0, xmm0); \
+ xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \
+ xmm3 = _mm_unpacklo_epi16(xmm3, xmm3); \
u_buf += 4; \
xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \
xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \
@@ -62,10 +62,10 @@ extern "C" {
// Read 4 UV from 422, upsample to 8 UV. With 8 Alpha.
#define READYUVA422 \
- xmm0 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \
+ xmm3 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \
xmm1 = _mm_cvtsi32_si128(*(uint32_t*)(u_buf + offset)); \
- xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \
- xmm0 = _mm_unpacklo_epi16(xmm0, xmm0); \
+ xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \
+ xmm3 = _mm_unpacklo_epi16(xmm3, xmm3); \
u_buf += 4; \
xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \
xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \
@@ -74,24 +74,21 @@ extern "C" {
a_buf += 8;
// Convert 8 pixels: 8 UV and 8 Y.
-#define YUVTORGB(yuvconstants) \
- xmm1 = _mm_loadu_si128(&xmm0); \
- xmm2 = _mm_loadu_si128(&xmm0); \
- xmm0 = _mm_maddubs_epi16(xmm0, *(__m128i*)yuvconstants->kUVToB); \
- xmm1 = _mm_maddubs_epi16(xmm1, *(__m128i*)yuvconstants->kUVToG); \
- xmm2 = _mm_maddubs_epi16(xmm2, *(__m128i*)yuvconstants->kUVToR); \
- xmm0 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasB, xmm0); \
- xmm1 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasG, xmm1); \
- xmm2 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasR, xmm2); \
- xmm4 = _mm_mulhi_epu16(xmm4, *(__m128i*)yuvconstants->kYToRgb); \
- xmm0 = _mm_adds_epi16(xmm0, xmm4); \
- xmm1 = _mm_adds_epi16(xmm1, xmm4); \
- xmm2 = _mm_adds_epi16(xmm2, xmm4); \
- xmm0 = _mm_srai_epi16(xmm0, 6); \
- xmm1 = _mm_srai_epi16(xmm1, 6); \
- xmm2 = _mm_srai_epi16(xmm2, 6); \
- xmm0 = _mm_packus_epi16(xmm0, xmm0); \
- xmm1 = _mm_packus_epi16(xmm1, xmm1); \
+#define YUVTORGB(yuvconstants) \
+ xmm3 = _mm_sub_epi8(xmm3, _mm_set1_epi8(0x80)); \
+ xmm4 = _mm_mulhi_epu16(xmm4, *(__m128i*)yuvconstants->kYToRgb); \
+ xmm4 = _mm_add_epi16(xmm4, *(__m128i*)yuvconstants->kYBiasToRgb); \
+ xmm0 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToB, xmm3); \
+ xmm1 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToG, xmm3); \
+ xmm2 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToR, xmm3); \
+ xmm0 = _mm_adds_epi16(xmm4, xmm0); \
+ xmm1 = _mm_subs_epi16(xmm4, xmm1); \
+ xmm2 = _mm_adds_epi16(xmm4, xmm2); \
+ xmm0 = _mm_srai_epi16(xmm0, 6); \
+ xmm1 = _mm_srai_epi16(xmm1, 6); \
+ xmm2 = _mm_srai_epi16(xmm2, 6); \
+ xmm0 = _mm_packus_epi16(xmm0, xmm0); \
+ xmm1 = _mm_packus_epi16(xmm1, xmm1); \
xmm2 = _mm_packus_epi16(xmm2, xmm2);
// Store 8 ARGB values.
@@ -112,7 +109,7 @@ void I422ToARGBRow_SSSE3(const uint8_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- __m128i xmm0, xmm1, xmm2, xmm4;
+ __m128i xmm0, xmm1, xmm2, xmm3, xmm4;
const __m128i xmm5 = _mm_set1_epi8(-1);
const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf;
while (width > 0) {
@@ -132,7 +129,7 @@ void I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- __m128i xmm0, xmm1, xmm2, xmm4, xmm5;
+ __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5;
const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf;
while (width > 0) {
READYUVA422
@@ -150,7 +147,7 @@ void I444ToARGBRow_SSSE3(const uint8_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- __m128i xmm0, xmm1, xmm2, xmm4;
+ __m128i xmm0, xmm1, xmm2, xmm3, xmm4;
const __m128i xmm5 = _mm_set1_epi8(-1);
const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf;
while (width > 0) {
@@ -170,7 +167,7 @@ void I444AlphaToARGBRow_SSSE3(const uint8_t* y_buf,
uint8_t* dst_argb,
const struct YuvConstants* yuvconstants,
int width) {
- __m128i xmm0, xmm1, xmm2, xmm4, xmm5;
+ __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5;
const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf;
while (width > 0) {
READYUVA444
@@ -247,11 +244,11 @@ static const uvec8 kAddY16 = {16u, 16u, 16u, 16u, 16u, 16u, 16u, 16u,
// 7 bit fixed point 0.5.
static const vec16 kAddYJ64 = {64, 64, 64, 64, 64, 64, 64, 64};
-static const uvec8 kAddUV128 = {128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u,
- 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u};
-
-static const uvec16 kAddUVJ128 = {0x8080u, 0x8080u, 0x8080u, 0x8080u,
- 0x8080u, 0x8080u, 0x8080u, 0x8080u};
+// 8 bit fixed point 0.5, for bias of UV.
+static const ulvec8 kBiasUV128 = {
+ 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+ 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+ 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80};
// Shuffle table for converting RGB24 to ARGB.
static const uvec8 kShuffleMaskRGB24ToARGB = {
@@ -1427,7 +1424,7 @@ __declspec(naked) void RGBAToYRow_SSSE3(const uint8_t* src_argb,
}
}
-__declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1440,7 +1437,7 @@ __declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- movdqa xmm5, xmmword ptr kAddUV128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kARGBToV
movdqa xmm7, xmmword ptr kARGBToU
sub edi, edx // stride from u to v
@@ -1499,7 +1496,7 @@ __declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0,
}
}
-__declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1512,7 +1509,7 @@ __declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- movdqa xmm5, xmmword ptr kAddUVJ128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kARGBToVJ
movdqa xmm7, xmmword ptr kARGBToUJ
sub edi, edx // stride from u to v
@@ -1573,7 +1570,7 @@ __declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0,
}
#ifdef HAS_ARGBTOUVROW_AVX2
-__declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1586,7 +1583,7 @@ __declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- vbroadcastf128 ymm5, xmmword ptr kAddUV128
+ vbroadcastf128 ymm5, xmmword ptr kBiasUV128
vbroadcastf128 ymm6, xmmword ptr kARGBToV
vbroadcastf128 ymm7, xmmword ptr kARGBToU
sub edi, edx // stride from u to v
@@ -1641,7 +1638,7 @@ __declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0,
#endif // HAS_ARGBTOUVROW_AVX2
#ifdef HAS_ARGBTOUVJROW_AVX2
-__declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1654,7 +1651,7 @@ __declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- vbroadcastf128 ymm5, xmmword ptr kAddUVJ128
+ vbroadcastf128 ymm5, xmmword ptr kBiasUV128
vbroadcastf128 ymm6, xmmword ptr kARGBToVJ
vbroadcastf128 ymm7, xmmword ptr kARGBToUJ
sub edi, edx // stride from u to v
@@ -1709,7 +1706,7 @@ __declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0,
}
#endif // HAS_ARGBTOUVJROW_AVX2
-__declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb,
uint8_t* dst_u,
uint8_t* dst_v,
int width) {
@@ -1719,7 +1716,7 @@ __declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 4 + 8] // dst_u
mov edi, [esp + 4 + 12] // dst_v
mov ecx, [esp + 4 + 16] // width
- movdqa xmm5, xmmword ptr kAddUV128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kARGBToV
movdqa xmm7, xmmword ptr kARGBToU
sub edi, edx // stride from u to v
@@ -1767,7 +1764,7 @@ __declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0,
}
}
-__declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1780,7 +1777,7 @@ __declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- movdqa xmm5, xmmword ptr kAddUV128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kBGRAToV
movdqa xmm7, xmmword ptr kBGRAToU
sub edi, edx // stride from u to v
@@ -1839,7 +1836,7 @@ __declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0,
}
}
-__declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1852,7 +1849,7 @@ __declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- movdqa xmm5, xmmword ptr kAddUV128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kABGRToV
movdqa xmm7, xmmword ptr kABGRToU
sub edi, edx // stride from u to v
@@ -1911,7 +1908,7 @@ __declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0,
}
}
-__declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb,
int src_stride_argb,
uint8_t* dst_u,
uint8_t* dst_v,
@@ -1924,7 +1921,7 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0,
mov edx, [esp + 8 + 12] // dst_u
mov edi, [esp + 8 + 16] // dst_v
mov ecx, [esp + 8 + 20] // width
- movdqa xmm5, xmmword ptr kAddUV128
+ movdqa xmm5, xmmword ptr kBiasUV128
movdqa xmm6, xmmword ptr kRGBAToV
movdqa xmm7, xmmword ptr kRGBAToU
sub edi, edx // stride from u to v
@@ -1986,14 +1983,14 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0,
// Read 16 UV from 444
#define READYUV444_AVX2 \
- __asm { \
- __asm vmovdqu xmm0, [esi] /* U */ \
- __asm vmovdqu xmm1, [esi + edi] /* V */ \
+ __asm { \
+ __asm vmovdqu xmm3, [esi] /* U */ \
+ __asm vmovdqu xmm1, [esi + edi] /* V */ \
__asm lea esi, [esi + 16] \
- __asm vpermq ymm0, ymm0, 0xd8 \
+ __asm vpermq ymm3, ymm3, 0xd8 \
__asm vpermq ymm1, ymm1, 0xd8 \
- __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \
- __asm vmovdqu xmm4, [eax] /* Y */ \
+ __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \
+ __asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
__asm lea eax, [eax + 16]}
@@ -2001,12 +1998,12 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0,
// Read 16 UV from 444. With 16 Alpha.
#define READYUVA444_AVX2 \
__asm { \
- __asm vmovdqu xmm0, [esi] /* U */ \
+ __asm vmovdqu xmm3, [esi] /* U */ \
__asm vmovdqu xmm1, [esi + edi] /* V */ \
__asm lea esi, [esi + 16] \
- __asm vpermq ymm0, ymm0, 0xd8 \
+ __asm vpermq ymm3, ymm3, 0xd8 \
__asm vpermq ymm1, ymm1, 0xd8 \
- __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \
+ __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \
__asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
@@ -2017,123 +2014,122 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0,
// Read 8 UV from 422, upsample to 16 UV.
#define READYUV422_AVX2 \
- __asm { \
- __asm vmovq xmm0, qword ptr [esi] /* U */ \
- __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \
+ __asm { \
+ __asm vmovq xmm3, qword ptr [esi] /* U */ \
+ __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \
__asm lea esi, [esi + 8] \
- __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \
- __asm vpermq ymm0, ymm0, 0xd8 \
- __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \
- __asm vmovdqu xmm4, [eax] /* Y */ \
+ __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \
+ __asm vpermq ymm3, ymm3, 0xd8 \
+ __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \
+ __asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
__asm lea eax, [eax + 16]}
// Read 8 UV from 422, upsample to 16 UV. With 16 Alpha.
#define READYUVA422_AVX2 \
- __asm { \
- __asm vmovq xmm0, qword ptr [esi] /* U */ \
- __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \
+ __asm { \
+ __asm vmovq xmm3, qword ptr [esi] /* U */ \
+ __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \
__asm lea esi, [esi + 8] \
- __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \
- __asm vpermq ymm0, ymm0, 0xd8 \
- __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \
- __asm vmovdqu xmm4, [eax] /* Y */ \
+ __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \
+ __asm vpermq ymm3, ymm3, 0xd8 \
+ __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \
+ __asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
__asm lea eax, [eax + 16] \
- __asm vmovdqu xmm5, [ebp] /* A */ \
+ __asm vmovdqu xmm5, [ebp] /* A */ \
__asm vpermq ymm5, ymm5, 0xd8 \
__asm lea ebp, [ebp + 16]}
// Read 8 UV from NV12, upsample to 16 UV.
#define READNV12_AVX2 \
- __asm { \
- __asm vmovdqu xmm0, [esi] /* UV */ \
+ __asm { \
+ __asm vmovdqu xmm3, [esi] /* UV */ \
__asm lea esi, [esi + 16] \
- __asm vpermq ymm0, ymm0, 0xd8 \
- __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \
- __asm vmovdqu xmm4, [eax] /* Y */ \
+ __asm vpermq ymm3, ymm3, 0xd8 \
+ __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \
+ __asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
__asm lea eax, [eax + 16]}
// Read 8 UV from NV21, upsample to 16 UV.
#define READNV21_AVX2 \
- __asm { \
- __asm vmovdqu xmm0, [esi] /* UV */ \
+ __asm { \
+ __asm vmovdqu xmm3, [esi] /* UV */ \
__asm lea esi, [esi + 16] \
- __asm vpermq ymm0, ymm0, 0xd8 \
- __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleNV21 \
- __asm vmovdqu xmm4, [eax] /* Y */ \
+ __asm vpermq ymm3, ymm3, 0xd8 \
+ __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleNV21 \
+ __asm vmovdqu xmm4, [eax] /* Y */ \
__asm vpermq ymm4, ymm4, 0xd8 \
__asm vpunpcklbw ymm4, ymm4, ymm4 \
__asm lea eax, [eax + 16]}
// Read 8 YUY2 with 16 Y and upsample 8 UV to 16 UV.
#define READYUY2_AVX2 \
- __asm { \
- __asm vmovdqu ymm4, [eax] /* YUY2 */ \
+ __asm { \
+ __asm vmovdqu ymm4, [eax] /* YUY2 */ \
__asm vpshufb ymm4, ymm4, ymmword ptr kShuffleYUY2Y \
- __asm vmovdqu ymm0, [eax] /* UV */ \
- __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleYUY2UV \
+ __asm vmovdqu ymm3, [eax] /* UV */ \
+ __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleYUY2UV \
__asm lea eax, [eax + 32]}
// Read 8 UYVY with 16 Y and upsample 8 UV to 16 UV.
#define READUYVY_AVX2 \
- __asm { \
- __asm vmovdqu ymm4, [eax] /* UYVY */ \
+ __asm { \
+ __asm vmovdqu ymm4, [eax] /* UYVY */ \
__asm vpshufb ymm4, ymm4, ymmword ptr kShuffleUYVYY \
- __asm vmovdqu ymm0, [eax] /* UV */ \
- __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleUYVYUV \
+ __asm vmovdqu ymm3, [eax] /* UV */ \
+ __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleUYVYUV \
__asm lea eax, [eax + 32]}
// Convert 16 pixels: 16 UV and 16 Y.
#define YUVTORGB_AVX2(YuvConstants) \
- __asm { \
- __asm vpmaddubsw ymm2, ymm0, ymmword ptr [YuvConstants + KUVTOR] /* R UV */\
- __asm vpmaddubsw ymm1, ymm0, ymmword ptr [YuvConstants + KUVTOG] /* G UV */\
- __asm vpmaddubsw ymm0, ymm0, ymmword ptr [YuvConstants + KUVTOB] /* B UV */\
- __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASR] \
- __asm vpsubw ymm2, ymm3, ymm2 \
- __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASG] \
- __asm vpsubw ymm1, ymm3, ymm1 \
- __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASB] \
- __asm vpsubw ymm0, ymm3, ymm0 /* Step 2: Find Y contribution to 16 R,G,B values */ \
+ __asm { \
+ __asm vpsubb ymm3, ymm3, ymmword ptr kBiasUV128 \
__asm vpmulhuw ymm4, ymm4, ymmword ptr [YuvConstants + KYTORGB] \
- __asm vpaddsw ymm0, ymm0, ymm4 /* B += Y */ \
- __asm vpaddsw ymm1, ymm1, ymm4 /* G += Y */ \
- __asm vpaddsw ymm2, ymm2, ymm4 /* R += Y */ \
+ __asm vmovdqa ymm0, ymmword ptr [YuvConstants + KUVTOB] \
+ __asm vmovdqa ymm1, ymmword ptr [YuvConstants + KUVTOG] \
+ __asm vmovdqa ymm2, ymmword ptr [YuvConstants + KUVTOR] \
+ __asm vpmaddubsw ymm0, ymm0, ymm3 /* B UV */ \
+ __asm vpmaddubsw ymm1, ymm1, ymm3 /* G UV */ \
+ __asm vpmaddubsw ymm2, ymm2, ymm3 /* B UV */ \
+ __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KYBIASTORGB] \
+ __asm vpaddw ymm4, ymm3, ymm4 \
+ __asm vpaddsw ymm0, ymm0, ymm4 \
+ __asm vpsubsw ymm1, ymm4, ymm1 \
+ __asm vpaddsw ymm2, ymm2, ymm4 \
__asm vpsraw ymm0, ymm0, 6 \
__asm vpsraw ymm1, ymm1, 6 \
__asm vpsraw ymm2, ymm2, 6 \
- __asm vpackuswb ymm0, ymm0, ymm0 /* B */ \
- __asm vpackuswb ymm1, ymm1, ymm1 /* G */ \
- __asm vpackuswb ymm2, ymm2, ymm2 /* R */ \
- }
+ __asm vpackuswb ymm0, ymm0, ymm0 \
+ __asm vpackuswb ymm1, ymm1, ymm1 \
+ __asm vpackuswb ymm2, ymm2, ymm2}
// Store 16 ARGB values.
#define STOREARGB_AVX2 \
- __asm { \
- __asm vpunpcklbw ymm0, ymm0, ymm1 /* BG */ \
+ __asm { \
+ __asm vpunpcklbw ymm0, ymm0, ymm1 /* BG */ \
__asm vpermq ymm0, ymm0, 0xd8 \
- __asm vpunpcklbw ymm2, ymm2, ymm5 /* RA */ \
+ __asm vpunpcklbw ymm2, ymm2, ymm5 /* RA */ \
__asm vpermq ymm2, ymm2, 0xd8 \
- __asm vpunpcklwd ymm1, ymm0, ymm2 /* BGRA first 8 pixels */ \
- __asm vpunpckhwd ymm0, ymm0, ymm2 /* BGRA next 8 pixels */ \
+ __asm vpunpcklwd ymm1, ymm0, ymm2 /* BGRA first 8 pixels */ \
+ __asm vpunpckhwd ymm0, ymm0, ymm2 /* BGRA next 8 pixels */ \
__asm vmovdqu 0[edx], ymm1 \
__asm vmovdqu 32[edx], ymm0 \
__asm lea edx, [edx + 64]}
// Store 16 RGBA values.
#define STORERGBA_AVX2 \
- __asm { \
- __asm vpunpcklbw ymm1, ymm1, ymm2 /* GR */ \
+ __asm { \
+ __asm vpunpcklbw ymm1, ymm1, ymm2 /* GR */ \
__asm vpermq ymm1, ymm1, 0xd8 \
- __asm vpunpcklbw ymm2, ymm5, ymm0 /* AB */ \
+ __asm vpunpcklbw ymm2, ymm5, ymm0 /* AB */ \
__asm vpermq ymm2, ymm2, 0xd8 \
- __asm vpunpcklwd ymm0, ymm2, ymm1 /* ABGR first 8 pixels */ \
- __asm vpunpckhwd ymm1, ymm2, ymm1 /* ABGR next 8 pixels */ \
+ __asm vpunpcklwd ymm0, ymm2, ymm1 /* ABGR first 8 pixels */ \
+ __asm vpunpckhwd ymm1, ymm2, ymm1 /* ABGR next 8 pixels */ \
__asm vmovdqu [edx], ymm0 \
__asm vmovdqu [edx + 32], ymm1 \
__asm lea edx, [edx + 64]}
@@ -2480,11 +2476,11 @@ __declspec(naked) void I422ToRGBARow_AVX2(
// Read 8 UV from 444.
#define READYUV444 \
- __asm { \
- __asm movq xmm0, qword ptr [esi] /* U */ \
+ __asm { \
+ __asm movq xmm3, qword ptr [esi] /* U */ \
__asm movq xmm1, qword ptr [esi + edi] /* V */ \
__asm lea esi, [esi + 8] \
- __asm punpcklbw xmm0, xmm1 /* UV */ \
+ __asm punpcklbw xmm3, xmm1 /* UV */ \
__asm movq xmm4, qword ptr [eax] \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8]}
@@ -2492,10 +2488,10 @@ __declspec(naked) void I422ToRGBARow_AVX2(
// Read 4 UV from 444. With 8 Alpha.
#define READYUVA444 \
__asm { \
- __asm movq xmm0, qword ptr [esi] /* U */ \
+ __asm movq xmm3, qword ptr [esi] /* U */ \
__asm movq xmm1, qword ptr [esi + edi] /* V */ \
__asm lea esi, [esi + 8] \
- __asm punpcklbw xmm0, xmm1 /* UV */ \
+ __asm punpcklbw xmm3, xmm1 /* UV */ \
__asm movq xmm4, qword ptr [eax] \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8] \
@@ -2504,180 +2500,178 @@ __declspec(naked) void I422ToRGBARow_AVX2(
// Read 4 UV from 422, upsample to 8 UV.
#define READYUV422 \
- __asm { \
- __asm movd xmm0, [esi] /* U */ \
- __asm movd xmm1, [esi + edi] /* V */ \
+ __asm { \
+ __asm movd xmm3, [esi] /* U */ \
+ __asm movd xmm1, [esi + edi] /* V */ \
__asm lea esi, [esi + 4] \
- __asm punpcklbw xmm0, xmm1 /* UV */ \
- __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \
+ __asm punpcklbw xmm3, xmm1 /* UV */ \
+ __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \
__asm movq xmm4, qword ptr [eax] \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8]}
// Read 4 UV from 422, upsample to 8 UV. With 8 Alpha.
#define READYUVA422 \
- __asm { \
- __asm movd xmm0, [esi] /* U */ \
- __asm movd xmm1, [esi + edi] /* V */ \
+ __asm { \
+ __asm movd xmm3, [esi] /* U */ \
+ __asm movd xmm1, [esi + edi] /* V */ \
__asm lea esi, [esi + 4] \
- __asm punpcklbw xmm0, xmm1 /* UV */ \
- __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \
- __asm movq xmm4, qword ptr [eax] /* Y */ \
+ __asm punpcklbw xmm3, xmm1 /* UV */ \
+ __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \
+ __asm movq xmm4, qword ptr [eax] /* Y */ \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8] \
- __asm movq xmm5, qword ptr [ebp] /* A */ \
+ __asm movq xmm5, qword ptr [ebp] /* A */ \
__asm lea ebp, [ebp + 8]}
// Read 4 UV from NV12, upsample to 8 UV.
#define READNV12 \
- __asm { \
- __asm movq xmm0, qword ptr [esi] /* UV */ \
+ __asm { \
+ __asm movq xmm3, qword ptr [esi] /* UV */ \
__asm lea esi, [esi + 8] \
- __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \
+ __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \
__asm movq xmm4, qword ptr [eax] \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8]}
// Read 4 VU from NV21, upsample to 8 UV.
#define READNV21 \
- __asm { \
- __asm movq xmm0, qword ptr [esi] /* UV */ \
+ __asm { \
+ __asm movq xmm3, qword ptr [esi] /* UV */ \
__asm lea esi, [esi + 8] \
- __asm pshufb xmm0, xmmword ptr kShuffleNV21 \
+ __asm pshufb xmm3, xmmword ptr kShuffleNV21 \
__asm movq xmm4, qword ptr [eax] \
__asm punpcklbw xmm4, xmm4 \
__asm lea eax, [eax + 8]}
// Read 4 YUY2 with 8 Y and upsample 4 UV to 8 UV.
#define READYUY2 \
- __asm { \
- __asm movdqu xmm4, [eax] /* YUY2 */ \
+ __asm { \
+ __asm movdqu xmm4, [eax] /* YUY2 */ \
__asm pshufb xmm4, xmmword ptr kShuffleYUY2Y \
- __asm movdqu xmm0, [eax] /* UV */ \
- __asm pshufb xmm0, xmmword ptr kShuffleYUY2UV \
+ __asm movdqu xmm3, [eax] /* UV */ \
+ __asm pshufb xmm3, xmmword ptr kShuffleYUY2UV \
__asm lea eax, [eax + 16]}
// Read 4 UYVY with 8 Y and upsample 4 UV to 8 UV.
#define READUYVY \
- __asm { \
- __asm movdqu xmm4, [eax] /* UYVY */ \
+ __asm { \
+ __asm movdqu xmm4, [eax] /* UYVY */ \
__asm pshufb xmm4, xmmword ptr kShuffleUYVYY \
- __asm movdqu xmm0, [eax] /* UV */ \
- __asm pshufb xmm0, xmmword ptr kShuffleUYVYUV \
+ __asm movdqu xmm3, [eax] /* UV */ \
+ __asm pshufb xmm3, xmmword ptr kShuffleUYVYUV \
__asm lea eax, [eax + 16]}
// Convert 8 pixels: 8 UV and 8 Y.
#define YUVTORGB(YuvConstants) \
- __asm { \
- __asm movdqa xmm1, xmm0 \
- __asm movdqa xmm2, xmm0 \
- __asm movdqa xmm3, xmm0 \
- __asm movdqa xmm0, xmmword ptr [YuvConstants + KUVBIASB] \
- __asm pmaddubsw xmm1, xmmword ptr [YuvConstants + KUVTOB] \
- __asm psubw xmm0, xmm1 \
- __asm movdqa xmm1, xmmword ptr [YuvConstants + KUVBIASG] \
- __asm pmaddubsw xmm2, xmmword ptr [YuvConstants + KUVTOG] \
- __asm psubw xmm1, xmm2 \
- __asm movdqa xmm2, xmmword ptr [YuvConstants + KUVBIASR] \
- __asm pmaddubsw xmm3, xmmword ptr [YuvConstants + KUVTOR] \
- __asm psubw xmm2, xmm3 \
+ __asm { \
+ __asm psubb xmm3, xmmword ptr kBiasUV128 \
__asm pmulhuw xmm4, xmmword ptr [YuvConstants + KYTORGB] \
- __asm paddsw xmm0, xmm4 /* B += Y */ \
- __asm paddsw xmm1, xmm4 /* G += Y */ \
- __asm paddsw xmm2, xmm4 /* R += Y */ \
+ __asm movdqa xmm0, xmmword ptr [YuvConstants + KUVTOB] \
+ __asm movdqa xmm1, xmmword ptr [YuvConstants + KUVTOG] \
+ __asm movdqa xmm2, xmmword ptr [YuvConstants + KUVTOR] \
+ __asm pmaddubsw xmm0, xmm3 \
+ __asm pmaddubsw xmm1, xmm3 \
+ __asm pmaddubsw xmm2, xmm3 \
+ __asm movdqa xmm3, xmmword ptr [YuvConstants + KYBIASTORGB] \
+ __asm paddw xmm4, xmm3 \
+ __asm paddsw xmm0, xmm4 \
+ __asm paddsw xmm2, xmm4 \
+ __asm psubsw xmm4, xmm1 \
+ __asm movdqa xmm1, xmm4 \
__asm psraw xmm0, 6 \
__asm psraw xmm1, 6 \
__asm psraw xmm2, 6 \
- __asm packuswb xmm0, xmm0 /* B */ \
- __asm packuswb xmm1, xmm1 /* G */ \
- __asm packuswb xmm2, xmm2 /* R */ \
+ __asm packuswb xmm0, xmm0 /* B */ \
+ __asm packuswb xmm1, xmm1 /* G */ \
+ __asm packuswb xmm2, xmm2 /* R */ \
}
// Store 8 ARGB values.
#define STOREARGB \
- __asm { \
- __asm punpcklbw xmm0, xmm1 /* BG */ \
- __asm punpcklbw xmm2, xmm5 /* RA */ \
+ __asm { \
+ __asm punpcklbw xmm0, xmm1 /* BG */ \
+ __asm punpcklbw xmm2, xmm5 /* RA */ \
__asm movdqa xmm1, xmm0 \
- __asm punpcklwd xmm0, xmm2 /* BGRA first 4 pixels */ \
- __asm punpckhwd xmm1, xmm2 /* BGRA next 4 pixels */ \
+ __asm punpcklwd xmm0, xmm2 /* BGRA first 4 pixels */ \
+ __asm punpckhwd xmm1, xmm2 /* BGRA next 4 pixels */ \
__asm movdqu 0[edx], xmm0 \
__asm movdqu 16[edx], xmm1 \
__asm lea edx, [edx + 32]}
// Store 8 BGRA values.
#define STOREBGRA \
- __asm { \
- __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \
- __asm punpcklbw xmm1, xmm0 /* GB */ \
- __asm punpcklbw xmm5, xmm2 /* AR */ \
+ __asm { \
+ __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \
+ __asm punpcklbw xmm1, xmm0 /* GB */ \
+ __asm punpcklbw xmm5, xmm2 /* AR */ \
__asm movdqa xmm0, xmm5 \
- __asm punpcklwd xmm5, xmm1 /* BGRA first 4 pixels */ \
- __asm punpckhwd xmm0, xmm1 /* BGRA next 4 pixels */ \
+ __asm punpcklwd xmm5, xmm1 /* BGRA first 4 pixels */ \
+ __asm punpckhwd xmm0, xmm1 /* BGRA next 4 pixels */ \
__asm movdqu 0[edx], xmm5 \
__asm movdqu 16[edx], xmm0 \
__asm lea edx, [edx + 32]}
// Store 8 RGBA values.
#define STORERGBA \
- __asm { \
- __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \
- __asm punpcklbw xmm1, xmm2 /* GR */ \
- __asm punpcklbw xmm5, xmm0 /* AB */ \
+ __asm { \
+ __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \
+ __asm punpcklbw xmm1, xmm2 /* GR */ \
+ __asm punpcklbw xmm5, xmm0 /* AB */ \
__asm movdqa xmm0, xmm5 \
- __asm punpcklwd xmm5, xmm1 /* RGBA first 4 pixels */ \
- __asm punpckhwd xmm0, xmm1 /* RGBA next 4 pixels */ \
+ __asm punpcklwd xmm5, xmm1 /* RGBA first 4 pixels */ \
+ __asm punpckhwd xmm0, xmm1 /* RGBA next 4 pixels */ \
__asm movdqu 0[edx], xmm5 \
__asm movdqu 16[edx], xmm0 \
__asm lea edx, [edx + 32]}
// Store 8 RGB24 values.
#define STORERGB24 \
- __asm {/* Weave into RRGB */ \
- __asm punpcklbw xmm0, xmm1 /* BG */ \
- __asm punpcklbw xmm2, xmm2 /* RR */ \
+ __asm {/* Weave into RRGB */ \
+ __asm punpcklbw xmm0, xmm1 /* BG */ \
+ __asm punpcklbw xmm2, xmm2 /* RR */ \
__asm movdqa xmm1, xmm0 \
- __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \
- __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB24 */ \
- __asm pshufb xmm0, xmm5 /* Pack first 8 and last 4 bytes. */ \
- __asm pshufb xmm1, xmm6 /* Pack first 12 bytes. */ \
- __asm palignr xmm1, xmm0, 12 /* last 4 bytes of xmm0 + 12 xmm1 */ \
- __asm movq qword ptr 0[edx], xmm0 /* First 8 bytes */ \
- __asm movdqu 8[edx], xmm1 /* Last 16 bytes */ \
+ __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \
+ __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB24 */ \
+ __asm pshufb xmm0, xmm5 /* Pack first 8 and last 4 bytes. */ \
+ __asm pshufb xmm1, xmm6 /* Pack first 12 bytes. */ \
+ __asm palignr xmm1, xmm0, 12 /* last 4 bytes of xmm0 + 12 xmm1 */ \
+ __asm movq qword ptr 0[edx], xmm0 /* First 8 bytes */ \
+ __asm movdqu 8[edx], xmm1 /* Last 16 bytes */ \
__asm lea edx, [edx + 24]}
// Store 8 RGB565 values.
#define STORERGB565 \
- __asm {/* Weave into RRGB */ \
- __asm punpcklbw xmm0, xmm1 /* BG */ \
- __asm punpcklbw xmm2, xmm2 /* RR */ \
+ __asm {/* Weave into RRGB */ \
+ __asm punpcklbw xmm0, xmm1 /* BG */ \
+ __asm punpcklbw xmm2, xmm2 /* RR */ \
__asm movdqa xmm1, xmm0 \
- __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \
- __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB565 */ \
- __asm movdqa xmm3, xmm0 /* B first 4 pixels of argb */ \
- __asm movdqa xmm2, xmm0 /* G */ \
- __asm pslld xmm0, 8 /* R */ \
- __asm psrld xmm3, 3 /* B */ \
- __asm psrld xmm2, 5 /* G */ \
- __asm psrad xmm0, 16 /* R */ \
- __asm pand xmm3, xmm5 /* B */ \
- __asm pand xmm2, xmm6 /* G */ \
- __asm pand xmm0, xmm7 /* R */ \
- __asm por xmm3, xmm2 /* BG */ \
- __asm por xmm0, xmm3 /* BGR */ \
- __asm movdqa xmm3, xmm1 /* B next 4 pixels of argb */ \
- __asm movdqa xmm2, xmm1 /* G */ \
- __asm pslld xmm1, 8 /* R */ \
- __asm psrld xmm3, 3 /* B */ \
- __asm psrld xmm2, 5 /* G */ \
- __asm psrad xmm1, 16 /* R */ \
- __asm pand xmm3, xmm5 /* B */ \
- __asm pand xmm2, xmm6 /* G */ \
- __asm pand xmm1, xmm7 /* R */ \
- __asm por xmm3, xmm2 /* BG */ \
- __asm por xmm1, xmm3 /* BGR */ \
+ __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \
+ __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB565 */ \
+ __asm movdqa xmm3, xmm0 /* B first 4 pixels of argb */ \
+ __asm movdqa xmm2, xmm0 /* G */ \
+ __asm pslld xmm0, 8 /* R */ \
+ __asm psrld xmm3, 3 /* B */ \
+ __asm psrld xmm2, 5 /* G */ \
+ __asm psrad xmm0, 16 /* R */ \
+ __asm pand xmm3, xmm5 /* B */ \
+ __asm pand xmm2, xmm6 /* G */ \
+ __asm pand xmm0, xmm7 /* R */ \
+ __asm por xmm3, xmm2 /* BG */ \
+ __asm por xmm0, xmm3 /* BGR */ \
+ __asm movdqa xmm3, xmm1 /* B next 4 pixels of argb */ \
+ __asm movdqa xmm2, xmm1 /* G */ \
+ __asm pslld xmm1, 8 /* R */ \
+ __asm psrld xmm3, 3 /* B */ \
+ __asm psrld xmm2, 5 /* G */ \
+ __asm psrad xmm1, 16 /* R */ \
+ __asm pand xmm3, xmm5 /* B */ \
+ __asm pand xmm2, xmm6 /* G */ \
+ __asm pand xmm1, xmm7 /* R */ \
+ __asm por xmm3, xmm2 /* BG */ \
+ __asm por xmm1, xmm3 /* BGR */ \
__asm packssdw xmm0, xmm1 \
- __asm movdqu 0[edx], xmm0 /* store 8 pixels of RGB565 */ \
+ __asm movdqu 0[edx], xmm0 /* store 8 pixels of RGB565 */ \
__asm lea edx, [edx + 16]}
// 8 pixels.
@@ -4347,13 +4341,13 @@ static const uvec8 kShuffleAlpha = {3u, 0x80, 3u, 0x80, 7u, 0x80, 7u, 0x80,
11u, 0x80, 11u, 0x80, 15u, 0x80, 15u, 0x80};
// Blend 8 pixels at a time.
-__declspec(naked) void ARGBBlendRow_SSSE3(const uint8_t* src_argb0,
+__declspec(naked) void ARGBBlendRow_SSSE3(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
+ mov eax, [esp + 4 + 4] // src_argb
mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
@@ -4442,7 +4436,7 @@ __declspec(naked) void ARGBAttenuateRow_SSSE3(const uint8_t* src_argb,
uint8_t* dst_argb,
int width) {
__asm {
- mov eax, [esp + 4] // src_argb0
+ mov eax, [esp + 4] // src_argb
mov edx, [esp + 8] // dst_argb
mov ecx, [esp + 12] // width
pcmpeqb xmm3, xmm3 // generate mask 0xff000000
@@ -4487,7 +4481,7 @@ __declspec(naked) void ARGBAttenuateRow_AVX2(const uint8_t* src_argb,
uint8_t* dst_argb,
int width) {
__asm {
- mov eax, [esp + 4] // src_argb0
+ mov eax, [esp + 4] // src_argb
mov edx, [esp + 8] // dst_argb
mov ecx, [esp + 12] // width
sub edx, eax
@@ -4581,7 +4575,7 @@ __declspec(naked) void ARGBUnattenuateRow_AVX2(const uint8_t* src_argb,
uint8_t* dst_argb,
int width) {
__asm {
- mov eax, [esp + 4] // src_argb0
+ mov eax, [esp + 4] // src_argb
mov edx, [esp + 8] // dst_argb
mov ecx, [esp + 12] // width
sub edx, eax
@@ -4752,22 +4746,22 @@ __declspec(naked) void ARGBSepiaRow_SSSE3(uint8_t* dst_argb, int width) {
pmaddubsw xmm6, xmm2
phaddw xmm0, xmm6
psrlw xmm0, 7
- packuswb xmm0, xmm0 // 8 B values
+ packuswb xmm0, xmm0 // 8 B values
movdqu xmm5, [eax] // G
movdqu xmm1, [eax + 16]
pmaddubsw xmm5, xmm3
pmaddubsw xmm1, xmm3
phaddw xmm5, xmm1
psrlw xmm5, 7
- packuswb xmm5, xmm5 // 8 G values
- punpcklbw xmm0, xmm5 // 8 BG values
+ packuswb xmm5, xmm5 // 8 G values
+ punpcklbw xmm0, xmm5 // 8 BG values
movdqu xmm5, [eax] // R
movdqu xmm1, [eax + 16]
pmaddubsw xmm5, xmm4
pmaddubsw xmm1, xmm4
phaddw xmm5, xmm1
psrlw xmm5, 7
- packuswb xmm5, xmm5 // 8 R values
+ packuswb xmm5, xmm5 // 8 R values
movdqu xmm6, [eax] // A
movdqu xmm1, [eax + 16]
psrld xmm6, 24
@@ -4817,25 +4811,25 @@ __declspec(naked) void ARGBColorMatrixRow_SSSE3(const uint8_t* src_argb,
movdqu xmm1, [eax + 16]
pmaddubsw xmm6, xmm3
pmaddubsw xmm1, xmm3
- phaddsw xmm0, xmm7 // B
- phaddsw xmm6, xmm1 // G
- psraw xmm0, 6 // B
- psraw xmm6, 6 // G
- packuswb xmm0, xmm0 // 8 B values
- packuswb xmm6, xmm6 // 8 G values
- punpcklbw xmm0, xmm6 // 8 BG values
+ phaddsw xmm0, xmm7 // B
+ phaddsw xmm6, xmm1 // G
+ psraw xmm0, 6 // B
+ psraw xmm6, 6 // G
+ packuswb xmm0, xmm0 // 8 B values
+ packuswb xmm6, xmm6 // 8 G values
+ punpcklbw xmm0, xmm6 // 8 BG values
movdqu xmm1, [eax] // R
movdqu xmm7, [eax + 16]
pmaddubsw xmm1, xmm4
pmaddubsw xmm7, xmm4
- phaddsw xmm1, xmm7 // R
+ phaddsw xmm1, xmm7 // R
movdqu xmm6, [eax] // A
movdqu xmm7, [eax + 16]
pmaddubsw xmm6, xmm5
pmaddubsw xmm7, xmm5
phaddsw xmm6, xmm7 // A
- psraw xmm1, 6 // R
- psraw xmm6, 6 // A
+ psraw xmm1, 6 // R
+ psraw xmm6, 6 // A
packuswb xmm1, xmm1 // 8 R values
packuswb xmm6, xmm6 // 8 A values
punpcklbw xmm1, xmm6 // 8 RA values
@@ -4878,16 +4872,16 @@ __declspec(naked) void ARGBQuantizeRow_SSE2(uint8_t* dst_argb,
convertloop:
movdqu xmm0, [eax] // read 4 pixels
- punpcklbw xmm0, xmm5 // first 2 pixels
- pmulhuw xmm0, xmm2 // pixel * scale >> 16
+ punpcklbw xmm0, xmm5 // first 2 pixels
+ pmulhuw xmm0, xmm2 // pixel * scale >> 16
movdqu xmm1, [eax] // read 4 pixels
- punpckhbw xmm1, xmm5 // next 2 pixels
+ punpckhbw xmm1, xmm5 // next 2 pixels
pmulhuw xmm1, xmm2
- pmullw xmm0, xmm3 // * interval_size
+ pmullw xmm0, xmm3 // * interval_size
movdqu xmm7, [eax] // read 4 pixels
pmullw xmm1, xmm3
- pand xmm7, xmm6 // mask alpha
- paddw xmm0, xmm4 // + interval_size / 2
+ pand xmm7, xmm6 // mask alpha
+ paddw xmm0, xmm4 // + interval_size / 2
paddw xmm1, xmm4
packuswb xmm0, xmm1
por xmm0, xmm7
@@ -4907,9 +4901,9 @@ __declspec(naked) void ARGBShadeRow_SSE2(const uint8_t* src_argb,
int width,
uint32_t value) {
__asm {
- mov eax, [esp + 4] // src_argb
- mov edx, [esp + 8] // dst_argb
- mov ecx, [esp + 12] // width
+ mov eax, [esp + 4] // src_argb
+ mov edx, [esp + 8] // dst_argb
+ mov ecx, [esp + 12] // width
movd xmm2, [esp + 16] // value
punpcklbw xmm2, xmm2
punpcklqdq xmm2, xmm2
@@ -4918,10 +4912,10 @@ __declspec(naked) void ARGBShadeRow_SSE2(const uint8_t* src_argb,
movdqu xmm0, [eax] // read 4 pixels
lea eax, [eax + 16]
movdqa xmm1, xmm0
- punpcklbw xmm0, xmm0 // first 2
- punpckhbw xmm1, xmm1 // next 2
- pmulhuw xmm0, xmm2 // argb * value
- pmulhuw xmm1, xmm2 // argb * value
+ punpcklbw xmm0, xmm0 // first 2
+ punpckhbw xmm1, xmm1 // next 2
+ pmulhuw xmm0, xmm2 // argb * value
+ pmulhuw xmm1, xmm2 // argb * value
psrlw xmm0, 8
psrlw xmm1, 8
packuswb xmm0, xmm1
@@ -4937,29 +4931,29 @@ __declspec(naked) void ARGBShadeRow_SSE2(const uint8_t* src_argb,
#ifdef HAS_ARGBMULTIPLYROW_SSE2
// Multiply 2 rows of ARGB pixels together, 4 pixels at a time.
-__declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
- pxor xmm5, xmm5 // constant 0
+ pxor xmm5, xmm5 // constant 0
convertloop:
- movdqu xmm0, [eax] // read 4 pixels from src_argb0
+ movdqu xmm0, [eax] // read 4 pixels from src_argb
movdqu xmm2, [esi] // read 4 pixels from src_argb1
movdqu xmm1, xmm0
movdqu xmm3, xmm2
- punpcklbw xmm0, xmm0 // first 2
- punpckhbw xmm1, xmm1 // next 2
- punpcklbw xmm2, xmm5 // first 2
- punpckhbw xmm3, xmm5 // next 2
- pmulhuw xmm0, xmm2 // src_argb0 * src_argb1 first 2
- pmulhuw xmm1, xmm3 // src_argb0 * src_argb1 next 2
+ punpcklbw xmm0, xmm0 // first 2
+ punpckhbw xmm1, xmm1 // next 2
+ punpcklbw xmm2, xmm5 // first 2
+ punpckhbw xmm3, xmm5 // next 2
+ pmulhuw xmm0, xmm2 // src_argb * src_argb1 first 2
+ pmulhuw xmm1, xmm3 // src_argb * src_argb1 next 2
lea eax, [eax + 16]
lea esi, [esi + 16]
packuswb xmm0, xmm1
@@ -4977,14 +4971,14 @@ __declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBADDROW_SSE2
// Add 2 rows of ARGB pixels together, 4 pixels at a time.
// TODO(fbarchard): Port this to posix, neon and other math functions.
-__declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
@@ -4992,11 +4986,11 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0,
jl convertloop49
convertloop4:
- movdqu xmm0, [eax] // read 4 pixels from src_argb0
+ movdqu xmm0, [eax] // read 4 pixels from src_argb
lea eax, [eax + 16]
movdqu xmm1, [esi] // read 4 pixels from src_argb1
lea esi, [esi + 16]
- paddusb xmm0, xmm1 // src_argb0 + src_argb1
+ paddusb xmm0, xmm1 // src_argb + src_argb1
movdqu [edx], xmm0
lea edx, [edx + 16]
sub ecx, 4
@@ -5007,11 +5001,11 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0,
jl convertloop19
convertloop1:
- movd xmm0, [eax] // read 1 pixels from src_argb0
+ movd xmm0, [eax] // read 1 pixels from src_argb
lea eax, [eax + 4]
movd xmm1, [esi] // read 1 pixels from src_argb1
lea esi, [esi + 4]
- paddusb xmm0, xmm1 // src_argb0 + src_argb1
+ paddusb xmm0, xmm1 // src_argb + src_argb1
movd [edx], xmm0
lea edx, [edx + 4]
sub ecx, 1
@@ -5026,23 +5020,23 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBSUBTRACTROW_SSE2
// Subtract 2 rows of ARGB pixels together, 4 pixels at a time.
-__declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
convertloop:
- movdqu xmm0, [eax] // read 4 pixels from src_argb0
+ movdqu xmm0, [eax] // read 4 pixels from src_argb
lea eax, [eax + 16]
movdqu xmm1, [esi] // read 4 pixels from src_argb1
lea esi, [esi + 16]
- psubusb xmm0, xmm1 // src_argb0 - src_argb1
+ psubusb xmm0, xmm1 // src_argb - src_argb1
movdqu [edx], xmm0
lea edx, [edx + 16]
sub ecx, 4
@@ -5056,20 +5050,20 @@ __declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb0,
#ifdef HAS_ARGBMULTIPLYROW_AVX2
// Multiply 2 rows of ARGB pixels together, 8 pixels at a time.
-__declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
- vpxor ymm5, ymm5, ymm5 // constant 0
+ vpxor ymm5, ymm5, ymm5 // constant 0
convertloop:
- vmovdqu ymm1, [eax] // read 8 pixels from src_argb0
+ vmovdqu ymm1, [eax] // read 8 pixels from src_argb
lea eax, [eax + 32]
vmovdqu ymm3, [esi] // read 8 pixels from src_argb1
lea esi, [esi + 32]
@@ -5077,8 +5071,8 @@ __declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
vpunpckhbw ymm1, ymm1, ymm1 // high 4
vpunpcklbw ymm2, ymm3, ymm5 // low 4
vpunpckhbw ymm3, ymm3, ymm5 // high 4
- vpmulhuw ymm0, ymm0, ymm2 // src_argb0 * src_argb1 low 4
- vpmulhuw ymm1, ymm1, ymm3 // src_argb0 * src_argb1 high 4
+ vpmulhuw ymm0, ymm0, ymm2 // src_argb * src_argb1 low 4
+ vpmulhuw ymm1, ymm1, ymm3 // src_argb * src_argb1 high 4
vpackuswb ymm0, ymm0, ymm1
vmovdqu [edx], ymm0
lea edx, [edx + 32]
@@ -5094,19 +5088,19 @@ __declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0,
#ifdef HAS_ARGBADDROW_AVX2
// Add 2 rows of ARGB pixels together, 8 pixels at a time.
-__declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
convertloop:
- vmovdqu ymm0, [eax] // read 8 pixels from src_argb0
+ vmovdqu ymm0, [eax] // read 8 pixels from src_argb
lea eax, [eax + 32]
vpaddusb ymm0, ymm0, [esi] // add 8 pixels from src_argb1
lea esi, [esi + 32]
@@ -5124,21 +5118,21 @@ __declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb0,
#ifdef HAS_ARGBSUBTRACTROW_AVX2
// Subtract 2 rows of ARGB pixels together, 8 pixels at a time.
-__declspec(naked) void ARGBSubtractRow_AVX2(const uint8_t* src_argb0,
+__declspec(naked) void ARGBSubtractRow_AVX2(const uint8_t* src_argb,
const uint8_t* src_argb1,
uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_argb0
- mov esi, [esp + 4 + 8] // src_argb1
+ mov eax, [esp + 4 + 4] // src_argb
+ mov esi, [esp + 4 + 8] // src_argb1
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
convertloop:
- vmovdqu ymm0, [eax] // read 8 pixels from src_argb0
+ vmovdqu ymm0, [eax] // read 8 pixels from src_argb
lea eax, [eax + 32]
- vpsubusb ymm0, ymm0, [esi] // src_argb0 - src_argb1
+ vpsubusb ymm0, ymm0, [esi] // src_argb - src_argb1
lea esi, [esi + 32]
vmovdqu [edx], ymm0
lea edx, [edx + 32]
@@ -5165,8 +5159,8 @@ __declspec(naked) void SobelXRow_SSE2(const uint8_t* src_y0,
__asm {
push esi
push edi
- mov eax, [esp + 8 + 4] // src_y0
- mov esi, [esp + 8 + 8] // src_y1
+ mov eax, [esp + 8 + 4] // src_y0
+ mov esi, [esp + 8 + 8] // src_y1
mov edi, [esp + 8 + 12] // src_y2
mov edx, [esp + 8 + 16] // dst_sobelx
mov ecx, [esp + 8 + 20] // width
@@ -5176,17 +5170,17 @@ __declspec(naked) void SobelXRow_SSE2(const uint8_t* src_y0,
pxor xmm5, xmm5 // constant 0
convertloop:
- movq xmm0, qword ptr [eax] // read 8 pixels from src_y0[0]
+ movq xmm0, qword ptr [eax] // read 8 pixels from src_y0[0]
movq xmm1, qword ptr [eax + 2] // read 8 pixels from src_y0[2]
punpcklbw xmm0, xmm5
punpcklbw xmm1, xmm5
psubw xmm0, xmm1
- movq xmm1, qword ptr [eax + esi] // read 8 pixels from src_y1[0]
+ movq xmm1, qword ptr [eax + esi] // read 8 pixels from src_y1[0]
movq xmm2, qword ptr [eax + esi + 2] // read 8 pixels from src_y1[2]
punpcklbw xmm1, xmm5
punpcklbw xmm2, xmm5
psubw xmm1, xmm2
- movq xmm2, qword ptr [eax + edi] // read 8 pixels from src_y2[0]
+ movq xmm2, qword ptr [eax + edi] // read 8 pixels from src_y2[0]
movq xmm3, qword ptr [eax + edi + 2] // read 8 pixels from src_y2[2]
punpcklbw xmm2, xmm5
punpcklbw xmm3, xmm5
@@ -5221,8 +5215,8 @@ __declspec(naked) void SobelYRow_SSE2(const uint8_t* src_y0,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_y0
- mov esi, [esp + 4 + 8] // src_y1
+ mov eax, [esp + 4 + 4] // src_y0
+ mov esi, [esp + 4 + 8] // src_y1
mov edx, [esp + 4 + 12] // dst_sobely
mov ecx, [esp + 4 + 16] // width
sub esi, eax
@@ -5230,17 +5224,17 @@ __declspec(naked) void SobelYRow_SSE2(const uint8_t* src_y0,
pxor xmm5, xmm5 // constant 0
convertloop:
- movq xmm0, qword ptr [eax] // read 8 pixels from src_y0[0]
+ movq xmm0, qword ptr [eax] // read 8 pixels from src_y0[0]
movq xmm1, qword ptr [eax + esi] // read 8 pixels from src_y1[0]
punpcklbw xmm0, xmm5
punpcklbw xmm1, xmm5
psubw xmm0, xmm1
- movq xmm1, qword ptr [eax + 1] // read 8 pixels from src_y0[1]
+ movq xmm1, qword ptr [eax + 1] // read 8 pixels from src_y0[1]
movq xmm2, qword ptr [eax + esi + 1] // read 8 pixels from src_y1[1]
punpcklbw xmm1, xmm5
punpcklbw xmm2, xmm5
psubw xmm1, xmm2
- movq xmm2, qword ptr [eax + 2] // read 8 pixels from src_y0[2]
+ movq xmm2, qword ptr [eax + 2] // read 8 pixels from src_y0[2]
movq xmm3, qword ptr [eax + esi + 2] // read 8 pixels from src_y1[2]
punpcklbw xmm2, xmm5
punpcklbw xmm3, xmm5
@@ -5275,8 +5269,8 @@ __declspec(naked) void SobelRow_SSE2(const uint8_t* src_sobelx,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_sobelx
- mov esi, [esp + 4 + 8] // src_sobely
+ mov eax, [esp + 4 + 4] // src_sobelx
+ mov esi, [esp + 4 + 8] // src_sobely
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
sub esi, eax
@@ -5284,7 +5278,7 @@ __declspec(naked) void SobelRow_SSE2(const uint8_t* src_sobelx,
pslld xmm5, 24 // 0xff000000
convertloop:
- movdqu xmm0, [eax] // read 16 pixels src_sobelx
+ movdqu xmm0, [eax] // read 16 pixels src_sobelx
movdqu xmm1, [eax + esi] // read 16 pixels src_sobely
lea eax, [eax + 16]
paddusb xmm0, xmm1 // sobel = sobelx + sobely
@@ -5323,8 +5317,8 @@ __declspec(naked) void SobelToPlaneRow_SSE2(const uint8_t* src_sobelx,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_sobelx
- mov esi, [esp + 4 + 8] // src_sobely
+ mov eax, [esp + 4 + 4] // src_sobelx
+ mov esi, [esp + 4 + 8] // src_sobely
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
sub esi, eax
@@ -5357,15 +5351,15 @@ __declspec(naked) void SobelXYRow_SSE2(const uint8_t* src_sobelx,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] // src_sobelx
- mov esi, [esp + 4 + 8] // src_sobely
+ mov eax, [esp + 4 + 4] // src_sobelx
+ mov esi, [esp + 4 + 8] // src_sobely
mov edx, [esp + 4 + 12] // dst_argb
mov ecx, [esp + 4 + 16] // width
sub esi, eax
pcmpeqb xmm5, xmm5 // alpha 255
convertloop:
- movdqu xmm0, [eax] // read 16 pixels src_sobelx
+ movdqu xmm0, [eax] // read 16 pixels src_sobelx
movdqu xmm1, [eax + esi] // read 16 pixels src_sobely
lea eax, [eax + 16]
movdqa xmm2, xmm0
@@ -5535,7 +5529,7 @@ void CumulativeSumToAverageRow_SSE2(const int32_t* topleft,
add ecx, 4 - 1
jl l1b
- // 1 pixel loop
+ // 1 pixel loop
l1:
movdqu xmm0, [eax]
psubd xmm0, [eax + edx * 4]
@@ -5577,7 +5571,7 @@ void ComputeCumulativeSumRow_SSE2(const uint8_t* row,
test edx, 15
jne l4b
- // 4 pixel loop
+ // 4 pixel loop
l4:
movdqu xmm2, [eax] // 4 argb pixels 16 bytes.
lea eax, [eax + 16]
@@ -5623,7 +5617,7 @@ void ComputeCumulativeSumRow_SSE2(const uint8_t* row,
add ecx, 4 - 1
jl l1b
- // 1 pixel loop
+ // 1 pixel loop
l1:
movd xmm2, dword ptr [eax] // 1 argb pixel
lea eax, [eax + 4]
@@ -5657,7 +5651,7 @@ __declspec(naked) LIBYUV_API void ARGBAffineRow_SSE2(const uint8_t* src_argb,
mov esi, [esp + 16] // stride
mov edx, [esp + 20] // dst_argb
mov ecx, [esp + 24] // pointer to uv_dudv
- movq xmm2, qword ptr [ecx] // uv
+ movq xmm2, qword ptr [ecx] // uv
movq xmm7, qword ptr [ecx + 8] // dudv
mov ecx, [esp + 28] // width
shl esi, 16 // 4, stride
@@ -5666,7 +5660,7 @@ __declspec(naked) LIBYUV_API void ARGBAffineRow_SSE2(const uint8_t* src_argb,
sub ecx, 4
jl l4b
- // setup for 4 pixel loop
+ // setup for 4 pixel loop
pshufd xmm7, xmm7, 0x44 // dup dudv
pshufd xmm5, xmm5, 0 // dup 4, stride
movdqa xmm0, xmm2 // x0, y0, x1, y1
@@ -5678,16 +5672,16 @@ __declspec(naked) LIBYUV_API void ARGBAffineRow_SSE2(const uint8_t* src_argb,
addps xmm3, xmm4
addps xmm4, xmm4 // dudv *= 4
- // 4 pixel loop
+ // 4 pixel loop
l4:
cvttps2dq xmm0, xmm2 // x, y float to int first 2
cvttps2dq xmm1, xmm3 // x, y float to int next 2
packssdw xmm0, xmm1 // x, y as 8 shorts
pmaddwd xmm0, xmm5 // offsets = x * 4 + y * stride.
movd esi, xmm0
- pshufd xmm0, xmm0, 0x39 // shift right
+ pshufd xmm0, xmm0, 0x39 // shift right
movd edi, xmm0
- pshufd xmm0, xmm0, 0x39 // shift right
+ pshufd xmm0, xmm0, 0x39 // shift right
movd xmm1, [eax + esi] // read pixel 0
movd xmm6, [eax + edi] // read pixel 1
punpckldq xmm1, xmm6 // combine pixel 0 and 1
@@ -5739,8 +5733,8 @@ __declspec(naked) void InterpolateRow_AVX2(uint8_t* dst_ptr,
__asm {
push esi
push edi
- mov edi, [esp + 8 + 4] // dst_ptr
- mov esi, [esp + 8 + 8] // src_ptr
+ mov edi, [esp + 8 + 4] // dst_ptr
+ mov esi, [esp + 8 + 8] // src_ptr
mov edx, [esp + 8 + 12] // src_stride
mov ecx, [esp + 8 + 16] // dst_width
mov eax, [esp + 8 + 20] // source_y_fraction (0..255)
@@ -5749,7 +5743,7 @@ __declspec(naked) void InterpolateRow_AVX2(uint8_t* dst_ptr,
je xloop100 // 0 / 256. Blend 100 / 0.
sub edi, esi
cmp eax, 128
- je xloop50 // 128 /256 is 0.50. Blend 50 / 50.
+ je xloop50 // 128 /256 is 0.50. Blend 50 / 50.
vmovd xmm0, eax // high fraction 0..255
neg eax
@@ -5776,7 +5770,7 @@ __declspec(naked) void InterpolateRow_AVX2(uint8_t* dst_ptr,
vpaddw ymm0, ymm0, ymm4
vpsrlw ymm1, ymm1, 8
vpsrlw ymm0, ymm0, 8
- vpackuswb ymm0, ymm0, ymm1 // unmutates
+ vpackuswb ymm0, ymm0, ymm1 // unmutates
vmovdqu [esi + edi], ymm0
lea esi, [esi + 32]
sub ecx, 32
@@ -5817,17 +5811,17 @@ __declspec(naked) void InterpolateRow_SSSE3(uint8_t* dst_ptr,
push esi
push edi
- mov edi, [esp + 8 + 4] // dst_ptr
- mov esi, [esp + 8 + 8] // src_ptr
+ mov edi, [esp + 8 + 4] // dst_ptr
+ mov esi, [esp + 8 + 8] // src_ptr
mov edx, [esp + 8 + 12] // src_stride
mov ecx, [esp + 8 + 16] // dst_width
mov eax, [esp + 8 + 20] // source_y_fraction (0..255)
sub edi, esi
- // Dispatch to specialized filters if applicable.
+ // Dispatch to specialized filters if applicable.
cmp eax, 0
je xloop100 // 0 /256. Blend 100 / 0.
cmp eax, 128
- je xloop50 // 128 / 256 is 0.50. Blend 50 / 50.
+ je xloop50 // 128 / 256 is 0.50. Blend 50 / 50.
movd xmm0, eax // high fraction 0..255
neg eax
@@ -5846,7 +5840,7 @@ __declspec(naked) void InterpolateRow_SSSE3(uint8_t* dst_ptr,
movdqu xmm1, xmm0
punpcklbw xmm0, xmm2
punpckhbw xmm1, xmm2
- psubb xmm0, xmm4 // bias image by -128
+ psubb xmm0, xmm4 // bias image by -128
psubb xmm1, xmm4
movdqa xmm2, xmm5
movdqa xmm3, xmm5
@@ -5895,8 +5889,8 @@ __declspec(naked) void ARGBShuffleRow_SSSE3(const uint8_t* src_argb,
const uint8_t* shuffler,
int width) {
__asm {
- mov eax, [esp + 4] // src_argb
- mov edx, [esp + 8] // dst_argb
+ mov eax, [esp + 4] // src_argb
+ mov edx, [esp + 8] // dst_argb
mov ecx, [esp + 12] // shuffler
movdqu xmm5, [ecx]
mov ecx, [esp + 16] // width
@@ -5922,8 +5916,8 @@ __declspec(naked) void ARGBShuffleRow_AVX2(const uint8_t* src_argb,
const uint8_t* shuffler,
int width) {
__asm {
- mov eax, [esp + 4] // src_argb
- mov edx, [esp + 8] // dst_argb
+ mov eax, [esp + 4] // src_argb
+ mov edx, [esp + 8] // dst_argb
mov ecx, [esp + 12] // shuffler
vbroadcastf128 ymm5, [ecx] // same shuffle in high as low.
mov ecx, [esp + 16] // width
@@ -5960,18 +5954,18 @@ __declspec(naked) void I422ToYUY2Row_SSE2(const uint8_t* src_y,
__asm {
push esi
push edi
- mov eax, [esp + 8 + 4] // src_y
- mov esi, [esp + 8 + 8] // src_u
+ mov eax, [esp + 8 + 4] // src_y
+ mov esi, [esp + 8 + 8] // src_u
mov edx, [esp + 8 + 12] // src_v
mov edi, [esp + 8 + 16] // dst_frame
mov ecx, [esp + 8 + 20] // width
sub edx, esi
convertloop:
- movq xmm2, qword ptr [esi] // U
+ movq xmm2, qword ptr [esi] // U
movq xmm3, qword ptr [esi + edx] // V
lea esi, [esi + 8]
- punpcklbw xmm2, xmm3 // UV
+ punpcklbw xmm2, xmm3 // UV
movdqu xmm0, [eax] // Y
lea eax, [eax + 16]
movdqa xmm1, xmm0
@@ -5997,22 +5991,22 @@ __declspec(naked) void I422ToUYVYRow_SSE2(const uint8_t* src_y,
__asm {
push esi
push edi
- mov eax, [esp + 8 + 4] // src_y
- mov esi, [esp + 8 + 8] // src_u
+ mov eax, [esp + 8 + 4] // src_y
+ mov esi, [esp + 8 + 8] // src_u
mov edx, [esp + 8 + 12] // src_v
mov edi, [esp + 8 + 16] // dst_frame
mov ecx, [esp + 8 + 20] // width
sub edx, esi
convertloop:
- movq xmm2, qword ptr [esi] // U
+ movq xmm2, qword ptr [esi] // U
movq xmm3, qword ptr [esi + edx] // V
lea esi, [esi + 8]
- punpcklbw xmm2, xmm3 // UV
+ punpcklbw xmm2, xmm3 // UV
movdqu xmm0, [eax] // Y
movdqa xmm1, xmm2
lea eax, [eax + 16]
- punpcklbw xmm1, xmm0 // UYVY
+ punpcklbw xmm1, xmm0 // UYVY
punpckhbw xmm2, xmm0
movdqu [edi], xmm1
movdqu [edi + 16], xmm2
@@ -6039,10 +6033,10 @@ __declspec(naked) void ARGBPolynomialRow_SSE2(const uint8_t* src_argb,
mov ecx, [esp + 4 + 16] /* width */
pxor xmm3, xmm3 // 0 constant for zero extending bytes to ints.
- // 2 pixel loop.
+ // 2 pixel loop.
convertloop:
- // pmovzxbd xmm0, dword ptr [eax] // BGRA pixel
- // pmovzxbd xmm4, dword ptr [eax + 4] // BGRA pixel
+ // pmovzxbd xmm0, dword ptr [eax] // BGRA pixel
+ // pmovzxbd xmm4, dword ptr [eax + 4] // BGRA pixel
movq xmm0, qword ptr [eax] // BGRABGRA
lea eax, [eax + 8]
punpcklbw xmm0, xmm3
@@ -6091,8 +6085,8 @@ __declspec(naked) void ARGBPolynomialRow_AVX2(const uint8_t* src_argb,
const float* poly,
int width) {
__asm {
- mov eax, [esp + 4] /* src_argb */
- mov edx, [esp + 8] /* dst_argb */
+ mov eax, [esp + 4] /* src_argb */
+ mov edx, [esp + 8] /* dst_argb */
mov ecx, [esp + 12] /* poly */
vbroadcastf128 ymm4, [ecx] // C0
vbroadcastf128 ymm5, [ecx + 16] // C1
@@ -6131,8 +6125,8 @@ __declspec(naked) void HalfFloatRow_SSE2(const uint16_t* src,
float scale,
int width) {
__asm {
- mov eax, [esp + 4] /* src */
- mov edx, [esp + 8] /* dst */
+ mov eax, [esp + 4] /* src */
+ mov edx, [esp + 8] /* dst */
movd xmm4, dword ptr [esp + 12] /* scale */
mov ecx, [esp + 16] /* width */
mulss xmm4, kExpBias
@@ -6140,7 +6134,7 @@ __declspec(naked) void HalfFloatRow_SSE2(const uint16_t* src,
pxor xmm5, xmm5
sub edx, eax
- // 8 pixel loop.
+ // 8 pixel loop.
convertloop:
movdqu xmm2, xmmword ptr [eax] // 8 shorts
add eax, 16
@@ -6178,7 +6172,7 @@ __declspec(naked) void HalfFloatRow_AVX2(const uint16_t* src,
vpxor ymm5, ymm5, ymm5
sub edx, eax
- // 16 pixel loop.
+ // 16 pixel loop.
convertloop:
vmovdqu ymm2, [eax] // 16 shorts
add eax, 32
@@ -6188,7 +6182,7 @@ __declspec(naked) void HalfFloatRow_AVX2(const uint16_t* src,
vcvtdq2ps ymm2, ymm2
vmulps ymm3, ymm3, ymm4 // scale to adjust exponent for 5 bit range.
vmulps ymm2, ymm2, ymm4
- vpsrld ymm3, ymm3, 13 // float convert to 8 half floats truncate
+ vpsrld ymm3, ymm3, 13 // float convert to 8 half floats truncate
vpsrld ymm2, ymm2, 13
vpackssdw ymm2, ymm2, ymm3
vmovdqu [eax + edx - 32], ymm2
@@ -6206,22 +6200,22 @@ __declspec(naked) void HalfFloatRow_F16C(const uint16_t* src,
float scale,
int width) {
__asm {
- mov eax, [esp + 4] /* src */
- mov edx, [esp + 8] /* dst */
+ mov eax, [esp + 4] /* src */
+ mov edx, [esp + 8] /* dst */
vbroadcastss ymm4, [esp + 12] /* scale */
- mov ecx, [esp + 16] /* width */
+ mov ecx, [esp + 16] /* width */
sub edx, eax
- // 16 pixel loop.
+ // 16 pixel loop.
convertloop:
vpmovzxwd ymm2, xmmword ptr [eax] // 8 shorts -> 8 ints
vpmovzxwd ymm3, xmmword ptr [eax + 16] // 8 more shorts
add eax, 32
- vcvtdq2ps ymm2, ymm2 // convert 8 ints to floats
+ vcvtdq2ps ymm2, ymm2 // convert 8 ints to floats
vcvtdq2ps ymm3, ymm3
vmulps ymm2, ymm2, ymm4 // scale to normalized range 0 to 1
vmulps ymm3, ymm3, ymm4
- vcvtps2ph xmm2, ymm2, 3 // float convert to 8 half floats truncate
+ vcvtps2ph xmm2, ymm2, 3 // float convert to 8 half floats truncate
vcvtps2ph xmm3, ymm3, 3
vmovdqu [eax + edx + 32], xmm2
vmovdqu [eax + edx + 32 + 16], xmm3
@@ -6240,8 +6234,8 @@ __declspec(naked) void ARGBColorTableRow_X86(uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] /* dst_argb */
- mov esi, [esp + 4 + 8] /* table_argb */
+ mov eax, [esp + 4 + 4] /* dst_argb */
+ mov esi, [esp + 4 + 8] /* table_argb */
mov ecx, [esp + 4 + 12] /* width */
// 1 pixel loop.
@@ -6274,8 +6268,8 @@ __declspec(naked) void RGBColorTableRow_X86(uint8_t* dst_argb,
int width) {
__asm {
push esi
- mov eax, [esp + 4 + 4] /* dst_argb */
- mov esi, [esp + 4 + 8] /* table_argb */
+ mov eax, [esp + 4 + 4] /* dst_argb */
+ mov esi, [esp + 4 + 8] /* table_argb */
mov ecx, [esp + 4 + 12] /* width */
// 1 pixel loop.
@@ -6309,8 +6303,8 @@ __declspec(naked) void ARGBLumaColorTableRow_SSSE3(const uint8_t* src_argb,
__asm {
push esi
push edi
- mov eax, [esp + 8 + 4] /* src_argb */
- mov edi, [esp + 8 + 8] /* dst_argb */
+ mov eax, [esp + 8 + 4] /* src_argb */
+ mov edi, [esp + 8 + 8] /* dst_argb */
mov ecx, [esp + 8 + 12] /* width */
movd xmm2, dword ptr [esp + 8 + 16] // luma table
movd xmm3, dword ptr [esp + 8 + 20] // lumacoeff
@@ -6320,7 +6314,7 @@ __declspec(naked) void ARGBLumaColorTableRow_SSSE3(const uint8_t* src_argb,
psllw xmm4, 8
pxor xmm5, xmm5
- // 4 pixel loop.
+ // 4 pixel loop.
convertloop:
movdqu xmm0, xmmword ptr [eax] // generate luma ptr
pmaddubsw xmm0, xmm3
diff --git a/third_party/libyuv/source/scale.cc b/third_party/libyuv/source/scale.cc
index 4a5dc94aaa..03b0486f76 100644
--- a/third_party/libyuv/source/scale.cc
+++ b/third_party/libyuv/source/scale.cc
@@ -1446,7 +1446,8 @@ void ScalePlaneUp2_Bilinear(int src_width,
for (x = 0; x < src_height - 1; ++x) {
Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width);
src_ptr += src_stride;
- // TODO: Test performance of writing one row of destination at a time.
+ // TODO(fbarchard): Test performance of writing one row of destination at a
+ // time.
dst_ptr += 2 * dst_stride;
}
if (!(dst_height & 1)) {
@@ -1459,7 +1460,7 @@ void ScalePlaneUp2_Bilinear(int src_width,
// its original width, using linear interpolation.
// stride is in count of uint16_t.
// This is used to scale U and V planes of I210 to I410 and I212 to I412.
-void ScalePlaneUp2_16_Linear(int src_width,
+void ScalePlaneUp2_12_Linear(int src_width,
int src_height,
int dst_width,
int dst_height,
@@ -1476,21 +1477,21 @@ void ScalePlaneUp2_16_Linear(int src_width,
// This function can only scale up by 2 times horizontally.
assert(src_width == ((dst_width + 1) / 2));
-#ifdef HAS_SCALEROWUP2LINEAR_16_SSSE3
+#ifdef HAS_SCALEROWUP2LINEAR_12_SSSE3
if (TestCpuFlag(kCpuHasSSSE3)) {
- ScaleRowUp = ScaleRowUp2_Linear_16_Any_SSSE3;
+ ScaleRowUp = ScaleRowUp2_Linear_12_Any_SSSE3;
}
#endif
-#ifdef HAS_SCALEROWUP2LINEAR_16_AVX2
+#ifdef HAS_SCALEROWUP2LINEAR_12_AVX2
if (TestCpuFlag(kCpuHasAVX2)) {
- ScaleRowUp = ScaleRowUp2_Linear_16_Any_AVX2;
+ ScaleRowUp = ScaleRowUp2_Linear_12_Any_AVX2;
}
#endif
-#ifdef HAS_SCALEROWUP2LINEAR_16_NEON
+#ifdef HAS_SCALEROWUP2LINEAR_12_NEON
if (TestCpuFlag(kCpuHasNEON)) {
- ScaleRowUp = ScaleRowUp2_Linear_16_Any_NEON;
+ ScaleRowUp = ScaleRowUp2_Linear_12_Any_NEON;
}
#endif
@@ -1513,6 +1514,102 @@ void ScalePlaneUp2_16_Linear(int src_width,
// its original size, using bilinear interpolation.
// stride is in count of uint16_t.
// This is used to scale U and V planes of I010 to I410 and I012 to I412.
+void ScalePlaneUp2_12_Bilinear(int src_width,
+ int src_height,
+ int dst_width,
+ int dst_height,
+ int src_stride,
+ int dst_stride,
+ const uint16_t* src_ptr,
+ uint16_t* dst_ptr) {
+ void (*Scale2RowUp)(const uint16_t* src_ptr, ptrdiff_t src_stride,
+ uint16_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) =
+ ScaleRowUp2_Bilinear_16_Any_C;
+ int x;
+
+ // This function can only scale up by 2 times.
+ assert(src_width == ((dst_width + 1) / 2));
+ assert(src_height == ((dst_height + 1) / 2));
+
+#ifdef HAS_SCALEROWUP2BILINEAR_12_SSSE3
+ if (TestCpuFlag(kCpuHasSSSE3)) {
+ Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_SSSE3;
+ }
+#endif
+
+#ifdef HAS_SCALEROWUP2BILINEAR_12_AVX2
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_AVX2;
+ }
+#endif
+
+#ifdef HAS_SCALEROWUP2BILINEAR_12_NEON
+ if (TestCpuFlag(kCpuHasNEON)) {
+ Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_NEON;
+ }
+#endif
+
+ Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width);
+ dst_ptr += dst_stride;
+ for (x = 0; x < src_height - 1; ++x) {
+ Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width);
+ src_ptr += src_stride;
+ dst_ptr += 2 * dst_stride;
+ }
+ if (!(dst_height & 1)) {
+ Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width);
+ }
+}
+
+void ScalePlaneUp2_16_Linear(int src_width,
+ int src_height,
+ int dst_width,
+ int dst_height,
+ int src_stride,
+ int dst_stride,
+ const uint16_t* src_ptr,
+ uint16_t* dst_ptr) {
+ void (*ScaleRowUp)(const uint16_t* src_ptr, uint16_t* dst_ptr,
+ int dst_width) = ScaleRowUp2_Linear_16_Any_C;
+ int i;
+ int y;
+ int dy;
+
+ // This function can only scale up by 2 times horizontally.
+ assert(src_width == ((dst_width + 1) / 2));
+
+#ifdef HAS_SCALEROWUP2LINEAR_16_SSE2
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ ScaleRowUp = ScaleRowUp2_Linear_16_Any_SSE2;
+ }
+#endif
+
+#ifdef HAS_SCALEROWUP2LINEAR_16_AVX2
+ if (TestCpuFlag(kCpuHasAVX2)) {
+ ScaleRowUp = ScaleRowUp2_Linear_16_Any_AVX2;
+ }
+#endif
+
+#ifdef HAS_SCALEROWUP2LINEAR_16_NEON
+ if (TestCpuFlag(kCpuHasNEON)) {
+ ScaleRowUp = ScaleRowUp2_Linear_16_Any_NEON;
+ }
+#endif
+
+ if (dst_height == 1) {
+ ScaleRowUp(src_ptr + ((src_height - 1) / 2) * src_stride, dst_ptr,
+ dst_width);
+ } else {
+ dy = FixedDiv(src_height - 1, dst_height - 1);
+ y = (1 << 15) - 1;
+ for (i = 0; i < dst_height; ++i) {
+ ScaleRowUp(src_ptr + (y >> 16) * src_stride, dst_ptr, dst_width);
+ dst_ptr += dst_stride;
+ y += dy;
+ }
+ }
+}
+
void ScalePlaneUp2_16_Bilinear(int src_width,
int src_height,
int dst_width,
@@ -1530,7 +1627,7 @@ void ScalePlaneUp2_16_Bilinear(int src_width,
assert(src_width == ((dst_width + 1) / 2));
assert(src_height == ((dst_height + 1) / 2));
-#ifdef HAS_SCALEROWUP2BILINEAR_16_SSSE3
+#ifdef HAS_SCALEROWUP2BILINEAR_16_SSE2
if (TestCpuFlag(kCpuHasSSSE3)) {
Scale2RowUp = ScaleRowUp2_Bilinear_16_Any_SSSE3;
}
@@ -1945,6 +2042,17 @@ void ScalePlane_16(const uint16_t* src,
dst_stride, src, dst);
return;
}
+ if ((dst_width + 1) / 2 == src_width && filtering == kFilterLinear) {
+ ScalePlaneUp2_16_Linear(src_width, src_height, dst_width, dst_height,
+ src_stride, dst_stride, src, dst);
+ return;
+ }
+ if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width &&
+ (filtering == kFilterBilinear || filtering == kFilterBox)) {
+ ScalePlaneUp2_16_Bilinear(src_width, src_height, dst_width, dst_height,
+ src_stride, dst_stride, src, dst);
+ return;
+ }
if (filtering && dst_height > src_height) {
ScalePlaneBilinearUp_16(src_width, src_height, dst_width, dst_height,
src_stride, dst_stride, src, dst, filtering);
@@ -1981,13 +2089,13 @@ void ScalePlane_12(const uint16_t* src,
}
if ((dst_width + 1) / 2 == src_width && filtering == kFilterLinear) {
- ScalePlaneUp2_16_Linear(src_width, src_height, dst_width, dst_height,
+ ScalePlaneUp2_12_Linear(src_width, src_height, dst_width, dst_height,
src_stride, dst_stride, src, dst);
return;
}
if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width &&
(filtering == kFilterBilinear || filtering == kFilterBox)) {
- ScalePlaneUp2_16_Bilinear(src_width, src_height, dst_width, dst_height,
+ ScalePlaneUp2_12_Bilinear(src_width, src_height, dst_width, dst_height,
src_stride, dst_stride, src, dst);
return;
}
diff --git a/third_party/libyuv/source/scale_any.cc b/third_party/libyuv/source/scale_any.cc
index d30f583366..965749c415 100644
--- a/third_party/libyuv/source/scale_any.cc
+++ b/third_party/libyuv/source/scale_any.cc
@@ -656,14 +656,22 @@ SUH2LANY(ScaleRowUp2_Linear_Any_SSSE3,
uint8_t)
#endif
-#ifdef HAS_SCALEROWUP2LINEAR_16_SSSE3
-SUH2LANY(ScaleRowUp2_Linear_16_Any_SSSE3,
- ScaleRowUp2_Linear_16_SSSE3,
+#ifdef HAS_SCALEROWUP2LINEAR_12_SSSE3
+SUH2LANY(ScaleRowUp2_Linear_12_Any_SSSE3,
+ ScaleRowUp2_Linear_12_SSSE3,
ScaleRowUp2_Linear_16_C,
15,
uint16_t)
#endif
+#ifdef HAS_SCALEROWUP2LINEAR_16_SSE2
+SUH2LANY(ScaleRowUp2_Linear_16_Any_SSE2,
+ ScaleRowUp2_Linear_16_SSE2,
+ ScaleRowUp2_Linear_16_C,
+ 7,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2LINEAR_AVX2
SUH2LANY(ScaleRowUp2_Linear_Any_AVX2,
ScaleRowUp2_Linear_AVX2,
@@ -672,11 +680,19 @@ SUH2LANY(ScaleRowUp2_Linear_Any_AVX2,
uint8_t)
#endif
+#ifdef HAS_SCALEROWUP2LINEAR_12_AVX2
+SUH2LANY(ScaleRowUp2_Linear_12_Any_AVX2,
+ ScaleRowUp2_Linear_12_AVX2,
+ ScaleRowUp2_Linear_16_C,
+ 31,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2LINEAR_16_AVX2
SUH2LANY(ScaleRowUp2_Linear_16_Any_AVX2,
ScaleRowUp2_Linear_16_AVX2,
ScaleRowUp2_Linear_16_C,
- 31,
+ 15,
uint16_t)
#endif
@@ -688,6 +704,14 @@ SUH2LANY(ScaleRowUp2_Linear_Any_NEON,
uint8_t)
#endif
+#ifdef HAS_SCALEROWUP2LINEAR_12_NEON
+SUH2LANY(ScaleRowUp2_Linear_12_Any_NEON,
+ ScaleRowUp2_Linear_12_NEON,
+ ScaleRowUp2_Linear_16_C,
+ 15,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2LINEAR_16_NEON
SUH2LANY(ScaleRowUp2_Linear_16_Any_NEON,
ScaleRowUp2_Linear_16_NEON,
@@ -744,14 +768,22 @@ SU2BLANY(ScaleRowUp2_Bilinear_Any_SSE2,
uint8_t)
#endif
-#ifdef HAS_SCALEROWUP2BILINEAR_16_SSSE3
-SU2BLANY(ScaleRowUp2_Bilinear_16_Any_SSSE3,
- ScaleRowUp2_Bilinear_16_SSSE3,
+#ifdef HAS_SCALEROWUP2BILINEAR_12_SSSE3
+SU2BLANY(ScaleRowUp2_Bilinear_12_Any_SSSE3,
+ ScaleRowUp2_Bilinear_12_SSSE3,
ScaleRowUp2_Bilinear_16_C,
15,
uint16_t)
#endif
+#ifdef HAS_SCALEROWUP2BILINEAR_16_SSE2
+SU2BLANY(ScaleRowUp2_Bilinear_16_Any_SSSE3,
+ ScaleRowUp2_Bilinear_16_SSE2,
+ ScaleRowUp2_Bilinear_16_C,
+ 7,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2BILINEAR_SSSE3
SU2BLANY(ScaleRowUp2_Bilinear_Any_SSSE3,
ScaleRowUp2_Bilinear_SSSE3,
@@ -768,6 +800,14 @@ SU2BLANY(ScaleRowUp2_Bilinear_Any_AVX2,
uint8_t)
#endif
+#ifdef HAS_SCALEROWUP2BILINEAR_12_AVX2
+SU2BLANY(ScaleRowUp2_Bilinear_12_Any_AVX2,
+ ScaleRowUp2_Bilinear_12_AVX2,
+ ScaleRowUp2_Bilinear_16_C,
+ 15,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2BILINEAR_16_AVX2
SU2BLANY(ScaleRowUp2_Bilinear_16_Any_AVX2,
ScaleRowUp2_Bilinear_16_AVX2,
@@ -784,11 +824,19 @@ SU2BLANY(ScaleRowUp2_Bilinear_Any_NEON,
uint8_t)
#endif
+#ifdef HAS_SCALEROWUP2BILINEAR_12_NEON
+SU2BLANY(ScaleRowUp2_Bilinear_12_Any_NEON,
+ ScaleRowUp2_Bilinear_12_NEON,
+ ScaleRowUp2_Bilinear_16_C,
+ 15,
+ uint16_t)
+#endif
+
#ifdef HAS_SCALEROWUP2BILINEAR_16_NEON
SU2BLANY(ScaleRowUp2_Bilinear_16_Any_NEON,
ScaleRowUp2_Bilinear_16_NEON,
ScaleRowUp2_Bilinear_16_C,
- 15,
+ 7,
uint16_t)
#endif
@@ -860,7 +908,7 @@ SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_AVX2,
SBUH2LANY(ScaleUVRowUp2_Linear_Any_NEON,
ScaleUVRowUp2_Linear_NEON,
ScaleUVRowUp2_Linear_C,
- 7,
+ 15,
uint8_t)
#endif
@@ -868,7 +916,7 @@ SBUH2LANY(ScaleUVRowUp2_Linear_Any_NEON,
SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_NEON,
ScaleUVRowUp2_Linear_16_NEON,
ScaleUVRowUp2_Linear_16_C,
- 7,
+ 15,
uint16_t)
#endif
@@ -966,7 +1014,7 @@ SBU2BLANY(ScaleUVRowUp2_Bilinear_Any_NEON,
SBU2BLANY(ScaleUVRowUp2_Bilinear_16_Any_NEON,
ScaleUVRowUp2_Bilinear_16_NEON,
ScaleUVRowUp2_Bilinear_16_C,
- 3,
+ 7,
uint16_t)
#endif
diff --git a/third_party/libyuv/source/scale_gcc.cc b/third_party/libyuv/source/scale_gcc.cc
index f03903f0be..279c5e4020 100644
--- a/third_party/libyuv/source/scale_gcc.cc
+++ b/third_party/libyuv/source/scale_gcc.cc
@@ -17,8 +17,7 @@ extern "C" {
#endif
// This module is for GCC x86 and x64.
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
// Offsets for source bytes 0 to 9
static const uvec8 kShuf0 = {0, 1, 3, 4, 5, 7, 8, 9,
@@ -950,8 +949,8 @@ void ScaleRowUp2_Bilinear_SSE2(const uint8_t* src_ptr,
}
#endif
-#ifdef HAS_SCALEROWUP2LINEAR_16_SSSE3
-void ScaleRowUp2_Linear_16_SSSE3(const uint16_t* src_ptr,
+#ifdef HAS_SCALEROWUP2LINEAR_12_SSSE3
+void ScaleRowUp2_Linear_12_SSSE3(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width) {
asm volatile(
@@ -1000,8 +999,8 @@ void ScaleRowUp2_Linear_16_SSSE3(const uint16_t* src_ptr,
}
#endif
-#ifdef HAS_SCALEROWUP2BILINEAR_16_SSSE3
-void ScaleRowUp2_Bilinear_16_SSSE3(const uint16_t* src_ptr,
+#ifdef HAS_SCALEROWUP2BILINEAR_12_SSSE3
+void ScaleRowUp2_Bilinear_12_SSSE3(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
ptrdiff_t dst_stride,
@@ -1045,11 +1044,11 @@ void ScaleRowUp2_Bilinear_16_SSSE3(const uint16_t* src_ptr,
"paddw %%xmm3,%%xmm5 \n" // near+far
"paddw %%xmm1,%%xmm1 \n" // 2*near
"paddw %%xmm3,%%xmm3 \n" // 2*near
- "paddw %%xmm4,%%xmm1 \n" // 3*near+far (1, lo)
- "paddw %%xmm5,%%xmm3 \n" // 3*near+far (1, hi)
+ "paddw %%xmm4,%%xmm1 \n" // 3*near+far (2, lo)
+ "paddw %%xmm5,%%xmm3 \n" // 3*near+far (2, hi)
- // xmm4 xmm1 xmm0 xmm2
- // xmm5 xmm2 xmm1 xmm3
+ // xmm0 xmm2
+ // xmm1 xmm3
"movdqa %%xmm0,%%xmm4 \n"
"movdqa %%xmm1,%%xmm5 \n"
@@ -1099,6 +1098,166 @@ void ScaleRowUp2_Bilinear_16_SSSE3(const uint16_t* src_ptr,
}
#endif
+#ifdef HAS_SCALEROWUP2LINEAR_16_SSE2
+void ScaleRowUp2_Linear_16_SSE2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width) {
+ asm volatile(
+ "pxor %%xmm5,%%xmm5 \n"
+ "pcmpeqd %%xmm4,%%xmm4 \n"
+ "psrld $31,%%xmm4 \n"
+ "pslld $1,%%xmm4 \n" // all 2
+
+ LABELALIGN
+ "1: \n"
+ "movq (%0),%%xmm0 \n" // 0123 (16b)
+ "movq 2(%0),%%xmm1 \n" // 1234 (16b)
+
+ "punpcklwd %%xmm5,%%xmm0 \n" // 0123 (32b)
+ "punpcklwd %%xmm5,%%xmm1 \n" // 1234 (32b)
+
+ "movdqa %%xmm0,%%xmm2 \n"
+ "movdqa %%xmm1,%%xmm3 \n"
+
+ "pshufd $0b10110001,%%xmm2,%%xmm2 \n" // 1032 (even, far)
+ "pshufd $0b10110001,%%xmm3,%%xmm3 \n" // 2143 (odd, far)
+
+ "paddd %%xmm4,%%xmm2 \n" // far+2 (lo)
+ "paddd %%xmm4,%%xmm3 \n" // far+2 (hi)
+ "paddd %%xmm0,%%xmm2 \n" // near+far+2 (lo)
+ "paddd %%xmm1,%%xmm3 \n" // near+far+2 (hi)
+ "paddd %%xmm0,%%xmm0 \n" // 2*near (lo)
+ "paddd %%xmm1,%%xmm1 \n" // 2*near (hi)
+ "paddd %%xmm2,%%xmm0 \n" // 3*near+far+2 (lo)
+ "paddd %%xmm3,%%xmm1 \n" // 3*near+far+2 (hi)
+
+ "psrld $2,%%xmm0 \n" // 3/4*near+1/4*far (lo)
+ "psrld $2,%%xmm1 \n" // 3/4*near+1/4*far (hi)
+ "packssdw %%xmm1,%%xmm0 \n"
+ "pshufd $0b11011000,%%xmm0,%%xmm0 \n"
+ "movdqu %%xmm0,(%1) \n"
+
+ "lea 0x8(%0),%0 \n"
+ "lea 0x10(%1),%1 \n" // 4 pixel to 8 pixel
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(dst_ptr), // %1
+ "+r"(dst_width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5");
+}
+#endif
+
+#ifdef HAS_SCALEROWUP2BILINEAR_16_SSE2
+void ScaleRowUp2_Bilinear_16_SSE2(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width) {
+ asm volatile(
+ "pxor %%xmm7,%%xmm7 \n"
+ "pcmpeqd %%xmm6,%%xmm6 \n"
+ "psrld $31,%%xmm6 \n"
+ "pslld $3,%%xmm6 \n" // all 8
+
+ LABELALIGN
+ "1: \n"
+ "movq (%0),%%xmm0 \n" // 0011 (16b, 1u1v)
+ "movq 4(%0),%%xmm1 \n" // 1122 (16b, 1u1v)
+ "punpcklwd %%xmm7,%%xmm0 \n" // 0011 (near) (32b, 1u1v)
+ "punpcklwd %%xmm7,%%xmm1 \n" // 1122 (near) (32b, 1u1v)
+ "movdqa %%xmm0,%%xmm2 \n"
+ "movdqa %%xmm1,%%xmm3 \n"
+ "pshufd $0b01001110,%%xmm2,%%xmm2 \n" // 1100 (far) (1, lo)
+ "pshufd $0b01001110,%%xmm3,%%xmm3 \n" // 2211 (far) (1, hi)
+ "paddd %%xmm0,%%xmm2 \n" // near+far (1, lo)
+ "paddd %%xmm1,%%xmm3 \n" // near+far (1, hi)
+ "paddd %%xmm0,%%xmm0 \n" // 2*near (1, lo)
+ "paddd %%xmm1,%%xmm1 \n" // 2*near (1, hi)
+ "paddd %%xmm2,%%xmm0 \n" // 3*near+far (1, lo)
+ "paddd %%xmm3,%%xmm1 \n" // 3*near+far (1, hi)
+
+ "movq (%0),%%xmm0 \n" // 0123 (16b)
+ "movq 2(%0),%%xmm1 \n" // 1234 (16b)
+ "punpcklwd %%xmm7,%%xmm0 \n" // 0123 (32b)
+ "punpcklwd %%xmm7,%%xmm1 \n" // 1234 (32b)
+ "movdqa %%xmm0,%%xmm2 \n"
+ "movdqa %%xmm1,%%xmm3 \n"
+ "pshufd $0b10110001,%%xmm2,%%xmm2 \n" // 1032 (even, far)
+ "pshufd $0b10110001,%%xmm3,%%xmm3 \n" // 2143 (odd, far)
+ "paddd %%xmm0,%%xmm2 \n" // near+far (lo)
+ "paddd %%xmm1,%%xmm3 \n" // near+far (hi)
+ "paddd %%xmm0,%%xmm0 \n" // 2*near (lo)
+ "paddd %%xmm1,%%xmm1 \n" // 2*near (hi)
+ "paddd %%xmm2,%%xmm0 \n" // 3*near+far (1, lo)
+ "paddd %%xmm3,%%xmm1 \n" // 3*near+far (1, hi)
+
+ "movq (%0,%3,2),%%xmm2 \n"
+ "movq 2(%0,%3,2),%%xmm3 \n"
+ "punpcklwd %%xmm7,%%xmm2 \n" // 0123 (32b)
+ "punpcklwd %%xmm7,%%xmm3 \n" // 1234 (32b)
+ "movdqa %%xmm2,%%xmm4 \n"
+ "movdqa %%xmm3,%%xmm5 \n"
+ "pshufd $0b10110001,%%xmm4,%%xmm4 \n" // 1032 (even, far)
+ "pshufd $0b10110001,%%xmm5,%%xmm5 \n" // 2143 (odd, far)
+ "paddd %%xmm2,%%xmm4 \n" // near+far (lo)
+ "paddd %%xmm3,%%xmm5 \n" // near+far (hi)
+ "paddd %%xmm2,%%xmm2 \n" // 2*near (lo)
+ "paddd %%xmm3,%%xmm3 \n" // 2*near (hi)
+ "paddd %%xmm4,%%xmm2 \n" // 3*near+far (2, lo)
+ "paddd %%xmm5,%%xmm3 \n" // 3*near+far (2, hi)
+
+ "movdqa %%xmm0,%%xmm4 \n"
+ "movdqa %%xmm2,%%xmm5 \n"
+ "paddd %%xmm0,%%xmm4 \n" // 6*near+2*far (1, lo)
+ "paddd %%xmm6,%%xmm5 \n" // 3*near+far+8 (2, lo)
+ "paddd %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo)
+ "paddd %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo)
+ "psrld $4,%%xmm4 \n" // ^ div by 16 (1, lo)
+
+ "movdqa %%xmm2,%%xmm5 \n"
+ "paddd %%xmm2,%%xmm5 \n" // 6*near+2*far (2, lo)
+ "paddd %%xmm6,%%xmm0 \n" // 3*near+far+8 (1, lo)
+ "paddd %%xmm2,%%xmm5 \n" // 9*near+3*far (2, lo)
+ "paddd %%xmm0,%%xmm5 \n" // 9 3 3 1 + 8 (2, lo)
+ "psrld $4,%%xmm5 \n" // ^ div by 16 (2, lo)
+
+ "movdqa %%xmm1,%%xmm0 \n"
+ "movdqa %%xmm3,%%xmm2 \n"
+ "paddd %%xmm1,%%xmm0 \n" // 6*near+2*far (1, hi)
+ "paddd %%xmm6,%%xmm2 \n" // 3*near+far+8 (2, hi)
+ "paddd %%xmm1,%%xmm0 \n" // 9*near+3*far (1, hi)
+ "paddd %%xmm2,%%xmm0 \n" // 9 3 3 1 + 8 (1, hi)
+ "psrld $4,%%xmm0 \n" // ^ div by 16 (1, hi)
+
+ "movdqa %%xmm3,%%xmm2 \n"
+ "paddd %%xmm3,%%xmm2 \n" // 6*near+2*far (2, hi)
+ "paddd %%xmm6,%%xmm1 \n" // 3*near+far+8 (1, hi)
+ "paddd %%xmm3,%%xmm2 \n" // 9*near+3*far (2, hi)
+ "paddd %%xmm1,%%xmm2 \n" // 9 3 3 1 + 8 (2, hi)
+ "psrld $4,%%xmm2 \n" // ^ div by 16 (2, hi)
+
+ "packssdw %%xmm0,%%xmm4 \n"
+ "pshufd $0b11011000,%%xmm4,%%xmm4 \n"
+ "movdqu %%xmm4,(%1) \n" // store above
+ "packssdw %%xmm2,%%xmm5 \n"
+ "pshufd $0b11011000,%%xmm4,%%xmm4 \n"
+ "movdqu %%xmm5,(%1,%4,2) \n" // store below
+
+ "lea 0x8(%0),%0 \n"
+ "lea 0x10(%1),%1 \n" // 4 pixel to 8 pixel
+ "sub $0x8,%2 \n"
+ "jg 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(dst_ptr), // %1
+ "+r"(dst_width) // %2
+ : "r"((intptr_t)(src_stride)), // %3
+ "r"((intptr_t)(dst_stride)) // %4
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6");
+}
+#endif
+
#ifdef HAS_SCALEROWUP2LINEAR_SSSE3
void ScaleRowUp2_Linear_SSSE3(const uint8_t* src_ptr,
uint8_t* dst_ptr,
@@ -1352,8 +1511,8 @@ void ScaleRowUp2_Bilinear_AVX2(const uint8_t* src_ptr,
}
#endif
-#ifdef HAS_SCALEROWUP2LINEAR_16_AVX2
-void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
+#ifdef HAS_SCALEROWUP2LINEAR_12_AVX2
+void ScaleRowUp2_Linear_12_AVX2(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width) {
asm volatile(
@@ -1402,8 +1561,8 @@ void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
}
#endif
-#ifdef HAS_SCALEROWUP2BILINEAR_16_AVX2
-void ScaleRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
+#ifdef HAS_SCALEROWUP2BILINEAR_12_AVX2
+void ScaleRowUp2_Bilinear_12_AVX2(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
ptrdiff_t dst_stride,
@@ -1466,6 +1625,139 @@ void ScaleRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
}
#endif
+#ifdef HAS_SCALEROWUP2LINEAR_16_AVX2
+void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width) {
+ asm volatile(
+ "vpcmpeqd %%ymm4,%%ymm4,%%ymm4 \n"
+ "vpsrld $31,%%ymm4,%%ymm4 \n"
+ "vpslld $1,%%ymm4,%%ymm4 \n" // all 2
+
+ LABELALIGN
+ "1: \n"
+ "vmovdqu (%0),%%xmm0 \n" // 01234567 (16b, 1u1v)
+ "vmovdqu 2(%0),%%xmm1 \n" // 12345678 (16b, 1u1v)
+
+ "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v)
+
+ "vpshufd $0b10110001,%%ymm0,%%ymm2 \n" // 10325476 (lo, far)
+ "vpshufd $0b10110001,%%ymm1,%%ymm3 \n" // 21436587 (hi, far)
+
+ "vpaddd %%ymm4,%%ymm2,%%ymm2 \n" // far+2 (lo)
+ "vpaddd %%ymm4,%%ymm3,%%ymm3 \n" // far+2 (hi)
+ "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far+2 (lo)
+ "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far+2 (hi)
+ "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo)
+ "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi)
+ "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far+2 (lo)
+ "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far+2 (hi)
+
+ "vpsrld $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far (lo)
+ "vpsrld $2,%%ymm1,%%ymm1 \n" // 3/4*near+1/4*far (hi)
+ "vpackusdw %%ymm1,%%ymm0,%%ymm0 \n"
+ "vpshufd $0b11011000,%%ymm0,%%ymm0 \n"
+ "vmovdqu %%ymm0,(%1) \n"
+
+ "lea 0x10(%0),%0 \n"
+ "lea 0x20(%1),%1 \n" // 8 pixel to 16 pixel
+ "sub $0x10,%2 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_ptr), // %0
+ "+r"(dst_ptr), // %1
+ "+r"(dst_width) // %2
+ :
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4");
+}
+#endif
+
+#ifdef HAS_SCALEROWUP2BILINEAR_16_AVX2
+void ScaleRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width) {
+ asm volatile(
+ "vpcmpeqd %%ymm6,%%ymm6,%%ymm6 \n"
+ "vpsrld $31,%%ymm6,%%ymm6 \n"
+ "vpslld $3,%%ymm6,%%ymm6 \n" // all 8
+
+ LABELALIGN
+ "1: \n"
+
+ "vmovdqu (%0),%%xmm0 \n" // 01234567 (16b, 1u1v)
+ "vmovdqu 2(%0),%%xmm1 \n" // 12345678 (16b, 1u1v)
+ "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v)
+ "vpshufd $0b10110001,%%ymm0,%%ymm2 \n" // 10325476 (lo, far)
+ "vpshufd $0b10110001,%%ymm1,%%ymm3 \n" // 21436587 (hi, far)
+ "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far (lo)
+ "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far (hi)
+ "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo)
+ "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi)
+ "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far (1, lo)
+ "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far (1, hi)
+
+ "vmovdqu (%0,%3,2),%%xmm2 \n" // 01234567 (16b, 1u1v)
+ "vmovdqu 2(%0,%3,2),%%xmm3 \n" // 12345678 (16b, 1u1v)
+ "vpmovzxwd %%xmm2,%%ymm2 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm3,%%ymm3 \n" // 12345678 (32b, 1u1v)
+ "vpshufd $0b10110001,%%ymm2,%%ymm4 \n" // 10325476 (lo, far)
+ "vpshufd $0b10110001,%%ymm3,%%ymm5 \n" // 21436587 (hi, far)
+ "vpaddd %%ymm2,%%ymm4,%%ymm4 \n" // near+far (lo)
+ "vpaddd %%ymm3,%%ymm5,%%ymm5 \n" // near+far (hi)
+ "vpaddd %%ymm2,%%ymm2,%%ymm2 \n" // 2*near (lo)
+ "vpaddd %%ymm3,%%ymm3,%%ymm3 \n" // 2*near (hi)
+ "vpaddd %%ymm2,%%ymm4,%%ymm2 \n" // 3*near+far (2, lo)
+ "vpaddd %%ymm3,%%ymm5,%%ymm3 \n" // 3*near+far (2, hi)
+
+ "vpaddd %%ymm0,%%ymm0,%%ymm4 \n" // 6*near+2*far (1, lo)
+ "vpaddd %%ymm6,%%ymm2,%%ymm5 \n" // 3*near+far+8 (2, lo)
+ "vpaddd %%ymm4,%%ymm0,%%ymm4 \n" // 9*near+3*far (1, lo)
+ "vpaddd %%ymm4,%%ymm5,%%ymm4 \n" // 9 3 3 1 + 8 (1, lo)
+ "vpsrld $4,%%ymm4,%%ymm4 \n" // ^ div by 16 (1, lo)
+
+ "vpaddd %%ymm2,%%ymm2,%%ymm5 \n" // 6*near+2*far (2, lo)
+ "vpaddd %%ymm6,%%ymm0,%%ymm0 \n" // 3*near+far+8 (1, lo)
+ "vpaddd %%ymm5,%%ymm2,%%ymm5 \n" // 9*near+3*far (2, lo)
+ "vpaddd %%ymm5,%%ymm0,%%ymm5 \n" // 9 3 3 1 + 8 (2, lo)
+ "vpsrld $4,%%ymm5,%%ymm5 \n" // ^ div by 16 (2, lo)
+
+ "vpaddd %%ymm1,%%ymm1,%%ymm0 \n" // 6*near+2*far (1, hi)
+ "vpaddd %%ymm6,%%ymm3,%%ymm2 \n" // 3*near+far+8 (2, hi)
+ "vpaddd %%ymm0,%%ymm1,%%ymm0 \n" // 9*near+3*far (1, hi)
+ "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 9 3 3 1 + 8 (1, hi)
+ "vpsrld $4,%%ymm0,%%ymm0 \n" // ^ div by 16 (1, hi)
+
+ "vpaddd %%ymm3,%%ymm3,%%ymm2 \n" // 6*near+2*far (2, hi)
+ "vpaddd %%ymm6,%%ymm1,%%ymm1 \n" // 3*near+far+8 (1, hi)
+ "vpaddd %%ymm2,%%ymm3,%%ymm2 \n" // 9*near+3*far (2, hi)
+ "vpaddd %%ymm2,%%ymm1,%%ymm2 \n" // 9 3 3 1 + 8 (2, hi)
+ "vpsrld $4,%%ymm2,%%ymm2 \n" // ^ div by 16 (2, hi)
+
+ "vpackusdw %%ymm0,%%ymm4,%%ymm4 \n"
+ "vpshufd $0b11011000,%%ymm4,%%ymm4 \n"
+ "vmovdqu %%ymm4,(%1) \n" // store above
+ "vpackusdw %%ymm2,%%ymm5,%%ymm5 \n"
+ "vpshufd $0b11011000,%%ymm5,%%ymm5 \n"
+ "vmovdqu %%ymm5,(%1,%4,2) \n" // store below
+
+ "lea 0x10(%0),%0 \n"
+ "lea 0x20(%1),%1 \n" // 8 pixel to 16 pixel
+ "sub $0x10,%2 \n"
+ "jg 1b \n"
+ "vzeroupper \n"
+ : "+r"(src_ptr), // %0
+ "+r"(dst_ptr), // %1
+ "+r"(dst_width) // %2
+ : "r"((intptr_t)(src_stride)), // %3
+ "r"((intptr_t)(dst_stride)) // %4
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6");
+}
+#endif
+
// Reads 16xN bytes and produces 16 shorts at a time.
void ScaleAddRow_SSE2(const uint8_t* src_ptr,
uint16_t* dst_ptr,
@@ -2522,7 +2814,6 @@ void ScaleUVRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width) {
asm volatile(
- "vpxor %%xmm5,%%xmm5,%%xmm5 \n"
"vpcmpeqd %%ymm4,%%ymm4,%%ymm4 \n"
"vpsrld $31,%%ymm4,%%ymm4 \n"
"vpslld $1,%%ymm4,%%ymm4 \n" // all 2
@@ -2532,11 +2823,8 @@ void ScaleUVRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
"vmovdqu (%0),%%xmm0 \n" // 00112233 (16b, 1u1v)
"vmovdqu 4(%0),%%xmm1 \n" // 11223344 (16b, 1u1v)
- "vpermq $0b11011000,%%ymm0,%%ymm0 \n" // 0011000022330000
- "vpermq $0b11011000,%%ymm1,%%ymm1 \n" // 1122000033440000
-
- "vpunpcklwd %%ymm5,%%ymm0,%%ymm0 \n" // 00112233 (32b, 1u1v)
- "vpunpcklwd %%ymm5,%%ymm1,%%ymm1 \n" // 11223344 (32b, 1u1v)
+ "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v)
"vpshufd $0b01001110,%%ymm0,%%ymm2 \n" // 11003322 (lo, far)
"vpshufd $0b01001110,%%ymm1,%%ymm3 \n" // 22114433 (hi, far)
@@ -2564,7 +2852,7 @@ void ScaleUVRowUp2_Linear_16_AVX2(const uint16_t* src_ptr,
"+r"(dst_ptr), // %1
"+r"(dst_width) // %2
:
- : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5");
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4");
}
#endif
@@ -2575,7 +2863,6 @@ void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
ptrdiff_t dst_stride,
int dst_width) {
asm volatile(
- "vpxor %%xmm7,%%xmm7,%%xmm7 \n"
"vpcmpeqd %%ymm6,%%ymm6,%%ymm6 \n"
"vpsrld $31,%%ymm6,%%ymm6 \n"
"vpslld $3,%%ymm6,%%ymm6 \n" // all 8
@@ -2585,10 +2872,8 @@ void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
"vmovdqu (%0),%%xmm0 \n" // 00112233 (16b, 1u1v)
"vmovdqu 4(%0),%%xmm1 \n" // 11223344 (16b, 1u1v)
- "vpermq $0b11011000,%%ymm0,%%ymm0 \n" // 0011000022330000
- "vpermq $0b11011000,%%ymm1,%%ymm1 \n" // 1122000033440000
- "vpunpcklwd %%ymm7,%%ymm0,%%ymm0 \n" // 00112233 (32b, 1u1v)
- "vpunpcklwd %%ymm7,%%ymm1,%%ymm1 \n" // 11223344 (32b, 1u1v)
+ "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v)
"vpshufd $0b01001110,%%ymm0,%%ymm2 \n" // 11003322 (lo, far)
"vpshufd $0b01001110,%%ymm1,%%ymm3 \n" // 22114433 (hi, far)
"vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far (lo)
@@ -2600,10 +2885,8 @@ void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
"vmovdqu (%0,%3,2),%%xmm2 \n" // 00112233 (16b, 1u1v)
"vmovdqu 4(%0,%3,2),%%xmm3 \n" // 11223344 (16b, 1u1v)
- "vpermq $0b11011000,%%ymm2,%%ymm2 \n" // 0011000022330000
- "vpermq $0b11011000,%%ymm3,%%ymm3 \n" // 1122000033440000
- "vpunpcklwd %%ymm7,%%ymm2,%%ymm2 \n" // 00112233 (32b, 1u1v)
- "vpunpcklwd %%ymm7,%%ymm3,%%ymm3 \n" // 11223344 (32b, 1u1v)
+ "vpmovzxwd %%xmm2,%%ymm2 \n" // 01234567 (32b, 1u1v)
+ "vpmovzxwd %%xmm3,%%ymm3 \n" // 12345678 (32b, 1u1v)
"vpshufd $0b01001110,%%ymm2,%%ymm4 \n" // 11003322 (lo, far)
"vpshufd $0b01001110,%%ymm3,%%ymm5 \n" // 22114433 (hi, far)
"vpaddd %%ymm2,%%ymm4,%%ymm4 \n" // near+far (lo)
@@ -2652,8 +2935,7 @@ void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr,
"+r"(dst_width) // %2
: "r"((intptr_t)(src_stride)), // %3
"r"((intptr_t)(dst_stride)) // %4
- : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6",
- "xmm7");
+ : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6");
}
#endif
diff --git a/third_party/libyuv/source/scale_neon.cc b/third_party/libyuv/source/scale_neon.cc
index 41dba3e8ea..6a0d6e1b49 100644
--- a/third_party/libyuv/source/scale_neon.cc
+++ b/third_party/libyuv/source/scale_neon.cc
@@ -603,7 +603,7 @@ void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr,
);
}
-void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
+void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width) {
const uint16_t* src_temp = src_ptr + 1;
@@ -633,7 +633,7 @@ void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
);
}
-void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
+void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
ptrdiff_t dst_stride,
@@ -647,7 +647,6 @@ void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
"vmov.u16 q15, #3 \n"
"1: \n"
- "add %5, %0, #2 \n"
"vld1.16 {q0}, [%0]! \n" // 01234567 (16b)
"vld1.16 {q1}, [%5]! \n" // 12345678 (16b)
@@ -655,7 +654,6 @@ void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
"vmla.u16 q0, q1, q15 \n" // 3*near+far (odd)
"vmla.u16 q1, q2, q15 \n" // 3*near+far (even)
- "add %5, %1, #2 \n"
"vld1.16 {q2}, [%1]! \n" // 01234567 (16b)
"vld1.16 {q3}, [%6]! \n" // 12345678 (16b)
@@ -692,6 +690,102 @@ void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
);
}
+void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width) {
+ const uint16_t* src_temp = src_ptr + 1;
+ asm volatile(
+ "vmov.u16 d31, #3 \n"
+
+ "1: \n"
+ "vld1.16 {q0}, [%0]! \n" // 01234567 (16b)
+ "vld1.16 {q1}, [%3]! \n" // 12345678 (16b)
+
+ "vmovl.u16 q2, d0 \n" // 0123 (32b)
+ "vmovl.u16 q3, d1 \n" // 4567 (32b)
+ "vmovl.u16 q4, d2 \n" // 1234 (32b)
+ "vmovl.u16 q5, d3 \n" // 5678 (32b)
+
+ "vmlal.u16 q2, d2, d31 \n"
+ "vmlal.u16 q3, d3, d31 \n"
+ "vmlal.u16 q4, d0, d31 \n"
+ "vmlal.u16 q5, d1, d31 \n"
+
+ "vrshrn.u32 d0, q4, #2 \n"
+ "vrshrn.u32 d1, q5, #2 \n"
+ "vrshrn.u32 d2, q2, #2 \n"
+ "vrshrn.u32 d3, q3, #2 \n"
+
+ "vst2.16 {q0, q1}, [%1]! \n" // store
+ "subs %2, %2, #16 \n" // 8 sample -> 16 sample
+ "bgt 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(dst_ptr), // %1
+ "+r"(dst_width), // %2
+ "+r"(src_temp) // %3
+ :
+ : "memory", "cc", "q0", "q1", "q2", "q15" // Clobber List
+ );
+}
+
+void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width) {
+ const uint16_t* src_ptr1 = src_ptr + src_stride;
+ uint16_t* dst_ptr1 = dst_ptr + dst_stride;
+ const uint16_t* src_temp = src_ptr + 1;
+ const uint16_t* src_temp1 = src_ptr1 + 1;
+
+ asm volatile(
+ "vmov.u16 d31, #3 \n"
+ "vmov.u32 q14, #3 \n"
+
+ "1: \n"
+ "vld1.16 {d0}, [%0]! \n" // 0123 (16b)
+ "vld1.16 {d1}, [%5]! \n" // 1234 (16b)
+ "vmovl.u16 q2, d0 \n" // 0123 (32b)
+ "vmovl.u16 q3, d1 \n" // 1234 (32b)
+ "vmlal.u16 q2, d1, d31 \n"
+ "vmlal.u16 q3, d0, d31 \n"
+
+ "vld1.16 {d0}, [%1]! \n" // 0123 (16b)
+ "vld1.16 {d1}, [%6]! \n" // 1234 (16b)
+ "vmovl.u16 q4, d0 \n" // 0123 (32b)
+ "vmovl.u16 q5, d1 \n" // 1234 (32b)
+ "vmlal.u16 q4, d1, d31 \n"
+ "vmlal.u16 q5, d0, d31 \n"
+
+ "vmovq q0, q4 \n"
+ "vmovq q1, q5 \n"
+ "vmla.u32 q4, q2, q14 \n"
+ "vmla.u32 q5, q3, q14 \n"
+ "vmla.u32 q2, q0, q14 \n"
+ "vmla.u32 q3, q1, q14 \n"
+
+ "vrshrn.u32 d1, q4, #4 \n"
+ "vrshrn.u32 d0, q5, #4 \n"
+ "vrshrn.u32 d3, q2, #4 \n"
+ "vrshrn.u32 d2, q3, #4 \n"
+
+ "vst2.16 {d0, d1}, [%2]! \n" // store
+ "vst2.16 {d2, d3}, [%3]! \n" // store
+ "subs %4, %4, #8 \n" // 4 sample -> 8 sample
+ "bgt 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(src_ptr1), // %1
+ "+r"(dst_ptr), // %2
+ "+r"(dst_ptr1), // %3
+ "+r"(dst_width), // %4
+ "+r"(src_temp), // %5
+ "+r"(src_temp1) // %6
+ :
+ : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", "q14",
+ "d31" // Clobber List
+ );
+}
+
void ScaleUVRowUp2_Linear_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
int dst_width) {
diff --git a/third_party/libyuv/source/scale_neon64.cc b/third_party/libyuv/source/scale_neon64.cc
index 22fedcb5a4..8656fec7fa 100644
--- a/third_party/libyuv/source/scale_neon64.cc
+++ b/third_party/libyuv/source/scale_neon64.cc
@@ -630,7 +630,7 @@ void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr,
);
}
-void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
+void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr,
uint16_t* dst_ptr,
int dst_width) {
const uint16_t* src_temp = src_ptr + 1;
@@ -661,7 +661,7 @@ void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
);
}
-void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
+void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr,
ptrdiff_t src_stride,
uint16_t* dst_ptr,
ptrdiff_t dst_stride,
@@ -721,6 +721,106 @@ void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
);
}
+void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr,
+ uint16_t* dst_ptr,
+ int dst_width) {
+ const uint16_t* src_temp = src_ptr + 1;
+ asm volatile(
+ "movi v31.8h, #3 \n"
+
+ "1: \n"
+ "ld1 {v0.8h}, [%0], #16 \n" // 01234567 (16b)
+ "ld1 {v1.8h}, [%1], #16 \n" // 12345678 (16b)
+ "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead
+
+ "ushll v2.4s, v0.4h, #0 \n" // 0123 (32b)
+ "ushll2 v3.4s, v0.8h, #0 \n" // 4567 (32b)
+ "ushll v4.4s, v1.4h, #0 \n" // 1234 (32b)
+ "ushll2 v5.4s, v1.8h, #0 \n" // 5678 (32b)
+
+ "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (1, odd)
+ "umlal2 v3.4s, v1.8h, v31.8h \n" // 3*near+far (2, odd)
+ "umlal v4.4s, v0.4h, v31.4h \n" // 3*near+far (1, even)
+ "umlal2 v5.4s, v0.8h, v31.8h \n" // 3*near+far (2, even)
+
+ "rshrn v0.4h, v4.4s, #2 \n" // 3/4*near+1/4*far
+ "rshrn2 v0.8h, v5.4s, #2 \n" // 3/4*near+1/4*far (even)
+ "rshrn v1.4h, v2.4s, #2 \n" // 3/4*near+1/4*far
+ "rshrn2 v1.8h, v3.4s, #2 \n" // 3/4*near+1/4*far (odd)
+
+ "st2 {v0.8h, v1.8h}, [%2], #32 \n" // store
+ "subs %w3, %w3, #16 \n" // 8 sample -> 16 sample
+ "b.gt 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(src_temp), // %1
+ "+r"(dst_ptr), // %2
+ "+r"(dst_width) // %3
+ :
+ : "memory", "cc", "v0", "v1", "v2", "v31" // Clobber List
+ );
+}
+
+void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
+ ptrdiff_t src_stride,
+ uint16_t* dst_ptr,
+ ptrdiff_t dst_stride,
+ int dst_width) {
+ const uint16_t* src_ptr1 = src_ptr + src_stride;
+ uint16_t* dst_ptr1 = dst_ptr + dst_stride;
+ const uint16_t* src_temp = src_ptr + 1;
+ const uint16_t* src_temp1 = src_ptr1 + 1;
+
+ asm volatile(
+ "movi v31.4h, #3 \n"
+ "movi v30.4s, #3 \n"
+
+ "1: \n"
+ "ldr d0, [%0], #8 \n" // 0123 (16b)
+ "ldr d1, [%2], #8 \n" // 1234 (16b)
+ "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead
+ "ushll v2.4s, v0.4h, #0 \n" // 0123 (32b)
+ "ushll v3.4s, v1.4h, #0 \n" // 1234 (32b)
+ "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (1, odd)
+ "umlal v3.4s, v0.4h, v31.4h \n" // 3*near+far (1, even)
+
+ "ldr d0, [%1], #8 \n" // 0123 (16b)
+ "ldr d1, [%3], #8 \n" // 1234 (16b)
+ "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead
+ "ushll v4.4s, v0.4h, #0 \n" // 0123 (32b)
+ "ushll v5.4s, v1.4h, #0 \n" // 1234 (32b)
+ "umlal v4.4s, v1.4h, v31.4h \n" // 3*near+far (2, odd)
+ "umlal v5.4s, v0.4h, v31.4h \n" // 3*near+far (2, even)
+
+ "mov v0.16b, v4.16b \n"
+ "mov v1.16b, v5.16b \n"
+ "mla v4.4s, v2.4s, v30.4s \n" // 9 3 3 1 (1, odd)
+ "mla v5.4s, v3.4s, v30.4s \n" // 9 3 3 1 (1, even)
+ "mla v2.4s, v0.4s, v30.4s \n" // 9 3 3 1 (2, odd)
+ "mla v3.4s, v1.4s, v30.4s \n" // 9 3 3 1 (2, even)
+
+ "rshrn v1.4h, v4.4s, #4 \n" // 3/4*near+1/4*far
+ "rshrn v0.4h, v5.4s, #4 \n" // 3/4*near+1/4*far
+ "rshrn v5.4h, v2.4s, #4 \n" // 3/4*near+1/4*far
+ "rshrn v4.4h, v3.4s, #4 \n" // 3/4*near+1/4*far
+
+ "st2 {v0.4h, v1.4h}, [%4], #16 \n" // store 1
+ "st2 {v4.4h, v5.4h}, [%5], #16 \n" // store 2
+
+ "subs %w6, %w6, #8 \n" // 4 sample -> 8 sample
+ "b.gt 1b \n"
+ : "+r"(src_ptr), // %0
+ "+r"(src_ptr1), // %1
+ "+r"(src_temp), // %2
+ "+r"(src_temp1), // %3
+ "+r"(dst_ptr), // %4
+ "+r"(dst_ptr1), // %5
+ "+r"(dst_width) // %6
+ :
+ : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v30",
+ "v31" // Clobber List
+ );
+}
+
void ScaleUVRowUp2_Linear_NEON(const uint8_t* src_ptr,
uint8_t* dst_ptr,
int dst_width) {
@@ -888,8 +988,8 @@ void ScaleUVRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr,
"umlal v4.4s, v1.4h, v31.4h \n" // 3*near+far (2, odd)
"umlal v5.4s, v0.4h, v31.4h \n" // 3*near+far (2, even)
- "mov v0.4s, v4.4s \n"
- "mov v1.4s, v5.4s \n"
+ "mov v0.16b, v4.16b \n"
+ "mov v1.16b, v5.16b \n"
"mla v4.4s, v2.4s, v30.4s \n" // 9 3 3 1 (1, odd)
"mla v5.4s, v3.4s, v30.4s \n" // 9 3 3 1 (1, even)
"mla v2.4s, v0.4s, v30.4s \n" // 9 3 3 1 (2, odd)
diff --git a/third_party/libyuv/source/scale_uv.cc b/third_party/libyuv/source/scale_uv.cc
index 7b977912f9..d9a314453e 100644
--- a/third_party/libyuv/source/scale_uv.cc
+++ b/third_party/libyuv/source/scale_uv.cc
@@ -746,7 +746,8 @@ void ScaleUVBilinearUp2(int src_width,
for (x = 0; x < src_height - 1; ++x) {
Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width);
src_ptr += src_stride;
- // TODO: Test performance of writing one row of destination at a time.
+ // TODO(fbarchard): Test performance of writing one row of destination at a
+ // time.
dst_ptr += 2 * dst_stride;
}
if (!(dst_height & 1)) {
@@ -851,7 +852,8 @@ void ScaleUVBilinearUp2_16(int src_width,
for (x = 0; x < src_height - 1; ++x) {
Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width);
src_ptr += src_stride;
- // TODO: Test performance of writing one row of destination at a time.
+ // TODO(fbarchard): Test performance of writing one row of destination at a
+ // time.
dst_ptr += 2 * dst_stride;
}
if (!(dst_height & 1)) {
diff --git a/third_party/libyuv/source/scale_win.cc b/third_party/libyuv/source/scale_win.cc
index c5fc86f3e9..ea1f95c6c3 100644
--- a/third_party/libyuv/source/scale_win.cc
+++ b/third_party/libyuv/source/scale_win.cc
@@ -16,8 +16,9 @@ namespace libyuv {
extern "C" {
#endif
-// This module is for 32 bit Visual C x86 and clangcl
-#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER)
+// This module is for 32 bit Visual C x86
+#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \
+ !defined(__clang__) && defined(_M_IX86)
// Offsets for source bytes 0 to 9
static const uvec8 kShuf0 = {0, 1, 3, 4, 5, 7, 8, 9,
diff --git a/third_party/libyuv/unit_test/color_test.cc b/third_party/libyuv/unit_test/color_test.cc
index a81ab19a86..e2d037ff79 100644
--- a/third_party/libyuv/unit_test/color_test.cc
+++ b/third_party/libyuv/unit_test/color_test.cc
@@ -22,8 +22,7 @@ namespace libyuv {
// TODO(fbarchard): clang x86 has a higher accuracy YUV to RGB.
// Port to Visual C and other CPUs
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER)))
+#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__))
#define ERROR_FULL 5
#define ERROR_J420 4
#else
@@ -32,7 +31,11 @@ namespace libyuv {
#endif
#define ERROR_R 1
#define ERROR_G 1
-#define ERROR_B 3
+#ifdef LIBYUV_UNLIMITED_DATA
+#define ERROR_B 1
+#else
+#define ERROR_B 18
+#endif
#define TESTCS(TESTNAME, YUVTOARGB, ARGBTOYUV, HS1, HS, HN, DIFF) \
TEST_F(LibYUVColorTest, TESTNAME) { \
@@ -498,7 +501,11 @@ TEST_F(LibYUVColorTest, TestYUV) {
YUVToRGB(240, 0, 0, &r1, &g1, &b1);
EXPECT_EQ(57, r1);
EXPECT_EQ(255, g1);
+#ifdef LIBYUV_UNLIMITED_DATA
+ EXPECT_EQ(3, b1);
+#else
EXPECT_EQ(5, b1);
+#endif
for (int i = 0; i < 256; ++i) {
YUVToRGBReference(i, 128, 128, &r0, &g0, &b0);
@@ -655,9 +662,9 @@ TEST_F(LibYUVColorTest, TestFullYUVJ) {
int y = RANDOM256(y2);
YUVJToRGBReference(y, u, v, &r0, &g0, &b0);
YUVJToRGB(y, u, v, &r1, &g1, &b1);
- EXPECT_NEAR(r0, r1, 1);
- EXPECT_NEAR(g0, g1, 1);
- EXPECT_NEAR(b0, b1, 1);
+ EXPECT_NEAR(r0, r1, ERROR_R);
+ EXPECT_NEAR(g0, g1, ERROR_G);
+ EXPECT_NEAR(b0, b1, ERROR_B);
++rh[r1 - r0 + 128];
++gh[g1 - g0 + 128];
++bh[b1 - b0 + 128];
@@ -687,8 +694,7 @@ TEST_F(LibYUVColorTest, TestFullYUVH) {
YUVHToRGB(y, u, v, &r1, &g1, &b1);
EXPECT_NEAR(r0, r1, ERROR_R);
EXPECT_NEAR(g0, g1, ERROR_G);
- // TODO(crbug.com/libyuv/862): Reduce the errors in the B channel.
- EXPECT_NEAR(b0, b1, 15);
+ EXPECT_NEAR(b0, b1, ERROR_B);
++rh[r1 - r0 + 128];
++gh[g1 - g0 + 128];
++bh[b1 - b0 + 128];
@@ -716,9 +722,9 @@ TEST_F(LibYUVColorTest, TestFullYUVF) {
int y = RANDOM256(y2);
YUVFToRGBReference(y, u, v, &r0, &g0, &b0);
YUVFToRGB(y, u, v, &r1, &g1, &b1);
- EXPECT_NEAR(r0, r1, 5);
- EXPECT_NEAR(g0, g1, 5);
- EXPECT_NEAR(b0, b1, 5);
+ EXPECT_NEAR(r0, r1, ERROR_R);
+ EXPECT_NEAR(g0, g1, ERROR_G);
+ EXPECT_NEAR(b0, b1, ERROR_B);
++rh[r1 - r0 + 128];
++gh[g1 - g0 + 128];
++bh[b1 - b0 + 128];
@@ -748,8 +754,7 @@ TEST_F(LibYUVColorTest, TestFullYUVU) {
YUVUToRGB(y, u, v, &r1, &g1, &b1);
EXPECT_NEAR(r0, r1, ERROR_R);
EXPECT_NEAR(g0, g1, ERROR_G);
- // TODO(crbug.com/libyuv/863): Reduce the errors in the B channel.
- EXPECT_NEAR(b0, b1, 18);
+ EXPECT_NEAR(b0, b1, ERROR_B);
++rh[r1 - r0 + 128];
++gh[g1 - g0 + 128];
++bh[b1 - b0 + 128];
diff --git a/third_party/libyuv/unit_test/compare_test.cc b/third_party/libyuv/unit_test/compare_test.cc
index bd99cdd3ac..c29562cb86 100644
--- a/third_party/libyuv/unit_test/compare_test.cc
+++ b/third_party/libyuv/unit_test/compare_test.cc
@@ -344,7 +344,7 @@ static const int kMaxOptCount = (1 << (32 - 3)) - 64; // 536870848
TEST_F(LibYUVCompareTest, TestHammingDistance_Opt) {
uint32_t h1 = 0;
- const int kMaxWidth = (benchmark_width_ * benchmark_height_ + 31) & ~31;
+ const int kMaxWidth = (benchmark_width_ * benchmark_height_ + 63) & ~63;
align_buffer_page_end(src_a, kMaxWidth);
align_buffer_page_end(src_b, kMaxWidth);
memset(src_a, 255u, kMaxWidth);
diff --git a/third_party/libyuv/unit_test/convert_test.cc b/third_party/libyuv/unit_test/convert_test.cc
index 8638a84c13..3855838381 100644
--- a/third_party/libyuv/unit_test/convert_test.cc
+++ b/third_party/libyuv/unit_test/convert_test.cc
@@ -55,14 +55,14 @@ namespace libyuv {
static_assert(SRC_BPC == 1 || SRC_BPC == 2, "SRC BPC unsupported"); \
static_assert(DST_BPC == 1 || DST_BPC == 2, "DST BPC unsupported"); \
static_assert(SRC_SUBSAMP_X == 1 || SRC_SUBSAMP_X == 2, \
- "SRC_SUBSAMP_X unsupported"); \
+ "SRC_SUBSAMP_X unsupported"); \
static_assert(SRC_SUBSAMP_Y == 1 || SRC_SUBSAMP_Y == 2, \
- "SRC_SUBSAMP_Y unsupported"); \
+ "SRC_SUBSAMP_Y unsupported"); \
static_assert(DST_SUBSAMP_X == 1 || DST_SUBSAMP_X == 2, \
- "DST_SUBSAMP_X unsupported"); \
+ "DST_SUBSAMP_X unsupported"); \
static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
- "DST_SUBSAMP_Y unsupported"); \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ "DST_SUBSAMP_Y unsupported"); \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
@@ -137,7 +137,7 @@ namespace libyuv {
DST_SUBSAMP_X, DST_SUBSAMP_Y, SRC_DEPTH) \
TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
- benchmark_width_ - 4, _Any, +, 0, SRC_DEPTH) \
+ benchmark_width_ + 1, _Any, +, 0, SRC_DEPTH) \
TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
benchmark_width_, _Unaligned, +, 1, SRC_DEPTH) \
@@ -183,8 +183,8 @@ TESTPLANARTOP(I412, uint16_t, 2, 1, 1, I444, uint8_t, 1, 1, 1, 12)
#define TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
W1280, N, NEG, OFF, PN, OFF_U, OFF_V) \
- TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##_##PN##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##To##PN##N) { \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kSizeUV = \
SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
@@ -270,7 +270,7 @@ TESTPLANARTOP(I412, uint16_t, 2, 1, 1, I444, uint8_t, 1, 1, 1, 12)
SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, \
SUBSAMP_Y) \
TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
- FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, \
+ FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_ + 1, \
_Any, +, 0, PN, OFF_U, OFF_V) \
TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, \
@@ -318,7 +318,7 @@ int I400ToNV21(const uint8_t* src_y,
"DST_SUBSAMP_X unsupported"); \
static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
"DST_SUBSAMP_Y unsupported"); \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
@@ -386,7 +386,7 @@ int I400ToNV21(const uint8_t* src_y,
DST_SUBSAMP_X, DST_SUBSAMP_Y, SRC_DEPTH) \
TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
- DST_SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, SRC_DEPTH) \
+ DST_SUBSAMP_Y, benchmark_width_ + 1, _Any, +, 0, SRC_DEPTH) \
TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
DST_SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
@@ -424,7 +424,7 @@ TESTPLANARTOBP(I212, uint16_t, 2, 2, 1, P212, uint16_t, 2, 2, 1, 12)
"DST_SUBSAMP_X unsupported"); \
static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
"DST_SUBSAMP_Y unsupported"); \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
@@ -493,7 +493,7 @@ TESTPLANARTOBP(I212, uint16_t, 2, 2, 1, P212, uint16_t, 2, 2, 1, 12)
DST_SUBSAMP_X, DST_SUBSAMP_Y, SRC_DEPTH) \
TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
- DST_SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, 1, \
+ DST_SUBSAMP_Y, benchmark_width_ + 1, _Any, +, 0, 1, \
SRC_DEPTH) \
TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
@@ -537,7 +537,7 @@ TESTBIPLANARTOBP(P216, uint16_t, 2, 2, 1, P416, uint16_t, 2, 1, 1, 12)
"DST_SUBSAMP_X unsupported"); \
static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
"DST_SUBSAMP_Y unsupported"); \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
@@ -606,7 +606,7 @@ TESTBIPLANARTOBP(P216, uint16_t, 2, 2, 1, P416, uint16_t, 2, 1, 1, 12)
DST_SUBSAMP_X, DST_SUBSAMP_Y, SRC_DEPTH) \
TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
- DST_SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, SRC_DEPTH) \
+ DST_SUBSAMP_Y, benchmark_width_ + 1, _Any, +, 0, SRC_DEPTH) \
TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, \
DST_SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
@@ -654,7 +654,7 @@ TESTBIPLANARTOP(NV21, uint8_t, 1, 2, 2, I420, uint8_t, 1, 2, 2, 8)
#define TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, W1280, N, NEG, OFF) \
TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -702,7 +702,7 @@ TESTBIPLANARTOP(NV21, uint8_t, 1, 2, 2, I420, uint8_t, 1, 2, 2, 8)
#define TESTPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN) \
TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_ - 4, _Any, +, 0) \
+ YALIGN, benchmark_width_ + 1, _Any, +, 0) \
TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, benchmark_width_, _Unaligned, +, 1) \
TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
@@ -769,12 +769,14 @@ TESTPLANARTOB(J420, 2, 2, J400, 1, 1, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
TESTPLANARTOB(I420, 2, 2, AR30, 4, 4, 1)
TESTPLANARTOB(H420, 2, 2, AR30, 4, 4, 1)
+TESTPLANARTOB(I420, 2, 2, AB30, 4, 4, 1)
+TESTPLANARTOB(H420, 2, 2, AB30, 4, 4, 1)
#endif
#define TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, W1280, N, NEG, OFF, ATTEN) \
TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -821,7 +823,7 @@ TESTPLANARTOB(H420, 2, 2, AR30, 4, 4, 1)
#define TESTQPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN) \
TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_ - 4, _Any, +, 0, 0) \
+ YALIGN, benchmark_width_ + 1, _Any, +, 0, 0) \
TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, benchmark_width_, _Unaligned, +, 1, 0) \
TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
@@ -928,6 +930,8 @@ TESTQPLANARTOB(J420Alpha, 2, 2, ARGB, 4, 4, 1)
TESTQPLANARTOB(J420Alpha, 2, 2, ABGR, 4, 4, 1)
TESTQPLANARTOB(H420Alpha, 2, 2, ARGB, 4, 4, 1)
TESTQPLANARTOB(H420Alpha, 2, 2, ABGR, 4, 4, 1)
+TESTQPLANARTOB(F420Alpha, 2, 2, ARGB, 4, 4, 1)
+TESTQPLANARTOB(F420Alpha, 2, 2, ABGR, 4, 4, 1)
TESTQPLANARTOB(U420Alpha, 2, 2, ARGB, 4, 4, 1)
TESTQPLANARTOB(U420Alpha, 2, 2, ABGR, 4, 4, 1)
TESTQPLANARTOB(V420Alpha, 2, 2, ARGB, 4, 4, 1)
@@ -938,6 +942,8 @@ TESTQPLANARTOB(J422Alpha, 2, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(J422Alpha, 2, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(H422Alpha, 2, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(H422Alpha, 2, 1, ABGR, 4, 4, 1)
+TESTQPLANARTOB(F422Alpha, 2, 1, ARGB, 4, 4, 1)
+TESTQPLANARTOB(F422Alpha, 2, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(U422Alpha, 2, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(U422Alpha, 2, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(V422Alpha, 2, 1, ARGB, 4, 4, 1)
@@ -948,6 +954,8 @@ TESTQPLANARTOB(J444Alpha, 1, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(J444Alpha, 1, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(H444Alpha, 1, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(H444Alpha, 1, 1, ABGR, 4, 4, 1)
+TESTQPLANARTOB(F444Alpha, 1, 1, ARGB, 4, 4, 1)
+TESTQPLANARTOB(F444Alpha, 1, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(U444Alpha, 1, 1, ARGB, 4, 4, 1)
TESTQPLANARTOB(U444Alpha, 1, 1, ABGR, 4, 4, 1)
TESTQPLANARTOB(V444Alpha, 1, 1, ARGB, 4, 4, 1)
@@ -956,7 +964,7 @@ TESTQPLANARTOB(V444Alpha, 1, 1, ABGR, 4, 4, 1)
#define TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, \
BPP_B, W1280, N, NEG, OFF) \
TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStrideB = kWidth * BPP_B; \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -1009,7 +1017,7 @@ TESTQPLANARTOB(V444Alpha, 1, 1, ABGR, 4, 4, 1)
#define TESTBIPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B) \
TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
- benchmark_width_ - 4, _Any, +, 0) \
+ benchmark_width_ + 1, _Any, +, 0) \
TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
benchmark_width_, _Unaligned, +, 1) \
TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
@@ -1064,7 +1072,7 @@ TESTBIPLANARTOB(NV12, 2, 2, RGB565, RGB565, 2)
#define TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
W1280, N, NEG, OFF) \
TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
const int kStride = (kStrideUV * SUBSAMP_X * 8 * BPP_A + 7) / 8; \
@@ -1111,7 +1119,7 @@ TESTBIPLANARTOB(NV12, 2, 2, RGB565, RGB565, 2)
#define TESTATOPLANAR(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
- benchmark_width_ - 4, _Any, +, 0) \
+ benchmark_width_ + 1, _Any, +, 0) \
TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
benchmark_width_, _Unaligned, +, 1) \
TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
@@ -1134,6 +1142,7 @@ TESTATOPLANAR(BGRA, 4, 1, I420, 2, 2)
TESTATOPLANAR(I400, 1, 1, I420, 2, 2)
TESTATOPLANAR(J400, 1, 1, J420, 2, 2)
TESTATOPLANAR(RAW, 3, 1, I420, 2, 2)
+TESTATOPLANAR(RAW, 3, 1, J420, 2, 2)
TESTATOPLANAR(RGB24, 3, 1, I420, 2, 2)
TESTATOPLANAR(RGB24, 3, 1, J420, 2, 2)
TESTATOPLANAR(RGBA, 4, 1, I420, 2, 2)
@@ -1145,7 +1154,7 @@ TESTATOPLANAR(YUY2, 2, 1, I422, 2, 1)
#define TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, \
SUBSAMP_Y, W1280, N, NEG, OFF) \
TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStride = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -1191,7 +1200,7 @@ TESTATOPLANAR(YUY2, 2, 1, I422, 2, 1)
#define TESTATOBIPLANAR(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
- benchmark_width_ - 4, _Any, +, 0) \
+ benchmark_width_ + 1, _Any, +, 0) \
TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
benchmark_width_, _Unaligned, +, 1) \
TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
@@ -1208,152 +1217,166 @@ TESTATOBIPLANAR(UYVY, 2, 4, NV12, 2, 2)
TESTATOBIPLANAR(AYUV, 1, 4, NV12, 2, 2)
TESTATOBIPLANAR(AYUV, 1, 4, NV21, 2, 2)
-#define TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, W1280, N, NEG, OFF) \
- TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
- const int kHeight = benchmark_height_; \
- const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
- const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
- const int kStrideA = \
- (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
- const int kStrideB = \
- (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
- align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
- align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
- align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
- for (int i = 0; i < kStrideA * kHeightA; ++i) { \
- src_argb[i + OFF] = (fastrand() & 0xff); \
- } \
- memset(dst_argb_c, 1, kStrideB* kHeightB); \
- memset(dst_argb_opt, 101, kStrideB* kHeightB); \
- MaskCpuFlags(disable_cpu_flags_); \
- FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_c, kStrideB, kWidth, \
- NEG kHeight); \
- MaskCpuFlags(benchmark_cpu_info_); \
- for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_opt, kStrideB, \
- kWidth, NEG kHeight); \
- } \
- for (int i = 0; i < kStrideB * kHeightB; ++i) { \
- EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
- } \
- free_aligned_buffer_page_end(src_argb); \
- free_aligned_buffer_page_end(dst_argb_c); \
- free_aligned_buffer_page_end(dst_argb_opt); \
+#define TESTATOBI(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, \
+ EPP_B, STRIDE_B, HEIGHT_B, W1280, N, NEG, OFF) \
+ TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##N) { \
+ const int kWidth = W1280; \
+ const int kHeight = benchmark_height_; \
+ const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
+ const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
+ const int kStrideA = \
+ (kWidth * EPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
+ const int kStrideB = \
+ (kWidth * EPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
+ align_buffer_page_end(src_argb, \
+ kStrideA* kHeightA*(int)sizeof(TYPE_A) + OFF); \
+ align_buffer_page_end(dst_argb_c, kStrideB* kHeightB*(int)sizeof(TYPE_B)); \
+ align_buffer_page_end(dst_argb_opt, \
+ kStrideB* kHeightB*(int)sizeof(TYPE_B)); \
+ for (int i = 0; i < kStrideA * kHeightA * (int)sizeof(TYPE_A); ++i) { \
+ src_argb[i + OFF] = (fastrand() & 0xff); \
+ } \
+ memset(dst_argb_c, 1, kStrideB* kHeightB); \
+ memset(dst_argb_opt, 101, kStrideB* kHeightB); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FMT_A##To##FMT_B((TYPE_A*)(src_argb + OFF), kStrideA, (TYPE_B*)dst_argb_c, \
+ kStrideB, kWidth, NEG kHeight); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_A##To##FMT_B((TYPE_A*)(src_argb + OFF), kStrideA, \
+ (TYPE_B*)dst_argb_opt, kStrideB, kWidth, NEG kHeight); \
+ } \
+ for (int i = 0; i < kStrideB * kHeightB * (int)sizeof(TYPE_B); ++i) { \
+ EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
+ } \
+ free_aligned_buffer_page_end(src_argb); \
+ free_aligned_buffer_page_end(dst_argb_c); \
+ free_aligned_buffer_page_end(dst_argb_opt); \
+ }
+
+#define TESTATOBRANDOM(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, \
+ TYPE_B, EPP_B, STRIDE_B, HEIGHT_B) \
+ TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_Random) { \
+ for (int times = 0; times < benchmark_iterations_; ++times) { \
+ const int kWidth = (fastrand() & 63) + 1; \
+ const int kHeight = (fastrand() & 31) + 1; \
+ const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
+ const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
+ const int kStrideA = \
+ (kWidth * EPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
+ const int kStrideB = \
+ (kWidth * EPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
+ align_buffer_page_end(src_argb, kStrideA* kHeightA*(int)sizeof(TYPE_A)); \
+ align_buffer_page_end(dst_argb_c, \
+ kStrideB* kHeightB*(int)sizeof(TYPE_B)); \
+ align_buffer_page_end(dst_argb_opt, \
+ kStrideB* kHeightB*(int)sizeof(TYPE_B)); \
+ for (int i = 0; i < kStrideA * kHeightA * (int)sizeof(TYPE_A); ++i) { \
+ src_argb[i] = 0xfe; \
+ } \
+ memset(dst_argb_c, 123, kStrideB* kHeightB); \
+ memset(dst_argb_opt, 123, kStrideB* kHeightB); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FMT_A##To##FMT_B((TYPE_A*)src_argb, kStrideA, (TYPE_B*)dst_argb_c, \
+ kStrideB, kWidth, kHeight); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ FMT_A##To##FMT_B((TYPE_A*)src_argb, kStrideA, (TYPE_B*)dst_argb_opt, \
+ kStrideB, kWidth, kHeight); \
+ for (int i = 0; i < kStrideB * kHeightB * (int)sizeof(TYPE_B); ++i) { \
+ EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
+ } \
+ free_aligned_buffer_page_end(src_argb); \
+ free_aligned_buffer_page_end(dst_argb_c); \
+ free_aligned_buffer_page_end(dst_argb_opt); \
+ } \
}
-#define TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, \
- STRIDE_B, HEIGHT_B) \
- TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_Random) { \
- for (int times = 0; times < benchmark_iterations_; ++times) { \
- const int kWidth = (fastrand() & 63) + 1; \
- const int kHeight = (fastrand() & 31) + 1; \
- const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
- const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
- const int kStrideA = \
- (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
- const int kStrideB = \
- (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
- align_buffer_page_end(src_argb, kStrideA* kHeightA); \
- align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
- align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
- for (int i = 0; i < kStrideA * kHeightA; ++i) { \
- src_argb[i] = (fastrand() & 0xff); \
- } \
- memset(dst_argb_c, 123, kStrideB* kHeightB); \
- memset(dst_argb_opt, 123, kStrideB* kHeightB); \
- MaskCpuFlags(disable_cpu_flags_); \
- FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_c, kStrideB, kWidth, \
- kHeight); \
- MaskCpuFlags(benchmark_cpu_info_); \
- FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_opt, kStrideB, kWidth, \
- kHeight); \
- for (int i = 0; i < kStrideB * kHeightB; ++i) { \
- EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
- } \
- free_aligned_buffer_page_end(src_argb); \
- free_aligned_buffer_page_end(dst_argb_c); \
- free_aligned_buffer_page_end(dst_argb_opt); \
- } \
- }
-
-#define TESTATOB(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B) \
- TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, benchmark_width_ - 4, _Any, +, 0) \
- TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, benchmark_width_, _Unaligned, +, 1) \
- TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, benchmark_width_, _Invert, -, 0) \
- TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, benchmark_width_, _Opt, +, 0) \
- TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B)
-
-TESTATOB(AB30, 4, 4, 1, ABGR, 4, 4, 1)
-TESTATOB(AB30, 4, 4, 1, ARGB, 4, 4, 1)
+#define TESTATOB(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, \
+ EPP_B, STRIDE_B, HEIGHT_B) \
+ TESTATOBI(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, EPP_B, \
+ STRIDE_B, HEIGHT_B, benchmark_width_ + 1, _Any, +, 0) \
+ TESTATOBI(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, EPP_B, \
+ STRIDE_B, HEIGHT_B, benchmark_width_, _Unaligned, +, 1) \
+ TESTATOBI(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, EPP_B, \
+ STRIDE_B, HEIGHT_B, benchmark_width_, _Invert, -, 0) \
+ TESTATOBI(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, EPP_B, \
+ STRIDE_B, HEIGHT_B, benchmark_width_, _Opt, +, 0) \
+ TESTATOBRANDOM(FMT_A, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, FMT_B, TYPE_B, \
+ EPP_B, STRIDE_B, HEIGHT_B)
+
+TESTATOB(AB30, uint8_t, 4, 4, 1, ABGR, uint8_t, 4, 4, 1)
+TESTATOB(AB30, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(ABGR, 4, 4, 1, AR30, 4, 4, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, AR30, uint8_t, 4, 4, 1)
#endif
-TESTATOB(ABGR, 4, 4, 1, ARGB, 4, 4, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(AR30, 4, 4, 1, AB30, 4, 4, 1)
+TESTATOB(AR30, uint8_t, 4, 4, 1, AB30, uint8_t, 4, 4, 1)
#endif
-TESTATOB(AR30, 4, 4, 1, ABGR, 4, 4, 1)
+TESTATOB(AR30, uint8_t, 4, 4, 1, ABGR, uint8_t, 4, 4, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(AR30, 4, 4, 1, AR30, 4, 4, 1)
-TESTATOB(AR30, 4, 4, 1, ARGB, 4, 4, 1)
+TESTATOB(AR30, uint8_t, 4, 4, 1, AR30, uint8_t, 4, 4, 1)
+TESTATOB(AR30, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
#endif
-TESTATOB(ARGB, 4, 4, 1, ABGR, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, ABGR, uint8_t, 4, 4, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(ARGB, 4, 4, 1, AR30, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, AR30, uint8_t, 4, 4, 1)
#endif
-TESTATOB(ARGB, 4, 4, 1, ARGB, 4, 4, 1)
-TESTATOB(ARGB, 4, 4, 1, ARGB1555, 2, 2, 1)
-TESTATOB(ARGB, 4, 4, 1, ARGB4444, 2, 2, 1)
-TESTATOB(ARGB, 4, 4, 1, ARGBMirror, 4, 4, 1)
-TESTATOB(ARGB, 4, 4, 1, BGRA, 4, 4, 1)
-TESTATOB(ARGB, 4, 4, 1, I400, 1, 1, 1)
-TESTATOB(ARGB, 4, 4, 1, J400, 1, 1, 1)
-TESTATOB(RGBA, 4, 4, 1, J400, 1, 1, 1)
-TESTATOB(ARGB, 4, 4, 1, RAW, 3, 3, 1)
-TESTATOB(ARGB, 4, 4, 1, RGB24, 3, 3, 1)
-TESTATOB(ABGR, 4, 4, 1, RAW, 3, 3, 1)
-TESTATOB(ABGR, 4, 4, 1, RGB24, 3, 3, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, ARGB1555, uint8_t, 2, 2, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, ARGB4444, uint8_t, 2, 2, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, ARGBMirror, uint8_t, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, BGRA, uint8_t, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, I400, uint8_t, 1, 1, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, J400, uint8_t, 1, 1, 1)
+TESTATOB(RGBA, uint8_t, 4, 4, 1, J400, uint8_t, 1, 1, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, RAW, uint8_t, 3, 3, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, RGB24, uint8_t, 3, 3, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, RAW, uint8_t, 3, 3, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, RGB24, uint8_t, 3, 3, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(ARGB, 4, 4, 1, RGB565, 2, 2, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, RGB565, uint8_t, 2, 2, 1)
#endif
-TESTATOB(ARGB, 4, 4, 1, RGBA, 4, 4, 1)
-TESTATOB(ARGB, 4, 4, 1, UYVY, 2, 4, 1)
-TESTATOB(ARGB, 4, 4, 1, YUY2, 2, 4, 1) // 4
-TESTATOB(ARGB1555, 2, 2, 1, ARGB, 4, 4, 1)
-TESTATOB(ARGB4444, 2, 2, 1, ARGB, 4, 4, 1)
-TESTATOB(BGRA, 4, 4, 1, ARGB, 4, 4, 1)
-TESTATOB(I400, 1, 1, 1, ARGB, 4, 4, 1)
-TESTATOB(I400, 1, 1, 1, I400, 1, 1, 1)
-TESTATOB(I400, 1, 1, 1, I400Mirror, 1, 1, 1)
-TESTATOB(J400, 1, 1, 1, ARGB, 4, 4, 1)
-TESTATOB(J400, 1, 1, 1, J400, 1, 1, 1)
-TESTATOB(RAW, 3, 3, 1, ARGB, 4, 4, 1)
-TESTATOB(RAW, 3, 3, 1, RGBA, 4, 4, 1)
-TESTATOB(RAW, 3, 3, 1, RGB24, 3, 3, 1)
-TESTATOB(RGB24, 3, 3, 1, ARGB, 4, 4, 1)
-TESTATOB(RGB24, 3, 3, 1, J400, 1, 1, 1)
-TESTATOB(RGB24, 3, 3, 1, RGB24Mirror, 3, 3, 1)
-TESTATOB(RAW, 3, 3, 1, J400, 1, 1, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, RGBA, uint8_t, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, UYVY, uint8_t, 2, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, YUY2, uint8_t, 2, 4, 1) // 4
+TESTATOB(ARGB1555, uint8_t, 2, 2, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(ARGB4444, uint8_t, 2, 2, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(BGRA, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(I400, uint8_t, 1, 1, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(I400, uint8_t, 1, 1, 1, I400, uint8_t, 1, 1, 1)
+TESTATOB(I400, uint8_t, 1, 1, 1, I400Mirror, uint8_t, 1, 1, 1)
+TESTATOB(J400, uint8_t, 1, 1, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(J400, uint8_t, 1, 1, 1, J400, uint8_t, 1, 1, 1)
+TESTATOB(RAW, uint8_t, 3, 3, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(RAW, uint8_t, 3, 3, 1, RGBA, uint8_t, 4, 4, 1)
+TESTATOB(RAW, uint8_t, 3, 3, 1, RGB24, uint8_t, 3, 3, 1)
+TESTATOB(RGB24, uint8_t, 3, 3, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(RGB24, uint8_t, 3, 3, 1, J400, uint8_t, 1, 1, 1)
+TESTATOB(RGB24, uint8_t, 3, 3, 1, RGB24Mirror, uint8_t, 3, 3, 1)
+TESTATOB(RAW, uint8_t, 3, 3, 1, J400, uint8_t, 1, 1, 1)
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTATOB(RGB565, 2, 2, 1, ARGB, 4, 4, 1)
+TESTATOB(RGB565, uint8_t, 2, 2, 1, ARGB, uint8_t, 4, 4, 1)
#endif
-TESTATOB(RGBA, 4, 4, 1, ARGB, 4, 4, 1)
-TESTATOB(UYVY, 2, 4, 1, ARGB, 4, 4, 1)
-TESTATOB(YUY2, 2, 4, 1, ARGB, 4, 4, 1)
-TESTATOB(YUY2, 2, 4, 1, Y, 1, 1, 1)
+TESTATOB(RGBA, uint8_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(UYVY, uint8_t, 2, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(YUY2, uint8_t, 2, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(YUY2, uint8_t, 2, 4, 1, Y, uint8_t, 1, 1, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, AR64, uint16_t, 4, 4, 1)
+TESTATOB(ARGB, uint8_t, 4, 4, 1, AB64, uint16_t, 4, 4, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, AR64, uint16_t, 4, 4, 1)
+TESTATOB(ABGR, uint8_t, 4, 4, 1, AB64, uint16_t, 4, 4, 1)
+TESTATOB(AR64, uint16_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(AB64, uint16_t, 4, 4, 1, ARGB, uint8_t, 4, 4, 1)
+TESTATOB(AR64, uint16_t, 4, 4, 1, ABGR, uint8_t, 4, 4, 1)
+TESTATOB(AB64, uint16_t, 4, 4, 1, ABGR, uint8_t, 4, 4, 1)
+TESTATOB(AR64, uint16_t, 4, 4, 1, AB64, uint16_t, 4, 4, 1)
+TESTATOB(AB64, uint16_t, 4, 4, 1, AR64, uint16_t, 4, 4, 1)
#define TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
HEIGHT_B, W1280, N, NEG, OFF) \
TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##Dither##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
@@ -1423,7 +1446,7 @@ TESTATOB(YUY2, 2, 4, 1, Y, 1, 1, 1)
#define TESTATOBD(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
HEIGHT_B) \
TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
- HEIGHT_B, benchmark_width_ - 4, _Any, +, 0) \
+ HEIGHT_B, benchmark_width_ + 1, _Any, +, 0) \
TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
HEIGHT_B, benchmark_width_, _Unaligned, +, 1) \
TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
@@ -1437,35 +1460,39 @@ TESTATOB(YUY2, 2, 4, 1, Y, 1, 1, 1)
TESTATOBD(ARGB, 4, 4, 1, RGB565, 2, 2, 1)
#endif
-#define TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, W1280, N, NEG, OFF) \
+#define TESTSYMI(FMT_ATOB, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, W1280, N, NEG, \
+ OFF) \
TEST_F(LibYUVConvertTest, FMT_ATOB##_Symetric##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
const int kStrideA = \
- (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
- align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
- align_buffer_page_end(dst_argb_c, kStrideA* kHeightA); \
- align_buffer_page_end(dst_argb_opt, kStrideA* kHeightA); \
- for (int i = 0; i < kStrideA * kHeightA; ++i) { \
+ (kWidth * EPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
+ align_buffer_page_end(src_argb, \
+ kStrideA* kHeightA*(int)sizeof(TYPE_A) + OFF); \
+ align_buffer_page_end(dst_argb_c, kStrideA* kHeightA*(int)sizeof(TYPE_A)); \
+ align_buffer_page_end(dst_argb_opt, \
+ kStrideA* kHeightA*(int)sizeof(TYPE_A)); \
+ for (int i = 0; i < kStrideA * kHeightA * (int)sizeof(TYPE_A); ++i) { \
src_argb[i + OFF] = (fastrand() & 0xff); \
} \
memset(dst_argb_c, 1, kStrideA* kHeightA); \
memset(dst_argb_opt, 101, kStrideA* kHeightA); \
MaskCpuFlags(disable_cpu_flags_); \
- FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_c, kStrideA, kWidth, \
- NEG kHeight); \
+ FMT_ATOB((TYPE_A*)(src_argb + OFF), kStrideA, (TYPE_A*)dst_argb_c, \
+ kStrideA, kWidth, NEG kHeight); \
MaskCpuFlags(benchmark_cpu_info_); \
for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_opt, kStrideA, kWidth, \
- NEG kHeight); \
+ FMT_ATOB((TYPE_A*)(src_argb + OFF), kStrideA, (TYPE_A*)dst_argb_opt, \
+ kStrideA, kWidth, NEG kHeight); \
} \
MaskCpuFlags(disable_cpu_flags_); \
- FMT_ATOB(dst_argb_c, kStrideA, dst_argb_c, kStrideA, kWidth, NEG kHeight); \
+ FMT_ATOB((TYPE_A*)dst_argb_c, kStrideA, (TYPE_A*)dst_argb_c, kStrideA, \
+ kWidth, NEG kHeight); \
MaskCpuFlags(benchmark_cpu_info_); \
- FMT_ATOB(dst_argb_opt, kStrideA, dst_argb_opt, kStrideA, kWidth, \
- NEG kHeight); \
- for (int i = 0; i < kStrideA * kHeightA; ++i) { \
+ FMT_ATOB((TYPE_A*)dst_argb_opt, kStrideA, (TYPE_A*)dst_argb_opt, kStrideA, \
+ kWidth, NEG kHeight); \
+ for (int i = 0; i < kStrideA * kHeightA * (int)sizeof(TYPE_A); ++i) { \
EXPECT_EQ(src_argb[i + OFF], dst_argb_opt[i]); \
EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
} \
@@ -1474,18 +1501,20 @@ TESTATOBD(ARGB, 4, 4, 1, RGB565, 2, 2, 1)
free_aligned_buffer_page_end(dst_argb_opt); \
}
-#define TESTSYM(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A) \
- TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_ - 4, _Any, +, \
- 0) \
- TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Unaligned, \
- +, 1) \
- TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Opt, +, 0)
+#define TESTSYM(FMT_ATOB, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A) \
+ TESTSYMI(FMT_ATOB, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, benchmark_width_ + 1, \
+ _Any, +, 0) \
+ TESTSYMI(FMT_ATOB, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, \
+ _Unaligned, +, 1) \
+ TESTSYMI(FMT_ATOB, TYPE_A, EPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, \
+ _Opt, +, 0)
-TESTSYM(ARGBToARGB, 4, 4, 1)
-TESTSYM(ARGBToBGRA, 4, 4, 1)
-TESTSYM(ARGBToABGR, 4, 4, 1)
-TESTSYM(BGRAToARGB, 4, 4, 1)
-TESTSYM(ABGRToARGB, 4, 4, 1)
+TESTSYM(ARGBToARGB, uint8_t, 4, 4, 1)
+TESTSYM(ARGBToBGRA, uint8_t, 4, 4, 1)
+TESTSYM(ARGBToABGR, uint8_t, 4, 4, 1)
+TESTSYM(BGRAToARGB, uint8_t, 4, 4, 1)
+TESTSYM(ABGRToARGB, uint8_t, 4, 4, 1)
+TESTSYM(AB64ToAR64, uint16_t, 4, 4, 1)
TEST_F(LibYUVConvertTest, Test565) {
SIMD_ALIGNED(uint8_t orig_pixels[256][4]);
@@ -2349,7 +2378,11 @@ TEST_F(LibYUVConvertTest, TestMJPGToARGB) {
// Test result matches known hash value.
uint32_t dst_argb_hash = HashDjb2(dst_argb, width * height, 5381);
+#ifdef LIBYUV_UNLIMITED_DATA
+ EXPECT_EQ(dst_argb_hash, 3900633302u);
+#else
EXPECT_EQ(dst_argb_hash, 2355976473u);
+#endif
free_aligned_buffer_page_end(dst_argb);
}
@@ -2658,7 +2691,7 @@ TEST_F(LibYUVConvertTest, TestDither) {
#define TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, W1280, N, NEG, OFF, FMT_C, BPP_C) \
TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##Dither##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -2711,7 +2744,7 @@ TEST_F(LibYUVConvertTest, TestDither) {
#define TESTPLANARTOBD(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, FMT_C, BPP_C) \
TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
+ YALIGN, benchmark_width_ + 1, _Any, +, 0, FMT_C, BPP_C) \
TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
YALIGN, benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C) \
TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
@@ -2784,11 +2817,12 @@ TESTPTOB(TestYUY2ToNV12, YUY2ToI420, YUY2ToNV12)
TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
// Transitive tests. A to B to C is same as A to C.
+// Benchmarks A To B to C for comparison to 1 step, benchmarked elsewhere.
#define TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
W1280, N, NEG, OFF, FMT_C, BPP_C) \
- TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##To##FMT_C##N) { \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
@@ -2805,23 +2839,23 @@ TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
src_v[i + OFF] = (fastrand() & 0xff); \
} \
memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
- for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
- src_v + OFF, kStrideUV, dst_argb_b + OFF, \
- kStrideB, kWidth, NEG kHeight); \
- } \
+ FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
+ src_v + OFF, kStrideUV, dst_argb_b + OFF, kStrideB, \
+ kWidth, NEG kHeight); \
/* Convert to a 3rd format in 1 step and 2 steps and compare */ \
const int kStrideC = kWidth * BPP_C; \
align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
- FMT_PLANAR##To##FMT_C(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
- src_v + OFF, kStrideUV, dst_argb_c + OFF, kStrideC, \
- kWidth, NEG kHeight); \
- /* Convert B to C */ \
- FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
- kWidth, kHeight); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_PLANAR##To##FMT_C(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
+ src_v + OFF, kStrideUV, dst_argb_c + OFF, \
+ kStrideC, kWidth, NEG kHeight); \
+ /* Convert B to C */ \
+ FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, \
+ kStrideC, kWidth, kHeight); \
+ } \
for (int i = 0; i < kStrideC * kHeight; ++i) { \
EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
} \
@@ -2836,7 +2870,7 @@ TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
#define TESTPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
FMT_C, BPP_C) \
TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
- benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
+ benchmark_width_ + 1, _Any, +, 0, FMT_C, BPP_C) \
TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C) \
TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
@@ -2844,26 +2878,28 @@ TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
benchmark_width_, _Opt, +, 0, FMT_C, BPP_C)
-TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ABGR, 4)
TESTPLANARTOE(I420, 2, 2, ABGR, 1, 4, ARGB, 4)
-TESTPLANARTOE(J420, 2, 2, ARGB, 1, 4, ARGB, 4)
-TESTPLANARTOE(J420, 2, 2, ABGR, 1, 4, ARGB, 4)
-TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, ARGB, 4)
-TESTPLANARTOE(H420, 2, 2, ABGR, 1, 4, ARGB, 4)
-TESTPLANARTOE(U420, 2, 2, ARGB, 1, 4, ARGB, 4)
-TESTPLANARTOE(U420, 2, 2, ABGR, 1, 4, ARGB, 4)
+TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ABGR, 4)
+TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RAW, 3)
+TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RGB24, 3)
TESTPLANARTOE(I420, 2, 2, BGRA, 1, 4, ARGB, 4)
-TESTPLANARTOE(I420, 2, 2, RGBA, 1, 4, ARGB, 4)
-TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, ARGB, 4)
+TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, ARGB, 4)
TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, RGB24, 3)
+TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, ARGB, 4)
TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, RAW, 3)
-TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RAW, 3)
-TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, ARGB, 4)
-TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, ARGB, 4)
-TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, RGB24, 3)
-TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, RAW, 3)
+TESTPLANARTOE(I420, 2, 2, RGBA, 1, 4, ARGB, 4)
+TESTPLANARTOE(H420, 2, 2, ABGR, 1, 4, ARGB, 4)
+TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, ABGR, 4)
TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, RAW, 3)
+TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, RGB24, 3)
TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, ARGB, 4)
+TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, RGB24, 3)
+TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, ARGB, 4)
+TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, RAW, 3)
+TESTPLANARTOE(J420, 2, 2, ABGR, 1, 4, ARGB, 4)
+TESTPLANARTOE(J420, 2, 2, ARGB, 1, 4, ARGB, 4)
+TESTPLANARTOE(U420, 2, 2, ABGR, 1, 4, ARGB, 4)
+TESTPLANARTOE(U420, 2, 2, ARGB, 1, 4, ARGB, 4)
#ifdef LITTLE_ENDIAN_ONLY_TEST
TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RGB565, 2)
TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ARGB1555, 2)
@@ -2899,8 +2935,8 @@ TESTPLANARTOE(I422, 2, 1, UYVY, 2, 4, ARGB, 4)
#define TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
W1280, N, NEG, OFF, FMT_C, BPP_C, ATTEN) \
- TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##To##FMT_C##N) { \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
const int kSizeUV = \
@@ -2919,25 +2955,25 @@ TESTPLANARTOE(I422, 2, 1, UYVY, 2, 4, ARGB, 4)
src_v[i + OFF] = (fastrand() & 0xff); \
} \
memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
- for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_PLANAR##To##FMT_B( \
- src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
- src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
- dst_argb_b + OFF, kStrideB, kWidth, NEG kHeight, ATTEN); \
- } \
+ FMT_PLANAR##To##FMT_B( \
+ src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
+ src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
+ dst_argb_b + OFF, kStrideB, kWidth, NEG kHeight, ATTEN); \
/* Convert to a 3rd format in 1 step and 2 steps and compare */ \
const int kStrideC = kWidth * BPP_C; \
align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
- FMT_PLANAR##To##FMT_C( \
- src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
- src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
- dst_argb_c + OFF, kStrideC, kWidth, NEG kHeight, ATTEN); \
- /* Convert B to C */ \
- FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
- kWidth, kHeight); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_PLANAR##To##FMT_C( \
+ src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
+ src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
+ dst_argb_c + OFF, kStrideC, kWidth, NEG kHeight, ATTEN); \
+ /* Convert B to C */ \
+ FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, \
+ kStrideC, kWidth, kHeight); \
+ } \
for (int i = 0; i < kStrideC * kHeight; ++i) { \
EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
} \
@@ -2953,7 +2989,7 @@ TESTPLANARTOE(I422, 2, 1, UYVY, 2, 4, ARGB, 4)
#define TESTQPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
FMT_C, BPP_C) \
TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
- benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C, 0) \
+ benchmark_width_ + 1, _Any, +, 0, FMT_C, BPP_C, 0) \
TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C, 0) \
TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
@@ -3000,8 +3036,8 @@ TESTQPLANARTOE(V444Alpha, 1, 1, ABGR, 1, 4, ARGB, 4)
#define TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, W1280, N, NEG, \
OFF, FMT_C, BPP_C) \
- TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_##FMT_C##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##To##FMT_C##N) { \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStrideA = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
@@ -3009,21 +3045,21 @@ TESTQPLANARTOE(V444Alpha, 1, 1, ABGR, 1, 4, ARGB, 4)
align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
MemRandomize(src_argb_a + OFF, kStrideA * kHeight); \
memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
- for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_A##To##FMT_B(src_argb_a + OFF, kStrideA, dst_argb_b + OFF, kStrideB, \
- kWidth, NEG kHeight); \
- } \
+ FMT_A##To##FMT_B(src_argb_a + OFF, kStrideA, dst_argb_b + OFF, kStrideB, \
+ kWidth, NEG kHeight); \
/* Convert to a 3rd format in 1 step and 2 steps and compare */ \
const int kStrideC = kWidth * BPP_C; \
align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
- FMT_A##To##FMT_C(src_argb_a + OFF, kStrideA, dst_argb_c + OFF, kStrideC, \
- kWidth, NEG kHeight); \
- /* Convert B to C */ \
- FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
- kWidth, kHeight); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_A##To##FMT_C(src_argb_a + OFF, kStrideA, dst_argb_c + OFF, kStrideC, \
+ kWidth, NEG kHeight); \
+ /* Convert B to C */ \
+ FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, \
+ kStrideC, kWidth, kHeight); \
+ } \
for (int i = 0; i < kStrideC * kHeight; i += 4) { \
EXPECT_EQ(dst_argb_c[i + OFF + 0], dst_argb_bc[i + OFF + 0]); \
EXPECT_EQ(dst_argb_c[i + OFF + 1], dst_argb_bc[i + OFF + 1]); \
@@ -3038,7 +3074,7 @@ TESTQPLANARTOE(V444Alpha, 1, 1, ABGR, 1, 4, ARGB, 4)
#define TESTPLANETOE(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, FMT_C, BPP_C) \
TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, \
- benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
+ benchmark_width_ + 1, _Any, +, 0, FMT_C, BPP_C) \
TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
_Unaligned, +, 1, FMT_C, BPP_C) \
TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
@@ -3161,91 +3197,457 @@ TEST_F(LibYUVConvertTest, ABGRToAR30Row_Opt) {
}
#endif // HAS_ABGRTOAR30ROW_AVX2
+// Provide matrix wrappers for 12 bit YUV
+#define I012ToARGB(a, b, c, d, e, f, g, h, i, j) \
+ I012ToARGBMatrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+#define I012ToAR30(a, b, c, d, e, f, g, h, i, j) \
+ I012ToAR30Matrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+
+#define I410ToARGB(a, b, c, d, e, f, g, h, i, j) \
+ I410ToARGBMatrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+#define I410ToABGR(a, b, c, d, e, f, g, h, i, j) \
+ I410ToABGRMatrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+#define H410ToARGB(a, b, c, d, e, f, g, h, i, j) \
+ I410ToARGBMatrix(a, b, c, d, e, f, g, h, &kYuvH709Constants, i, j)
+#define H410ToABGR(a, b, c, d, e, f, g, h, i, j) \
+ I410ToABGRMatrix(a, b, c, d, e, f, g, h, &kYuvH709Constants, i, j)
+#define U410ToARGB(a, b, c, d, e, f, g, h, i, j) \
+ I410ToARGBMatrix(a, b, c, d, e, f, g, h, &kYuv2020Constants, i, j)
+#define U410ToABGR(a, b, c, d, e, f, g, h, i, j) \
+ I410ToABGRMatrix(a, b, c, d, e, f, g, h, &kYuv2020Constants, i, j)
+#define I410ToAR30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAR30Matrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+#define I410ToAB30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAB30Matrix(a, b, c, d, e, f, g, h, &kYuvI601Constants, i, j)
+#define H410ToAR30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAR30Matrix(a, b, c, d, e, f, g, h, &kYuvH709Constants, i, j)
+#define H410ToAB30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAB30Matrix(a, b, c, d, e, f, g, h, &kYuvH709Constants, i, j)
+#define U410ToAR30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAR30Matrix(a, b, c, d, e, f, g, h, &kYuv2020Constants, i, j)
+#define U410ToAB30(a, b, c, d, e, f, g, h, i, j) \
+ I410ToAB30Matrix(a, b, c, d, e, f, g, h, &kYuv2020Constants, i, j)
+
// TODO(fbarchard): Fix clamping issue affected by U channel.
-#define TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
- ALIGN, YALIGN, W1280, N, NEG, SOFF, DOFF) \
- TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
- const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
- const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
- const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
- const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
- const int kBpc = 2; \
- align_buffer_page_end(src_y, kWidth* kHeight* kBpc + SOFF); \
- align_buffer_page_end(src_u, kSizeUV* kBpc + SOFF); \
- align_buffer_page_end(src_v, kSizeUV* kBpc + SOFF); \
- align_buffer_page_end(dst_argb_c, kStrideB* kHeight + DOFF); \
- align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + DOFF); \
- for (int i = 0; i < kWidth * kHeight; ++i) { \
- reinterpret_cast<uint16_t*>(src_y + SOFF)[i] = (fastrand() & 0x3ff); \
- } \
- for (int i = 0; i < kSizeUV; ++i) { \
- reinterpret_cast<uint16_t*>(src_u + SOFF)[i] = (fastrand() & 0x3ff); \
- reinterpret_cast<uint16_t*>(src_v + SOFF)[i] = (fastrand() & 0x3ff); \
- } \
- memset(dst_argb_c + DOFF, 1, kStrideB * kHeight); \
- memset(dst_argb_opt + DOFF, 101, kStrideB * kHeight); \
- MaskCpuFlags(disable_cpu_flags_); \
- FMT_PLANAR##To##FMT_B( \
- reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
- reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
- reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
- dst_argb_c + DOFF, kStrideB, kWidth, NEG kHeight); \
- MaskCpuFlags(benchmark_cpu_info_); \
- for (int i = 0; i < benchmark_iterations_; ++i) { \
- FMT_PLANAR##To##FMT_B( \
- reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
- reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
- reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
- dst_argb_opt + DOFF, kStrideB, kWidth, NEG kHeight); \
- } \
- for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
- EXPECT_EQ(dst_argb_c[i + DOFF], dst_argb_opt[i + DOFF]); \
- } \
- free_aligned_buffer_page_end(src_y); \
- free_aligned_buffer_page_end(src_u); \
- free_aligned_buffer_page_end(src_v); \
- free_aligned_buffer_page_end(dst_argb_c); \
- free_aligned_buffer_page_end(dst_argb_opt); \
- }
-
-#define TESTPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN) \
- TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_ - 4, _Any, +, 0, 0) \
- TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_, _Unaligned, +, 1, 1) \
- TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_, _Invert, -, 0, 0) \
- TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
- YALIGN, benchmark_width_, _Opt, +, 0, 0)
-
-TESTPLANAR16TOB(I010, 2, 2, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(I010, 2, 2, ABGR, 4, 4, 1)
-TESTPLANAR16TOB(H010, 2, 2, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(H010, 2, 2, ABGR, 4, 4, 1)
-TESTPLANAR16TOB(U010, 2, 2, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(U010, 2, 2, ABGR, 4, 4, 1)
-TESTPLANAR16TOB(I210, 2, 1, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(I210, 2, 1, ABGR, 4, 4, 1)
-TESTPLANAR16TOB(H210, 2, 1, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(H210, 2, 1, ABGR, 4, 4, 1)
-TESTPLANAR16TOB(U210, 2, 1, ARGB, 4, 4, 1)
-TESTPLANAR16TOB(U210, 2, 1, ABGR, 4, 4, 1)
+#define TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, \
+ BPP_B, ALIGN, YALIGN, W1280, N, NEG, SOFF, DOFF) \
+ TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
+ const int kWidth = W1280; \
+ const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
+ const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
+ const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
+ const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
+ const int kBpc = 2; \
+ align_buffer_page_end(src_y, kWidth* kHeight* kBpc + SOFF); \
+ align_buffer_page_end(src_u, kSizeUV* kBpc + SOFF); \
+ align_buffer_page_end(src_v, kSizeUV* kBpc + SOFF); \
+ align_buffer_page_end(dst_argb_c, kStrideB* kHeight + DOFF); \
+ align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + DOFF); \
+ for (int i = 0; i < kWidth * kHeight; ++i) { \
+ reinterpret_cast<uint16_t*>(src_y + SOFF)[i] = (fastrand() & FMT_MASK); \
+ } \
+ for (int i = 0; i < kSizeUV; ++i) { \
+ reinterpret_cast<uint16_t*>(src_u + SOFF)[i] = (fastrand() & FMT_MASK); \
+ reinterpret_cast<uint16_t*>(src_v + SOFF)[i] = (fastrand() & FMT_MASK); \
+ } \
+ memset(dst_argb_c + DOFF, 1, kStrideB * kHeight); \
+ memset(dst_argb_opt + DOFF, 101, kStrideB * kHeight); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FMT_PLANAR##To##FMT_B( \
+ reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
+ dst_argb_c + DOFF, kStrideB, kWidth, NEG kHeight); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_PLANAR##To##FMT_B( \
+ reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
+ dst_argb_opt + DOFF, kStrideB, kWidth, NEG kHeight); \
+ } \
+ for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
+ EXPECT_EQ(dst_argb_c[i + DOFF], dst_argb_opt[i + DOFF]); \
+ } \
+ free_aligned_buffer_page_end(src_y); \
+ free_aligned_buffer_page_end(src_u); \
+ free_aligned_buffer_page_end(src_v); \
+ free_aligned_buffer_page_end(dst_argb_c); \
+ free_aligned_buffer_page_end(dst_argb_opt); \
+ }
+
+#define TESTPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, \
+ BPP_B, ALIGN, YALIGN) \
+ TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, BPP_B, \
+ ALIGN, YALIGN, benchmark_width_ + 1, _Any, +, 0, 0) \
+ TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, BPP_B, \
+ ALIGN, YALIGN, benchmark_width_, _Unaligned, +, 1, 1) \
+ TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, BPP_B, \
+ ALIGN, YALIGN, benchmark_width_, _Invert, -, 0, 0) \
+ TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_MASK, FMT_B, BPP_B, \
+ ALIGN, YALIGN, benchmark_width_, _Opt, +, 0, 0)
+
+// These conversions are only optimized for x86
+#if defined(ENABLE_SLOW_TESTS) || defined(__x86_64__) || defined(__i386__)
+TESTPLANAR16TOB(I010, 2, 2, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(I010, 2, 2, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(H010, 2, 2, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(H010, 2, 2, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(U010, 2, 2, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(U010, 2, 2, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(I210, 2, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(I210, 2, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(H210, 2, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(H210, 2, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(U210, 2, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(U210, 2, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(I410, 1, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(I410, 1, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(H410, 1, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(H410, 1, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(U410, 1, 1, 0x3ff, ARGB, 4, 4, 1)
+TESTPLANAR16TOB(U410, 1, 1, 0x3ff, ABGR, 4, 4, 1)
+TESTPLANAR16TOB(I012, 2, 2, 0xfff, ARGB, 4, 4, 1)
+
#ifdef LITTLE_ENDIAN_ONLY_TEST
-TESTPLANAR16TOB(I010, 2, 2, AR30, 4, 4, 1)
-TESTPLANAR16TOB(I010, 2, 2, AB30, 4, 4, 1)
-TESTPLANAR16TOB(H010, 2, 2, AR30, 4, 4, 1)
-TESTPLANAR16TOB(H010, 2, 2, AB30, 4, 4, 1)
-TESTPLANAR16TOB(U010, 2, 2, AR30, 4, 4, 1)
-TESTPLANAR16TOB(U010, 2, 2, AB30, 4, 4, 1)
-TESTPLANAR16TOB(I210, 2, 1, AR30, 4, 4, 1)
-TESTPLANAR16TOB(I210, 2, 1, AB30, 4, 4, 1)
-TESTPLANAR16TOB(H210, 2, 1, AR30, 4, 4, 1)
-TESTPLANAR16TOB(H210, 2, 1, AB30, 4, 4, 1)
-TESTPLANAR16TOB(U210, 2, 1, AR30, 4, 4, 1)
-TESTPLANAR16TOB(U210, 2, 1, AB30, 4, 4, 1)
-#endif
+TESTPLANAR16TOB(I010, 2, 2, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(I010, 2, 2, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(H010, 2, 2, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(H010, 2, 2, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(U010, 2, 2, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(U010, 2, 2, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(I210, 2, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(I210, 2, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(H210, 2, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(H210, 2, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(U210, 2, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(U210, 2, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(I410, 1, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(I410, 1, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(H410, 1, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(H410, 1, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(U410, 1, 1, 0x3ff, AR30, 4, 4, 1)
+TESTPLANAR16TOB(U410, 1, 1, 0x3ff, AB30, 4, 4, 1)
+TESTPLANAR16TOB(I012, 2, 2, 0xfff, AR30, 4, 4, 1)
+#endif // LITTLE_ENDIAN_ONLY_TEST
+#endif // ENABLE_SLOW_TESTS
+
+#define TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
+ ALIGN, YALIGN, W1280, N, NEG, OFF, ATTEN, S_DEPTH) \
+ TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
+ const int kWidth = W1280; \
+ const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
+ const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
+ const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
+ const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
+ const int kBpc = 2; \
+ align_buffer_page_end(src_y, kWidth* kHeight* kBpc + OFF); \
+ align_buffer_page_end(src_u, kSizeUV* kBpc + OFF); \
+ align_buffer_page_end(src_v, kSizeUV* kBpc + OFF); \
+ align_buffer_page_end(src_a, kWidth* kHeight* kBpc + OFF); \
+ align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
+ align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
+ for (int i = 0; i < kWidth * kHeight; ++i) { \
+ reinterpret_cast<uint16_t*>(src_y + OFF)[i] = \
+ (fastrand() & ((1 << S_DEPTH) - 1)); \
+ reinterpret_cast<uint16_t*>(src_a + OFF)[i] = \
+ (fastrand() & ((1 << S_DEPTH) - 1)); \
+ } \
+ for (int i = 0; i < kSizeUV; ++i) { \
+ reinterpret_cast<uint16_t*>(src_u + OFF)[i] = \
+ (fastrand() & ((1 << S_DEPTH) - 1)); \
+ reinterpret_cast<uint16_t*>(src_v + OFF)[i] = \
+ (fastrand() & ((1 << S_DEPTH) - 1)); \
+ } \
+ memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
+ memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FMT_PLANAR##To##FMT_B(reinterpret_cast<uint16_t*>(src_y + OFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_u + OFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_v + OFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_a + OFF), kWidth, \
+ dst_argb_c + OFF, kStrideB, kWidth, NEG kHeight, \
+ ATTEN); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_PLANAR##To##FMT_B( \
+ reinterpret_cast<uint16_t*>(src_y + OFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_u + OFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_v + OFF), kStrideUV, \
+ reinterpret_cast<uint16_t*>(src_a + OFF), kWidth, \
+ dst_argb_opt + OFF, kStrideB, kWidth, NEG kHeight, ATTEN); \
+ } \
+ for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
+ EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_opt[i + OFF]); \
+ } \
+ free_aligned_buffer_page_end(src_y); \
+ free_aligned_buffer_page_end(src_u); \
+ free_aligned_buffer_page_end(src_v); \
+ free_aligned_buffer_page_end(src_a); \
+ free_aligned_buffer_page_end(dst_argb_c); \
+ free_aligned_buffer_page_end(dst_argb_opt); \
+ }
+
+#define TESTQPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
+ ALIGN, YALIGN, S_DEPTH) \
+ TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_ + 1, _Any, +, 0, 0, S_DEPTH) \
+ TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Unaligned, +, 1, 0, S_DEPTH) \
+ TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Invert, -, 0, 0, S_DEPTH) \
+ TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Opt, +, 0, 0, S_DEPTH) \
+ TESTQPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Premult, +, 0, 1, S_DEPTH)
+
+#define I010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define I010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define J010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define J010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define F010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define F010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define H010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define H010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define U010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define U010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define V010AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+#define V010AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+#define I210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define I210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define J210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define J210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define F210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define F210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define H210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define H210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define U210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define U210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define V210AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+#define V210AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+#define I410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define I410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvI601Constants, k, \
+ l, m)
+#define J410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define J410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
+ l, m)
+#define F410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define F410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvF709Constants, k, \
+ l, m)
+#define H410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define H410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
+ l, m)
+#define U410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define U410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
+ l, m)
+#define V410AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+#define V410AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvV2020Constants, k, \
+ l, m)
+
+// These conversions are only optimized for x86
+#if defined(ENABLE_SLOW_TESTS) || defined(__x86_64__) || defined(__i386__)
+TESTQPLANAR16TOB(I010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(I010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V010Alpha, 2, 2, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V010Alpha, 2, 2, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(I210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(I210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V210Alpha, 2, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V210Alpha, 2, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(I410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(I410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(J410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(H410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(F410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(U410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V410Alpha, 1, 1, ARGB, 4, 4, 1, 10)
+TESTQPLANAR16TOB(V410Alpha, 1, 1, ABGR, 4, 4, 1, 10)
+#endif // ENABLE_SLOW_TESTS
+
+#define TESTBIPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
+ ALIGN, YALIGN, W1280, N, NEG, SOFF, DOFF, S_DEPTH) \
+ TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
+ const int kWidth = W1280; \
+ const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
+ const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
+ const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X) * 2; \
+ const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y) * 2; \
+ const int kBpc = 2; \
+ align_buffer_page_end(src_y, kWidth* kHeight* kBpc + SOFF); \
+ align_buffer_page_end(src_uv, kSizeUV* kBpc + SOFF); \
+ align_buffer_page_end(dst_argb_c, kStrideB* kHeight + DOFF); \
+ align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + DOFF); \
+ for (int i = 0; i < kWidth * kHeight; ++i) { \
+ reinterpret_cast<uint16_t*>(src_y + SOFF)[i] = \
+ (fastrand() & (((uint16_t)(-1)) << (16 - S_DEPTH))); \
+ } \
+ for (int i = 0; i < kSizeUV; ++i) { \
+ reinterpret_cast<uint16_t*>(src_uv + SOFF)[i] = \
+ (fastrand() & (((uint16_t)(-1)) << (16 - S_DEPTH))); \
+ } \
+ memset(dst_argb_c + DOFF, 1, kStrideB * kHeight); \
+ memset(dst_argb_opt + DOFF, 101, kStrideB * kHeight); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FMT_PLANAR##To##FMT_B(reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_uv + SOFF), \
+ kStrideUV, dst_argb_c + DOFF, kStrideB, kWidth, \
+ NEG kHeight); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FMT_PLANAR##To##FMT_B(reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
+ reinterpret_cast<uint16_t*>(src_uv + SOFF), \
+ kStrideUV, dst_argb_opt + DOFF, kStrideB, kWidth, \
+ NEG kHeight); \
+ } \
+ for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
+ EXPECT_EQ(dst_argb_c[i + DOFF], dst_argb_opt[i + DOFF]); \
+ } \
+ free_aligned_buffer_page_end(src_y); \
+ free_aligned_buffer_page_end(src_uv); \
+ free_aligned_buffer_page_end(dst_argb_c); \
+ free_aligned_buffer_page_end(dst_argb_opt); \
+ }
+
+#define TESTBIPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
+ ALIGN, YALIGN, S_DEPTH) \
+ TESTBIPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_ + 1, _Any, +, 0, 0, S_DEPTH) \
+ TESTBIPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Unaligned, +, 1, 1, S_DEPTH) \
+ TESTBIPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Invert, -, 0, 0, S_DEPTH) \
+ TESTBIPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
+ YALIGN, benchmark_width_, _Opt, +, 0, 0, S_DEPTH)
+
+#define P010ToARGB(a, b, c, d, e, f, g, h) \
+ P010ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P210ToARGB(a, b, c, d, e, f, g, h) \
+ P210ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P010ToAR30(a, b, c, d, e, f, g, h) \
+ P010ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P210ToAR30(a, b, c, d, e, f, g, h) \
+ P210ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+
+#define P012ToARGB(a, b, c, d, e, f, g, h) \
+ P012ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P212ToARGB(a, b, c, d, e, f, g, h) \
+ P212ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P012ToAR30(a, b, c, d, e, f, g, h) \
+ P012ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P212ToAR30(a, b, c, d, e, f, g, h) \
+ P212ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+
+#define P016ToARGB(a, b, c, d, e, f, g, h) \
+ P016ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P216ToARGB(a, b, c, d, e, f, g, h) \
+ P216ToARGBMatrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P016ToAR30(a, b, c, d, e, f, g, h) \
+ P016ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+#define P216ToAR30(a, b, c, d, e, f, g, h) \
+ P216ToAR30Matrix(a, b, c, d, e, f, &kYuvH709Constants, g, h)
+
+#if defined(ENABLE_SLOW_TESTS) || defined(__x86_64__) || defined(__i386__)
+TESTBIPLANAR16TOB(P010, 2, 2, ARGB, 4, 4, 1, 10)
+TESTBIPLANAR16TOB(P210, 2, 1, ARGB, 4, 4, 1, 10)
+TESTBIPLANAR16TOB(P012, 2, 2, ARGB, 4, 4, 1, 12)
+TESTBIPLANAR16TOB(P212, 2, 1, ARGB, 4, 4, 1, 12)
+TESTBIPLANAR16TOB(P016, 2, 2, ARGB, 4, 4, 1, 16)
+TESTBIPLANAR16TOB(P216, 2, 1, ARGB, 4, 4, 1, 16)
+#ifdef LITTLE_ENDIAN_ONLY_TEST
+TESTBIPLANAR16TOB(P010, 2, 2, AR30, 4, 4, 1, 10)
+TESTBIPLANAR16TOB(P210, 2, 1, AR30, 4, 4, 1, 10)
+TESTBIPLANAR16TOB(P012, 2, 2, AR30, 4, 4, 1, 12)
+TESTBIPLANAR16TOB(P212, 2, 1, AR30, 4, 4, 1, 12)
+TESTBIPLANAR16TOB(P016, 2, 2, AR30, 4, 4, 1, 16)
+TESTBIPLANAR16TOB(P216, 2, 1, AR30, 4, 4, 1, 16)
+#endif // LITTLE_ENDIAN_ONLY_TEST
+#endif // defined(ENABLE_SLOW_TESTS)
static int Clamp(int y) {
if (y < 0) {
diff --git a/third_party/libyuv/unit_test/cpu_test.cc b/third_party/libyuv/unit_test/cpu_test.cc
index 7264de0801..4035cf2bbc 100644
--- a/third_party/libyuv/unit_test/cpu_test.cc
+++ b/third_party/libyuv/unit_test/cpu_test.cc
@@ -72,26 +72,98 @@ TEST_F(LibYUVBaseTest, TestCpuHas) {
#endif
}
-TEST_F(LibYUVBaseTest, TestCpuCompilerEnabled) {
-#if defined(__aarch64__)
- printf("Arm64 build\n");
+TEST_F(LibYUVBaseTest, TestCompilerMacros) {
+ // Tests all macros used in public headers.
+#ifdef __ATOMIC_RELAXED
+ printf("__ATOMIC_RELAXED %d\n", __ATOMIC_RELAXED);
#endif
-#if defined(__aarch64__) || defined(__ARM_NEON__) || defined(LIBYUV_NEON)
- printf("Neon build enabled\n");
+#ifdef __cplusplus
+ printf("__cplusplus %ld\n", __cplusplus);
#endif
-#if defined(__x86_64__) || defined(_M_X64)
- printf("x64 build\n");
+#ifdef __clang_major__
+ printf("__clang_major__ %d\n", __clang_major__);
+#endif
+#ifdef __clang_minor__
+ printf("__clang_minor__ %d\n", __clang_minor__);
+#endif
+#ifdef __GNUC__
+ printf("__GNUC__ %d\n", __GNUC__);
+#endif
+#ifdef __GNUC_MINOR__
+ printf("__GNUC_MINOR__ %d\n", __GNUC_MINOR__);
+#endif
+#ifdef __i386__
+ printf("__i386__ %d\n", __i386__);
+#endif
+#ifdef __mips
+ printf("__mips %d\n", __mips);
+#endif
+#ifdef __mips_isa_rev
+ printf("__mips_isa_rev %d\n", __mips_isa_rev);
+#endif
+#ifdef __x86_64__
+ printf("__x86_64__ %d\n", __x86_64__);
#endif
#ifdef _MSC_VER
printf("_MSC_VER %d\n", _MSC_VER);
#endif
-#if !defined(LIBYUV_DISABLE_X86) && \
- (defined(GCC_HAS_AVX2) || defined(CLANG_HAS_AVX2) || \
- defined(VISUALC_HAS_AVX2))
- printf("Has AVX2 1\n");
-#else
- printf("Has AVX2 0\n");
-// If compiler does not support AVX2, the following function not expected:
+#ifdef __aarch64__
+ printf("__aarch64__ %d\n", __aarch64__);
+#endif
+#ifdef __APPLE__
+ printf("__APPLE__ %d\n", __APPLE__);
+#endif
+#ifdef __arm__
+ printf("__arm__ %d\n", __arm__);
+#endif
+#ifdef __clang__
+ printf("__clang__ %d\n", __clang__);
+#endif
+#ifdef __CLR_VER
+ printf("__CLR_VER %d\n", __CLR_VER);
+#endif
+#ifdef __CYGWIN__
+ printf("__CYGWIN__ %d\n", __CYGWIN__);
+#endif
+#ifdef __llvm__
+ printf("__llvm__ %d\n", __llvm__);
+#endif
+#ifdef __mips_msa
+ printf("__mips_msa %d\n", __mips_msa);
+#endif
+#ifdef __native_client__
+ printf("__native_client__ %d\n", __native_client__);
+#endif
+#ifdef __pic__
+ printf("__pic__ %d\n", __pic__);
+#endif
+#ifdef __pnacl__
+ printf("__pnacl__ %d\n", __pnacl__);
+#endif
+#ifdef _M_IX86
+ printf("_M_IX86 %d\n", _M_IX86);
+#endif
+#ifdef _M_X64
+ printf("_M_X64 %d\n", _M_X64);
+#endif
+#ifdef _MIPS_ARCH_LOONGSON3A
+ printf("_MIPS_ARCH_LOONGSON3A %d\n", _MIPS_ARCH_LOONGSON3A);
+#endif
+#ifdef _WIN32
+ printf("_WIN32 %d\n", _WIN32);
+#endif
+#ifdef GG_LONGLONG
+ printf("GG_LONGLONG %d\n", GG_LONGLONG);
+#endif
+#ifdef INT_TYPES_DEFINED
+ printf("INT_TYPES_DEFINED\n");
+#endif
+#ifdef __has_feature
+ printf("__has_feature\n");
+#if __has_feature(memory_sanitizer)
+ printf("__has_feature(memory_sanitizer) %d\n",
+ __has_feature(memory_sanitizer));
+#endif
#endif
}
diff --git a/third_party/libyuv/unit_test/planar_test.cc b/third_party/libyuv/unit_test/planar_test.cc
index fd1755cdca..5c60842136 100644
--- a/third_party/libyuv/unit_test/planar_test.cc
+++ b/third_party/libyuv/unit_test/planar_test.cc
@@ -155,7 +155,7 @@ static int TestAttenuateI(int width,
}
TEST_F(LibYUVPlanarTest, ARGBAttenuate_Any) {
- int max_diff = TestAttenuateI(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestAttenuateI(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_LE(max_diff, 2);
@@ -228,7 +228,7 @@ static int TestUnattenuateI(int width,
}
TEST_F(LibYUVPlanarTest, ARGBUnattenuate_Any) {
- int max_diff = TestUnattenuateI(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestUnattenuateI(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_LE(max_diff, 2);
@@ -1076,7 +1076,7 @@ TEST_F(LibYUVPlanarTest, TestInterpolatePlane) {
#define TESTTERP(FMT_A, BPP_A, STRIDE_A, FMT_B, BPP_B, STRIDE_B, W1280, TERP, \
N, NEG, OFF) \
TEST_F(LibYUVPlanarTest, ARGBInterpolate##TERP##N) { \
- const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
+ const int kWidth = W1280; \
const int kHeight = benchmark_height_; \
const int kStrideA = \
(kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
@@ -1108,7 +1108,7 @@ TEST_F(LibYUVPlanarTest, TestInterpolatePlane) {
}
#define TESTINTERPOLATE(TERP) \
- TESTTERP(ARGB, 4, 1, ARGB, 4, 1, benchmark_width_ - 1, TERP, _Any, +, 0) \
+ TESTTERP(ARGB, 4, 1, ARGB, 4, 1, benchmark_width_ + 1, TERP, _Any, +, 0) \
TESTTERP(ARGB, 4, 1, ARGB, 4, 1, benchmark_width_, TERP, _Unaligned, +, 1) \
TESTTERP(ARGB, 4, 1, ARGB, 4, 1, benchmark_width_, TERP, _Invert, -, 0) \
TESTTERP(ARGB, 4, 1, ARGB, 4, 1, benchmark_width_, TERP, _Opt, +, 0)
@@ -1174,7 +1174,7 @@ static int TestBlend(int width,
TEST_F(LibYUVPlanarTest, ARGBBlend_Any) {
int max_diff =
- TestBlend(benchmark_width_ - 4, benchmark_height_, benchmark_iterations_,
+ TestBlend(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0, 1);
EXPECT_LE(max_diff, 1);
}
@@ -1280,7 +1280,7 @@ TEST_F(LibYUVPlanarTest, BlendPlane_Unaligned) {
disable_cpu_flags_, benchmark_cpu_info_, +1, 1);
}
TEST_F(LibYUVPlanarTest, BlendPlane_Any) {
- TestBlendPlane(benchmark_width_ - 4, benchmark_height_, benchmark_iterations_,
+ TestBlendPlane(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 1);
}
TEST_F(LibYUVPlanarTest, BlendPlane_Invert) {
@@ -1375,7 +1375,7 @@ TEST_F(LibYUVPlanarTest, I420Blend_Unaligned) {
// TODO(fbarchard): DISABLED because _Any uses C. Avoid C and re-enable.
TEST_F(LibYUVPlanarTest, DISABLED_I420Blend_Any) {
- TestI420Blend(benchmark_width_ - 4, benchmark_height_, benchmark_iterations_,
+ TestI420Blend(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0);
}
TEST_F(LibYUVPlanarTest, I420Blend_Invert) {
@@ -1524,7 +1524,7 @@ static int TestMultiply(int width,
}
TEST_F(LibYUVPlanarTest, ARGBMultiply_Any) {
- int max_diff = TestMultiply(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestMultiply(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_LE(max_diff, 1);
@@ -1599,7 +1599,7 @@ static int TestAdd(int width,
TEST_F(LibYUVPlanarTest, ARGBAdd_Any) {
int max_diff =
- TestAdd(benchmark_width_ - 1, benchmark_height_, benchmark_iterations_,
+ TestAdd(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0);
EXPECT_LE(max_diff, 1);
}
@@ -1672,7 +1672,7 @@ static int TestSubtract(int width,
}
TEST_F(LibYUVPlanarTest, ARGBSubtract_Any) {
- int max_diff = TestSubtract(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestSubtract(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_LE(max_diff, 1);
@@ -1745,7 +1745,7 @@ static int TestSobel(int width,
TEST_F(LibYUVPlanarTest, ARGBSobel_Any) {
int max_diff =
- TestSobel(benchmark_width_ - 1, benchmark_height_, benchmark_iterations_,
+ TestSobel(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0);
EXPECT_EQ(0, max_diff);
}
@@ -1818,7 +1818,7 @@ static int TestSobelToPlane(int width,
}
TEST_F(LibYUVPlanarTest, ARGBSobelToPlane_Any) {
- int max_diff = TestSobelToPlane(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestSobelToPlane(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_EQ(0, max_diff);
@@ -1890,7 +1890,7 @@ static int TestSobelXY(int width,
}
TEST_F(LibYUVPlanarTest, ARGBSobelXY_Any) {
- int max_diff = TestSobelXY(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestSobelXY(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0);
EXPECT_EQ(0, max_diff);
@@ -1966,29 +1966,35 @@ static int TestBlur(int width,
return max_diff;
}
+#if defined(ENABLE_SLOW_TESTS) || defined(__x86_64__) || defined(__i386__)
+#define DISABLED_ARM(name) name
+#else
+#define DISABLED_ARM(name) DISABLED_##name
+#endif
+
static const int kBlurSize = 55;
-TEST_F(LibYUVPlanarTest, ARGBBlur_Any) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlur_Any)) {
int max_diff =
- TestBlur(benchmark_width_ - 1, benchmark_height_, benchmark_iterations_,
+ TestBlur(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0, kBlurSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlur_Unaligned) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlur_Unaligned)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 1, kBlurSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlur_Invert) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlur_Invert)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, -1, 0, kBlurSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlur_Opt) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlur_Opt)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0, kBlurSize);
@@ -1996,35 +2002,35 @@ TEST_F(LibYUVPlanarTest, ARGBBlur_Opt) {
}
static const int kBlurSmallSize = 5;
-TEST_F(LibYUVPlanarTest, ARGBBlurSmall_Any) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlurSmall_Any)) {
int max_diff =
- TestBlur(benchmark_width_ - 1, benchmark_height_, benchmark_iterations_,
+ TestBlur(benchmark_width_ + 1, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0, kBlurSmallSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlurSmall_Unaligned) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlurSmall_Unaligned)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 1, kBlurSmallSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlurSmall_Invert) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlurSmall_Invert)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, -1, 0, kBlurSmallSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, ARGBBlurSmall_Opt) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(ARGBBlurSmall_Opt)) {
int max_diff =
TestBlur(benchmark_width_, benchmark_height_, benchmark_iterations_,
disable_cpu_flags_, benchmark_cpu_info_, +1, 0, kBlurSmallSize);
EXPECT_LE(max_diff, 1);
}
-TEST_F(LibYUVPlanarTest, TestARGBPolynomial) {
+TEST_F(LibYUVPlanarTest, DISABLED_ARM(TestARGBPolynomial)) {
SIMD_ALIGNED(uint8_t orig_pixels[1280][4]);
SIMD_ALIGNED(uint8_t dst_pixels_opt[1280][4]);
SIMD_ALIGNED(uint8_t dst_pixels_c[1280][4]);
@@ -2398,8 +2404,7 @@ TEST_F(LibYUVPlanarTest, TestARGBCopyAlpha) {
}
TEST_F(LibYUVPlanarTest, TestARGBExtractAlpha) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 4);
align_buffer_page_end(dst_pixels_opt, kPixels);
align_buffer_page_end(dst_pixels_c, kPixels);
@@ -2427,8 +2432,7 @@ TEST_F(LibYUVPlanarTest, TestARGBExtractAlpha) {
}
TEST_F(LibYUVPlanarTest, TestARGBCopyYToAlpha) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(orig_pixels, kPixels);
align_buffer_page_end(dst_pixels_opt, kPixels * 4);
align_buffer_page_end(dst_pixels_c, kPixels * 4);
@@ -2505,7 +2509,7 @@ static int TestARGBRect(int width,
}
TEST_F(LibYUVPlanarTest, ARGBRect_Any) {
- int max_diff = TestARGBRect(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestARGBRect(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0, 4);
EXPECT_EQ(0, max_diff);
@@ -2533,7 +2537,7 @@ TEST_F(LibYUVPlanarTest, ARGBRect_Opt) {
}
TEST_F(LibYUVPlanarTest, SetPlane_Any) {
- int max_diff = TestARGBRect(benchmark_width_ - 1, benchmark_height_,
+ int max_diff = TestARGBRect(benchmark_width_ + 1, benchmark_height_,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_, +1, 0, 1);
EXPECT_EQ(0, max_diff);
@@ -2561,35 +2565,25 @@ TEST_F(LibYUVPlanarTest, SetPlane_Opt) {
}
TEST_F(LibYUVPlanarTest, MergeUVPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
- align_buffer_page_end(src_pixels, kPixels * 2);
- align_buffer_page_end(tmp_pixels_u, kPixels);
- align_buffer_page_end(tmp_pixels_v, kPixels);
+ const int kPixels = benchmark_width_ * benchmark_height_;
+ align_buffer_page_end(src_pixels_u, kPixels);
+ align_buffer_page_end(src_pixels_v, kPixels);
align_buffer_page_end(dst_pixels_opt, kPixels * 2);
align_buffer_page_end(dst_pixels_c, kPixels * 2);
- MemRandomize(src_pixels, kPixels * 2);
- MemRandomize(tmp_pixels_u, kPixels);
- MemRandomize(tmp_pixels_v, kPixels);
+ MemRandomize(src_pixels_u, kPixels);
+ MemRandomize(src_pixels_v, kPixels);
MemRandomize(dst_pixels_opt, kPixels * 2);
MemRandomize(dst_pixels_c, kPixels * 2);
MaskCpuFlags(disable_cpu_flags_);
- SplitUVPlane(src_pixels, benchmark_width_ * 2, tmp_pixels_u, benchmark_width_,
- tmp_pixels_v, benchmark_width_, benchmark_width_,
- benchmark_height_);
- MergeUVPlane(tmp_pixels_u, benchmark_width_, tmp_pixels_v, benchmark_width_,
+ MergeUVPlane(src_pixels_u, benchmark_width_, src_pixels_v, benchmark_width_,
dst_pixels_c, benchmark_width_ * 2, benchmark_width_,
benchmark_height_);
MaskCpuFlags(benchmark_cpu_info_);
- SplitUVPlane(src_pixels, benchmark_width_ * 2, tmp_pixels_u, benchmark_width_,
- tmp_pixels_v, benchmark_width_, benchmark_width_,
- benchmark_height_);
-
for (int i = 0; i < benchmark_iterations_; ++i) {
- MergeUVPlane(tmp_pixels_u, benchmark_width_, tmp_pixels_v, benchmark_width_,
+ MergeUVPlane(src_pixels_u, benchmark_width_, src_pixels_v, benchmark_width_,
dst_pixels_opt, benchmark_width_ * 2, benchmark_width_,
benchmark_height_);
}
@@ -2598,60 +2592,127 @@ TEST_F(LibYUVPlanarTest, MergeUVPlane_Opt) {
EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]);
}
- free_aligned_buffer_page_end(src_pixels);
- free_aligned_buffer_page_end(tmp_pixels_u);
- free_aligned_buffer_page_end(tmp_pixels_v);
+ free_aligned_buffer_page_end(src_pixels_u);
+ free_aligned_buffer_page_end(src_pixels_v);
+ free_aligned_buffer_page_end(dst_pixels_opt);
+ free_aligned_buffer_page_end(dst_pixels_c);
+}
+
+// 16 bit channel split and merge
+TEST_F(LibYUVPlanarTest, MergeUVPlane_16_Opt) {
+ const int kPixels = benchmark_width_ * benchmark_height_;
+ align_buffer_page_end(src_pixels_u, kPixels * 2);
+ align_buffer_page_end(src_pixels_v, kPixels * 2);
+ align_buffer_page_end(dst_pixels_opt, kPixels * 2 * 2);
+ align_buffer_page_end(dst_pixels_c, kPixels * 2 * 2);
+ MemRandomize(src_pixels_u, kPixels * 2);
+ MemRandomize(src_pixels_v, kPixels * 2);
+ MemRandomize(dst_pixels_opt, kPixels * 2 * 2);
+ MemRandomize(dst_pixels_c, kPixels * 2 * 2);
+
+ MaskCpuFlags(disable_cpu_flags_);
+ MergeUVPlane_16((const uint16_t*)src_pixels_u, benchmark_width_,
+ (const uint16_t*)src_pixels_v, benchmark_width_,
+ (uint16_t*)dst_pixels_c, benchmark_width_ * 2,
+ benchmark_width_, benchmark_height_, 12);
+ MaskCpuFlags(benchmark_cpu_info_);
+
+ for (int i = 0; i < benchmark_iterations_; ++i) {
+ MergeUVPlane_16((const uint16_t*)src_pixels_u, benchmark_width_,
+ (const uint16_t*)src_pixels_v, benchmark_width_,
+ (uint16_t*)dst_pixels_opt, benchmark_width_ * 2,
+ benchmark_width_, benchmark_height_, 12);
+ }
+
+ for (int i = 0; i < kPixels * 2 * 2; ++i) {
+ EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]);
+ }
+ free_aligned_buffer_page_end(src_pixels_u);
+ free_aligned_buffer_page_end(src_pixels_v);
free_aligned_buffer_page_end(dst_pixels_opt);
free_aligned_buffer_page_end(dst_pixels_c);
}
TEST_F(LibYUVPlanarTest, SplitUVPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 2);
- align_buffer_page_end(tmp_pixels_u, kPixels);
- align_buffer_page_end(tmp_pixels_v, kPixels);
- align_buffer_page_end(dst_pixels_opt, kPixels * 2);
- align_buffer_page_end(dst_pixels_c, kPixels * 2);
+ align_buffer_page_end(dst_pixels_u_c, kPixels);
+ align_buffer_page_end(dst_pixels_v_c, kPixels);
+ align_buffer_page_end(dst_pixels_u_opt, kPixels);
+ align_buffer_page_end(dst_pixels_v_opt, kPixels);
MemRandomize(src_pixels, kPixels * 2);
- MemRandomize(tmp_pixels_u, kPixels);
- MemRandomize(tmp_pixels_v, kPixels);
- MemRandomize(dst_pixels_opt, kPixels * 2);
- MemRandomize(dst_pixels_c, kPixels * 2);
+ MemRandomize(dst_pixels_u_c, kPixels);
+ MemRandomize(dst_pixels_v_c, kPixels);
+ MemRandomize(dst_pixels_u_opt, kPixels);
+ MemRandomize(dst_pixels_v_opt, kPixels);
MaskCpuFlags(disable_cpu_flags_);
- SplitUVPlane(src_pixels, benchmark_width_ * 2, tmp_pixels_u, benchmark_width_,
- tmp_pixels_v, benchmark_width_, benchmark_width_,
- benchmark_height_);
- MergeUVPlane(tmp_pixels_u, benchmark_width_, tmp_pixels_v, benchmark_width_,
- dst_pixels_c, benchmark_width_ * 2, benchmark_width_,
- benchmark_height_);
+ SplitUVPlane(src_pixels, benchmark_width_ * 2, dst_pixels_u_c,
+ benchmark_width_, dst_pixels_v_c, benchmark_width_,
+ benchmark_width_, benchmark_height_);
MaskCpuFlags(benchmark_cpu_info_);
for (int i = 0; i < benchmark_iterations_; ++i) {
- SplitUVPlane(src_pixels, benchmark_width_ * 2, tmp_pixels_u,
- benchmark_width_, tmp_pixels_v, benchmark_width_,
+ SplitUVPlane(src_pixels, benchmark_width_ * 2, dst_pixels_u_opt,
+ benchmark_width_, dst_pixels_v_opt, benchmark_width_,
benchmark_width_, benchmark_height_);
}
- MergeUVPlane(tmp_pixels_u, benchmark_width_, tmp_pixels_v, benchmark_width_,
- dst_pixels_opt, benchmark_width_ * 2, benchmark_width_,
- benchmark_height_);
- for (int i = 0; i < kPixels * 2; ++i) {
- EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]);
+ for (int i = 0; i < kPixels; ++i) {
+ EXPECT_EQ(dst_pixels_u_c[i], dst_pixels_u_opt[i]);
+ EXPECT_EQ(dst_pixels_v_c[i], dst_pixels_v_opt[i]);
}
free_aligned_buffer_page_end(src_pixels);
- free_aligned_buffer_page_end(tmp_pixels_u);
- free_aligned_buffer_page_end(tmp_pixels_v);
- free_aligned_buffer_page_end(dst_pixels_opt);
- free_aligned_buffer_page_end(dst_pixels_c);
+ free_aligned_buffer_page_end(dst_pixels_u_c);
+ free_aligned_buffer_page_end(dst_pixels_v_c);
+ free_aligned_buffer_page_end(dst_pixels_u_opt);
+ free_aligned_buffer_page_end(dst_pixels_v_opt);
+}
+
+// 16 bit channel split
+TEST_F(LibYUVPlanarTest, SplitUVPlane_16_Opt) {
+ const int kPixels = benchmark_width_ * benchmark_height_;
+ align_buffer_page_end(src_pixels, kPixels * 2 * 2);
+ align_buffer_page_end(dst_pixels_u_c, kPixels * 2);
+ align_buffer_page_end(dst_pixels_v_c, kPixels * 2);
+ align_buffer_page_end(dst_pixels_u_opt, kPixels * 2);
+ align_buffer_page_end(dst_pixels_v_opt, kPixels * 2);
+ MemRandomize(src_pixels, kPixels * 2 * 2);
+ MemRandomize(dst_pixels_u_c, kPixels * 2);
+ MemRandomize(dst_pixels_v_c, kPixels * 2);
+ MemRandomize(dst_pixels_u_opt, kPixels * 2);
+ MemRandomize(dst_pixels_v_opt, kPixels * 2);
+
+ MaskCpuFlags(disable_cpu_flags_);
+ SplitUVPlane_16((const uint16_t*)src_pixels, benchmark_width_ * 2,
+ (uint16_t*)dst_pixels_u_c, benchmark_width_,
+ (uint16_t*)dst_pixels_v_c, benchmark_width_, benchmark_width_,
+ benchmark_height_, 10);
+ MaskCpuFlags(benchmark_cpu_info_);
+
+ for (int i = 0; i < benchmark_iterations_; ++i) {
+ SplitUVPlane_16((const uint16_t*)src_pixels, benchmark_width_ * 2,
+ (uint16_t*)dst_pixels_u_opt, benchmark_width_,
+ (uint16_t*)dst_pixels_v_opt, benchmark_width_,
+ benchmark_width_, benchmark_height_, 10);
+ }
+
+ for (int i = 0; i < kPixels * 2; ++i) {
+ EXPECT_EQ(dst_pixels_u_c[i], dst_pixels_u_opt[i]);
+ EXPECT_EQ(dst_pixels_v_c[i], dst_pixels_v_opt[i]);
+ }
+ free_aligned_buffer_page_end(src_pixels);
+ free_aligned_buffer_page_end(dst_pixels_u_c);
+ free_aligned_buffer_page_end(dst_pixels_v_c);
+ free_aligned_buffer_page_end(dst_pixels_u_opt);
+ free_aligned_buffer_page_end(dst_pixels_v_opt);
}
TEST_F(LibYUVPlanarTest, SwapUVPlane_Opt) {
// Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 2);
align_buffer_page_end(dst_pixels_opt, kPixels * 2);
align_buffer_page_end(dst_pixels_c, kPixels * 2);
@@ -2681,7 +2742,7 @@ TEST_F(LibYUVPlanarTest, SwapUVPlane_Opt) {
TEST_F(LibYUVPlanarTest, MergeRGBPlane_Opt) {
// Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 3);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2730,7 +2791,7 @@ TEST_F(LibYUVPlanarTest, MergeRGBPlane_Opt) {
TEST_F(LibYUVPlanarTest, SplitRGBPlane_Opt) {
// Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 3);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2777,8 +2838,7 @@ TEST_F(LibYUVPlanarTest, SplitRGBPlane_Opt) {
}
TEST_F(LibYUVPlanarTest, MergeARGBPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 4);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2832,8 +2892,7 @@ TEST_F(LibYUVPlanarTest, MergeARGBPlane_Opt) {
}
TEST_F(LibYUVPlanarTest, SplitARGBPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 4);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2887,8 +2946,7 @@ TEST_F(LibYUVPlanarTest, SplitARGBPlane_Opt) {
}
TEST_F(LibYUVPlanarTest, MergeXRGBPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 4);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2938,8 +2996,7 @@ TEST_F(LibYUVPlanarTest, MergeXRGBPlane_Opt) {
}
TEST_F(LibYUVPlanarTest, SplitXRGBPlane_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels, kPixels * 4);
align_buffer_page_end(tmp_pixels_r, kPixels);
align_buffer_page_end(tmp_pixels_g, kPixels);
@@ -2987,11 +3044,166 @@ TEST_F(LibYUVPlanarTest, SplitXRGBPlane_Opt) {
free_aligned_buffer_page_end(dst_pixels_c);
}
+// Merge 4 channels
+#define TESTQPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, W1280, N, NEG, OFF) \
+ TEST_F(LibYUVPlanarTest, FUNC##Plane_##DEPTH##N) { \
+ const int kWidth = W1280; \
+ const int kPixels = kWidth * benchmark_height_; \
+ align_buffer_page_end(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_a, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(dst_memory_c, kPixels * 4 * sizeof(DTYPE)); \
+ align_buffer_page_end(dst_memory_opt, kPixels * 4 * sizeof(DTYPE)); \
+ MemRandomize(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_a, kPixels * sizeof(STYPE) + OFF); \
+ memset(dst_memory_c, 0, kPixels * 4 * sizeof(DTYPE)); \
+ memset(dst_memory_opt, 0, kPixels * 4 * sizeof(DTYPE)); \
+ STYPE* src_pixels_r = reinterpret_cast<STYPE*>(src_memory_r + OFF); \
+ STYPE* src_pixels_g = reinterpret_cast<STYPE*>(src_memory_g + OFF); \
+ STYPE* src_pixels_b = reinterpret_cast<STYPE*>(src_memory_b + OFF); \
+ STYPE* src_pixels_a = reinterpret_cast<STYPE*>(src_memory_a + OFF); \
+ DTYPE* dst_pixels_c = reinterpret_cast<DTYPE*>(dst_memory_c); \
+ DTYPE* dst_pixels_opt = reinterpret_cast<DTYPE*>(dst_memory_opt); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, src_pixels_a, kWidth, dst_pixels_c, kWidth * 4, \
+ kWidth, NEG benchmark_height_, DEPTH); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, src_pixels_a, kWidth, dst_pixels_opt, kWidth * 4, \
+ kWidth, NEG benchmark_height_, DEPTH); \
+ } \
+ for (int i = 0; i < kPixels * 4; ++i) { \
+ EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]); \
+ } \
+ free_aligned_buffer_page_end(src_memory_r); \
+ free_aligned_buffer_page_end(src_memory_g); \
+ free_aligned_buffer_page_end(src_memory_b); \
+ free_aligned_buffer_page_end(src_memory_a); \
+ free_aligned_buffer_page_end(dst_memory_c); \
+ free_aligned_buffer_page_end(dst_memory_opt); \
+ }
+
+// Merge 3 channel RGB into 4 channel XRGB with opaque alpha
+#define TESTQPLANAROTOPI(FUNC, STYPE, DTYPE, DEPTH, W1280, N, NEG, OFF) \
+ TEST_F(LibYUVPlanarTest, FUNC##Plane_Opaque_##DEPTH##N) { \
+ const int kWidth = W1280; \
+ const int kPixels = kWidth * benchmark_height_; \
+ align_buffer_page_end(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(dst_memory_c, kPixels * 4 * sizeof(DTYPE)); \
+ align_buffer_page_end(dst_memory_opt, kPixels * 4 * sizeof(DTYPE)); \
+ MemRandomize(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ memset(dst_memory_c, 0, kPixels * 4 * sizeof(DTYPE)); \
+ memset(dst_memory_opt, 0, kPixels * 4 * sizeof(DTYPE)); \
+ STYPE* src_pixels_r = reinterpret_cast<STYPE*>(src_memory_r + OFF); \
+ STYPE* src_pixels_g = reinterpret_cast<STYPE*>(src_memory_g + OFF); \
+ STYPE* src_pixels_b = reinterpret_cast<STYPE*>(src_memory_b + OFF); \
+ DTYPE* dst_pixels_c = reinterpret_cast<DTYPE*>(dst_memory_c); \
+ DTYPE* dst_pixels_opt = reinterpret_cast<DTYPE*>(dst_memory_opt); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, NULL, 0, dst_pixels_c, kWidth * 4, kWidth, \
+ NEG benchmark_height_, DEPTH); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, NULL, 0, dst_pixels_opt, kWidth * 4, kWidth, \
+ NEG benchmark_height_, DEPTH); \
+ } \
+ for (int i = 0; i < kPixels * 4; ++i) { \
+ EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]); \
+ } \
+ free_aligned_buffer_page_end(src_memory_r); \
+ free_aligned_buffer_page_end(src_memory_g); \
+ free_aligned_buffer_page_end(src_memory_b); \
+ free_aligned_buffer_page_end(dst_memory_c); \
+ free_aligned_buffer_page_end(dst_memory_opt); \
+ }
+
+#define TESTQPLANARTOP(FUNC, STYPE, DTYPE, DEPTH) \
+ TESTQPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_ + 1, _Any, +, 0) \
+ TESTQPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Unaligned, +, \
+ 1) \
+ TESTQPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Invert, -, 0) \
+ TESTQPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Opt, +, 0) \
+ TESTQPLANAROTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_ + 1, _Any, +, \
+ 0) \
+ TESTQPLANAROTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Unaligned, +, \
+ 1) \
+ TESTQPLANAROTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Invert, -, 0) \
+ TESTQPLANAROTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Opt, +, 0)
+
+TESTQPLANARTOP(MergeAR64, uint16_t, uint16_t, 10)
+TESTQPLANARTOP(MergeAR64, uint16_t, uint16_t, 12)
+TESTQPLANARTOP(MergeAR64, uint16_t, uint16_t, 16)
+TESTQPLANARTOP(MergeARGB16To8, uint16_t, uint8_t, 10)
+TESTQPLANARTOP(MergeARGB16To8, uint16_t, uint8_t, 12)
+TESTQPLANARTOP(MergeARGB16To8, uint16_t, uint8_t, 16)
+
+#define TESTTPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, W1280, N, NEG, OFF) \
+ TEST_F(LibYUVPlanarTest, FUNC##Plane_##DEPTH##N) { \
+ const int kWidth = W1280; \
+ const int kPixels = kWidth * benchmark_height_; \
+ align_buffer_page_end(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ align_buffer_page_end(dst_memory_c, kPixels * 4 * sizeof(DTYPE)); \
+ align_buffer_page_end(dst_memory_opt, kPixels * 4 * sizeof(DTYPE)); \
+ MemRandomize(src_memory_r, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_g, kPixels * sizeof(STYPE) + OFF); \
+ MemRandomize(src_memory_b, kPixels * sizeof(STYPE) + OFF); \
+ STYPE* src_pixels_r = reinterpret_cast<STYPE*>(src_memory_r + OFF); \
+ STYPE* src_pixels_g = reinterpret_cast<STYPE*>(src_memory_g + OFF); \
+ STYPE* src_pixels_b = reinterpret_cast<STYPE*>(src_memory_b + OFF); \
+ DTYPE* dst_pixels_c = reinterpret_cast<DTYPE*>(dst_memory_c); \
+ DTYPE* dst_pixels_opt = reinterpret_cast<DTYPE*>(dst_memory_opt); \
+ memset(dst_pixels_c, 1, kPixels * 4 * sizeof(DTYPE)); \
+ memset(dst_pixels_opt, 2, kPixels * 4 * sizeof(DTYPE)); \
+ MaskCpuFlags(disable_cpu_flags_); \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, dst_pixels_c, kWidth * 4, kWidth, \
+ NEG benchmark_height_, DEPTH); \
+ MaskCpuFlags(benchmark_cpu_info_); \
+ for (int i = 0; i < benchmark_iterations_; ++i) { \
+ FUNC##Plane(src_pixels_r, kWidth, src_pixels_g, kWidth, src_pixels_b, \
+ kWidth, dst_pixels_opt, kWidth * 4, kWidth, \
+ NEG benchmark_height_, DEPTH); \
+ } \
+ for (int i = 0; i < kPixels * 4; ++i) { \
+ EXPECT_EQ(dst_pixels_c[i], dst_pixels_opt[i]); \
+ } \
+ free_aligned_buffer_page_end(src_memory_r); \
+ free_aligned_buffer_page_end(src_memory_g); \
+ free_aligned_buffer_page_end(src_memory_b); \
+ free_aligned_buffer_page_end(dst_memory_c); \
+ free_aligned_buffer_page_end(dst_memory_opt); \
+ }
+
+#define TESTTPLANARTOP(FUNC, STYPE, DTYPE, DEPTH) \
+ TESTTPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_ + 1, _Any, +, 0) \
+ TESTTPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Unaligned, +, \
+ 1) \
+ TESTTPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Invert, -, 0) \
+ TESTTPLANARTOPI(FUNC, STYPE, DTYPE, DEPTH, benchmark_width_, _Opt, +, 0)
+
+TESTTPLANARTOP(MergeXR30, uint16_t, uint8_t, 10)
+TESTTPLANARTOP(MergeXR30, uint16_t, uint8_t, 12)
+TESTTPLANARTOP(MergeXR30, uint16_t, uint8_t, 16)
+
// TODO(fbarchard): improve test for platforms and cpu detect
#ifdef HAS_MERGEUVROW_16_AVX2
TEST_F(LibYUVPlanarTest, MergeUVRow_16_Opt) {
// Round count up to multiple of 16
const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+
align_buffer_page_end(src_pixels_u, kPixels * 2);
align_buffer_page_end(src_pixels_v, kPixels * 2);
align_buffer_page_end(dst_pixels_uv_opt, kPixels * 2 * 2);
@@ -3035,8 +3247,9 @@ TEST_F(LibYUVPlanarTest, MergeUVRow_16_Opt) {
// TODO(fbarchard): Improve test for more platforms.
#ifdef HAS_MULTIPLYROW_16_AVX2
TEST_F(LibYUVPlanarTest, MultiplyRow_16_Opt) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ // Round count up to multiple of 32
+ const int kPixels = (benchmark_width_ * benchmark_height_ + 31) & ~31;
+
align_buffer_page_end(src_pixels_y, kPixels * 2);
align_buffer_page_end(dst_pixels_y_opt, kPixels * 2);
align_buffer_page_end(dst_pixels_y_c, kPixels * 2);
@@ -3072,8 +3285,7 @@ TEST_F(LibYUVPlanarTest, MultiplyRow_16_Opt) {
#endif // HAS_MULTIPLYROW_16_AVX2
TEST_F(LibYUVPlanarTest, Convert16To8Plane) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels_y, kPixels * 2);
align_buffer_page_end(dst_pixels_y_opt, kPixels);
align_buffer_page_end(dst_pixels_y_c, kPixels);
@@ -3152,8 +3364,7 @@ TEST_F(LibYUVPlanarTest, Convert16To8Row_Opt) {
#endif // ENABLE_ROW_TESTS
TEST_F(LibYUVPlanarTest, Convert8To16Plane) {
- // Round count up to multiple of 16
- const int kPixels = (benchmark_width_ * benchmark_height_ + 15) & ~15;
+ const int kPixels = benchmark_width_ * benchmark_height_;
align_buffer_page_end(src_pixels_y, kPixels);
align_buffer_page_end(dst_pixels_y_opt, kPixels * 2);
align_buffer_page_end(dst_pixels_y_c, kPixels * 2);
diff --git a/third_party/libyuv/unit_test/rotate_argb_test.cc b/third_party/libyuv/unit_test/rotate_argb_test.cc
index 3208b66a2a..01ed69ca55 100644
--- a/third_party/libyuv/unit_test/rotate_argb_test.cc
+++ b/third_party/libyuv/unit_test/rotate_argb_test.cc
@@ -156,29 +156,29 @@ TEST_F(LibYUVRotateTest, RotatePlane270_Opt) {
}
TEST_F(LibYUVRotateTest, DISABLED_RotatePlane0_Odd) {
- TestRotatePlane(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate0,
+ TestRotatePlane(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate0,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_RotatePlane90_Odd) {
- TestRotatePlane(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate90,
+ TestRotatePlane(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate90,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_RotatePlane180_Odd) {
- TestRotatePlane(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate180,
+ TestRotatePlane(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate180,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_RotatePlane270_Odd) {
- TestRotatePlane(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate270,
+ TestRotatePlane(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate270,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
diff --git a/third_party/libyuv/unit_test/rotate_test.cc b/third_party/libyuv/unit_test/rotate_test.cc
index 61941e63e0..1bab584fa1 100644
--- a/third_party/libyuv/unit_test/rotate_test.cc
+++ b/third_party/libyuv/unit_test/rotate_test.cc
@@ -108,29 +108,29 @@ TEST_F(LibYUVRotateTest, I420Rotate270_Opt) {
// Odd width tests work but disabled because they use C code and can be
// tested by passing an odd width command line or environment variable.
TEST_F(LibYUVRotateTest, DISABLED_I420Rotate0_Odd) {
- I420TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate0,
+ I420TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate0,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I420Rotate90_Odd) {
- I420TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate90,
+ I420TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate90,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I420Rotate180_Odd) {
- I420TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate180,
+ I420TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate180,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I420Rotate270_Odd) {
- I420TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate270,
+ I420TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate270,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
@@ -225,29 +225,29 @@ TEST_F(LibYUVRotateTest, I444Rotate270_Opt) {
// Odd width tests work but disabled because they use C code and can be
// tested by passing an odd width command line or environment variable.
TEST_F(LibYUVRotateTest, DISABLED_I444Rotate0_Odd) {
- I444TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate0,
+ I444TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate0,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I444Rotate90_Odd) {
- I444TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate90,
+ I444TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate90,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I444Rotate180_Odd) {
- I444TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate180,
+ I444TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate180,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_I444Rotate270_Odd) {
- I444TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate270,
+ I444TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate270,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
@@ -340,29 +340,29 @@ TEST_F(LibYUVRotateTest, NV12Rotate270_Opt) {
}
TEST_F(LibYUVRotateTest, DISABLED_NV12Rotate0_Odd) {
- NV12TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate0,
+ NV12TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate0,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_NV12Rotate90_Odd) {
- NV12TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate90,
+ NV12TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate90,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_NV12Rotate180_Odd) {
- NV12TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_width_ - 3, benchmark_height_ - 1, kRotate180,
+ NV12TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_width_ + 1, benchmark_height_ + 1, kRotate180,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
TEST_F(LibYUVRotateTest, DISABLED_NV12Rotate270_Odd) {
- NV12TestRotate(benchmark_width_ - 3, benchmark_height_ - 1,
- benchmark_height_ - 1, benchmark_width_ - 3, kRotate270,
+ NV12TestRotate(benchmark_width_ + 1, benchmark_height_ + 1,
+ benchmark_height_ + 1, benchmark_width_ + 1, kRotate270,
benchmark_iterations_, disable_cpu_flags_,
benchmark_cpu_info_);
}
diff --git a/third_party/libyuv/unit_test/scale_argb_test.cc b/third_party/libyuv/unit_test/scale_argb_test.cc
index ac9766124f..48ad75eafd 100644
--- a/third_party/libyuv/unit_test/scale_argb_test.cc
+++ b/third_party/libyuv/unit_test/scale_argb_test.cc
@@ -114,8 +114,8 @@ static int ARGBTestFilter(int src_width,
return max_diff;
}
-static const int kTileX = 8;
-static const int kTileY = 8;
+static const int kTileX = 64;
+static const int kTileY = 64;
static int TileARGBScale(const uint8_t* src_argb,
int src_stride_argb,
@@ -232,7 +232,7 @@ static int ARGBClipTestFilter(int src_width,
#define DX(x, nom, denom) static_cast<int>((Abs(x) / nom) * nom)
#define SX(x, nom, denom) static_cast<int>((x / nom) * denom)
-#define TEST_FACTOR1(name, filter, nom, denom, max_diff) \
+#define TEST_FACTOR1(DISABLED_, name, filter, nom, denom, max_diff) \
TEST_F(LibYUVScaleTest, ARGBScaleDownBy##name##_##filter) { \
int diff = ARGBTestFilter( \
SX(benchmark_width_, nom, denom), SX(benchmark_height_, nom, denom), \
@@ -241,7 +241,7 @@ static int ARGBClipTestFilter(int src_width,
benchmark_cpu_info_); \
EXPECT_LE(diff, max_diff); \
} \
- TEST_F(LibYUVScaleTest, ARGBScaleDownClipBy##name##_##filter) { \
+ TEST_F(LibYUVScaleTest, DISABLED_##ARGBScaleDownClipBy##name##_##filter) { \
int diff = ARGBClipTestFilter( \
SX(benchmark_width_, nom, denom), SX(benchmark_height_, nom, denom), \
DX(benchmark_width_, nom, denom), DX(benchmark_height_, nom, denom), \
@@ -251,11 +251,19 @@ static int ARGBClipTestFilter(int src_width,
// Test a scale factor with all 4 filters. Expect unfiltered to be exact, but
// filtering is different fixed point implementations for SSSE3, Neon and C.
-#define TEST_FACTOR(name, nom, denom) \
- TEST_FACTOR1(name, None, nom, denom, 0) \
- TEST_FACTOR1(name, Linear, nom, denom, 3) \
- TEST_FACTOR1(name, Bilinear, nom, denom, 3) \
- TEST_FACTOR1(name, Box, nom, denom, 3)
+#ifdef ENABLE_SLOW_TESTS
+#define TEST_FACTOR(name, nom, denom) \
+ TEST_FACTOR1(, name, None, nom, denom, 0) \
+ TEST_FACTOR1(, name, Linear, nom, denom, 3) \
+ TEST_FACTOR1(, name, Bilinear, nom, denom, 3) \
+ TEST_FACTOR1(, name, Box, nom, denom, 3)
+#else
+#define TEST_FACTOR(name, nom, denom) \
+ TEST_FACTOR1(DISABLED_, name, None, nom, denom, 0) \
+ TEST_FACTOR1(DISABLED_, name, Linear, nom, denom, 3) \
+ TEST_FACTOR1(DISABLED_, name, Bilinear, nom, denom, 3) \
+ TEST_FACTOR1(DISABLED_, name, Box, nom, denom, 3)
+#endif
TEST_FACTOR(2, 1, 2)
TEST_FACTOR(4, 1, 4)
@@ -268,7 +276,7 @@ TEST_FACTOR(3, 1, 3)
#undef SX
#undef DX
-#define TEST_SCALETO1(name, width, height, filter, max_diff) \
+#define TEST_SCALETO1(DISABLED_, name, width, height, filter, max_diff) \
TEST_F(LibYUVScaleTest, name##To##width##x##height##_##filter) { \
int diff = ARGBTestFilter(benchmark_width_, benchmark_height_, width, \
height, kFilter##filter, benchmark_iterations_, \
@@ -282,13 +290,15 @@ TEST_FACTOR(3, 1, 3)
benchmark_cpu_info_); \
EXPECT_LE(diff, max_diff); \
} \
- TEST_F(LibYUVScaleTest, name##ClipTo##width##x##height##_##filter) { \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##name##ClipTo##width##x##height##_##filter) { \
int diff = \
ARGBClipTestFilter(benchmark_width_, benchmark_height_, width, height, \
kFilter##filter, benchmark_iterations_); \
EXPECT_LE(diff, max_diff); \
} \
- TEST_F(LibYUVScaleTest, name##ClipFrom##width##x##height##_##filter) { \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##name##ClipFrom##width##x##height##_##filter) { \
int diff = ARGBClipTestFilter(width, height, Abs(benchmark_width_), \
Abs(benchmark_height_), kFilter##filter, \
benchmark_iterations_); \
@@ -296,13 +306,20 @@ TEST_FACTOR(3, 1, 3)
}
/// Test scale to a specified size with all 4 filters.
-#define TEST_SCALETO(name, width, height) \
- TEST_SCALETO1(name, width, height, None, 0) \
- TEST_SCALETO1(name, width, height, Linear, 3) \
- TEST_SCALETO1(name, width, height, Bilinear, 3)
+#ifdef ENABLE_SLOW_TESTS
+#define TEST_SCALETO(name, width, height) \
+ TEST_SCALETO1(, name, width, height, None, 0) \
+ TEST_SCALETO1(, name, width, height, Linear, 3) \
+ TEST_SCALETO1(, name, width, height, Bilinear, 3)
+#else
+#define TEST_SCALETO(name, width, height) \
+ TEST_SCALETO1(DISABLED_, name, width, height, None, 0) \
+ TEST_SCALETO1(DISABLED_, name, width, height, Linear, 3) \
+ TEST_SCALETO1(DISABLED_, name, width, height, Bilinear, 3)
+#endif
TEST_SCALETO(ARGBScale, 1, 1)
-TEST_SCALETO(ARGBScale, 256, 144) /* 128x72 * 2 */
+TEST_SCALETO(ARGBScale, 256, 144) /* 128x72 * 2 */
TEST_SCALETO(ARGBScale, 320, 240)
TEST_SCALETO(ARGBScale, 569, 480)
TEST_SCALETO(ARGBScale, 640, 360)
diff --git a/third_party/libyuv/unit_test/scale_test.cc b/third_party/libyuv/unit_test/scale_test.cc
index d24806a661..6da6b574d1 100644
--- a/third_party/libyuv/unit_test/scale_test.cc
+++ b/third_party/libyuv/unit_test/scale_test.cc
@@ -259,6 +259,123 @@ static int I420TestFilter_12(int src_width,
return max_diff;
}
+// Test scaling with 8 bit C vs 16 bit C and return maximum pixel difference.
+// 0 = exact.
+static int I420TestFilter_16(int src_width,
+ int src_height,
+ int dst_width,
+ int dst_height,
+ FilterMode f,
+ int benchmark_iterations,
+ int disable_cpu_flags,
+ int benchmark_cpu_info) {
+ if (!SizeValid(src_width, src_height, dst_width, dst_height)) {
+ return 0;
+ }
+
+ int i;
+ int src_width_uv = (Abs(src_width) + 1) >> 1;
+ int src_height_uv = (Abs(src_height) + 1) >> 1;
+
+ int64_t src_y_plane_size = (Abs(src_width)) * (Abs(src_height));
+ int64_t src_uv_plane_size = (src_width_uv) * (src_height_uv);
+
+ int src_stride_y = Abs(src_width);
+ int src_stride_uv = src_width_uv;
+
+ align_buffer_page_end(src_y, src_y_plane_size);
+ align_buffer_page_end(src_u, src_uv_plane_size);
+ align_buffer_page_end(src_v, src_uv_plane_size);
+ align_buffer_page_end(src_y_16, src_y_plane_size * 2);
+ align_buffer_page_end(src_u_16, src_uv_plane_size * 2);
+ align_buffer_page_end(src_v_16, src_uv_plane_size * 2);
+ if (!src_y || !src_u || !src_v || !src_y_16 || !src_u_16 || !src_v_16) {
+ printf("Skipped. Alloc failed " FILELINESTR(__FILE__, __LINE__) "\n");
+ return 0;
+ }
+ uint16_t* p_src_y_16 = reinterpret_cast<uint16_t*>(src_y_16);
+ uint16_t* p_src_u_16 = reinterpret_cast<uint16_t*>(src_u_16);
+ uint16_t* p_src_v_16 = reinterpret_cast<uint16_t*>(src_v_16);
+
+ MemRandomize(src_y, src_y_plane_size);
+ MemRandomize(src_u, src_uv_plane_size);
+ MemRandomize(src_v, src_uv_plane_size);
+
+ for (i = 0; i < src_y_plane_size; ++i) {
+ p_src_y_16[i] = src_y[i];
+ }
+ for (i = 0; i < src_uv_plane_size; ++i) {
+ p_src_u_16[i] = src_u[i];
+ p_src_v_16[i] = src_v[i];
+ }
+
+ int dst_width_uv = (dst_width + 1) >> 1;
+ int dst_height_uv = (dst_height + 1) >> 1;
+
+ int dst_y_plane_size = (dst_width) * (dst_height);
+ int dst_uv_plane_size = (dst_width_uv) * (dst_height_uv);
+
+ int dst_stride_y = dst_width;
+ int dst_stride_uv = dst_width_uv;
+
+ align_buffer_page_end(dst_y_8, dst_y_plane_size);
+ align_buffer_page_end(dst_u_8, dst_uv_plane_size);
+ align_buffer_page_end(dst_v_8, dst_uv_plane_size);
+ align_buffer_page_end(dst_y_16, dst_y_plane_size * 2);
+ align_buffer_page_end(dst_u_16, dst_uv_plane_size * 2);
+ align_buffer_page_end(dst_v_16, dst_uv_plane_size * 2);
+
+ uint16_t* p_dst_y_16 = reinterpret_cast<uint16_t*>(dst_y_16);
+ uint16_t* p_dst_u_16 = reinterpret_cast<uint16_t*>(dst_u_16);
+ uint16_t* p_dst_v_16 = reinterpret_cast<uint16_t*>(dst_v_16);
+
+ MaskCpuFlags(disable_cpu_flags); // Disable all CPU optimization.
+ I420Scale(src_y, src_stride_y, src_u, src_stride_uv, src_v, src_stride_uv,
+ src_width, src_height, dst_y_8, dst_stride_y, dst_u_8,
+ dst_stride_uv, dst_v_8, dst_stride_uv, dst_width, dst_height, f);
+ MaskCpuFlags(benchmark_cpu_info); // Enable all CPU optimization.
+ for (i = 0; i < benchmark_iterations; ++i) {
+ I420Scale_16(p_src_y_16, src_stride_y, p_src_u_16, src_stride_uv,
+ p_src_v_16, src_stride_uv, src_width, src_height, p_dst_y_16,
+ dst_stride_y, p_dst_u_16, dst_stride_uv, p_dst_v_16,
+ dst_stride_uv, dst_width, dst_height, f);
+ }
+
+ // Expect an exact match.
+ int max_diff = 0;
+ for (i = 0; i < dst_y_plane_size; ++i) {
+ int abs_diff = Abs(dst_y_8[i] - p_dst_y_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ }
+ for (i = 0; i < dst_uv_plane_size; ++i) {
+ int abs_diff = Abs(dst_u_8[i] - p_dst_u_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ abs_diff = Abs(dst_v_8[i] - p_dst_v_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ }
+
+ free_aligned_buffer_page_end(dst_y_8);
+ free_aligned_buffer_page_end(dst_u_8);
+ free_aligned_buffer_page_end(dst_v_8);
+ free_aligned_buffer_page_end(dst_y_16);
+ free_aligned_buffer_page_end(dst_u_16);
+ free_aligned_buffer_page_end(dst_v_16);
+ free_aligned_buffer_page_end(src_y);
+ free_aligned_buffer_page_end(src_u);
+ free_aligned_buffer_page_end(src_v);
+ free_aligned_buffer_page_end(src_y_16);
+ free_aligned_buffer_page_end(src_u_16);
+ free_aligned_buffer_page_end(src_v_16);
+
+ return max_diff;
+}
+
// Test scaling with C vs Opt and return maximum pixel difference. 0 = exact.
static int I444TestFilter(int src_width,
int src_height,
@@ -494,6 +611,123 @@ static int I444TestFilter_12(int src_width,
return max_diff;
}
+// Test scaling with 8 bit C vs 16 bit C and return maximum pixel difference.
+// 0 = exact.
+static int I444TestFilter_16(int src_width,
+ int src_height,
+ int dst_width,
+ int dst_height,
+ FilterMode f,
+ int benchmark_iterations,
+ int disable_cpu_flags,
+ int benchmark_cpu_info) {
+ if (!SizeValid(src_width, src_height, dst_width, dst_height)) {
+ return 0;
+ }
+
+ int i;
+ int src_width_uv = Abs(src_width);
+ int src_height_uv = Abs(src_height);
+
+ int64_t src_y_plane_size = (Abs(src_width)) * (Abs(src_height));
+ int64_t src_uv_plane_size = (src_width_uv) * (src_height_uv);
+
+ int src_stride_y = Abs(src_width);
+ int src_stride_uv = src_width_uv;
+
+ align_buffer_page_end(src_y, src_y_plane_size);
+ align_buffer_page_end(src_u, src_uv_plane_size);
+ align_buffer_page_end(src_v, src_uv_plane_size);
+ align_buffer_page_end(src_y_16, src_y_plane_size * 2);
+ align_buffer_page_end(src_u_16, src_uv_plane_size * 2);
+ align_buffer_page_end(src_v_16, src_uv_plane_size * 2);
+ if (!src_y || !src_u || !src_v || !src_y_16 || !src_u_16 || !src_v_16) {
+ printf("Skipped. Alloc failed " FILELINESTR(__FILE__, __LINE__) "\n");
+ return 0;
+ }
+ uint16_t* p_src_y_16 = reinterpret_cast<uint16_t*>(src_y_16);
+ uint16_t* p_src_u_16 = reinterpret_cast<uint16_t*>(src_u_16);
+ uint16_t* p_src_v_16 = reinterpret_cast<uint16_t*>(src_v_16);
+
+ MemRandomize(src_y, src_y_plane_size);
+ MemRandomize(src_u, src_uv_plane_size);
+ MemRandomize(src_v, src_uv_plane_size);
+
+ for (i = 0; i < src_y_plane_size; ++i) {
+ p_src_y_16[i] = src_y[i];
+ }
+ for (i = 0; i < src_uv_plane_size; ++i) {
+ p_src_u_16[i] = src_u[i];
+ p_src_v_16[i] = src_v[i];
+ }
+
+ int dst_width_uv = dst_width;
+ int dst_height_uv = dst_height;
+
+ int dst_y_plane_size = (dst_width) * (dst_height);
+ int dst_uv_plane_size = (dst_width_uv) * (dst_height_uv);
+
+ int dst_stride_y = dst_width;
+ int dst_stride_uv = dst_width_uv;
+
+ align_buffer_page_end(dst_y_8, dst_y_plane_size);
+ align_buffer_page_end(dst_u_8, dst_uv_plane_size);
+ align_buffer_page_end(dst_v_8, dst_uv_plane_size);
+ align_buffer_page_end(dst_y_16, dst_y_plane_size * 2);
+ align_buffer_page_end(dst_u_16, dst_uv_plane_size * 2);
+ align_buffer_page_end(dst_v_16, dst_uv_plane_size * 2);
+
+ uint16_t* p_dst_y_16 = reinterpret_cast<uint16_t*>(dst_y_16);
+ uint16_t* p_dst_u_16 = reinterpret_cast<uint16_t*>(dst_u_16);
+ uint16_t* p_dst_v_16 = reinterpret_cast<uint16_t*>(dst_v_16);
+
+ MaskCpuFlags(disable_cpu_flags); // Disable all CPU optimization.
+ I444Scale(src_y, src_stride_y, src_u, src_stride_uv, src_v, src_stride_uv,
+ src_width, src_height, dst_y_8, dst_stride_y, dst_u_8,
+ dst_stride_uv, dst_v_8, dst_stride_uv, dst_width, dst_height, f);
+ MaskCpuFlags(benchmark_cpu_info); // Enable all CPU optimization.
+ for (i = 0; i < benchmark_iterations; ++i) {
+ I444Scale_16(p_src_y_16, src_stride_y, p_src_u_16, src_stride_uv,
+ p_src_v_16, src_stride_uv, src_width, src_height, p_dst_y_16,
+ dst_stride_y, p_dst_u_16, dst_stride_uv, p_dst_v_16,
+ dst_stride_uv, dst_width, dst_height, f);
+ }
+
+ // Expect an exact match.
+ int max_diff = 0;
+ for (i = 0; i < dst_y_plane_size; ++i) {
+ int abs_diff = Abs(dst_y_8[i] - p_dst_y_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ }
+ for (i = 0; i < dst_uv_plane_size; ++i) {
+ int abs_diff = Abs(dst_u_8[i] - p_dst_u_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ abs_diff = Abs(dst_v_8[i] - p_dst_v_16[i]);
+ if (abs_diff > max_diff) {
+ max_diff = abs_diff;
+ }
+ }
+
+ free_aligned_buffer_page_end(dst_y_8);
+ free_aligned_buffer_page_end(dst_u_8);
+ free_aligned_buffer_page_end(dst_v_8);
+ free_aligned_buffer_page_end(dst_y_16);
+ free_aligned_buffer_page_end(dst_u_16);
+ free_aligned_buffer_page_end(dst_v_16);
+ free_aligned_buffer_page_end(src_y);
+ free_aligned_buffer_page_end(src_u);
+ free_aligned_buffer_page_end(src_v);
+ free_aligned_buffer_page_end(src_y_16);
+ free_aligned_buffer_page_end(src_u_16);
+ free_aligned_buffer_page_end(src_v_16);
+
+ return max_diff;
+}
+
// Test scaling with C vs Opt and return maximum pixel difference. 0 = exact.
static int NV12TestFilter(int src_width,
int src_height,
@@ -700,6 +934,20 @@ TEST_FACTOR(3, 1, 3, 0)
benchmark_iterations_, disable_cpu_flags_, benchmark_cpu_info_); \
EXPECT_LE(diff, max_diff); \
} \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##I420##name##To##width##x##height##_##filter##_16) { \
+ int diff = I420TestFilter_16( \
+ benchmark_width_, benchmark_height_, width, height, kFilter##filter, \
+ benchmark_iterations_, disable_cpu_flags_, benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##I444##name##To##width##x##height##_##filter##_16) { \
+ int diff = I444TestFilter_16( \
+ benchmark_width_, benchmark_height_, width, height, kFilter##filter, \
+ benchmark_iterations_, disable_cpu_flags_, benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
TEST_F(LibYUVScaleTest, NV12##name##To##width##x##height##_##filter) { \
int diff = NV12TestFilter(benchmark_width_, benchmark_height_, width, \
height, kFilter##filter, benchmark_iterations_, \
@@ -736,6 +984,22 @@ TEST_FACTOR(3, 1, 3, 0)
benchmark_cpu_info_); \
EXPECT_LE(diff, max_diff); \
} \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##I420##name##From##width##x##height##_##filter##_16) { \
+ int diff = I420TestFilter_16(width, height, Abs(benchmark_width_), \
+ Abs(benchmark_height_), kFilter##filter, \
+ benchmark_iterations_, disable_cpu_flags_, \
+ benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
+ TEST_F(LibYUVScaleTest, \
+ DISABLED_##I444##name##From##width##x##height##_##filter##_16) { \
+ int diff = I444TestFilter_16(width, height, Abs(benchmark_width_), \
+ Abs(benchmark_height_), kFilter##filter, \
+ benchmark_iterations_, disable_cpu_flags_, \
+ benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
TEST_F(LibYUVScaleTest, NV12##name##From##width##x##height##_##filter) { \
int diff = NV12TestFilter(width, height, Abs(benchmark_width_), \
Abs(benchmark_height_), kFilter##filter, \
@@ -761,7 +1025,7 @@ TEST_FACTOR(3, 1, 3, 0)
#endif
TEST_SCALETO(Scale, 1, 1)
-TEST_SCALETO(Scale, 256, 144) /* 128x72 * 2 */
+TEST_SCALETO(Scale, 256, 144) /* 128x72 * 2 */
TEST_SCALETO(Scale, 320, 240)
TEST_SCALETO(Scale, 569, 480)
TEST_SCALETO(Scale, 640, 360)
@@ -801,6 +1065,20 @@ TEST_SCALETO(Scale, 1920, 1080)
disable_cpu_flags_, benchmark_cpu_info_); \
EXPECT_LE(diff, max_diff); \
} \
+ TEST_F(LibYUVScaleTest, DISABLED_##I420##name##SwapXY_##filter##_16) { \
+ int diff = I420TestFilter_16(benchmark_width_, benchmark_height_, \
+ benchmark_height_, benchmark_width_, \
+ kFilter##filter, benchmark_iterations_, \
+ disable_cpu_flags_, benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
+ TEST_F(LibYUVScaleTest, DISABLED_##I444##name##SwapXY_##filter##_16) { \
+ int diff = I444TestFilter_16(benchmark_width_, benchmark_height_, \
+ benchmark_height_, benchmark_width_, \
+ kFilter##filter, benchmark_iterations_, \
+ disable_cpu_flags_, benchmark_cpu_info_); \
+ EXPECT_LE(diff, max_diff); \
+ } \
TEST_F(LibYUVScaleTest, NV12##name##SwapXY_##filter) { \
int diff = NV12TestFilter(benchmark_width_, benchmark_height_, \
benchmark_height_, benchmark_width_, \
diff --git a/third_party/libyuv/unit_test/scale_uv_test.cc b/third_party/libyuv/unit_test/scale_uv_test.cc
index 59eeee3043..6e4649f84d 100644
--- a/third_party/libyuv/unit_test/scale_uv_test.cc
+++ b/third_party/libyuv/unit_test/scale_uv_test.cc
@@ -166,7 +166,7 @@ TEST_FACTOR(3, 1, 3)
TEST_SCALETO1(name, width, height, Bilinear, 3)
TEST_SCALETO(UVScale, 1, 1)
-TEST_SCALETO(UVScale, 256, 144) /* 128x72 * 2 */
+TEST_SCALETO(UVScale, 256, 144) /* 128x72 * 2 */
TEST_SCALETO(UVScale, 320, 240)
TEST_SCALETO(UVScale, 569, 480)
TEST_SCALETO(UVScale, 640, 360)
diff --git a/third_party/libyuv/unit_test/unit_test.cc b/third_party/libyuv/unit_test/unit_test.cc
index 85e3b7170f..e6dbc3eed6 100644
--- a/third_party/libyuv/unit_test/unit_test.cc
+++ b/third_party/libyuv/unit_test/unit_test.cc
@@ -26,9 +26,13 @@ unsigned int fastrand_seed = 0xfb;
ABSL_FLAG(int32_t, libyuv_width, 0, "width of test image.");
ABSL_FLAG(int32_t, libyuv_height, 0, "height of test image.");
ABSL_FLAG(int32_t, libyuv_repeat, 0, "number of times to repeat test.");
-ABSL_FLAG(int32_t, libyuv_flags, 0,
+ABSL_FLAG(int32_t,
+ libyuv_flags,
+ 0,
"cpu flags for reference code. 1 = C, -1 = SIMD");
-ABSL_FLAG(int32_t, libyuv_cpu_info, 0,
+ABSL_FLAG(int32_t,
+ libyuv_cpu_info,
+ 0,
"cpu flags for benchmark code. 1 = C, -1 = SIMD");
#else
// Disable command line parameters if absl/flags disabled.
diff --git a/third_party/libyuv/unit_test/unit_test.h b/third_party/libyuv/unit_test/unit_test.h
index 87907fa160..580832addc 100644
--- a/third_party/libyuv/unit_test/unit_test.h
+++ b/third_party/libyuv/unit_test/unit_test.h
@@ -11,7 +11,7 @@
#ifndef UNIT_TEST_UNIT_TEST_H_ // NOLINT
#define UNIT_TEST_UNIT_TEST_H_
-#ifdef WIN32
+#ifdef _WIN32
#include <windows.h>
#else
#include <sys/resource.h>
diff --git a/third_party/libyuv/unit_test/video_common_test.cc b/third_party/libyuv/unit_test/video_common_test.cc
index 6c6a384d41..36728ea900 100644
--- a/third_party/libyuv/unit_test/video_common_test.cc
+++ b/third_party/libyuv/unit_test/video_common_test.cc
@@ -29,7 +29,7 @@ static bool TestValidFourCC(uint32_t fourcc, int bpp) {
!TestValidChar((fourcc >> 24) & 0xff)) {
return false;
}
- if (bpp < 0 || bpp > 32) {
+ if (bpp < 0 || bpp > 64) {
return false;
}
return true;
@@ -72,6 +72,8 @@ TEST_F(LibYUVBaseTest, TestFourCC) {
EXPECT_TRUE(TestValidFourCC(FOURCC_ABGR, FOURCC_BPP_ABGR));
EXPECT_TRUE(TestValidFourCC(FOURCC_AR30, FOURCC_BPP_AR30));
EXPECT_TRUE(TestValidFourCC(FOURCC_AB30, FOURCC_BPP_AB30));
+ EXPECT_TRUE(TestValidFourCC(FOURCC_AR64, FOURCC_BPP_AR64));
+ EXPECT_TRUE(TestValidFourCC(FOURCC_AB64, FOURCC_BPP_AB64));
EXPECT_TRUE(TestValidFourCC(FOURCC_24BG, FOURCC_BPP_24BG));
EXPECT_TRUE(TestValidFourCC(FOURCC_RAW, FOURCC_BPP_RAW));
EXPECT_TRUE(TestValidFourCC(FOURCC_RGBA, FOURCC_BPP_RGBA));
diff --git a/third_party/opus/src/celt/celt_lpc.c b/third_party/opus/src/celt/celt_lpc.c
index 457e7ed0d2..242e6df55e 100644
--- a/third_party/opus/src/celt/celt_lpc.c
+++ b/third_party/opus/src/celt/celt_lpc.c
@@ -50,7 +50,11 @@ int p
#endif
OPUS_CLEAR(lpc, p);
+#ifdef FIXED_POINT
if (ac[0] != 0)
+#else
+ if (ac[0] > 1e-10f)
+#endif
{
for (i = 0; i < p; i++) {
/* Sum up this iteration's reflection coefficient */
@@ -73,10 +77,10 @@ int p
error = error - MULT32_32_Q31(MULT32_32_Q31(r,r),error);
/* Bail out once we get 30 dB gain */
#ifdef FIXED_POINT
- if (error<SHR32(ac[0],10))
+ if (error<=SHR32(ac[0],10))
break;
#else
- if (error<.001f*ac[0])
+ if (error<=.001f*ac[0])
break;
#endif
}
diff --git a/third_party/opus/src/src/opus_encoder.c b/third_party/opus/src/src/opus_encoder.c
index 321bb2bb1e..253fe9e880 100644
--- a/third_party/opus/src/src/opus_encoder.c
+++ b/third_party/opus/src/src/opus_encoder.c
@@ -900,10 +900,10 @@ static int decide_dtx_mode(opus_int activity, /* indicates if this fr
{
if (!activity)
{
- /* The number of consecutive DTX frames should be within the allowed bounds.
- Note that the allowed bound is defined in the Silk headers and assumes 20 ms
- frames. As this function can be called with any frame length, a conversion to
- miliseconds is done before the comparisons. */
+ /* The number of consecutive DTX frames should be within the allowed bounds.
+ Note that the allowed bound is defined in the SILK headers and assumes 20 ms
+ frames. As this function can be called with any frame length, a conversion to
+ milliseconds is done before the comparisons. */
(*nb_no_activity_ms_Q1) += frame_size_ms_Q1;
if (*nb_no_activity_ms_Q1 > NB_SPEECH_FRAMES_BEFORE_DTX*20*2)
{
diff --git a/third_party/pffft/pffft_unittest.cc b/third_party/pffft/pffft_unittest.cc
index 559723434e..c2bf184191 100644
--- a/third_party/pffft/pffft_unittest.cc
+++ b/third_party/pffft/pffft_unittest.cc
@@ -68,7 +68,7 @@ void PffftValidate(int fft_size, bool complex_fft) {
}
for (k = 0; k < num_floats; ++k) {
- ref_max = std::max(ref_max, fabs(ref[k]));
+ ref_max = std::max<float>(ref_max, (float) fabs(ref[k]));
}
// Pass 0: non canonical ordering of transform coefficients.
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_constants.h b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_constants.h
index b8e2f2d581..be2028eb27 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_constants.h
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_constants.h
@@ -34,7 +34,7 @@
#if defined(__FreeBSD__) && !defined(__Userspace__)
#include <sys/cdefs.h>
-__FBSDID("$FreeBSD: head/sys/netinet/sctp_constants.h 365071 2020-09-01 21:19:14Z mjg $");
+__FBSDID("$FreeBSD$");
#endif
#ifndef _NETINET_SCTP_CONSTANTS_H_
@@ -610,7 +610,7 @@ extern void getwintimeofday(struct timeval *tv);
#define SCTP_RTO_UPPER_BOUND (60000) /* 60 sec in ms */
#define SCTP_RTO_LOWER_BOUND (1000) /* 1 sec is ms */
-#define SCTP_RTO_INITIAL (3000) /* 3 sec in ms */
+#define SCTP_RTO_INITIAL (1000) /* 1 sec in ms */
#define SCTP_INP_KILL_TIMEOUT 20 /* number of ms to retry kill of inpcb */
#define SCTP_ASOC_KILL_TIMEOUT 10 /* number of ms to retry kill of inpcb */
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_input.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_input.c
index f3c3644855..fb6e4c23eb 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_input.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_input.c
@@ -108,57 +108,12 @@ sctp_handle_init(struct mbuf *m, int iphlen, int offset,
if (stcb == NULL) {
SCTP_INP_RLOCK(inp);
}
- /* validate length */
- if (ntohs(cp->ch.chunk_length) < sizeof(struct sctp_init_chunk)) {
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(inp, stcb, m, iphlen, src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, port);
- if (stcb)
- *abort_no_unlock = 1;
- goto outnow;
- }
- /* validate parameters */
+ /* Validate parameters */
init = &cp->init;
- if (init->initiate_tag == 0) {
- /* protocol error... send abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(inp, stcb, m, iphlen, src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, port);
- if (stcb)
- *abort_no_unlock = 1;
- goto outnow;
- }
- if (ntohl(init->a_rwnd) < SCTP_MIN_RWND) {
- /* invalid parameter... send abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(inp, stcb, m, iphlen, src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, port);
- if (stcb)
- *abort_no_unlock = 1;
- goto outnow;
- }
- if (init->num_inbound_streams == 0) {
- /* protocol error... send abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(inp, stcb, m, iphlen, src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, port);
- if (stcb)
- *abort_no_unlock = 1;
- goto outnow;
- }
- if (init->num_outbound_streams == 0) {
+ if ((ntohl(init->initiate_tag) == 0) ||
+ (ntohl(init->a_rwnd) < SCTP_MIN_RWND) ||
+ (ntohs(init->num_inbound_streams) == 0) ||
+ (ntohs(init->num_outbound_streams) == 0)) {
/* protocol error... send abort */
op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
sctp_abort_association(inp, stcb, m, iphlen, src, dst, sh, op_err,
@@ -514,26 +469,34 @@ sctp_process_init_ack(struct mbuf *m, int iphlen, int offset,
asoc = &stcb->asoc;
asoc->peer_supports_nat = (uint8_t)nat_friendly;
/* process the peer's parameters in the INIT-ACK */
- retval = sctp_process_init((struct sctp_init_chunk *)cp, stcb);
- if (retval < 0) {
+ if (sctp_process_init((struct sctp_init_chunk *)cp, stcb) < 0) {
if (op_err != NULL) {
sctp_m_freem(op_err);
}
- return (retval);
+ op_err = sctp_generate_cause(SCTP_CAUSE_OUT_OF_RESC, "");
+ SCTPDBG(SCTP_DEBUG_INPUT1, "sctp_process_init() failed\n");
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
+ *abort_no_unlock = 1;
+ return (-1);
}
initack_limit = offset + ntohs(cp->ch.chunk_length);
/* load all addresses */
if ((retval = sctp_load_addresses_from_init(stcb, m,
- (offset + sizeof(struct sctp_init_chunk)), initack_limit,
- src, dst, NULL, stcb->asoc.port))) {
+ offset + sizeof(struct sctp_init_chunk),
+ initack_limit, src, dst, NULL, stcb->asoc.port)) < 0) {
if (op_err != NULL) {
sctp_m_freem(op_err);
}
op_err = sctp_generate_cause(SCTP_BASE_SYSCTL(sctp_diag_info_code),
"Problem with address parameters");
SCTPDBG(SCTP_DEBUG_INPUT1,
- "Load addresses from INIT causes an abort %d\n",
- retval);
+ "Load addresses from INIT causes an abort %d\n",
+ retval);
sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
src, dst, sh, op_err,
#if defined(__FreeBSD__) && !defined(__Userspace__)
@@ -1420,57 +1383,12 @@ sctp_handle_init_ack(struct mbuf *m, int iphlen, int offset,
"sctp_handle_init_ack: TCB is null\n");
return (-1);
}
- if (ntohs(cp->ch.chunk_length) < sizeof(struct sctp_init_ack_chunk)) {
- /* Invalid length */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
- src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, net->port);
- *abort_no_unlock = 1;
- return (-1);
- }
init_ack = &cp->init;
- /* validate parameters */
- if (init_ack->initiate_tag == 0) {
- /* protocol error... send an abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
- src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, net->port);
- *abort_no_unlock = 1;
- return (-1);
- }
- if (ntohl(init_ack->a_rwnd) < SCTP_MIN_RWND) {
- /* protocol error... send an abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
- src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, net->port);
- *abort_no_unlock = 1;
- return (-1);
- }
- if (init_ack->num_inbound_streams == 0) {
- /* protocol error... send an abort */
- op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
- sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
- src, dst, sh, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
-#endif
- vrf_id, net->port);
- *abort_no_unlock = 1;
- return (-1);
- }
- if (init_ack->num_outbound_streams == 0) {
+ /* Validate parameters. */
+ if ((ntohl(init_ack->initiate_tag) == 0) ||
+ (ntohl(init_ack->a_rwnd) < SCTP_MIN_RWND) ||
+ (ntohs(init_ack->num_inbound_streams) == 0) ||
+ (ntohs(init_ack->num_outbound_streams) == 0)) {
/* protocol error... send an abort */
op_err = sctp_generate_cause(SCTP_CAUSE_INVALID_PARAM, "");
sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
@@ -1624,6 +1542,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
vrf_id, net->port);
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 2;
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
/*
@@ -1638,9 +1557,11 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
(uint8_t *) & init_buf);
if (init_cp == NULL) {
/* could not pull a INIT chunk in cookie */
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
if (init_cp->ch.chunk_type != SCTP_INITIATION) {
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
/*
@@ -1653,9 +1574,11 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
(uint8_t *) & initack_buf);
if (initack_cp == NULL) {
/* could not pull INIT-ACK chunk in cookie */
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
if (initack_cp->ch.chunk_type != SCTP_INITIATION_ACK) {
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
if ((ntohl(initack_cp->init.initiate_tag) == asoc->my_vtag) &&
@@ -1681,6 +1604,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
*/
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 17;
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
switch (SCTP_GET_STATE(stcb)) {
@@ -1693,10 +1617,17 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
* have the right seq no's.
*/
/* First we must process the INIT !! */
- retval = sctp_process_init(init_cp, stcb);
- if (retval < 0) {
+ if (sctp_process_init(init_cp, stcb) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 3;
+ op_err = sctp_generate_cause(SCTP_CAUSE_OUT_OF_RESC, "");
+ SCTPDBG(SCTP_DEBUG_INPUT1, "sctp_process_init() failed\n");
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
/* we have already processed the INIT so no problem */
@@ -1741,6 +1672,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
SCTP_TCB_LOCK(stcb);
atomic_add_int(&stcb->asoc.refcnt, -1);
if (stcb->asoc.state & SCTP_STATE_CLOSED_SOCKET) {
+ SCTP_TCB_UNLOCK(stcb);
SCTP_SOCKET_UNLOCK(so, 1);
return (NULL);
}
@@ -1776,16 +1708,22 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
break;
} /* end switch */
sctp_stop_all_cookie_timers(stcb);
- /*
- * We ignore the return code here.. not sure if we should
- * somehow abort.. but we do have an existing asoc. This
- * really should not fail.
- */
- if (sctp_load_addresses_from_init(stcb, m,
- init_offset + sizeof(struct sctp_init_chunk),
- initack_offset, src, dst, init_src, stcb->asoc.port)) {
+ if ((retval = sctp_load_addresses_from_init(stcb, m,
+ init_offset + sizeof(struct sctp_init_chunk),
+ initack_offset, src, dst, init_src, stcb->asoc.port)) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 4;
+ op_err = sctp_generate_cause(SCTP_BASE_SYSCTL(sctp_diag_info_code),
+ "Problem with address parameters");
+ SCTPDBG(SCTP_DEBUG_INPUT1,
+ "Load addresses from INIT causes an abort %d\n",
+ retval);
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
/* respond with a COOKIE-ACK */
@@ -1805,6 +1743,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
*/
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 6;
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
/* If nat support, and the below and stcb is established,
@@ -1830,6 +1769,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
mflowtype, mflowid, inp->fibnum,
#endif
vrf_id, port);
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
if ((ntohl(initack_cp->init.initiate_tag) == asoc->my_vtag) &&
@@ -1859,6 +1799,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 7;
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
if (how_indx < sizeof(asoc->cookie_how))
@@ -1901,17 +1842,35 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
}
}
/* process the INIT info (peer's info) */
- retval = sctp_process_init(init_cp, stcb);
- if (retval < 0) {
+ if (sctp_process_init(init_cp, stcb) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 9;
+ op_err = sctp_generate_cause(SCTP_CAUSE_OUT_OF_RESC, "");
+ SCTPDBG(SCTP_DEBUG_INPUT1, "sctp_process_init() failed\n");
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
- if (sctp_load_addresses_from_init(stcb, m,
- init_offset + sizeof(struct sctp_init_chunk),
- initack_offset, src, dst, init_src, stcb->asoc.port)) {
+ if ((retval = sctp_load_addresses_from_init(stcb, m,
+ init_offset + sizeof(struct sctp_init_chunk),
+ initack_offset, src, dst, init_src, stcb->asoc.port)) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 10;
+ op_err = sctp_generate_cause(SCTP_BASE_SYSCTL(sctp_diag_info_code),
+ "Problem with address parameters");
+ SCTPDBG(SCTP_DEBUG_INPUT1,
+ "Load addresses from INIT causes an abort %d\n",
+ retval);
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
if ((SCTP_GET_STATE(stcb) == SCTP_STATE_COOKIE_WAIT) ||
@@ -1933,6 +1892,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
SCTP_TCB_LOCK(stcb);
atomic_add_int(&stcb->asoc.refcnt, -1);
if (stcb->asoc.state & SCTP_STATE_CLOSED_SOCKET) {
+ SCTP_TCB_UNLOCK(stcb);
SCTP_SOCKET_UNLOCK(so, 1);
return (NULL);
}
@@ -1985,19 +1945,25 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
#endif
if (asoc->peer_supports_nat) {
+ struct sctp_tcb *local_stcb;
+
/* This is a gross gross hack.
* Just call the cookie_new code since we
* are allowing a duplicate association.
* I hope this works...
*/
- return (sctp_process_cookie_new(m, iphlen, offset, src, dst,
- sh, cookie, cookie_len,
- inp, netp, init_src,notification,
- auth_skipped, auth_offset, auth_len,
+ local_stcb = sctp_process_cookie_new(m, iphlen, offset, src, dst,
+ sh, cookie, cookie_len,
+ inp, netp, init_src,notification,
+ auth_skipped, auth_offset, auth_len,
#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid,
+ mflowtype, mflowid,
#endif
- vrf_id, port));
+ vrf_id, port);
+ if (local_stcb == NULL) {
+ SCTP_TCB_UNLOCK(stcb);
+ }
+ return (local_stcb);
}
/*
* case A in Section 5.2.4 Table 2: XXMM (peer restarted)
@@ -2005,11 +1971,7 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
/* temp code */
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 12;
- sctp_timer_stop(SCTP_TIMER_TYPE_INIT, inp, stcb, net,
- SCTP_FROM_SCTP_INPUT + SCTP_LOC_16);
- sctp_timer_stop(SCTP_TIMER_TYPE_HEARTBEAT, inp, stcb, net,
- SCTP_FROM_SCTP_INPUT + SCTP_LOC_17);
-
+ sctp_stop_association_timers(stcb, false);
/* notify upper layer */
*notification = SCTP_NOTIFY_ASSOC_RESTART;
atomic_add_int(&stcb->asoc.refcnt, 1);
@@ -2042,6 +2004,10 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
asoc->str_reset_seq_in = asoc->init_seq_number;
asoc->advanced_peer_ack_point = asoc->last_acked_seq;
asoc->send_sack = 1;
+ asoc->data_pkts_seen = 0;
+ asoc->last_data_chunk_from = NULL;
+ asoc->last_control_chunk_from = NULL;
+ asoc->last_net_cmt_send_started = NULL;
if (asoc->mapping_array) {
memset(asoc->mapping_array, 0,
asoc->mapping_array_size);
@@ -2106,6 +2072,9 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
SCTP_ZONE_FREE(SCTP_BASE_INFO(ipi_zone_chunk), chk);
SCTP_DECR_CHK_COUNT();
}
+ asoc->ctrl_queue_cnt = 0;
+ asoc->str_reset = NULL;
+ asoc->stream_reset_outstanding = 0;
TAILQ_FOREACH_SAFE(chk, &asoc->asconf_send_queue, sctp_next, nchk) {
TAILQ_REMOVE(&asoc->asconf_send_queue, chk, sctp_next);
if (chk->data) {
@@ -2154,11 +2123,17 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
asoc->total_flight = 0;
asoc->total_flight_count = 0;
/* process the INIT info (peer's info) */
- retval = sctp_process_init(init_cp, stcb);
- if (retval < 0) {
+ if (sctp_process_init(init_cp, stcb) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 13;
-
+ op_err = sctp_generate_cause(SCTP_CAUSE_OUT_OF_RESC, "");
+ SCTPDBG(SCTP_DEBUG_INPUT1, "sctp_process_init() failed\n");
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
/*
@@ -2167,26 +2142,38 @@ sctp_process_cookie_existing(struct mbuf *m, int iphlen, int offset,
*/
net->hb_responded = 1;
- if (sctp_load_addresses_from_init(stcb, m,
- init_offset + sizeof(struct sctp_init_chunk),
- initack_offset, src, dst, init_src, stcb->asoc.port)) {
+ if ((retval = sctp_load_addresses_from_init(stcb, m,
+ init_offset + sizeof(struct sctp_init_chunk),
+ initack_offset, src, dst, init_src, stcb->asoc.port)) < 0) {
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 14;
-
+ op_err = sctp_generate_cause(SCTP_BASE_SYSCTL(sctp_diag_info_code),
+ "Problem with address parameters");
+ SCTPDBG(SCTP_DEBUG_INPUT1,
+ "Load addresses from INIT causes an abort %d\n",
+ retval);
+ sctp_abort_association(stcb->sctp_ep, stcb, m, iphlen,
+ src, dst, sh, op_err,
+#if defined(__FreeBSD__) && !defined(__Userspace__)
+ mflowtype, mflowid,
+#endif
+ vrf_id, net->port);
return (NULL);
}
/* respond with a COOKIE-ACK */
- sctp_stop_all_cookie_timers(stcb);
- sctp_toss_old_cookies(stcb, asoc);
sctp_send_cookie_ack(stcb);
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 15;
-
+ if (sctp_is_feature_on(inp, SCTP_PCB_FLAGS_AUTOCLOSE) &&
+ (asoc->sctp_autoclose_ticks > 0)) {
+ sctp_timer_start(SCTP_TIMER_TYPE_AUTOCLOSE, inp, stcb, NULL);
+ }
return (stcb);
}
if (how_indx < sizeof(asoc->cookie_how))
asoc->cookie_how[how_indx] = 16;
/* all other cases... */
+ SCTP_TCB_UNLOCK(stcb);
return (NULL);
}
@@ -2360,8 +2347,7 @@ sctp_process_cookie_new(struct mbuf *m, int iphlen, int offset,
asoc->advanced_peer_ack_point = asoc->last_acked_seq;
/* process the INIT info (peer's info) */
- retval = sctp_process_init(init_cp, stcb);
- if (retval < 0) {
+ if (sctp_process_init(init_cp, stcb) < 0) {
#if defined(__APPLE__) && !defined(__Userspace__)
atomic_add_int(&stcb->asoc.refcnt, 1);
SCTP_TCB_UNLOCK(stcb);
@@ -2377,9 +2363,9 @@ sctp_process_cookie_new(struct mbuf *m, int iphlen, int offset,
return (NULL);
}
/* load all addresses */
- if (sctp_load_addresses_from_init(stcb, m,
- init_offset + sizeof(struct sctp_init_chunk), initack_offset,
- src, dst, init_src, port)) {
+ if ((retval = sctp_load_addresses_from_init(stcb, m,
+ init_offset + sizeof(struct sctp_init_chunk),
+ initack_offset, src, dst, init_src, port)) < 0) {
#if defined(__APPLE__) && !defined(__Userspace__)
atomic_add_int(&stcb->asoc.refcnt, 1);
SCTP_TCB_UNLOCK(stcb);
@@ -2956,12 +2942,15 @@ sctp_handle_cookie_echo(struct mbuf *m, int iphlen, int offset,
had_a_existing_tcb = 1;
*stcb = sctp_process_cookie_existing(m, iphlen, offset,
src, dst, sh,
- cookie, cookie_len, *inp_p, *stcb, netp, to,
- &notification, auth_skipped, auth_offset, auth_len,
+ cookie, cookie_len, *inp_p, *stcb, netp, to,
+ &notification, auth_skipped, auth_offset, auth_len,
#if defined(__FreeBSD__) && !defined(__Userspace__)
mflowtype, mflowid,
#endif
vrf_id, port);
+ if (*stcb == NULL) {
+ *locked_tcb = NULL;
+ }
}
if (*stcb == NULL) {
@@ -3847,19 +3836,16 @@ sctp_find_stream_reset(struct sctp_tcb *stcb, uint32_t seq, struct sctp_tmit_chu
int len, clen;
asoc = &stcb->asoc;
- if (TAILQ_EMPTY(&stcb->asoc.control_send_queue)) {
- asoc->stream_reset_outstanding = 0;
- return (NULL);
- }
- if (stcb->asoc.str_reset == NULL) {
+ chk = asoc->str_reset;
+ if (TAILQ_EMPTY(&asoc->control_send_queue) ||
+ (chk == NULL)) {
asoc->stream_reset_outstanding = 0;
return (NULL);
}
- chk = stcb->asoc.str_reset;
if (chk->data == NULL) {
return (NULL);
}
- if (bchk) {
+ if (bchk != NULL) {
/* he wants a copy of the chk pointer */
*bchk = chk;
}
@@ -4798,6 +4784,7 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
int ret;
int abort_no_unlock = 0;
int ecne_seen = 0;
+ int abort_flag;
/*
* How big should this be, and should it be alloc'd? Lets try the
* d-mtu-ceiling for now (2k) and that should hopefully work ...
@@ -4962,29 +4949,6 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
}
return (NULL);
}
- } else if (ch->chunk_type == SCTP_SHUTDOWN_ACK) {
- if (vtag_in != asoc->my_vtag) {
- /*
- * this could be a stale SHUTDOWN-ACK or the
- * peer never got the SHUTDOWN-COMPLETE and
- * is still hung; we have started a new asoc
- * but it won't complete until the shutdown
- * is completed
- */
- if (stcb != NULL) {
- SCTP_TCB_UNLOCK(stcb);
- }
- SCTP_SNPRINTF(msg, sizeof(msg), "OOTB, %s:%d at %s", __FILE__, __LINE__, __func__);
- op_err = sctp_generate_cause(SCTP_BASE_SYSCTL(sctp_diag_info_code),
- msg);
- sctp_handle_ootb(m, iphlen, *offset, src, dst,
- sh, inp, op_err,
-#if defined(__FreeBSD__) && !defined(__Userspace__)
- mflowtype, mflowid, fibnum,
-#endif
- vrf_id, port);
- return (NULL);
- }
} else {
/* for all other chunks, vtag must match */
if (vtag_in != asoc->my_vtag) {
@@ -5047,10 +5011,7 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
chunk_buf);
if (ch == NULL) {
*offset = length;
- if (stcb != NULL) {
- SCTP_TCB_UNLOCK(stcb);
- }
- return (NULL);
+ return (stcb);
}
num_chunks++;
@@ -5084,12 +5045,12 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
/* The INIT chunk must be the only chunk. */
if ((num_chunks > 1) ||
(length - *offset > (int)SCTP_SIZE32(chk_length))) {
- /* RFC 4960 requires that no ABORT is sent */
+ /*
+ * RFC 4960bis requires stopping the
+ * processing of the packet.
+ */
*offset = length;
- if (stcb != NULL) {
- SCTP_TCB_UNLOCK(stcb);
- }
- return (NULL);
+ return (stcb);
}
/* Honor our resource limit. */
if (chk_length > SCTP_LARGEST_INIT_ACCEPTED) {
@@ -5296,20 +5257,19 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
if ((stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
SCTP_STAT_INCR(sctps_recvheartbeat);
sctp_send_heartbeat_ack(stcb, m, *offset,
- chk_length, *netp);
+ chk_length, *netp);
}
break;
case SCTP_HEARTBEAT_ACK:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_HEARTBEAT_ACK\n");
if ((stcb == NULL) || (chk_length != sizeof(struct sctp_heartbeat_chunk))) {
/* Its not ours */
- *offset = length;
- return (stcb);
+ break;
}
SCTP_STAT_INCR(sctps_recvheartbeatack);
if ((netp != NULL) && (*netp != NULL)) {
sctp_handle_heartbeat_ack((struct sctp_heartbeat_chunk *)ch,
- stcb, *netp);
+ stcb, *netp);
}
break;
case SCTP_ABORT_ASSOCIATION:
@@ -5330,14 +5290,12 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_SHUTDOWN, stcb %p\n",
(void *)stcb);
if ((stcb == NULL) || (chk_length != sizeof(struct sctp_shutdown_chunk))) {
- *offset = length;
- return (stcb);
+ break;
}
if ((netp != NULL) && (*netp != NULL)) {
- int abort_flag = 0;
-
+ abort_flag = 0;
sctp_handle_shutdown((struct sctp_shutdown_chunk *)ch,
- stcb, *netp, &abort_flag);
+ stcb, *netp, &abort_flag);
if (abort_flag) {
*offset = length;
return (NULL);
@@ -5346,11 +5304,12 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
break;
case SCTP_SHUTDOWN_ACK:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_SHUTDOWN_ACK, stcb %p\n", (void *)stcb);
- if ((stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
+ if ((chk_length == sizeof(struct sctp_shutdown_ack_chunk)) &&
+ (stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
sctp_handle_shutdown_ack((struct sctp_shutdown_ack_chunk *)ch, stcb, *netp);
+ *offset = length;
+ return (NULL);
}
- *offset = length;
- return (NULL);
break;
case SCTP_OPERATION_ERROR:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_OP_ERR\n");
@@ -5494,7 +5453,7 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
case SCTP_COOKIE_ACK:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_COOKIE_ACK, stcb %p\n", (void *)stcb);
if ((stcb == NULL) || chk_length != sizeof(struct sctp_cookie_ack_chunk)) {
- return (stcb);
+ break;
}
if (inp->sctp_flags & SCTP_PCB_FLAGS_SOCKET_GONE) {
/* We are not interested anymore */
@@ -5524,26 +5483,29 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
break;
case SCTP_ECN_ECHO:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_ECN_ECHO\n");
- if ((stcb == NULL) || (chk_length != sizeof(struct sctp_ecne_chunk))) {
- /* Its not ours */
- *offset = length;
- return (stcb);
+ if (stcb == NULL) {
+ break;
}
if (stcb->asoc.ecn_supported == 0) {
goto unknown_chunk;
}
+ if (chk_length != sizeof(struct sctp_ecne_chunk)) {
+ break;
+ }
sctp_handle_ecn_echo((struct sctp_ecne_chunk *)ch, stcb);
ecne_seen = 1;
break;
case SCTP_ECN_CWR:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_ECN_CWR\n");
- if ((stcb == NULL) || (chk_length != sizeof(struct sctp_cwr_chunk))) {
- *offset = length;
- return (stcb);
+ if (stcb == NULL) {
+ break;
}
if (stcb->asoc.ecn_supported == 0) {
goto unknown_chunk;
}
+ if (chk_length != sizeof(struct sctp_cwr_chunk)) {
+ break;
+ }
sctp_handle_ecn_cwr((struct sctp_cwr_chunk *)ch, stcb, *netp);
break;
case SCTP_SHUTDOWN_COMPLETE:
@@ -5554,12 +5516,13 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
*offset = length;
return (stcb);
}
- if ((stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
+ if ((chk_length == sizeof(struct sctp_shutdown_complete_chunk)) &&
+ (stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
sctp_handle_shutdown_complete((struct sctp_shutdown_complete_chunk *)ch,
- stcb, *netp);
+ stcb, *netp);
+ *offset = length;
+ return (NULL);
}
- *offset = length;
- return (NULL);
break;
case SCTP_ASCONF:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_ASCONF\n");
@@ -5568,32 +5531,33 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
goto unknown_chunk;
}
sctp_handle_asconf(m, *offset, src,
- (struct sctp_asconf_chunk *)ch, stcb, asconf_cnt == 0);
+ (struct sctp_asconf_chunk *)ch, stcb, asconf_cnt == 0);
asconf_cnt++;
}
break;
case SCTP_ASCONF_ACK:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_ASCONF_ACK\n");
+ if (stcb == NULL) {
+ break;
+ }
+ if (stcb->asoc.asconf_supported == 0) {
+ goto unknown_chunk;
+ }
if (chk_length < sizeof(struct sctp_asconf_ack_chunk)) {
- /* Its not ours */
- *offset = length;
- return (stcb);
+ break;
}
- if ((stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
- if (stcb->asoc.asconf_supported == 0) {
- goto unknown_chunk;
- }
+ if ((netp != NULL) && (*netp != NULL)) {
/* He's alive so give him credit */
if (SCTP_BASE_SYSCTL(sctp_logging_level) & SCTP_THRESHOLD_LOGGING) {
sctp_misc_ints(SCTP_THRESHOLD_CLEAR,
- stcb->asoc.overall_error_count,
- 0,
- SCTP_FROM_SCTP_INPUT,
- __LINE__);
+ stcb->asoc.overall_error_count,
+ 0,
+ SCTP_FROM_SCTP_INPUT,
+ __LINE__);
}
stcb->asoc.overall_error_count = 0;
sctp_handle_asconf_ack(m, *offset,
- (struct sctp_asconf_ack_chunk *)ch, stcb, *netp, &abort_no_unlock);
+ (struct sctp_asconf_ack_chunk *)ch, stcb, *netp, &abort_no_unlock);
if (abort_no_unlock)
return (NULL);
}
@@ -5602,72 +5566,70 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
case SCTP_IFORWARD_CUM_TSN:
SCTPDBG(SCTP_DEBUG_INPUT3, "%s\n",
ch->chunk_type == SCTP_FORWARD_CUM_TSN ? "FORWARD_TSN" : "I_FORWARD_TSN");
+ if (stcb == NULL) {
+ break;
+ }
+ if (stcb->asoc.prsctp_supported == 0) {
+ goto unknown_chunk;
+ }
if (chk_length < sizeof(struct sctp_forward_tsn_chunk)) {
- /* Its not ours */
- *offset = length;
- return (stcb);
+ break;
}
-
- if (stcb != NULL) {
- int abort_flag = 0;
-
- if (stcb->asoc.prsctp_supported == 0) {
- goto unknown_chunk;
- }
- if (((stcb->asoc.idata_supported == 1) && (ch->chunk_type == SCTP_FORWARD_CUM_TSN)) ||
- ((stcb->asoc.idata_supported == 0) && (ch->chunk_type == SCTP_IFORWARD_CUM_TSN))) {
- if (ch->chunk_type == SCTP_FORWARD_CUM_TSN) {
- SCTP_SNPRINTF(msg, sizeof(msg), "%s", "FORWARD-TSN chunk received when I-FORWARD-TSN was negotiated");
- } else {
- SCTP_SNPRINTF(msg, sizeof(msg), "%s", "I-FORWARD-TSN chunk received when FORWARD-TSN was negotiated");
- }
- op_err = sctp_generate_cause(SCTP_CAUSE_PROTOCOL_VIOLATION, msg);
- sctp_abort_an_association(inp, stcb, op_err, SCTP_SO_NOT_LOCKED);
- *offset = length;
- return (NULL);
+ if (((stcb->asoc.idata_supported == 1) && (ch->chunk_type == SCTP_FORWARD_CUM_TSN)) ||
+ ((stcb->asoc.idata_supported == 0) && (ch->chunk_type == SCTP_IFORWARD_CUM_TSN))) {
+ if (ch->chunk_type == SCTP_FORWARD_CUM_TSN) {
+ SCTP_SNPRINTF(msg, sizeof(msg), "%s", "FORWARD-TSN chunk received when I-FORWARD-TSN was negotiated");
+ } else {
+ SCTP_SNPRINTF(msg, sizeof(msg), "%s", "I-FORWARD-TSN chunk received when FORWARD-TSN was negotiated");
}
- *fwd_tsn_seen = 1;
- if (inp->sctp_flags & SCTP_PCB_FLAGS_SOCKET_GONE) {
- /* We are not interested anymore */
+ op_err = sctp_generate_cause(SCTP_CAUSE_PROTOCOL_VIOLATION, msg);
+ sctp_abort_an_association(inp, stcb, op_err, SCTP_SO_NOT_LOCKED);
+ *offset = length;
+ return (NULL);
+ }
+ *fwd_tsn_seen = 1;
+ if (inp->sctp_flags & SCTP_PCB_FLAGS_SOCKET_GONE) {
+ /* We are not interested anymore */
#if defined(__APPLE__) && !defined(__Userspace__)
- so = SCTP_INP_SO(inp);
- atomic_add_int(&stcb->asoc.refcnt, 1);
- SCTP_TCB_UNLOCK(stcb);
- SCTP_SOCKET_LOCK(so, 1);
- SCTP_TCB_LOCK(stcb);
- atomic_subtract_int(&stcb->asoc.refcnt, 1);
+ so = SCTP_INP_SO(inp);
+ atomic_add_int(&stcb->asoc.refcnt, 1);
+ SCTP_TCB_UNLOCK(stcb);
+ SCTP_SOCKET_LOCK(so, 1);
+ SCTP_TCB_LOCK(stcb);
+ atomic_subtract_int(&stcb->asoc.refcnt, 1);
#endif
- (void)sctp_free_assoc(inp, stcb, SCTP_NORMAL_PROC,
- SCTP_FROM_SCTP_INPUT + SCTP_LOC_31);
+ (void)sctp_free_assoc(inp, stcb, SCTP_NORMAL_PROC,
+ SCTP_FROM_SCTP_INPUT + SCTP_LOC_31);
#if defined(__APPLE__) && !defined(__Userspace__)
- SCTP_SOCKET_UNLOCK(so, 1);
+ SCTP_SOCKET_UNLOCK(so, 1);
#endif
- *offset = length;
- return (NULL);
- }
- /*
- * For sending a SACK this looks like DATA
- * chunks.
- */
- stcb->asoc.last_data_chunk_from = stcb->asoc.last_control_chunk_from;
- sctp_handle_forward_tsn(stcb,
- (struct sctp_forward_tsn_chunk *)ch, &abort_flag, m, *offset);
- if (abort_flag) {
- *offset = length;
- return (NULL);
- }
+ *offset = length;
+ return (NULL);
+ }
+ /*
+ * For sending a SACK this looks like DATA
+ * chunks.
+ */
+ stcb->asoc.last_data_chunk_from = stcb->asoc.last_control_chunk_from;
+ abort_flag = 0;
+ sctp_handle_forward_tsn(stcb,
+ (struct sctp_forward_tsn_chunk *)ch, &abort_flag, m, *offset);
+ if (abort_flag) {
+ *offset = length;
+ return (NULL);
}
break;
case SCTP_STREAM_RESET:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_STREAM_RESET\n");
- if ((stcb == NULL) || (chk_length < sizeof(struct sctp_stream_reset_tsn_req))) {
- /* Its not ours */
- *offset = length;
- return (stcb);
+ if (stcb == NULL) {
+ break;
}
if (stcb->asoc.reconfig_supported == 0) {
goto unknown_chunk;
}
+ if (chk_length < sizeof(struct sctp_stream_reset_tsn_req)) {
+ break;
+ }
if (sctp_handle_stream_reset(stcb, m, *offset, ch)) {
/* stop processing */
*offset = length;
@@ -5676,20 +5638,19 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
break;
case SCTP_PACKET_DROPPED:
SCTPDBG(SCTP_DEBUG_INPUT3, "SCTP_PACKET_DROPPED\n");
- /* re-get it all please */
+ if (stcb == NULL) {
+ break;
+ }
+ if (stcb->asoc.pktdrop_supported == 0) {
+ goto unknown_chunk;
+ }
if (chk_length < sizeof(struct sctp_pktdrop_chunk)) {
- /* Its not ours */
- *offset = length;
- return (stcb);
+ break;
}
-
- if ((stcb != NULL) && (netp != NULL) && (*netp != NULL)) {
- if (stcb->asoc.pktdrop_supported == 0) {
- goto unknown_chunk;
- }
+ if ((netp != NULL) && (*netp != NULL)) {
sctp_handle_packet_dropped((struct sctp_pktdrop_chunk *)ch,
- stcb, *netp,
- min(chk_length, contiguous));
+ stcb, *netp,
+ min(chk_length, contiguous));
}
break;
case SCTP_AUTHENTICATION:
@@ -5702,21 +5663,21 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
auth_skipped = 1;
}
/* skip this chunk (temporarily) */
- goto next_chunk;
+ break;
}
if (stcb->asoc.auth_supported == 0) {
goto unknown_chunk;
}
if ((chk_length < (sizeof(struct sctp_auth_chunk))) ||
(chk_length > (sizeof(struct sctp_auth_chunk) +
- SCTP_AUTH_DIGEST_LEN_MAX))) {
+ SCTP_AUTH_DIGEST_LEN_MAX))) {
/* Its not ours */
*offset = length;
return (stcb);
}
if (got_auth == 1) {
/* skip this chunk... it's already auth'd */
- goto next_chunk;
+ break;
}
got_auth = 1;
if (sctp_handle_auth(stcb, (struct sctp_auth_chunk *)ch, m, *offset)) {
@@ -5777,7 +5738,7 @@ sctp_process_control(struct mbuf *m, int iphlen, int *offset, int length,
break;
}
ch = (struct sctp_chunkhdr *)sctp_m_getptr(m, *offset,
- sizeof(struct sctp_chunkhdr), chunk_buf);
+ sizeof(struct sctp_chunkhdr), chunk_buf);
if (ch == NULL) {
*offset = length;
return (stcb);
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_os_userspace.h b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_os_userspace.h
index 6c3348ad9f..46b618110c 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_os_userspace.h
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_os_userspace.h
@@ -886,7 +886,7 @@ int sctp_userspace_get_mtu_from_ifn(uint32_t if_index, int af);
#define SCTP_GATHER_MTU_FROM_ROUTE(sctp_ifa, sa, rt) ((rt != NULL) ? rt->rt_rmx.rmx_mtu : 0)
-#define SCTP_GATHER_MTU_FROM_INTFC(sctp_ifn) sctp_userspace_get_mtu_from_ifn(if_nametoindex(((struct ifaddrs *) (sctp_ifn))->ifa_name), AF_INET)
+#define SCTP_GATHER_MTU_FROM_INTFC(sctp_ifn) (sctp_ifn->ifn_mtu)
#define SCTP_SET_MTU_OF_ROUTE(sa, rt, mtu) do { \
if (rt != NULL) \
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_pcb.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_pcb.c
index d30019b22a..d1e84daad5 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_pcb.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_pcb.c
@@ -7533,7 +7533,7 @@ sctp_load_addresses_from_init(struct sctp_tcb *stcb, struct mbuf *m,
break;
}
phdr = sctp_get_next_param(m, offset, &param_buf,
- sizeof(param_buf));
+ sizeof(param_buf));
}
/* Now check to see if we need to purge any addresses */
TAILQ_FOREACH_SAFE(net, &stcb->asoc.nets, sctp_next, nnet) {
@@ -7543,11 +7543,15 @@ sctp_load_addresses_from_init(struct sctp_tcb *stcb, struct mbuf *m,
/* remove and free it */
stcb->asoc.numnets--;
TAILQ_REMOVE(&stcb->asoc.nets, net, sctp_next);
- sctp_free_remote_addr(net);
+ if (net == stcb->asoc.alternate) {
+ sctp_free_remote_addr(stcb->asoc.alternate);
+ stcb->asoc.alternate = NULL;
+ }
if (net == stcb->asoc.primary_destination) {
stcb->asoc.primary_destination = NULL;
sctp_select_primary_destination(stcb);
}
+ sctp_free_remote_addr(net);
}
}
if ((stcb->asoc.ecn_supported == 1) &&
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.c
index db0e7533ff..8472c3a1c0 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.c
@@ -80,7 +80,25 @@ sctp_sha1_final(unsigned char *digest, struct sctp_sha1_context *ctx)
{
SHA1_Final(digest, &ctx->sha_ctx);
}
+#elif defined(SCTP_USE_MBEDTLS_SHA1)
+void
+sctp_sha1_init(struct sctp_sha1_context *ctx)
+{
+ mbedtls_sha1_init(&ctx->sha1_ctx);
+ mbedtls_sha1_starts_ret(&ctx->sha1_ctx);
+}
+void
+sctp_sha1_update(struct sctp_sha1_context *ctx, const unsigned char *ptr, unsigned int siz)
+{
+ mbedtls_sha1_update_ret(&ctx->sha1_ctx, ptr, siz);
+}
+
+void
+sctp_sha1_final(unsigned char *digest, struct sctp_sha1_context *ctx)
+{
+ mbedtls_sha1_finish_ret(&ctx->sha1_ctx, digest);
+}
#else
#include <string.h>
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.h b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.h
index d535ee4639..9ff4ff7bdc 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.h
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_sha1.h
@@ -46,6 +46,8 @@ __FBSDID("$FreeBSD$");
#include <pk11pub.h>
#elif defined(SCTP_USE_OPENSSL_SHA1)
#include <openssl/sha.h>
+#elif defined(SCTP_USE_MBEDTLS_SHA1)
+#include <mbedtls/sha1.h>
#endif
struct sctp_sha1_context {
@@ -53,6 +55,8 @@ struct sctp_sha1_context {
struct PK11Context *pk11_ctx;
#elif defined(SCTP_USE_OPENSSL_SHA1)
SHA_CTX sha_ctx;
+#elif defined(SCTP_USE_MBEDTLS_SHA1)
+ mbedtls_sha1_context sha1_ctx;
#else
unsigned int A;
unsigned int B;
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_userspace.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_userspace.c
index ba64aaff77..41aff19e08 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_userspace.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_userspace.c
@@ -98,23 +98,42 @@ sctp_userspace_set_threadname(const char *name)
int
sctp_userspace_get_mtu_from_ifn(uint32_t if_index, int af)
{
+#if defined(INET) || defined(INET6)
struct ifreq ifr;
int fd;
+#endif
+ int mtu;
- memset(&ifr, 0, sizeof(struct ifreq));
- if (if_indextoname(if_index, ifr.ifr_name) != NULL) {
- /* TODO can I use the raw socket here and not have to open a new one with each query? */
- if ((fd = socket(af, SOCK_DGRAM, 0)) < 0)
- return (0);
- if (ioctl(fd, SIOCGIFMTU, &ifr) < 0) {
+ switch (af) {
+#if defined(INET)
+ case AF_INET:
+#endif
+#if defined(INET6)
+ case AF_INET6:
+#endif
+#if defined(INET) || defined(INET6)
+ memset(&ifr, 0, sizeof(struct ifreq));
+ mtu = 0;
+ if (if_indextoname(if_index, ifr.ifr_name) != NULL) {
+ /* TODO can I use the raw socket here and not have to open a new one with each query? */
+ if ((fd = socket(af, SOCK_DGRAM, 0)) < 0) {
+ break;
+ }
+ if (ioctl(fd, SIOCGIFMTU, &ifr) >= 0) {
+ mtu = ifr.ifr_mtu;
+ }
close(fd);
- return (0);
}
- close(fd);
- return ifr.ifr_mtu;
- } else {
- return (0);
+ break;
+#endif
+ case AF_CONN:
+ mtu = 1280;
+ break;
+ default:
+ mtu = 0;
+ break;
}
+ return (mtu);
}
#endif
@@ -143,41 +162,60 @@ timingsafe_bcmp(const void *b1, const void *b2, size_t n)
int
sctp_userspace_get_mtu_from_ifn(uint32_t if_index, int af)
{
+#if defined(INET) || defined(INET6)
PIP_ADAPTER_ADDRESSES pAdapterAddrs, pAdapt;
DWORD AdapterAddrsSize, Err;
- int ret;
+#endif
+ int mtu;
- ret = 0;
- AdapterAddrsSize = 0;
- pAdapterAddrs = NULL;
- if ((Err = GetAdaptersAddresses(AF_UNSPEC, 0, NULL, NULL, &AdapterAddrsSize)) != 0) {
- if ((Err != ERROR_BUFFER_OVERFLOW) && (Err != ERROR_INSUFFICIENT_BUFFER)) {
- SCTPDBG(SCTP_DEBUG_USR, "GetAdaptersAddresses() sizing failed with error code %d, AdapterAddrsSize = %d\n", Err, AdapterAddrsSize);
- ret = -1;
+ switch (af) {
+#if defined(INET)
+ case AF_INET:
+#endif
+#if defined(INET6)
+ case AF_INET6:
+#endif
+#if defined(INET) || defined(INET6)
+ mtu = 0;
+ AdapterAddrsSize = 0;
+ pAdapterAddrs = NULL;
+ if ((Err = GetAdaptersAddresses(AF_UNSPEC, 0, NULL, NULL, &AdapterAddrsSize)) != 0) {
+ if ((Err != ERROR_BUFFER_OVERFLOW) && (Err != ERROR_INSUFFICIENT_BUFFER)) {
+ SCTPDBG(SCTP_DEBUG_USR, "GetAdaptersAddresses() sizing failed with error code %d, AdapterAddrsSize = %d\n", Err, AdapterAddrsSize);
+ mtu = -1;
+ goto cleanup;
+ }
+ }
+ if ((pAdapterAddrs = (PIP_ADAPTER_ADDRESSES) GlobalAlloc(GPTR, AdapterAddrsSize)) == NULL) {
+ SCTPDBG(SCTP_DEBUG_USR, "Memory allocation error!\n");
+ mtu = -1;
goto cleanup;
}
- }
- if ((pAdapterAddrs = (PIP_ADAPTER_ADDRESSES) GlobalAlloc(GPTR, AdapterAddrsSize)) == NULL) {
- SCTPDBG(SCTP_DEBUG_USR, "Memory allocation error!\n");
- ret = -1;
- goto cleanup;
- }
- if ((Err = GetAdaptersAddresses(AF_UNSPEC, 0, NULL, pAdapterAddrs, &AdapterAddrsSize)) != ERROR_SUCCESS) {
- SCTPDBG(SCTP_DEBUG_USR, "GetAdaptersAddresses() failed with error code %d\n", Err);
- ret = -1;
- goto cleanup;
- }
- for (pAdapt = pAdapterAddrs; pAdapt; pAdapt = pAdapt->Next) {
- if (pAdapt->IfIndex == if_index) {
- ret = pAdapt->Mtu;
- break;
+ if ((Err = GetAdaptersAddresses(AF_UNSPEC, 0, NULL, pAdapterAddrs, &AdapterAddrsSize)) != ERROR_SUCCESS) {
+ SCTPDBG(SCTP_DEBUG_USR, "GetAdaptersAddresses() failed with error code %d\n", Err);
+ mtu = -1;
+ goto cleanup;
}
+ for (pAdapt = pAdapterAddrs; pAdapt; pAdapt = pAdapt->Next) {
+ if (pAdapt->IfIndex == if_index) {
+ mtu = pAdapt->Mtu;
+ break;
+ }
+ }
+ cleanup:
+ if (pAdapterAddrs != NULL) {
+ GlobalFree(pAdapterAddrs);
+ }
+ break;
+#endif
+ case AF_CONN:
+ mtu = 1280;
+ break;
+ default:
+ mtu = 0;
+ break;
}
-cleanup:
- if (pAdapterAddrs != NULL) {
- GlobalFree(pAdapterAddrs);
- }
- return (ret);
+ return (mtu);
}
void
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_usrreq.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_usrreq.c
index e5fba96717..e8cf78017a 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_usrreq.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctp_usrreq.c
@@ -34,7 +34,7 @@
#if defined(__FreeBSD__) && !defined(__Userspace__)
#include <sys/cdefs.h>
-__FBSDID("$FreeBSD: head/sys/netinet/sctp_usrreq.c 366750 2020-10-16 10:44:48Z tuexen $");
+__FBSDID("$FreeBSD$");
#endif
#include <netinet/sctp_os.h>
@@ -974,29 +974,29 @@ sctp_sendm(struct socket *so, int flags, struct mbuf *m, struct sockaddr *addr,
((inp->sctp_flags & SCTP_PCB_FLAGS_CONNECTED) ||
(inp->sctp_flags & SCTP_PCB_FLAGS_TCPTYPE))) {
goto connected_type;
- } else if (addr == NULL) {
+ }
+
+ error = 0;
+ if (addr == NULL) {
SCTP_LTRACE_ERR_RET_PKT(m, inp, NULL, NULL, SCTP_FROM_SCTP_USRREQ, EDESTADDRREQ);
error = EDESTADDRREQ;
- sctp_m_freem(m);
- if (control) {
- sctp_m_freem(control);
- control = NULL;
- }
- return (error);
+ } else if (addr->sa_family != AF_INET) {
+ SCTP_LTRACE_ERR_RET_PKT(m, inp, NULL, NULL, SCTP_FROM_SCTP_USRREQ, EAFNOSUPPORT);
+ error = EAFNOSUPPORT;
+#if defined(HAVE_SA_LEN)
+ } else if (addr->sa_len != sizeof(struct sockaddr_in)) {
+ SCTP_LTRACE_ERR_RET_PKT(m, inp, NULL, NULL, SCTP_FROM_SCTP_USRREQ, EINVAL);
+ error = EINVAL;
+#endif
}
-#ifdef INET6
- if (addr->sa_family != AF_INET) {
- /* must be a v4 address! */
- SCTP_LTRACE_ERR_RET_PKT(m, inp, NULL, NULL, SCTP_FROM_SCTP_USRREQ, EDESTADDRREQ);
+ if (error != 0) {
sctp_m_freem(m);
if (control) {
sctp_m_freem(control);
control = NULL;
}
- error = EDESTADDRREQ;
return (error);
}
-#endif /* INET6 */
connected_type:
/* now what about control */
if (control) {
@@ -6112,6 +6112,7 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
return (EINVAL);
}
if ((paddrp->spp_flags & SPP_PMTUD_DISABLE) &&
+ (paddrp->spp_pathmtu > 0) &&
((paddrp->spp_pathmtu < SCTP_SMALLEST_PMTU) ||
(paddrp->spp_pathmtu > SCTP_LARGEST_PMTU))) {
if (stcb)
@@ -6156,28 +6157,30 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
SCTP_FROM_SCTP_USRREQ + SCTP_LOC_11);
}
net->dest_state |= SCTP_ADDR_NO_PMTUD;
- net->mtu = paddrp->spp_pathmtu;
- switch (net->ro._l_addr.sa.sa_family) {
+ if (paddrp->spp_pathmtu > 0) {
+ net->mtu = paddrp->spp_pathmtu;
+ switch (net->ro._l_addr.sa.sa_family) {
#ifdef INET
- case AF_INET:
- net->mtu += SCTP_MIN_V4_OVERHEAD;
- break;
+ case AF_INET:
+ net->mtu += SCTP_MIN_V4_OVERHEAD;
+ break;
#endif
#ifdef INET6
- case AF_INET6:
- net->mtu += SCTP_MIN_OVERHEAD;
- break;
+ case AF_INET6:
+ net->mtu += SCTP_MIN_OVERHEAD;
+ break;
#endif
#if defined(__Userspace__)
- case AF_CONN:
- net->mtu += sizeof(struct sctphdr);
- break;
+ case AF_CONN:
+ net->mtu += sizeof(struct sctphdr);
+ break;
#endif
- default:
- break;
- }
- if (net->mtu < stcb->asoc.smallest_mtu) {
- sctp_pathmtu_adjustment(stcb, net->mtu);
+ default:
+ break;
+ }
+ if (net->mtu < stcb->asoc.smallest_mtu) {
+ sctp_pathmtu_adjustment(stcb, net->mtu);
+ }
}
}
if (paddrp->spp_flags & SPP_PMTUD_ENABLE) {
@@ -6186,7 +6189,7 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
}
net->dest_state &= ~SCTP_ADDR_NO_PMTUD;
}
- if (paddrp->spp_pathmaxrxt) {
+ if (paddrp->spp_pathmaxrxt > 0) {
if (net->dest_state & SCTP_ADDR_PF) {
if (net->error_count > paddrp->spp_pathmaxrxt) {
net->dest_state &= ~SCTP_ADDR_PF;
@@ -6229,7 +6232,7 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
#endif
} else {
/************************ASSOC ONLY -- NO NET SPECIFIC SET ******************/
- if (paddrp->spp_pathmaxrxt != 0) {
+ if (paddrp->spp_pathmaxrxt > 0) {
stcb->asoc.def_net_failure = paddrp->spp_pathmaxrxt;
TAILQ_FOREACH(net, &stcb->asoc.nets, sctp_next) {
if (net->dest_state & SCTP_ADDR_PF) {
@@ -6261,7 +6264,6 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
net->failure_threshold = paddrp->spp_pathmaxrxt;
}
}
-
if (paddrp->spp_flags & SPP_HB_ENABLE) {
if (paddrp->spp_hbinterval != 0) {
stcb->asoc.heart_beat_delay = paddrp->spp_hbinterval;
@@ -6304,31 +6306,35 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
SCTP_FROM_SCTP_USRREQ + SCTP_LOC_16);
}
net->dest_state |= SCTP_ADDR_NO_PMTUD;
- net->mtu = paddrp->spp_pathmtu;
- switch (net->ro._l_addr.sa.sa_family) {
+ if (paddrp->spp_pathmtu > 0) {
+ net->mtu = paddrp->spp_pathmtu;
+ switch (net->ro._l_addr.sa.sa_family) {
#ifdef INET
- case AF_INET:
- net->mtu += SCTP_MIN_V4_OVERHEAD;
- break;
+ case AF_INET:
+ net->mtu += SCTP_MIN_V4_OVERHEAD;
+ break;
#endif
#ifdef INET6
- case AF_INET6:
- net->mtu += SCTP_MIN_OVERHEAD;
- break;
+ case AF_INET6:
+ net->mtu += SCTP_MIN_OVERHEAD;
+ break;
#endif
#if defined(__Userspace__)
- case AF_CONN:
- net->mtu += sizeof(struct sctphdr);
- break;
+ case AF_CONN:
+ net->mtu += sizeof(struct sctphdr);
+ break;
#endif
- default:
- break;
- }
- if (net->mtu < stcb->asoc.smallest_mtu) {
- sctp_pathmtu_adjustment(stcb, net->mtu);
+ default:
+ break;
+ }
+ if (net->mtu < stcb->asoc.smallest_mtu) {
+ sctp_pathmtu_adjustment(stcb, net->mtu);
+ }
}
}
- stcb->asoc.default_mtu = paddrp->spp_pathmtu;
+ if (paddrp->spp_pathmtu > 0) {
+ stcb->asoc.default_mtu = paddrp->spp_pathmtu;
+ }
sctp_stcb_feature_on(inp, stcb, SCTP_PCB_FLAGS_DO_NOT_PMTUD);
}
if (paddrp->spp_flags & SPP_PMTUD_ENABLE) {
@@ -6374,7 +6380,7 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
* For the TOS/FLOWLABEL stuff you set it
* with the options on the socket
*/
- if (paddrp->spp_pathmaxrxt != 0) {
+ if (paddrp->spp_pathmaxrxt > 0) {
inp->sctp_ep.def_net_failure = paddrp->spp_pathmaxrxt;
}
@@ -6400,7 +6406,9 @@ sctp_setopt(struct socket *so, int optname, void *optval, size_t optsize,
inp->sctp_ep.default_mtu = 0;
sctp_feature_off(inp, SCTP_PCB_FLAGS_DO_NOT_PMTUD);
} else if (paddrp->spp_flags & SPP_PMTUD_DISABLE) {
- inp->sctp_ep.default_mtu = paddrp->spp_pathmtu;
+ if (paddrp->spp_pathmtu > 0) {
+ inp->sctp_ep.default_mtu = paddrp->spp_pathmtu;
+ }
sctp_feature_on(inp, SCTP_PCB_FLAGS_DO_NOT_PMTUD);
}
if (paddrp->spp_flags & SPP_DSCP) {
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctputil.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctputil.c
index 79838e40da..639b36f307 100755
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctputil.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet/sctputil.c
@@ -4837,7 +4837,7 @@ sctp_handle_ootb(struct mbuf *m, int iphlen, int offset,
* if there is return 1, else return 0.
*/
int
-sctp_is_there_an_abort_here(struct mbuf *m, int iphlen, uint32_t * vtagfill)
+sctp_is_there_an_abort_here(struct mbuf *m, int iphlen, uint32_t *vtag)
{
struct sctp_chunkhdr *ch;
struct sctp_init_chunk *init_chk, chunk_buf;
@@ -4858,12 +4858,13 @@ sctp_is_there_an_abort_here(struct mbuf *m, int iphlen, uint32_t * vtagfill)
/* yep, tell them */
return (1);
}
- if (ch->chunk_type == SCTP_INITIATION) {
+ if ((ch->chunk_type == SCTP_INITIATION) ||
+ (ch->chunk_type == SCTP_INITIATION_ACK)) {
/* need to update the Vtag */
init_chk = (struct sctp_init_chunk *)sctp_m_getptr(m,
- offset, sizeof(*init_chk), (uint8_t *) & chunk_buf);
+ offset, sizeof(struct sctp_init_chunk), (uint8_t *) & chunk_buf);
if (init_chk != NULL) {
- *vtagfill = ntohl(init_chk->init.initiate_tag);
+ *vtag = ntohl(init_chk->init.initiate_tag);
}
}
/* Nope, move to the next chunk */
diff --git a/third_party/usrsctp/usrsctplib/usrsctplib/netinet6/sctp6_usrreq.c b/third_party/usrsctp/usrsctplib/usrsctplib/netinet6/sctp6_usrreq.c
index 5a931dd5a2..aa0c0051a5 100644
--- a/third_party/usrsctp/usrsctplib/usrsctplib/netinet6/sctp6_usrreq.c
+++ b/third_party/usrsctp/usrsctplib/usrsctplib/netinet6/sctp6_usrreq.c
@@ -34,7 +34,7 @@
#if defined(__FreeBSD__) && !defined(__Userspace__)
#include <sys/cdefs.h>
-__FBSDID("$FreeBSD: head/sys/netinet6/sctp6_usrreq.c 365071 2020-09-01 21:19:14Z mjg $");
+__FBSDID("$FreeBSD$");
#endif
#include <netinet/sctp_os.h>
@@ -259,13 +259,14 @@ sctp6_input(struct mbuf **i_pak, int *offp, int proto)
if (IN6_IS_ADDR_MULTICAST(&ip6->ip6_dst)) {
goto out;
}
- ecn_bits = ((ntohl(ip6->ip6_flow) >> 20) & 0x000000ff);
#if defined(__FreeBSD__)
+ ecn_bits = IPV6_TRAFFIC_CLASS(ip6);
if (m->m_pkthdr.csum_flags & CSUM_SCTP_VALID) {
SCTP_STAT_INCR(sctps_recvhwcrc);
compute_crc = 0;
} else {
#else
+ ecn_bits = ((ntohl(ip6->ip6_flow) >> 20) & 0x000000ff);
if (SCTP_BASE_SYSCTL(sctp_no_csum_on_loopback) &&
(IN6_ARE_ADDR_EQUAL(&src.sin6_addr, &dst.sin6_addr))) {
SCTP_STAT_INCR(sctps_recvhwcrc);
@@ -654,9 +655,10 @@ out:
return (error);
}
-SYSCTL_PROC(_net_inet6_sctp6, OID_AUTO, getcred, CTLTYPE_OPAQUE | CTLFLAG_RW,
- 0, 0,
- sctp6_getcred, "S,ucred", "Get the ucred of a SCTP6 connection");
+SYSCTL_PROC(_net_inet6_sctp6, OID_AUTO, getcred,
+ CTLTYPE_OPAQUE | CTLFLAG_RW | CTLFLAG_NEEDGIANT,
+ 0, 0, sctp6_getcred, "S,ucred",
+ "Get the ucred of a SCTP6 connection");
#endif
/* This is the same as the sctp_abort() could be made common */
@@ -1007,6 +1009,46 @@ sctp6_send(struct socket *so, int flags, struct mbuf *m, struct mbuf *nam,
SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EDESTADDRREQ);
return (EDESTADDRREQ);
}
+ switch (addr->sa_family) {
+#ifdef INET
+ case AF_INET:
+#if defined(HAVE_SA_LEN)
+ if (addr->sa_len != sizeof(struct sockaddr_in)) {
+ if (control) {
+ SCTP_RELEASE_PKT(control);
+ control = NULL;
+ }
+ SCTP_RELEASE_PKT(m);
+ SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EINVAL);
+ return (EINVAL);
+ }
+#endif
+ break;
+#endif
+#ifdef INET6
+ case AF_INET6:
+#if defined(HAVE_SA_LEN)
+ if (addr->sa_len != sizeof(struct sockaddr_in6)) {
+ if (control) {
+ SCTP_RELEASE_PKT(control);
+ control = NULL;
+ }
+ SCTP_RELEASE_PKT(m);
+ SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EINVAL);
+ return (EINVAL);
+ }
+#endif
+ break;
+#endif
+ default:
+ if (control) {
+ SCTP_RELEASE_PKT(control);
+ control = NULL;
+ }
+ SCTP_RELEASE_PKT(m);
+ SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EINVAL);
+ return (EINVAL);
+ }
#ifdef INET
sin6 = (struct sockaddr_in6 *)addr;
if (SCTP_IPV6_V6ONLY(inp)) {
@@ -1015,10 +1057,20 @@ sctp6_send(struct socket *so, int flags, struct mbuf *m, struct mbuf *nam,
* v4 addr or v4-mapped addr
*/
if (addr->sa_family == AF_INET) {
+ if (control) {
+ SCTP_RELEASE_PKT(control);
+ control = NULL;
+ }
+ SCTP_RELEASE_PKT(m);
SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EINVAL);
return (EINVAL);
}
if (IN6_IS_ADDR_V4MAPPED(&sin6->sin6_addr)) {
+ if (control) {
+ SCTP_RELEASE_PKT(control);
+ control = NULL;
+ }
+ SCTP_RELEASE_PKT(m);
SCTP_LTRACE_ERR_RET(inp, NULL, NULL, SCTP_FROM_SCTP6_USRREQ, EINVAL);
return (EINVAL);
}
diff --git a/tools_webrtc/get_landmines.py b/tools_webrtc/get_landmines.py
index 3b5965fce4..764f053f2a 100755
--- a/tools_webrtc/get_landmines.py
+++ b/tools_webrtc/get_landmines.py
@@ -55,6 +55,7 @@ def print_landmines(): # pylint: disable=invalid-name
print 'Clobber to change neteq_rtpplay type to executable'
print 'Clobber to remove .xctest files.'
print 'Clobber to remove .xctest files (take 2).'
+ print 'Switching rtc_executable to rtc_test'
def main():
diff --git a/tools_webrtc/libs/generate_licenses.py b/tools_webrtc/libs/generate_licenses.py
index 6ed4e37147..cbb1514d3c 100755
--- a/tools_webrtc/libs/generate_licenses.py
+++ b/tools_webrtc/libs/generate_licenses.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
#
@@ -23,12 +23,16 @@ Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
import sys
import argparse
-import cgi
import json
import logging
import os
import re
import subprocess
+try:
+ # python 3.2+
+ from html import escape
+except ImportError:
+ from cgi import escape
# Third_party library to licences mapping. Keys are names of the libraries
# (right after the `third_party/` prefix)
@@ -42,6 +46,7 @@ LIB_TO_LICENSES_DICT = {
],
'bazel': ['third_party/bazel/LICENSE'],
'boringssl': ['third_party/boringssl/src/LICENSE'],
+ 'crc32c': ['third_party/crc32c/src/LICENSE'],
'errorprone': [
'third_party/android_deps/libs/'
'com_google_errorprone_error_prone_core/LICENSE'
@@ -181,7 +186,7 @@ class LicenseBuilder(object):
target,
]
logging.debug('Running: %r', cmd)
- output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT)
+ output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
logging.debug('Output: %s', output_json)
return output_json
@@ -208,7 +213,7 @@ class LicenseBuilder(object):
self.common_licenses_dict.keys())
if missing_licenses:
error_msg = 'Missing licenses for following third_party targets: %s' % \
- ', '.join(missing_licenses)
+ ', '.join(sorted(missing_licenses))
logging.error(error_msg)
raise Exception(error_msg)
@@ -233,7 +238,7 @@ class LicenseBuilder(object):
for path in self.common_licenses_dict[license_lib]:
license_path = os.path.join(WEBRTC_ROOT, path)
with open(license_path, 'r') as license_file:
- license_text = cgi.escape(license_file.read(), quote=True)
+ license_text = escape(license_file.read(), quote=True)
output_license_file.write(license_text)
output_license_file.write('\n')
output_license_file.write('```\n\n')
diff --git a/tools_webrtc/libs/generate_licenses_test.py b/tools_webrtc/libs/generate_licenses_test.py
index 51acb89881..ebef78e132 100755
--- a/tools_webrtc/libs/generate_licenses_test.py
+++ b/tools_webrtc/libs/generate_licenses_test.py
@@ -10,7 +10,12 @@
# be found in the AUTHORS file in the root of the source tree.
import unittest
-import mock
+try:
+ # python 3.3+
+ from unittest.mock import patch
+except ImportError:
+ # From site-package
+ from mock import patch
from generate_licenses import LicenseBuilder
@@ -32,21 +37,21 @@ class TestLicenseBuilder(unittest.TestCase):
"""
def testParseLibraryName(self):
- self.assertEquals(
+ self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
'libname1')
- self.assertEquals(
+ self.assertEqual(
LicenseBuilder._ParseLibraryName(
'//a/b/third_party/libname2:c(d)'), 'libname2')
- self.assertEquals(
+ self.assertEqual(
LicenseBuilder._ParseLibraryName(
'//a/b/third_party/libname3/c:d(e)'), 'libname3')
- self.assertEquals(
+ self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
def testParseLibrarySimpleMatch(self):
builder = LicenseBuilder([], [], {}, {})
- self.assertEquals(builder._ParseLibrary('//a/b/third_party/libname:c'),
+ self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'),
'libname')
def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self):
@@ -54,7 +59,7 @@ class TestLicenseBuilder(unittest.TestCase):
'libname:foo.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], lib_dict, {})
- self.assertEquals(
+ self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:bar_java'),
'libname')
@@ -63,7 +68,7 @@ class TestLicenseBuilder(unittest.TestCase):
'libname:foo.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
- self.assertEquals(
+ self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'),
'libname:foo.*')
@@ -72,7 +77,7 @@ class TestLicenseBuilder(unittest.TestCase):
'libname/foo:bar.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
- self.assertEquals(
+ self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'),
'libname/foo:bar.*')
@@ -81,29 +86,29 @@ class TestLicenseBuilder(unittest.TestCase):
'libname/foo.*bar.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
- self.assertEquals(
+ self.assertEqual(
builder._ParseLibrary(
'//a/b/third_party/libname/fooHAHA:bar_java'),
'libname/foo.*bar.*')
- @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
+ @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithoutRegex(self):
builder = LicenseBuilder([], [], {}, {})
- self.assertEquals(
+ self.assertEqual(
builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2', 'libname3']))
- @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
+ @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithRegex(self):
lib_regex_dict = {
'libname2:c.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
- self.assertEquals(
+ self.assertEqual(
builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2:c.*', 'libname3']))
- @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
+ @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGenerateLicenseTextFailIfUnknownLibrary(self):
lib_dict = {
'simple_library': ['path/to/LICENSE'],
@@ -113,8 +118,8 @@ class TestLicenseBuilder(unittest.TestCase):
with self.assertRaises(Exception) as context:
builder.GenerateLicenseText('dummy/dir')
- self.assertEquals(
- context.exception.message,
+ self.assertEqual(
+ context.exception.args[0],
'Missing licenses for following third_party targets: '
'libname1, libname2, libname3')
diff --git a/tools_webrtc/mb/mb_config.pyl b/tools_webrtc/mb/mb_config.pyl
index 13159a3d94..a330315cc0 100644
--- a/tools_webrtc/mb/mb_config.pyl
+++ b/tools_webrtc/mb/mb_config.pyl
@@ -33,6 +33,7 @@
'Mac64 Release': 'release_bot_x64',
'Mac64 Builder': 'pure_release_bot_x64',
'Mac Asan': 'mac_asan_clang_release_bot_x64',
+ 'MacARM64 M1 Release': 'release_bot_arm64',
# Linux
'Linux32 Debug': 'no_h264_debug_bot_x86',
@@ -91,7 +92,6 @@
'Win64 Debug (Clang)': 'win_clang_debug_bot_x64',
'Win64 Release (Clang)': 'win_clang_release_bot_x64',
'Win64 ASan': 'win_asan_clang_release_bot_x64',
- 'Win64 UWP': 'win_uwp_release_bot_x64',
'Win (more configs)': {
'bwe_test_logging':
'bwe_test_logging_x86',
@@ -233,7 +233,6 @@
'win_asan': 'win_asan_clang_release_bot_x64',
'win_x64_clang_dbg_win8': 'win_clang_debug_bot_x64',
'win_x64_clang_dbg_win10': 'win_clang_debug_bot_x64',
- 'win_x64_uwp': 'win_uwp_release_bot_x64',
'win_x86_more_configs': {
'bwe_test_logging':
'bwe_test_logging_x86',
@@ -340,10 +339,6 @@
'asan', 'clang', 'full_symbols', 'openh264', 'release_bot', 'x64',
'win_fastlink',
],
- 'win_uwp_release_bot_x64': [
- # UWP passes compiler flags that are not supported by goma.
- 'no_clang', 'openh264', 'x64', 'winuwp', 'release_bot_no_goma'
- ],
# Mac
'mac_asan_clang_release_bot_x64': [
@@ -615,10 +610,6 @@
'gn_args': 'rtc_enable_sctp=false',
},
- 'winuwp': {
- 'gn_args': 'target_os="winuwp"',
- },
-
'win_undef_unicode': {
'gn_args': 'rtc_win_undef_unicode=true',
},
diff --git a/tools_webrtc/msan/suppressions.txt b/tools_webrtc/msan/suppressions.txt
index ce8b14292e..47a0dff16f 100644
--- a/tools_webrtc/msan/suppressions.txt
+++ b/tools_webrtc/msan/suppressions.txt
@@ -4,8 +4,8 @@
#
# Please think twice before you add or remove these rules.
-# This is a stripped down copy of Chromium's blacklist.txt, to enable
-# adding WebRTC-specific blacklist entries.
+# This is a stripped down copy of Chromium's ignorelist.txt, to enable
+# adding WebRTC-specific ignorelist entries.
# Uninit in zlib. http://crbug.com/116277
fun:*MOZ_Z_deflate*
diff --git a/tools_webrtc/ubsan/suppressions.txt b/tools_webrtc/ubsan/suppressions.txt
index 50b66e915a..dc76f38c20 100644
--- a/tools_webrtc/ubsan/suppressions.txt
+++ b/tools_webrtc/ubsan/suppressions.txt
@@ -1,7 +1,7 @@
#############################################################################
-# UBSan blacklist.
+# UBSan ignorelist.
#
-# This is a WebRTC-specific replacement of Chromium's blacklist.txt.
+# This is a WebRTC-specific replacement of Chromium's ignorelist.txt.
# Only exceptions for third party libraries go here. WebRTC's code should use
# the RTC_NO_SANITIZE macro. Please think twice before adding new exceptions.
diff --git a/tools_webrtc/ubsan/vptr_suppressions.txt b/tools_webrtc/ubsan/vptr_suppressions.txt
index 739de36659..617ba88f98 100644
--- a/tools_webrtc/ubsan/vptr_suppressions.txt
+++ b/tools_webrtc/ubsan/vptr_suppressions.txt
@@ -1,5 +1,5 @@
#############################################################################
-# UBSan vptr blacklist.
+# UBSan vptr ignorelist.
# Function and type based blacklisting use a mangled name, and it is especially
# tricky to represent C++ types. For now, any possible changes by name manglings
# are simply represented as wildcard expressions of regexp, and thus it might be
@@ -8,7 +8,7 @@
# Please think twice before you add or remove these rules.
#
# This is a stripped down copy of Chromium's vptr_blacklist.txt, to enable
-# adding WebRTC-specific blacklist entries.
+# adding WebRTC-specific ignorelist entries.
#############################################################################
# Using raw pointer values.
diff --git a/video/BUILD.gn b/video/BUILD.gn
index c524212353..7743aba944 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -67,6 +67,7 @@ rtc_library("video") {
"../api/crypto:options",
"../api/rtc_event_log",
"../api/task_queue",
+ "../api/units:time_delta",
"../api/units:timestamp",
"../api/video:encoded_image",
"../api/video:recordable_encoded_frame",
@@ -701,6 +702,7 @@ if (rtc_include_tests) {
"../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
+ "../modules/video_coding:webrtc_vp9_helpers",
"../modules/video_coding/codecs/av1:libaom_av1_encoder",
"../rtc_base",
"../rtc_base:checks",
diff --git a/video/adaptation/balanced_constraint.cc b/video/adaptation/balanced_constraint.cc
index 3de81e72e0..ec0b8e41d5 100644
--- a/video/adaptation/balanced_constraint.cc
+++ b/video/adaptation/balanced_constraint.cc
@@ -41,16 +41,16 @@ bool BalancedConstraint::IsAdaptationUpAllowed(
// exceed bitrate constraints.
if (degradation_preference_provider_->degradation_preference() ==
DegradationPreference::BALANCED) {
+ int frame_size_pixels = input_state.single_active_stream_pixels().value_or(
+ input_state.frame_size_pixels().value());
if (!balanced_settings_.CanAdaptUp(
- input_state.video_codec_type(),
- input_state.frame_size_pixels().value(),
+ input_state.video_codec_type(), frame_size_pixels,
encoder_target_bitrate_bps_.value_or(0))) {
return false;
}
if (DidIncreaseResolution(restrictions_before, restrictions_after) &&
!balanced_settings_.CanAdaptUpResolution(
- input_state.video_codec_type(),
- input_state.frame_size_pixels().value(),
+ input_state.video_codec_type(), frame_size_pixels,
encoder_target_bitrate_bps_.value_or(0))) {
return false;
}
diff --git a/video/adaptation/encode_usage_resource.cc b/video/adaptation/encode_usage_resource.cc
index 8fe7450a0c..c42c63f4b7 100644
--- a/video/adaptation/encode_usage_resource.cc
+++ b/video/adaptation/encode_usage_resource.cc
@@ -21,7 +21,7 @@ namespace webrtc {
// static
rtc::scoped_refptr<EncodeUsageResource> EncodeUsageResource::Create(
std::unique_ptr<OveruseFrameDetector> overuse_detector) {
- return new rtc::RefCountedObject<EncodeUsageResource>(
+ return rtc::make_ref_counted<EncodeUsageResource>(
std::move(overuse_detector));
}
diff --git a/video/adaptation/overuse_frame_detector_unittest.cc b/video/adaptation/overuse_frame_detector_unittest.cc
index d4bf910faa..37ad974a4c 100644
--- a/video/adaptation/overuse_frame_detector_unittest.cc
+++ b/video/adaptation/overuse_frame_detector_unittest.cc
@@ -455,6 +455,8 @@ TEST_F(OveruseFrameDetectorTest, RunOnTqNormalUsage) {
EXPECT_TRUE(event.Wait(10000));
}
+// TODO(crbug.com/webrtc/12846): investigate why the test fails on MAC bots.
+#if !defined(WEBRTC_MAC)
TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) {
const int kCapturerMaxFrameRate = 30;
const int kEncodeMaxFrameRate = 20; // Maximum fps the encoder can sustain.
@@ -490,6 +492,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) {
processing_time_us);
overuse_detector_->CheckForOveruse(observer_);
}
+#endif
TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) {
const int kMinFrameRate = 7; // Minimum fps allowed by current detector impl.
@@ -835,7 +838,7 @@ TEST_F(OveruseFrameDetectorTest2, ConvergesSlowly) {
// Should have started to approach correct load of 15%, but not very far.
EXPECT_LT(UsagePercent(), InitialUsage());
- EXPECT_GT(UsagePercent(), (InitialUsage() * 3 + 15) / 4);
+ EXPECT_GT(UsagePercent(), (InitialUsage() * 3 + 8) / 4);
// Run for roughly 10s more, should now be closer.
InsertAndSendFramesWithInterval(300, kFrameIntervalUs, kWidth, kHeight,
diff --git a/video/adaptation/pixel_limit_resource.cc b/video/adaptation/pixel_limit_resource.cc
index e1df141f70..789dac2c0a 100644
--- a/video/adaptation/pixel_limit_resource.cc
+++ b/video/adaptation/pixel_limit_resource.cc
@@ -28,8 +28,8 @@ constexpr TimeDelta kResourceUsageCheckIntervalMs = TimeDelta::Seconds(5);
rtc::scoped_refptr<PixelLimitResource> PixelLimitResource::Create(
TaskQueueBase* task_queue,
VideoStreamInputStateProvider* input_state_provider) {
- return new rtc::RefCountedObject<PixelLimitResource>(task_queue,
- input_state_provider);
+ return rtc::make_ref_counted<PixelLimitResource>(task_queue,
+ input_state_provider);
}
PixelLimitResource::PixelLimitResource(
diff --git a/video/adaptation/quality_scaler_resource.cc b/video/adaptation/quality_scaler_resource.cc
index c438488182..c455252d45 100644
--- a/video/adaptation/quality_scaler_resource.cc
+++ b/video/adaptation/quality_scaler_resource.cc
@@ -22,7 +22,7 @@ namespace webrtc {
// static
rtc::scoped_refptr<QualityScalerResource> QualityScalerResource::Create() {
- return new rtc::RefCountedObject<QualityScalerResource>();
+ return rtc::make_ref_counted<QualityScalerResource>();
}
QualityScalerResource::QualityScalerResource()
diff --git a/video/adaptation/video_stream_encoder_resource_manager.cc b/video/adaptation/video_stream_encoder_resource_manager.cc
index 1c2e5839f2..2705bf9af7 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.cc
+++ b/video/adaptation/video_stream_encoder_resource_manager.cc
@@ -32,6 +32,7 @@
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h"
#include "video/adaptation/quality_scaler_resource.h"
@@ -257,6 +258,9 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
quality_rampup_experiment_(
QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)),
encoder_settings_(absl::nullopt) {
+ TRACE_EVENT0(
+ "webrtc",
+ "VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager");
RTC_CHECK(degradation_preference_provider_);
RTC_CHECK(encoder_stats_observer_);
}
@@ -494,7 +498,7 @@ void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() {
quality_scaler_resource_, bandwidth,
DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)),
DataRate::KilobitsPerSec(encoder_settings_->video_codec().maxBitrate),
- LastInputFrameSizeOrDefault());
+ LastFrameSizeOrDefault());
}
}
@@ -551,7 +555,7 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
absl::optional<VideoEncoder::QpThresholds> thresholds =
balanced_settings_.GetQpThresholds(
GetVideoCodecTypeOrGeneric(encoder_settings_),
- LastInputFrameSizeOrDefault());
+ LastFrameSizeOrDefault());
if (thresholds) {
quality_scaler_resource_->SetQpThresholds(*thresholds);
}
@@ -591,10 +595,13 @@ CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions()
return options;
}
-int VideoStreamEncoderResourceManager::LastInputFrameSizeOrDefault() const {
+int VideoStreamEncoderResourceManager::LastFrameSizeOrDefault() const {
RTC_DCHECK_RUN_ON(encoder_queue_);
- return input_state_provider_->InputState().frame_size_pixels().value_or(
- kDefaultInputPixelsWidth * kDefaultInputPixelsHeight);
+ return input_state_provider_->InputState()
+ .single_active_stream_pixels()
+ .value_or(
+ input_state_provider_->InputState().frame_size_pixels().value_or(
+ kDefaultInputPixelsWidth * kDefaultInputPixelsHeight));
}
void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated(
diff --git a/video/adaptation/video_stream_encoder_resource_manager.h b/video/adaptation/video_stream_encoder_resource_manager.h
index 2e7060c604..e7174d2344 100644
--- a/video/adaptation/video_stream_encoder_resource_manager.h
+++ b/video/adaptation/video_stream_encoder_resource_manager.h
@@ -66,7 +66,7 @@ extern const int kDefaultInputPixelsHeight;
// resources.
//
// The manager is also involved with various mitigations not part of the
-// ResourceAdaptationProcessor code such as the inital frame dropping.
+// ResourceAdaptationProcessor code such as the initial frame dropping.
class VideoStreamEncoderResourceManager
: public VideoSourceRestrictionsListener,
public ResourceLimitationsListener,
@@ -156,7 +156,7 @@ class VideoStreamEncoderResourceManager
rtc::scoped_refptr<Resource> resource) const;
CpuOveruseOptions GetCpuOveruseOptions() const;
- int LastInputFrameSizeOrDefault() const;
+ int LastFrameSizeOrDefault() const;
// Calculates an up-to-date value of the target frame rate and informs the
// |encode_usage_resource_| of the new value.
diff --git a/video/buffered_frame_decryptor_unittest.cc b/video/buffered_frame_decryptor_unittest.cc
index 5ede4feaab..2f8a183ba1 100644
--- a/video/buffered_frame_decryptor_unittest.cc
+++ b/video/buffered_frame_decryptor_unittest.cc
@@ -86,7 +86,7 @@ class BufferedFrameDecryptorTest : public ::testing::Test,
decrypted_frame_call_count_ = 0;
decryption_status_change_count_ = 0;
seq_num_ = 0;
- mock_frame_decryptor_ = new rtc::RefCountedObject<MockFrameDecryptor>();
+ mock_frame_decryptor_ = rtc::make_ref_counted<MockFrameDecryptor>();
buffered_frame_decryptor_ =
std::make_unique<BufferedFrameDecryptor>(this, this);
buffered_frame_decryptor_->SetFrameDecryptor(mock_frame_decryptor_.get());
diff --git a/video/encoder_rtcp_feedback.cc b/video/encoder_rtcp_feedback.cc
index b81ff6120f..17095a0a0c 100644
--- a/video/encoder_rtcp_feedback.cc
+++ b/video/encoder_rtcp_feedback.cc
@@ -10,6 +10,9 @@
#include "video/encoder_rtcp_feedback.h"
+#include <algorithm>
+#include <utility>
+
#include "absl/types/optional.h"
#include "api/video_codecs/video_encoder.h"
#include "rtc_base/checks.h"
@@ -21,47 +24,36 @@ namespace {
constexpr int kMinKeyframeSendIntervalMs = 300;
} // namespace
-EncoderRtcpFeedback::EncoderRtcpFeedback(Clock* clock,
- const std::vector<uint32_t>& ssrcs,
- VideoStreamEncoderInterface* encoder)
+EncoderRtcpFeedback::EncoderRtcpFeedback(
+ Clock* clock,
+ const std::vector<uint32_t>& ssrcs,
+ VideoStreamEncoderInterface* encoder,
+ std::function<std::vector<RtpSequenceNumberMap::Info>(
+ uint32_t ssrc,
+ const std::vector<uint16_t>& seq_nums)> get_packet_infos)
: clock_(clock),
ssrcs_(ssrcs),
- rtp_video_sender_(nullptr),
+ get_packet_infos_(std::move(get_packet_infos)),
video_stream_encoder_(encoder),
- time_last_intra_request_ms_(-1),
- min_keyframe_send_interval_ms_(
- KeyframeIntervalSettings::ParseFromFieldTrials()
- .MinKeyframeSendIntervalMs()
- .value_or(kMinKeyframeSendIntervalMs)) {
+ time_last_packet_delivery_queue_(Timestamp::Millis(0)),
+ min_keyframe_send_interval_(
+ TimeDelta::Millis(KeyframeIntervalSettings::ParseFromFieldTrials()
+ .MinKeyframeSendIntervalMs()
+ .value_or(kMinKeyframeSendIntervalMs))) {
RTC_DCHECK(!ssrcs.empty());
+ packet_delivery_queue_.Detach();
}
-void EncoderRtcpFeedback::SetRtpVideoSender(
- const RtpVideoSenderInterface* rtp_video_sender) {
- RTC_DCHECK(rtp_video_sender);
- RTC_DCHECK(!rtp_video_sender_);
- rtp_video_sender_ = rtp_video_sender;
-}
+// Called via Call::DeliverRtcp.
+void EncoderRtcpFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(&packet_delivery_queue_);
+ RTC_DCHECK(std::find(ssrcs_.begin(), ssrcs_.end(), ssrc) != ssrcs_.end());
-bool EncoderRtcpFeedback::HasSsrc(uint32_t ssrc) {
- for (uint32_t registered_ssrc : ssrcs_) {
- if (registered_ssrc == ssrc) {
- return true;
- }
- }
- return false;
-}
+ const Timestamp now = clock_->CurrentTime();
+ if (time_last_packet_delivery_queue_ + min_keyframe_send_interval_ > now)
+ return;
-void EncoderRtcpFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
- RTC_DCHECK(HasSsrc(ssrc));
- {
- int64_t now_ms = clock_->TimeInMilliseconds();
- MutexLock lock(&mutex_);
- if (time_last_intra_request_ms_ + min_keyframe_send_interval_ms_ > now_ms) {
- return;
- }
- time_last_intra_request_ms_ = now_ms;
- }
+ time_last_packet_delivery_queue_ = now;
// Always produce key frame for all streams.
video_stream_encoder_->SendKeyFrame();
@@ -72,12 +64,12 @@ void EncoderRtcpFeedback::OnReceivedLossNotification(
uint16_t seq_num_of_last_decodable,
uint16_t seq_num_of_last_received,
bool decodability_flag) {
- RTC_DCHECK(rtp_video_sender_) << "Object initialization incomplete.";
+ RTC_DCHECK(get_packet_infos_) << "Object initialization incomplete.";
const std::vector<uint16_t> seq_nums = {seq_num_of_last_decodable,
seq_num_of_last_received};
const std::vector<RtpSequenceNumberMap::Info> infos =
- rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums);
+ get_packet_infos_(ssrc, seq_nums);
if (infos.empty()) {
return;
}
diff --git a/video/encoder_rtcp_feedback.h b/video/encoder_rtcp_feedback.h
index 3bd1cb91f0..2aadcc34e7 100644
--- a/video/encoder_rtcp_feedback.h
+++ b/video/encoder_rtcp_feedback.h
@@ -10,12 +10,16 @@
#ifndef VIDEO_ENCODER_RTCP_FEEDBACK_H_
#define VIDEO_ENCODER_RTCP_FEEDBACK_H_
+#include <functional>
#include <vector>
+#include "api/sequence_checker.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
#include "api/video/video_stream_encoder_interface.h"
#include "call/rtp_video_sender_interface.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -27,13 +31,15 @@ class VideoStreamEncoderInterface;
class EncoderRtcpFeedback : public RtcpIntraFrameObserver,
public RtcpLossNotificationObserver {
public:
- EncoderRtcpFeedback(Clock* clock,
- const std::vector<uint32_t>& ssrcs,
- VideoStreamEncoderInterface* encoder);
+ EncoderRtcpFeedback(
+ Clock* clock,
+ const std::vector<uint32_t>& ssrcs,
+ VideoStreamEncoderInterface* encoder,
+ std::function<std::vector<RtpSequenceNumberMap::Info>(
+ uint32_t ssrc,
+ const std::vector<uint16_t>& seq_nums)> get_packet_infos);
~EncoderRtcpFeedback() override = default;
- void SetRtpVideoSender(const RtpVideoSenderInterface* rtp_video_sender);
-
void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
// Implements RtcpLossNotificationObserver.
@@ -43,17 +49,19 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver,
bool decodability_flag) override;
private:
- bool HasSsrc(uint32_t ssrc);
-
Clock* const clock_;
const std::vector<uint32_t> ssrcs_;
- const RtpVideoSenderInterface* rtp_video_sender_;
+ const std::function<std::vector<RtpSequenceNumberMap::Info>(
+ uint32_t ssrc,
+ const std::vector<uint16_t>& seq_nums)>
+ get_packet_infos_;
VideoStreamEncoderInterface* const video_stream_encoder_;
- Mutex mutex_;
- int64_t time_last_intra_request_ms_ RTC_GUARDED_BY(mutex_);
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_delivery_queue_;
+ Timestamp time_last_packet_delivery_queue_
+ RTC_GUARDED_BY(packet_delivery_queue_);
- const int min_keyframe_send_interval_ms_;
+ const TimeDelta min_keyframe_send_interval_;
};
} // namespace webrtc
diff --git a/video/encoder_rtcp_feedback_unittest.cc b/video/encoder_rtcp_feedback_unittest.cc
index 81ac22b6c6..4cbb747e51 100644
--- a/video/encoder_rtcp_feedback_unittest.cc
+++ b/video/encoder_rtcp_feedback_unittest.cc
@@ -26,7 +26,8 @@ class VieKeyRequestTest : public ::testing::Test {
encoder_rtcp_feedback_(
&simulated_clock_,
std::vector<uint32_t>(1, VieKeyRequestTest::kSsrc),
- &encoder_) {}
+ &encoder_,
+ nullptr) {}
protected:
const uint32_t kSsrc = 1234;
diff --git a/video/end_to_end_tests/config_tests.cc b/video/end_to_end_tests/config_tests.cc
index bf63e2a51f..1bd897cb34 100644
--- a/video/end_to_end_tests/config_tests.cc
+++ b/video/end_to_end_tests/config_tests.cc
@@ -104,7 +104,7 @@ TEST_F(ConfigEndToEndTest, VerifyDefaultFlexfecReceiveConfigParameters) {
FlexfecReceiveStream::Config default_receive_config(&rtcp_send_transport);
EXPECT_EQ(-1, default_receive_config.payload_type)
<< "Enabling FlexFEC requires rtpmap: flexfec negotiation.";
- EXPECT_EQ(0U, default_receive_config.remote_ssrc)
+ EXPECT_EQ(0U, default_receive_config.rtp.remote_ssrc)
<< "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
EXPECT_TRUE(default_receive_config.protected_media_ssrcs.empty())
<< "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc
index 0d4ddac5a4..77ad9eb666 100644
--- a/video/end_to_end_tests/fec_tests.cc
+++ b/video/end_to_end_tests/fec_tests.cc
@@ -314,7 +314,7 @@ class FlexfecRenderObserver : public test::EndToEndTest,
void ModifyFlexfecConfigs(
std::vector<FlexfecReceiveStream::Config>* receive_configs) override {
- (*receive_configs)[0].local_ssrc = kFlexfecLocalSsrc;
+ (*receive_configs)[0].rtp.local_ssrc = kFlexfecLocalSsrc;
}
void PerformTest() override {
diff --git a/video/end_to_end_tests/network_state_tests.cc b/video/end_to_end_tests/network_state_tests.cc
index 9abde3bb32..4e0e86f987 100644
--- a/video/end_to_end_tests/network_state_tests.cc
+++ b/video/end_to_end_tests/network_state_tests.cc
@@ -10,13 +10,19 @@
#include <memory>
+#include "api/media_types.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
#include "api/test/simulated_network.h"
#include "api/video_codecs/video_encoder.h"
#include "call/fake_network_pipe.h"
#include "call/simulated_network.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "rtc_base/location.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "system_wrappers/include/sleep.h"
#include "test/call_test.h"
#include "test/fake_encoder.h"
@@ -166,7 +172,10 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
explicit NetworkStateTest(TaskQueueBase* task_queue)
: EndToEndTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
- task_queue_(task_queue),
+ e2e_test_task_queue_(task_queue),
+ task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue(
+ "NetworkStateTest",
+ TaskQueueFactory::Priority::NORMAL)),
sender_call_(nullptr),
receiver_call_(nullptr),
encoder_factory_(this),
@@ -219,26 +228,36 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
send_config->encoder_settings.encoder_factory = &encoder_factory_;
}
+ void SignalChannelNetworkState(Call* call,
+ MediaType media_type,
+ NetworkState network_state) {
+ SendTask(RTC_FROM_HERE, e2e_test_task_queue_,
+ [call, media_type, network_state] {
+ call->SignalChannelNetworkState(media_type, network_state);
+ });
+ }
+
void PerformTest() override {
EXPECT_TRUE(encoded_frames_.Wait(kDefaultTimeoutMs))
<< "No frames received by the encoder.";
- SendTask(RTC_FROM_HERE, task_queue_, [this]() {
+ SendTask(RTC_FROM_HERE, task_queue_.get(), [this]() {
// Wait for packets from both sender/receiver.
WaitForPacketsOrSilence(false, false);
// Sender-side network down for audio; there should be no effect on
// video
- sender_call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkDown);
+ SignalChannelNetworkState(sender_call_, MediaType::AUDIO, kNetworkDown);
+
WaitForPacketsOrSilence(false, false);
// Receiver-side network down for audio; no change expected
- receiver_call_->SignalChannelNetworkState(MediaType::AUDIO,
- kNetworkDown);
+ SignalChannelNetworkState(receiver_call_, MediaType::AUDIO,
+ kNetworkDown);
WaitForPacketsOrSilence(false, false);
// Sender-side network down.
- sender_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkDown);
+ SignalChannelNetworkState(sender_call_, MediaType::VIDEO, kNetworkDown);
{
MutexLock lock(&test_mutex_);
// After network goes down we shouldn't be encoding more frames.
@@ -248,14 +267,14 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
WaitForPacketsOrSilence(true, false);
// Receiver-side network down.
- receiver_call_->SignalChannelNetworkState(MediaType::VIDEO,
- kNetworkDown);
+ SignalChannelNetworkState(receiver_call_, MediaType::VIDEO,
+ kNetworkDown);
WaitForPacketsOrSilence(true, true);
// Network up for audio for both sides; video is still not expected to
// start
- sender_call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
- receiver_call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ SignalChannelNetworkState(sender_call_, MediaType::AUDIO, kNetworkUp);
+ SignalChannelNetworkState(receiver_call_, MediaType::AUDIO, kNetworkUp);
WaitForPacketsOrSilence(true, true);
// Network back up again for both.
@@ -265,8 +284,8 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
// network.
sender_state_ = kNetworkUp;
}
- sender_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
- receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ SignalChannelNetworkState(sender_call_, MediaType::VIDEO, kNetworkUp);
+ SignalChannelNetworkState(receiver_call_, MediaType::VIDEO, kNetworkUp);
WaitForPacketsOrSilence(false, false);
// TODO(skvlad): add tests to verify that the audio streams are stopped
@@ -340,7 +359,8 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
}
}
- TaskQueueBase* const task_queue_;
+ TaskQueueBase* const e2e_test_task_queue_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
Mutex test_mutex_;
rtc::Event encoded_frames_;
rtc::Event packet_event_;
diff --git a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
index 16eee8cb99..d46c40cd1e 100644
--- a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
+++ b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
@@ -27,8 +27,9 @@ void SetEncoderSpecific(VideoEncoderConfig* encoder_config,
if (type == kVideoCodecVP9) {
VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
vp9.numberOfSpatialLayers = num_spatial_layers;
- encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9);
+ encoder_config->encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
}
}
@@ -119,7 +120,7 @@ class InitEncodeTest : public test::EndToEndTest,
const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_);
encoder_config->codec_type = codec_type;
encoder_config->video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
payload_name_, /*max qp*/ 0, /*screencast*/ false,
/*screenshare enabled*/ false);
encoder_config->max_bitrate_bps = -1;
diff --git a/video/end_to_end_tests/rtp_rtcp_tests.cc b/video/end_to_end_tests/rtp_rtcp_tests.cc
index 76018027d6..a698328dad 100644
--- a/video/end_to_end_tests/rtp_rtcp_tests.cc
+++ b/video/end_to_end_tests/rtp_rtcp_tests.cc
@@ -316,7 +316,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation(
}
GetVideoEncoderConfig()->video_stream_factory =
- new rtc::RefCountedObject<VideoStreamFactory>();
+ rtc::make_ref_counted<VideoStreamFactory>();
// Use the same total bitrates when sending a single stream to avoid
// lowering the bitrate estimate and requiring a subsequent rampup.
one_stream = GetVideoEncoderConfig()->Copy();
@@ -537,12 +537,13 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) {
receive_transport.get());
flexfec_receive_config.payload_type =
GetVideoSendConfig()->rtp.flexfec.payload_type;
- flexfec_receive_config.remote_ssrc = GetVideoSendConfig()->rtp.flexfec.ssrc;
+ flexfec_receive_config.rtp.remote_ssrc =
+ GetVideoSendConfig()->rtp.flexfec.ssrc;
flexfec_receive_config.protected_media_ssrcs =
GetVideoSendConfig()->rtp.flexfec.protected_media_ssrcs;
- flexfec_receive_config.local_ssrc = kReceiverLocalVideoSsrc;
- flexfec_receive_config.transport_cc = true;
- flexfec_receive_config.rtp_header_extensions.emplace_back(
+ flexfec_receive_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
+ flexfec_receive_config.rtp.transport_cc = true;
+ flexfec_receive_config.rtp.extensions.emplace_back(
RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberExtensionId);
flexfec_receive_configs_.push_back(flexfec_receive_config);
diff --git a/video/end_to_end_tests/ssrc_tests.cc b/video/end_to_end_tests/ssrc_tests.cc
index cedae3934d..0c26311e92 100644
--- a/video/end_to_end_tests/ssrc_tests.cc
+++ b/video/end_to_end_tests/ssrc_tests.cc
@@ -132,13 +132,15 @@ void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
public:
SendsSetSsrcs(const uint32_t* ssrcs,
size_t num_ssrcs,
- bool send_single_ssrc_first)
+ bool send_single_ssrc_first,
+ TaskQueueBase* task_queue)
: EndToEndTest(kDefaultTimeoutMs),
num_ssrcs_(num_ssrcs),
send_single_ssrc_first_(send_single_ssrc_first),
ssrcs_to_observe_(num_ssrcs),
expect_single_ssrc_(send_single_ssrc_first),
- send_stream_(nullptr) {
+ send_stream_(nullptr),
+ task_queue_(task_queue) {
for (size_t i = 0; i < num_ssrcs; ++i)
valid_ssrcs_[ssrcs[i]] = true;
}
@@ -200,8 +202,10 @@ void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
if (send_single_ssrc_first_) {
// Set full simulcast and continue with the rest of the SSRCs.
- send_stream_->ReconfigureVideoEncoder(
- std::move(video_encoder_config_all_streams_));
+ SendTask(RTC_FROM_HERE, task_queue_, [&]() {
+ send_stream_->ReconfigureVideoEncoder(
+ std::move(video_encoder_config_all_streams_));
+ });
EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs.";
}
}
@@ -218,7 +222,8 @@ void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
VideoSendStream* send_stream_;
VideoEncoderConfig video_encoder_config_all_streams_;
- } test(kVideoSendSsrcs, num_ssrcs, send_single_ssrc_first);
+ TaskQueueBase* task_queue_;
+ } test(kVideoSendSsrcs, num_ssrcs, send_single_ssrc_first, task_queue());
RunBaseTest(&test);
}
diff --git a/video/end_to_end_tests/stats_tests.cc b/video/end_to_end_tests/stats_tests.cc
index ae0532b9a3..605f40e8f3 100644
--- a/video/end_to_end_tests/stats_tests.cc
+++ b/video/end_to_end_tests/stats_tests.cc
@@ -17,7 +17,7 @@
#include "api/test/video/function_video_encoder_factory.h"
#include "call/fake_network_pipe.h"
#include "call/simulated_network.h"
-#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/synchronization/mutex.h"
@@ -71,12 +71,11 @@ TEST_F(StatsEndToEndTest, GetStats) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
// Drop every 25th packet => 4% loss.
static const int kPacketLossFrac = 25;
- RTPHeader header;
- RtpUtility::RtpHeaderParser parser(packet, length);
- if (parser.Parse(&header) &&
- expected_send_ssrcs_.find(header.ssrc) !=
+ RtpPacket header;
+ if (header.Parse(packet, length) &&
+ expected_send_ssrcs_.find(header.Ssrc()) !=
expected_send_ssrcs_.end() &&
- header.sequenceNumber % kPacketLossFrac == 0) {
+ header.SequenceNumber() % kPacketLossFrac == 0) {
return DROP_PACKET;
}
check_stats_event_.Set();
@@ -154,7 +153,10 @@ TEST_F(StatsEndToEndTest, GetStats) {
bool CheckSendStats() {
RTC_DCHECK(send_stream_);
- VideoSendStream::Stats stats = send_stream_->GetStats();
+
+ VideoSendStream::Stats stats;
+ SendTask(RTC_FROM_HERE, task_queue_,
+ [&]() { stats = send_stream_->GetStats(); });
size_t expected_num_streams =
kNumSimulcastStreams + expected_send_ssrcs_.size();
@@ -179,9 +181,7 @@ TEST_F(StatsEndToEndTest, GetStats) {
const VideoSendStream::StreamStats& stream_stats = kv.second;
send_stats_filled_[CompoundKey("StatisticsUpdated", kv.first)] |=
- stream_stats.rtcp_stats.packets_lost != 0 ||
- stream_stats.rtcp_stats.extended_highest_sequence_number != 0 ||
- stream_stats.rtcp_stats.fraction_lost != 0;
+ stream_stats.report_block_data.has_value();
send_stats_filled_[CompoundKey("DataCountersUpdated", kv.first)] |=
stream_stats.rtp_stats.fec.packets != 0 ||
@@ -612,11 +612,9 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) {
Action OnSendRtp(const uint8_t* packet, size_t length) override {
MutexLock lock(&mutex_);
if (++sent_rtp_packets_ == kPacketNumberToDrop) {
- std::unique_ptr<RtpHeaderParser> parser(
- RtpHeaderParser::CreateForTest());
- RTPHeader header;
- EXPECT_TRUE(parser->Parse(packet, length, &header));
- dropped_rtp_packet_ = header.sequenceNumber;
+ RtpPacket header;
+ EXPECT_TRUE(header.Parse(packet, length));
+ dropped_rtp_packet_ = header.SequenceNumber();
return DROP_PACKET;
}
task_queue_->PostTask(std::unique_ptr<QueuedTask>(this));
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index 0e604cd765..8a0f3b3867 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -217,7 +217,7 @@ void FrameEncodeMetadataWriter::UpdateBitstream(
buffer, encoded_image->ColorSpace());
encoded_image->SetEncodedData(
- new rtc::RefCountedObject<EncodedImageBufferWrapper>(
+ rtc::make_ref_counted<EncodedImageBufferWrapper>(
std::move(modified_buffer)));
}
diff --git a/video/g3doc/adaptation.md b/video/g3doc/adaptation.md
index e2dc337928..084a0fd3aa 100644
--- a/video/g3doc/adaptation.md
+++ b/video/g3doc/adaptation.md
@@ -1,5 +1,4 @@
<?% config.freshness.reviewed = '2021-04-13' %?>
-
<?% config.freshness.owner = 'eshr' %?>
# Video Adaptation
diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc
index 0da9b69042..9837517b78 100644
--- a/video/quality_scaling_tests.cc
+++ b/video/quality_scaling_tests.cc
@@ -22,11 +22,12 @@
namespace webrtc {
namespace {
-constexpr int kWidth = 1280;
-constexpr int kHeight = 720;
+constexpr int kInitialWidth = 1280;
+constexpr int kInitialHeight = 720;
constexpr int kLowStartBps = 100000;
constexpr int kHighStartBps = 1000000;
-constexpr size_t kTimeoutMs = 10000; // Some tests are expected to time out.
+constexpr int kDefaultVgaMinStartBps = 500000; // From video_stream_encoder.cc
+constexpr int kTimeoutMs = 10000; // Some tests are expected to time out.
void SetEncoderSpecific(VideoEncoderConfig* encoder_config,
VideoCodecType type,
@@ -35,26 +36,22 @@ void SetEncoderSpecific(VideoEncoderConfig* encoder_config,
if (type == kVideoCodecVP8) {
VideoCodecVP8 vp8 = VideoEncoder::GetDefaultVp8Settings();
vp8.automaticResizeOn = automatic_resize;
- encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8);
+ encoder_config->encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ vp8);
} else if (type == kVideoCodecVP9) {
VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
vp9.automaticResizeOn = automatic_resize;
vp9.numberOfSpatialLayers = num_spatial_layers;
- encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9);
+ encoder_config->encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
}
}
} // namespace
class QualityScalingTest : public test::CallTest {
protected:
- void RunTest(const std::string& payload_name,
- const std::vector<bool>& streams_active,
- int start_bps,
- bool automatic_resize,
- bool expect_adaptation);
-
const std::string kPrefix = "WebRTC-Video-QualityScaling/Enabled-";
const std::string kEnd = ",0,0,0.9995,0.9999,1/";
const absl::optional<VideoEncoder::ResolutionBitrateLimits>
@@ -69,153 +66,254 @@ class QualityScalingTest : public test::CallTest {
640 * 360);
};
-void QualityScalingTest::RunTest(const std::string& payload_name,
- const std::vector<bool>& streams_active,
- int start_bps,
- bool automatic_resize,
- bool expect_adaptation) {
- class ScalingObserver
- : public test::SendTest,
- public test::FrameGeneratorCapturer::SinkWantsObserver {
- public:
- ScalingObserver(const std::string& payload_name,
- const std::vector<bool>& streams_active,
- int start_bps,
- bool automatic_resize,
- bool expect_adaptation)
- : SendTest(expect_adaptation ? kDefaultTimeoutMs : kTimeoutMs),
- encoder_factory_([](const SdpVideoFormat& format)
- -> std::unique_ptr<VideoEncoder> {
- if (format.name == "VP8")
- return VP8Encoder::Create();
- if (format.name == "VP9")
- return VP9Encoder::Create();
- if (format.name == "H264")
- return H264Encoder::Create(cricket::VideoCodec("H264"));
- RTC_NOTREACHED() << format.name;
- return nullptr;
- }),
- payload_name_(payload_name),
- streams_active_(streams_active),
- start_bps_(start_bps),
- automatic_resize_(automatic_resize),
- expect_adaptation_(expect_adaptation) {}
-
- private:
- void OnFrameGeneratorCapturerCreated(
- test::FrameGeneratorCapturer* frame_generator_capturer) override {
- frame_generator_capturer->SetSinkWantsObserver(this);
- // Set initial resolution.
- frame_generator_capturer->ChangeResolution(kWidth, kHeight);
- }
+class ScalingObserver : public test::SendTest {
+ protected:
+ ScalingObserver(const std::string& payload_name,
+ const std::vector<bool>& streams_active,
+ int start_bps,
+ bool automatic_resize,
+ bool expect_scaling)
+ : SendTest(expect_scaling ? kTimeoutMs * 4 : kTimeoutMs),
+ encoder_factory_(
+ [](const SdpVideoFormat& format) -> std::unique_ptr<VideoEncoder> {
+ if (format.name == "VP8")
+ return VP8Encoder::Create();
+ if (format.name == "VP9")
+ return VP9Encoder::Create();
+ if (format.name == "H264")
+ return H264Encoder::Create(cricket::VideoCodec("H264"));
+ RTC_NOTREACHED() << format.name;
+ return nullptr;
+ }),
+ payload_name_(payload_name),
+ streams_active_(streams_active),
+ start_bps_(start_bps),
+ automatic_resize_(automatic_resize),
+ expect_scaling_(expect_scaling) {}
+
+ DegradationPreference degradation_preference_ =
+ DegradationPreference::MAINTAIN_FRAMERATE;
+
+ private:
+ void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) override {
+ bitrate_config->start_bitrate_bps = start_bps_;
+ }
- // Called when FrameGeneratorCapturer::AddOrUpdateSink is called.
- void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
- const rtc::VideoSinkWants& wants) override {
- if (wants.max_pixel_count < kWidth * kHeight)
- observation_complete_.Set();
+ void ModifyVideoDegradationPreference(
+ DegradationPreference* degradation_preference) override {
+ *degradation_preference = degradation_preference_;
+ }
+
+ size_t GetNumVideoStreams() const override {
+ return (payload_name_ == "VP9") ? 1 : streams_active_.size();
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = payload_name_;
+ send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType;
+ encoder_config->video_format.name = payload_name_;
+ const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_);
+ encoder_config->codec_type = codec_type;
+ encoder_config->max_bitrate_bps =
+ std::max(start_bps_, encoder_config->max_bitrate_bps);
+ if (payload_name_ == "VP9") {
+ // Simulcast layers indicates which spatial layers are active.
+ encoder_config->simulcast_layers.resize(streams_active_.size());
+ encoder_config->simulcast_layers[0].max_bitrate_bps =
+ encoder_config->max_bitrate_bps;
}
- void ModifySenderBitrateConfig(
- BitrateConstraints* bitrate_config) override {
- bitrate_config->start_bitrate_bps = start_bps_;
+ double scale_factor = 1.0;
+ for (int i = streams_active_.size() - 1; i >= 0; --i) {
+ VideoStream& stream = encoder_config->simulcast_layers[i];
+ stream.active = streams_active_[i];
+ stream.scale_resolution_down_by = scale_factor;
+ scale_factor *= (payload_name_ == "VP9") ? 1.0 : 2.0;
}
+ SetEncoderSpecific(encoder_config, codec_type, automatic_resize_,
+ streams_active_.size());
+ }
- size_t GetNumVideoStreams() const override {
- return (payload_name_ == "VP9") ? 1 : streams_active_.size();
- }
+ void PerformTest() override { EXPECT_EQ(expect_scaling_, Wait()); }
- void ModifyVideoConfigs(
- VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
- send_config->encoder_settings.encoder_factory = &encoder_factory_;
- send_config->rtp.payload_name = payload_name_;
- send_config->rtp.payload_type = kVideoSendPayloadType;
- encoder_config->video_format.name = payload_name_;
- const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_);
- encoder_config->codec_type = codec_type;
- encoder_config->max_bitrate_bps =
- std::max(start_bps_, encoder_config->max_bitrate_bps);
- if (payload_name_ == "VP9") {
- // Simulcast layers indicates which spatial layers are active.
- encoder_config->simulcast_layers.resize(streams_active_.size());
- encoder_config->simulcast_layers[0].max_bitrate_bps =
- encoder_config->max_bitrate_bps;
- }
- double scale_factor = 1.0;
- for (int i = streams_active_.size() - 1; i >= 0; --i) {
- VideoStream& stream = encoder_config->simulcast_layers[i];
- stream.active = streams_active_[i];
- stream.scale_resolution_down_by = scale_factor;
- scale_factor *= (payload_name_ == "VP9") ? 1.0 : 2.0;
- }
- SetEncoderSpecific(encoder_config, codec_type, automatic_resize_,
- streams_active_.size());
- }
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ const std::string payload_name_;
+ const std::vector<bool> streams_active_;
+ const int start_bps_;
+ const bool automatic_resize_;
+ const bool expect_scaling_;
+};
- void PerformTest() override {
- EXPECT_EQ(expect_adaptation_, Wait())
- << "Timed out while waiting for a scale down.";
- }
+class DownscalingObserver
+ : public ScalingObserver,
+ public test::FrameGeneratorCapturer::SinkWantsObserver {
+ public:
+ DownscalingObserver(const std::string& payload_name,
+ const std::vector<bool>& streams_active,
+ int start_bps,
+ bool automatic_resize,
+ bool expect_downscale)
+ : ScalingObserver(payload_name,
+ streams_active,
+ start_bps,
+ automatic_resize,
+ expect_downscale) {}
+
+ private:
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetSinkWantsObserver(this);
+ frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight);
+ }
- test::FunctionVideoEncoderFactory encoder_factory_;
- const std::string payload_name_;
- const std::vector<bool> streams_active_;
- const int start_bps_;
- const bool automatic_resize_;
- const bool expect_adaptation_;
- } test(payload_name, streams_active, start_bps, automatic_resize,
- expect_adaptation);
+ void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {
+ if (wants.max_pixel_count < kInitialWidth * kInitialHeight)
+ observation_complete_.Set();
+ }
+};
- RunBaseTest(&test);
-}
+class UpscalingObserver
+ : public ScalingObserver,
+ public test::FrameGeneratorCapturer::SinkWantsObserver {
+ public:
+ UpscalingObserver(const std::string& payload_name,
+ const std::vector<bool>& streams_active,
+ int start_bps,
+ bool automatic_resize,
+ bool expect_upscale)
+ : ScalingObserver(payload_name,
+ streams_active,
+ start_bps,
+ automatic_resize,
+ expect_upscale) {}
+
+ void SetDegradationPreference(DegradationPreference preference) {
+ degradation_preference_ = preference;
+ }
+
+ private:
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetSinkWantsObserver(this);
+ frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight);
+ }
+
+ void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {
+ if (wants.max_pixel_count > last_wants_.max_pixel_count) {
+ if (wants.max_pixel_count == std::numeric_limits<int>::max())
+ observation_complete_.Set();
+ }
+ last_wants_ = wants;
+ }
+
+ rtc::VideoSinkWants last_wants_;
+};
TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kHighStartBps,
- /*automatic_resize=*/false, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/false,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForNormalQp_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kLowStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP8", /*streams_active=*/{true}, kLowStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
+}
+
+TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrateAndThenUp) {
+ // qp_low:127, qp_high:127 -> kLowQp
+ test::ScopedFieldTrials field_trials(
+ kPrefix + "127,127,0,0,0,0" + kEnd +
+ "WebRTC-Video-BalancedDegradationSettings/"
+ "pixels:230400|921600,fps:20|30,kbps:300|500/"); // should not affect
+
+ UpscalingObserver test("VP8", /*streams_active=*/{true},
+ kDefaultVgaMinStartBps - 1,
+ /*automatic_resize=*/true, /*expect_upscale=*/true);
+ RunBaseTest(&test);
+}
+
+TEST_F(QualityScalingTest, AdaptsDownAndThenUpWithBalanced) {
+ // qp_low:127, qp_high:127 -> kLowQp
+ test::ScopedFieldTrials field_trials(
+ kPrefix + "127,127,0,0,0,0" + kEnd +
+ "WebRTC-Video-BalancedDegradationSettings/"
+ "pixels:230400|921600,fps:20|30,kbps:300|499/");
+
+ UpscalingObserver test("VP8", /*streams_active=*/{true},
+ kDefaultVgaMinStartBps - 1,
+ /*automatic_resize=*/true, /*expect_upscale=*/true);
+ test.SetDegradationPreference(DegradationPreference::BALANCED);
+ RunBaseTest(&test);
+}
+
+TEST_F(QualityScalingTest, AdaptsDownButNotUpWithBalancedIfBitrateNotEnough) {
+ // qp_low:127, qp_high:127 -> kLowQp
+ test::ScopedFieldTrials field_trials(
+ kPrefix + "127,127,0,0,0,0" + kEnd +
+ "WebRTC-Video-BalancedDegradationSettings/"
+ "pixels:230400|921600,fps:20|30,kbps:300|500/");
+
+ UpscalingObserver test("VP8", /*streams_active=*/{true},
+ kDefaultVgaMinStartBps - 1,
+ /*automatic_resize=*/true, /*expect_upscale=*/false);
+ test.SetDegradationPreference(DegradationPreference::BALANCED);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) {
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true, true}, kLowStartBps,
- /*automatic_resize=*/false, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true, true}, kLowStartBps,
+ /*automatic_resize=*/false,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForHighQp_HighestStreamActive_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
- RunTest("VP8", {false, false, true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP8", /*streams_active=*/{false, false, true},
+ kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -223,18 +321,32 @@ TEST_F(QualityScalingTest,
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {false, false, true},
- kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP8", /*streams_active=*/{false, false, true},
+ kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
+}
+
+TEST_F(QualityScalingTest, AdaptsDownButNotUpWithMinStartBitrateLimit) {
+ // qp_low:127, qp_high:127 -> kLowQp
+ test::ScopedFieldTrials field_trials(kPrefix + "127,127,0,0,0,0" + kEnd);
+
+ UpscalingObserver test("VP8", /*streams_active=*/{false, true},
+ kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
+ /*automatic_resize=*/true, /*expect_upscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {false, false, true},
- kSinglecastLimits720pVp8->min_start_bitrate_bps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{false, false, true},
+ kSinglecastLimits720pVp8->min_start_bitrate_bps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -244,9 +356,11 @@ TEST_F(QualityScalingTest,
kPrefix + "1,127,0,0,0,0" + kEnd +
"WebRTC-DefaultBitrateLimitsKillSwitch/Enabled/");
- RunTest("VP8", {false, false, true},
- kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{false, false, true},
+ kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -254,16 +368,22 @@ TEST_F(QualityScalingTest,
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true},
+ kSinglecastLimits720pVp8->min_start_bitrate_bps - 1,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
- RunTest("VP8", {true, false, false}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true, false, false},
+ kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -271,16 +391,21 @@ TEST_F(QualityScalingTest,
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true, false, false}, kLowStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true, false, false},
+ kLowStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfScalingOff_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd);
- RunTest("VP8", {true}, kLowStartBps,
- /*automatic_resize=*/false, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP8", /*streams_active=*/{true}, kLowStartBps,
+ /*automatic_resize=*/false,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp9) {
@@ -288,8 +413,10 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP9", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp9) {
@@ -297,8 +424,10 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Disabled/");
- RunTest("VP9", {true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP9", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp9) {
@@ -306,8 +435,10 @@ TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {true}, kLowStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP9", /*streams_active=*/{true}, kLowStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) {
@@ -315,8 +446,11 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {true, false, false}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP9", /*streams_active=*/{true, false, false},
+ kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -325,8 +459,11 @@ TEST_F(QualityScalingTest,
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {true, false, false}, kLowStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP9", /*streams_active=*/{true, false, false},
+ kLowStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) {
@@ -334,8 +471,11 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {false, true, false}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP9", /*streams_active=*/{false, true, false},
+ kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest,
@@ -344,9 +484,11 @@ TEST_F(QualityScalingTest,
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {false, true, false},
- kSinglecastLimits360pVp9->min_start_bitrate_bps - 1,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("VP9", /*streams_active=*/{false, true, false},
+ kSinglecastLimits360pVp9->min_start_bitrate_bps - 1,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp9) {
@@ -354,9 +496,11 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp9) {
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
- RunTest("VP9", {false, true, false},
- kSinglecastLimits360pVp9->min_start_bitrate_bps,
- /*automatic_resize=*/true, /*expect_adaptation=*/false);
+ DownscalingObserver test("VP9", /*streams_active=*/{false, true, false},
+ kSinglecastLimits360pVp9->min_start_bitrate_bps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/false);
+ RunBaseTest(&test);
}
#if defined(WEBRTC_USE_H264)
@@ -364,16 +508,20 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_H264) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,1" + kEnd);
- RunTest("H264", {true}, kHighStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("H264", /*streams_active=*/{true}, kHighStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_H264) {
// qp_low:1, qp_high:51 -> kNormalQp
test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,51" + kEnd);
- RunTest("H264", {true}, kLowStartBps,
- /*automatic_resize=*/true, /*expect_adaptation=*/true);
+ DownscalingObserver test("H264", /*streams_active=*/{true}, kLowStartBps,
+ /*automatic_resize=*/true,
+ /*expect_downscale=*/true);
+ RunBaseTest(&test);
}
#endif // defined(WEBRTC_USE_H264)
diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc
index 3cce3c8ea4..af3cd221e7 100644
--- a/video/receive_statistics_proxy2.cc
+++ b/video/receive_statistics_proxy2.cc
@@ -946,26 +946,21 @@ void ReceiveStatisticsProxy::OnRenderedFrame(
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
int64_t sync_offset_ms,
double estimated_freq_khz) {
- RTC_DCHECK_RUN_ON(&incoming_render_queue_);
- int64_t now_ms = clock_->TimeInMilliseconds();
- worker_thread_->PostTask(
- ToQueuedTask(task_safety_, [video_playout_ntp_ms, sync_offset_ms,
- estimated_freq_khz, now_ms, this]() {
- RTC_DCHECK_RUN_ON(&main_thread_);
- sync_offset_counter_.Add(std::abs(sync_offset_ms));
- stats_.sync_offset_ms = sync_offset_ms;
- last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
- last_estimated_playout_time_ms_ = now_ms;
-
- const double kMaxFreqKhz = 10000.0;
- int offset_khz = kMaxFreqKhz;
- // Should not be zero or negative. If so, report max.
- if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0)
- offset_khz =
- static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5);
-
- freq_offset_counter_.Add(offset_khz);
- }));
+ RTC_DCHECK_RUN_ON(&main_thread_);
+
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ sync_offset_counter_.Add(std::abs(sync_offset_ms));
+ stats_.sync_offset_ms = sync_offset_ms;
+ last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
+ last_estimated_playout_time_ms_ = now_ms;
+
+ const double kMaxFreqKhz = 10000.0;
+ int offset_khz = kMaxFreqKhz;
+ // Should not be zero or negative. If so, report max.
+ if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0)
+ offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5);
+
+ freq_offset_counter_.Add(offset_khz);
}
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
diff --git a/video/report_block_stats.cc b/video/report_block_stats.cc
index e3e95f9aed..bf60364682 100644
--- a/video/report_block_stats.cc
+++ b/video/report_block_stats.cc
@@ -31,16 +31,13 @@ ReportBlockStats::ReportBlockStats()
ReportBlockStats::~ReportBlockStats() {}
-void ReportBlockStats::Store(uint32_t ssrc, const RtcpStatistics& rtcp_stats) {
+void ReportBlockStats::Store(uint32_t ssrc,
+ int packets_lost,
+ uint32_t extended_highest_sequence_number) {
Report report;
- report.packets_lost = rtcp_stats.packets_lost;
- report.extended_highest_sequence_number =
- rtcp_stats.extended_highest_sequence_number;
- StoreAndAddPacketIncrement(ssrc, report);
-}
+ report.packets_lost = packets_lost;
+ report.extended_highest_sequence_number = extended_highest_sequence_number;
-void ReportBlockStats::StoreAndAddPacketIncrement(uint32_t ssrc,
- const Report& report) {
// Get diff with previous report block.
const auto prev_report = prev_reports_.find(ssrc);
if (prev_report != prev_reports_.end()) {
diff --git a/video/report_block_stats.h b/video/report_block_stats.h
index de4a079032..1d1140295c 100644
--- a/video/report_block_stats.h
+++ b/video/report_block_stats.h
@@ -15,8 +15,6 @@
#include <map>
-#include "modules/rtp_rtcp/include/rtcp_statistics.h"
-
namespace webrtc {
// TODO(nisse): Usefulness of this class is somewhat unclear. The inputs are
@@ -32,7 +30,9 @@ class ReportBlockStats {
~ReportBlockStats();
// Updates stats and stores report block.
- void Store(uint32_t ssrc, const RtcpStatistics& rtcp_stats);
+ void Store(uint32_t ssrc,
+ int packets_lost,
+ uint32_t extended_highest_sequence_number);
// Returns the total fraction of lost packets (or -1 if less than two report
// blocks have been stored).
@@ -45,10 +45,6 @@ class ReportBlockStats {
int32_t packets_lost;
};
- // Updates the total number of packets/lost packets.
- // Stores the report.
- void StoreAndAddPacketIncrement(uint32_t ssrc, const Report& report);
-
// The total number of packets/lost packets.
uint32_t num_sequence_numbers_;
uint32_t num_lost_sequence_numbers_;
diff --git a/video/report_block_stats_unittest.cc b/video/report_block_stats_unittest.cc
index 0b0230941f..bd66e571a0 100644
--- a/video/report_block_stats_unittest.cc
+++ b/video/report_block_stats_unittest.cc
@@ -13,65 +13,51 @@
#include "test/gtest.h"
namespace webrtc {
+namespace {
-class ReportBlockStatsTest : public ::testing::Test {
- protected:
- ReportBlockStatsTest() {
- // kSsrc1: report 1-3.
- stats1_1_.packets_lost = 10;
- stats1_1_.extended_highest_sequence_number = 24000;
- stats1_2_.packets_lost = 15;
- stats1_2_.extended_highest_sequence_number = 24100;
- stats1_3_.packets_lost = 50;
- stats1_3_.extended_highest_sequence_number = 24200;
- // kSsrc2: report 1,2.
- stats2_1_.packets_lost = 111;
- stats2_1_.extended_highest_sequence_number = 8500;
- stats2_2_.packets_lost = 136;
- stats2_2_.extended_highest_sequence_number = 8800;
- }
+constexpr uint32_t kSsrc1 = 123;
+constexpr uint32_t kSsrc2 = 234;
- const uint32_t kSsrc1 = 123;
- const uint32_t kSsrc2 = 234;
- RtcpStatistics stats1_1_;
- RtcpStatistics stats1_2_;
- RtcpStatistics stats1_3_;
- RtcpStatistics stats2_1_;
- RtcpStatistics stats2_2_;
-};
-
-TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost) {
+TEST(ReportBlockStatsTest, StoreAndGetFractionLost) {
ReportBlockStats stats;
EXPECT_EQ(-1, stats.FractionLostInPercent());
// First report.
- stats.Store(kSsrc1, stats1_1_);
+ stats.Store(kSsrc1, /*packets_lost=*/10,
+ /*extended_highest_sequence_number=*/24'000);
EXPECT_EQ(-1, stats.FractionLostInPercent());
// fl: 100 * (15-10) / (24100-24000) = 5%
- stats.Store(kSsrc1, stats1_2_);
+ stats.Store(kSsrc1, /*packets_lost=*/15,
+ /*extended_highest_sequence_number=*/24'100);
EXPECT_EQ(5, stats.FractionLostInPercent());
// fl: 100 * (50-10) / (24200-24000) = 20%
- stats.Store(kSsrc1, stats1_3_);
+ stats.Store(kSsrc1, /*packets_lost=*/50,
+ /*extended_highest_sequence_number=*/24'200);
EXPECT_EQ(20, stats.FractionLostInPercent());
}
-TEST_F(ReportBlockStatsTest, StoreAndGetFractionLost_TwoSsrcs) {
+TEST(ReportBlockStatsTest, StoreAndGetFractionLost_TwoSsrcs) {
ReportBlockStats stats;
EXPECT_EQ(-1, stats.FractionLostInPercent());
// First report.
- stats.Store(kSsrc1, stats1_1_);
+ stats.Store(kSsrc1, /*packets_lost=*/10,
+ /*extended_highest_sequence_number=*/24'000);
EXPECT_EQ(-1, stats.FractionLostInPercent());
// fl: 100 * (15-10) / (24100-24000) = 5%
- stats.Store(kSsrc1, stats1_2_);
+ stats.Store(kSsrc1, /*packets_lost=*/15,
+ /*extended_highest_sequence_number=*/24'100);
EXPECT_EQ(5, stats.FractionLostInPercent());
// First report, kSsrc2.
- stats.Store(kSsrc2, stats2_1_);
+ stats.Store(kSsrc2, /*packets_lost=*/111,
+ /*extended_highest_sequence_number=*/8'500);
EXPECT_EQ(5, stats.FractionLostInPercent());
// fl: 100 * ((15-10) + (136-111)) / ((24100-24000) + (8800-8500)) = 7%
- stats.Store(kSsrc2, stats2_2_);
+ stats.Store(kSsrc2, /*packets_lost=*/136,
+ /*extended_highest_sequence_number=*/8'800);
EXPECT_EQ(7, stats.FractionLostInPercent());
}
+} // namespace
} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc
index bdca0397fa..a0520cd350 100644
--- a/video/rtp_video_stream_receiver.cc
+++ b/video/rtp_video_stream_receiver.cc
@@ -272,9 +272,10 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
// directly with |rtp_rtcp_|.
rtcp_feedback_buffer_(this, nack_sender, this),
packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()),
+ reference_finder_(std::make_unique<RtpFrameReferenceFinder>()),
has_received_frame_(false),
frames_decryptable_(false),
- absolute_capture_time_receiver_(clock) {
+ absolute_capture_time_interpolator_(clock) {
constexpr bool remb_candidate = true;
if (packet_router_)
packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
@@ -321,8 +322,6 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE);
}
- reference_finder_ = std::make_unique<RtpFrameReferenceFinder>(this);
-
// Only construct the encrypted receiver if frame encryption is enabled.
if (config_.crypto_options.sframe.require_frame_encryption) {
buffered_frame_decryptor_ =
@@ -333,10 +332,10 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
}
if (frame_transformer) {
- frame_transformer_delegate_ = new rtc::RefCountedObject<
- RtpVideoStreamReceiverFrameTransformerDelegate>(
- this, std::move(frame_transformer), rtc::Thread::Current(),
- config_.rtp.remote_ssrc);
+ frame_transformer_delegate_ =
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
frame_transformer_delegate_->Init();
}
}
@@ -506,18 +505,9 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
const RtpPacketReceived& rtp_packet,
const RTPVideoHeader& video) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
- auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
- rtp_packet, video, clock_->TimeInMilliseconds());
-
- // Try to extrapolate absolute capture time if it is missing.
- packet->packet_info.set_absolute_capture_time(
- absolute_capture_time_receiver_.OnReceivePacket(
- AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(),
- packet->packet_info.csrcs()),
- packet->packet_info.rtp_timestamp(),
- // Assume frequency is the same one for all video frames.
- kVideoPayloadTypeFrequency,
- packet->packet_info.absolute_capture_time()));
+
+ auto packet =
+ std::make_unique<video_coding::PacketBuffer::Packet>(rtp_packet, video);
RTPVideoHeader& video_header = packet->video_header;
video_header.rotation = kVideoRotation_0;
@@ -648,6 +638,29 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
video_coding::PacketBuffer::InsertResult insert_result;
{
MutexLock lock(&packet_buffer_lock_);
+ int64_t unwrapped_rtp_seq_num =
+ rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber());
+ auto& packet_info =
+ packet_infos_
+ .emplace(
+ unwrapped_rtp_seq_num,
+ RtpPacketInfo(
+ rtp_packet.Ssrc(), rtp_packet.Csrcs(),
+ rtp_packet.Timestamp(),
+ /*audio_level=*/absl::nullopt,
+ rtp_packet.GetExtension<AbsoluteCaptureTimeExtension>(),
+ /*receive_time_ms=*/clock_->TimeInMilliseconds()))
+ .first->second;
+
+ // Try to extrapolate absolute capture time if it is missing.
+ packet_info.set_absolute_capture_time(
+ absolute_capture_time_interpolator_.OnReceivePacket(
+ AbsoluteCaptureTimeInterpolator::GetSource(packet_info.ssrc(),
+ packet_info.csrcs()),
+ packet_info.rtp_timestamp(),
+ // Assume frequency is the same one for all video frames.
+ kVideoPayloadTypeFrequency, packet_info.absolute_capture_time()));
+
insert_result = packet_buffer_.InsertPacket(std::move(packet));
}
OnInsertedPacket(std::move(insert_result));
@@ -737,69 +750,83 @@ bool RtpVideoStreamReceiver::IsDecryptable() const {
void RtpVideoStreamReceiver::OnInsertedPacket(
video_coding::PacketBuffer::InsertResult result) {
- video_coding::PacketBuffer::Packet* first_packet = nullptr;
- int max_nack_count;
- int64_t min_recv_time;
- int64_t max_recv_time;
- std::vector<rtc::ArrayView<const uint8_t>> payloads;
- RtpPacketInfos::vector_type packet_infos;
-
- bool frame_boundary = true;
- for (auto& packet : result.packets) {
- // PacketBuffer promisses frame boundaries are correctly set on each
- // packet. Document that assumption with the DCHECKs.
- RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
- if (packet->is_first_packet_in_frame()) {
- first_packet = packet.get();
- max_nack_count = packet->times_nacked;
- min_recv_time = packet->packet_info.receive_time_ms();
- max_recv_time = packet->packet_info.receive_time_ms();
- payloads.clear();
- packet_infos.clear();
- } else {
- max_nack_count = std::max(max_nack_count, packet->times_nacked);
- min_recv_time =
- std::min(min_recv_time, packet->packet_info.receive_time_ms());
- max_recv_time =
- std::max(max_recv_time, packet->packet_info.receive_time_ms());
- }
- payloads.emplace_back(packet->video_payload);
- packet_infos.push_back(packet->packet_info);
-
- frame_boundary = packet->is_last_packet_in_frame();
- if (packet->is_last_packet_in_frame()) {
- auto depacketizer_it = payload_type_map_.find(first_packet->payload_type);
- RTC_CHECK(depacketizer_it != payload_type_map_.end());
-
- rtc::scoped_refptr<EncodedImageBuffer> bitstream =
- depacketizer_it->second->AssembleFrame(payloads);
- if (!bitstream) {
- // Failed to assemble a frame. Discard and continue.
- continue;
+ std::vector<std::unique_ptr<RtpFrameObject>> assembled_frames;
+ {
+ MutexLock lock(&packet_buffer_lock_);
+ video_coding::PacketBuffer::Packet* first_packet = nullptr;
+ int max_nack_count;
+ int64_t min_recv_time;
+ int64_t max_recv_time;
+ std::vector<rtc::ArrayView<const uint8_t>> payloads;
+ RtpPacketInfos::vector_type packet_infos;
+
+ bool frame_boundary = true;
+ for (auto& packet : result.packets) {
+ // PacketBuffer promisses frame boundaries are correctly set on each
+ // packet. Document that assumption with the DCHECKs.
+ RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
+ int64_t unwrapped_rtp_seq_num =
+ rtp_seq_num_unwrapper_.Unwrap(packet->seq_num);
+ RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0);
+ RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num];
+ if (packet->is_first_packet_in_frame()) {
+ first_packet = packet.get();
+ max_nack_count = packet->times_nacked;
+ min_recv_time = packet_info.receive_time().ms();
+ max_recv_time = packet_info.receive_time().ms();
+ payloads.clear();
+ packet_infos.clear();
+ } else {
+ max_nack_count = std::max(max_nack_count, packet->times_nacked);
+ min_recv_time =
+ std::min(min_recv_time, packet_info.receive_time().ms());
+ max_recv_time =
+ std::max(max_recv_time, packet_info.receive_time().ms());
+ }
+ payloads.emplace_back(packet->video_payload);
+ packet_infos.push_back(packet_info);
+
+ frame_boundary = packet->is_last_packet_in_frame();
+ if (packet->is_last_packet_in_frame()) {
+ auto depacketizer_it =
+ payload_type_map_.find(first_packet->payload_type);
+ RTC_CHECK(depacketizer_it != payload_type_map_.end());
+
+ rtc::scoped_refptr<EncodedImageBuffer> bitstream =
+ depacketizer_it->second->AssembleFrame(payloads);
+ if (!bitstream) {
+ // Failed to assemble a frame. Discard and continue.
+ continue;
+ }
+
+ const video_coding::PacketBuffer::Packet& last_packet = *packet;
+ assembled_frames.push_back(std::make_unique<RtpFrameObject>(
+ first_packet->seq_num, //
+ last_packet.seq_num, //
+ last_packet.marker_bit, //
+ max_nack_count, //
+ min_recv_time, //
+ max_recv_time, //
+ first_packet->timestamp, //
+ ntp_estimator_.Estimate(first_packet->timestamp), //
+ last_packet.video_header.video_timing, //
+ first_packet->payload_type, //
+ first_packet->codec(), //
+ last_packet.video_header.rotation, //
+ last_packet.video_header.content_type, //
+ first_packet->video_header, //
+ last_packet.video_header.color_space, //
+ RtpPacketInfos(std::move(packet_infos)), //
+ std::move(bitstream)));
}
+ }
+ RTC_DCHECK(frame_boundary);
- const video_coding::PacketBuffer::Packet& last_packet = *packet;
- OnAssembledFrame(std::make_unique<RtpFrameObject>(
- first_packet->seq_num, //
- last_packet.seq_num, //
- last_packet.marker_bit, //
- max_nack_count, //
- min_recv_time, //
- max_recv_time, //
- first_packet->timestamp, //
- ntp_estimator_.Estimate(first_packet->timestamp), //
- last_packet.video_header.video_timing, //
- first_packet->payload_type, //
- first_packet->codec(), //
- last_packet.video_header.rotation, //
- last_packet.video_header.content_type, //
- first_packet->video_header, //
- last_packet.video_header.color_space, //
- RtpPacketInfos(std::move(packet_infos)), //
- std::move(bitstream)));
+ if (result.buffer_cleared) {
+ packet_infos_.clear();
}
- }
- RTC_DCHECK(frame_boundary);
+ } // packet_buffer_lock_
+
if (result.buffer_cleared) {
{
MutexLock lock(&sync_info_lock_);
@@ -809,6 +836,10 @@ void RtpVideoStreamReceiver::OnInsertedPacket(
}
RequestKeyFrame();
}
+
+ for (auto& frame : assembled_frames) {
+ OnAssembledFrame(std::move(frame));
+ }
}
void RtpVideoStreamReceiver::OnAssembledFrame(
@@ -851,10 +882,9 @@ void RtpVideoStreamReceiver::OnAssembledFrame(
if (frame_is_newer) {
// When we reset the |reference_finder_| we don't want new picture ids
// to overlap with old picture ids. To ensure that doesn't happen we
- // start from the |last_completed_picture_id_| and add an offset in case
- // of reordering.
+ // start from the |last_completed_picture_id_| and add an offset in
+ // case of reordering.
reference_finder_ = std::make_unique<RtpFrameReferenceFinder>(
- this,
last_completed_picture_id_ + std::numeric_limits<uint16_t>::max());
current_codec_ = frame->codec_type();
} else {
@@ -876,26 +906,30 @@ void RtpVideoStreamReceiver::OnAssembledFrame(
} else if (frame_transformer_delegate_) {
frame_transformer_delegate_->TransformFrame(std::move(frame));
} else {
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
}
-void RtpVideoStreamReceiver::OnCompleteFrame(
- std::unique_ptr<EncodedFrame> frame) {
+void RtpVideoStreamReceiver::OnCompleteFrames(
+ RtpFrameReferenceFinder::ReturnVector frames) {
{
MutexLock lock(&last_seq_num_mutex_);
- RtpFrameObject* rtp_frame = static_cast<RtpFrameObject*>(frame.get());
- last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num();
+ for (const auto& frame : frames) {
+ RtpFrameObject* rtp_frame = static_cast<RtpFrameObject*>(frame.get());
+ last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num();
+ }
+ }
+ for (auto& frame : frames) {
+ last_completed_picture_id_ =
+ std::max(last_completed_picture_id_, frame->Id());
+ complete_frame_callback_->OnCompleteFrame(std::move(frame));
}
- last_completed_picture_id_ =
- std::max(last_completed_picture_id_, frame->Id());
- complete_frame_callback_->OnCompleteFrame(std::move(frame));
}
void RtpVideoStreamReceiver::OnDecryptedFrame(
std::unique_ptr<RtpFrameObject> frame) {
MutexLock lock(&reference_finder_lock_);
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
void RtpVideoStreamReceiver::OnDecryptionStatusChange(
@@ -919,7 +953,7 @@ void RtpVideoStreamReceiver::SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
RTC_DCHECK_RUN_ON(&network_tc_);
frame_transformer_delegate_ =
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
this, std::move(frame_transformer), rtc::Thread::Current(),
config_.rtp.remote_ssrc);
frame_transformer_delegate_->Init();
@@ -971,7 +1005,7 @@ void RtpVideoStreamReceiver::RemoveSecondarySink(
void RtpVideoStreamReceiver::ManageFrame(
std::unique_ptr<RtpFrameObject> frame) {
MutexLock lock(&reference_finder_lock_);
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
void RtpVideoStreamReceiver::ReceivePacket(const RtpPacketReceived& packet) {
@@ -1026,14 +1060,16 @@ void RtpVideoStreamReceiver::ParseAndHandleEncapsulatingHeader(
void RtpVideoStreamReceiver::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
{
MutexLock lock(&reference_finder_lock_);
- reference_finder_->PaddingReceived(seq_num);
+ OnCompleteFrames(reference_finder_->PaddingReceived(seq_num));
}
+
video_coding::PacketBuffer::InsertResult insert_result;
{
MutexLock lock(&packet_buffer_lock_);
insert_result = packet_buffer_.InsertPadding(seq_num);
}
OnInsertedPacket(std::move(insert_result));
+
if (nack_module_) {
nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false,
/* is _recovered = */ false);
@@ -1080,7 +1116,7 @@ bool RtpVideoStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet,
absl::optional<int64_t> remote_to_local_clock_offset_ms =
ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
if (remote_to_local_clock_offset_ms.has_value()) {
- absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
+ capture_clock_offset_updater_.SetRemoteToLocalClockOffset(
Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
}
}
@@ -1118,6 +1154,9 @@ void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) {
{
MutexLock lock(&packet_buffer_lock_);
packet_buffer_.ClearTo(seq_num);
+ int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num);
+ packet_infos_.erase(packet_infos_.begin(),
+ packet_infos_.upper_bound(unwrapped_rtp_seq_num));
}
MutexLock lock(&reference_finder_lock_);
reference_finder_->ClearTo(seq_num);
@@ -1211,7 +1250,7 @@ void RtpVideoStreamReceiver::UpdatePacketReceiveTimestamps(
<< " with payload type: " << static_cast<int>(packet.PayloadType())
<< ", timestamp: " << packet.Timestamp()
<< ", sequence number: " << packet.SequenceNumber()
- << ", arrival time: " << packet.arrival_time_ms();
+ << ", arrival time: " << ToString(packet.arrival_time());
int32_t time_offset;
if (packet.GetExtension<TransmissionOffset>(&time_offset)) {
ss << ", toffset: " << time_offset;
diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h
index 090488c4a8..b3d62f34a4 100644
--- a/video/rtp_video_stream_receiver.h
+++ b/video/rtp_video_stream_receiver.h
@@ -33,7 +33,8 @@
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
@@ -68,11 +69,18 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
public RecoveredPacketReceiver,
public RtpPacketSinkInterface,
public KeyFrameRequestSender,
- public OnCompleteFrameCallback,
public OnDecryptedFrameCallback,
public OnDecryptionStatusChangeCallback,
public RtpVideoFrameReceiver {
public:
+ // A complete frame is a frame which has received all its packets and all its
+ // references are known.
+ class OnCompleteFrameCallback {
+ public:
+ virtual ~OnCompleteFrameCallback() {}
+ virtual void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) = 0;
+ };
+
// DEPRECATED due to dependency on ReceiveStatisticsProxy.
RtpVideoStreamReceiver(
Clock* clock,
@@ -172,8 +180,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
// Don't use, still experimental.
void RequestPacketRetransmit(const std::vector<uint16_t>& sequence_numbers);
- // Implements OnCompleteFrameCallback.
- void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override;
+ void OnCompleteFrames(RtpFrameReferenceFinder::ReturnVector frames);
// Implements OnDecryptedFrameCallback.
void OnDecryptedFrame(std::unique_ptr<RtpFrameObject> frame) override;
@@ -303,7 +310,8 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
ParseGenericDependenciesResult ParseGenericDependenciesExtension(
const RtpPacketReceived& rtp_packet,
RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_);
- void OnAssembledFrame(std::unique_ptr<RtpFrameObject> frame);
+ void OnAssembledFrame(std::unique_ptr<RtpFrameObject> frame)
+ RTC_LOCKS_EXCLUDED(packet_buffer_lock_);
void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet,
bool is_keyframe)
RTC_RUN_ON(worker_task_checker_);
@@ -400,13 +408,21 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
std::atomic<bool> frames_decryptable_;
absl::optional<ColorSpace> last_color_space_;
- AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_
+ AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ CaptureClockOffsetUpdater capture_clock_offset_updater_
RTC_GUARDED_BY(worker_task_checker_);
int64_t last_completed_picture_id_ = 0;
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate>
frame_transformer_delegate_;
+
+ SeqNumUnwrapper<uint16_t> rtp_seq_num_unwrapper_
+ RTC_GUARDED_BY(packet_buffer_lock_);
+ std::map<int64_t, RtpPacketInfo> packet_infos_
+ RTC_GUARDED_BY(packet_buffer_lock_);
};
} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc
index c96dbed673..1dcdf72e60 100644
--- a/video/rtp_video_stream_receiver2.cc
+++ b/video/rtp_video_stream_receiver2.cc
@@ -49,7 +49,6 @@
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
#include "system_wrappers/include/ntp_time.h"
-#include "video/receive_statistics_proxy2.h"
namespace webrtc {
@@ -134,17 +133,18 @@ RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
RTC_DCHECK(key_frame_request_sender_);
RTC_DCHECK(nack_sender_);
RTC_DCHECK(loss_notification_sender_);
+ packet_sequence_checker_.Detach();
}
void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RequestKeyFrame() {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
request_key_frame_ = true;
}
void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendNack(
const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
RTC_DCHECK(!sequence_numbers.empty());
nack_sequence_numbers_.insert(nack_sequence_numbers_.end(),
sequence_numbers.cbegin(),
@@ -161,7 +161,7 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification(
uint16_t last_received_seq_num,
bool decodability_flag,
bool buffering_allowed) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
RTC_DCHECK(buffering_allowed);
RTC_DCHECK(!lntf_state_)
<< "SendLossNotification() called twice in a row with no call to "
@@ -171,7 +171,7 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification(
}
void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
bool request_key_frame = false;
std::vector<uint16_t> nack_sequence_numbers;
@@ -251,9 +251,11 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
&rtcp_feedback_buffer_,
&rtcp_feedback_buffer_)),
packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()),
+ reference_finder_(std::make_unique<RtpFrameReferenceFinder>()),
has_received_frame_(false),
frames_decryptable_(false),
- absolute_capture_time_receiver_(clock) {
+ absolute_capture_time_interpolator_(clock) {
+ packet_sequence_checker_.Detach();
constexpr bool remb_candidate = true;
if (packet_router_)
packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
@@ -295,8 +297,6 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
&rtcp_feedback_buffer_);
}
- reference_finder_ = std::make_unique<RtpFrameReferenceFinder>(this);
-
// Only construct the encrypted receiver if frame encryption is enabled.
if (config_.crypto_options.sframe.require_frame_encryption) {
buffered_frame_decryptor_ =
@@ -307,10 +307,10 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
}
if (frame_transformer) {
- frame_transformer_delegate_ = new rtc::RefCountedObject<
- RtpVideoStreamReceiverFrameTransformerDelegate>(
- this, std::move(frame_transformer), rtc::Thread::Current(),
- config_.rtp.remote_ssrc);
+ frame_transformer_delegate_ =
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
frame_transformer_delegate_->Init();
}
}
@@ -330,7 +330,7 @@ void RtpVideoStreamReceiver2::AddReceiveCodec(
const VideoCodec& video_codec,
const std::map<std::string, std::string>& codec_params,
bool raw_payload) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) ||
field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) {
packet_buffer_.ForceSpsPpsIdrIsH264Keyframe();
@@ -343,7 +343,7 @@ void RtpVideoStreamReceiver2::AddReceiveCodec(
}
absl::optional<Syncable::Info> RtpVideoStreamReceiver2::GetSyncInfo() const {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
Syncable::Info info;
if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
&info.capture_time_ntp_frac,
@@ -363,6 +363,7 @@ absl::optional<Syncable::Info> RtpVideoStreamReceiver2::GetSyncInfo() const {
return info;
}
+// RTC_RUN_ON(packet_sequence_checker_)
RtpVideoStreamReceiver2::ParseGenericDependenciesResult
RtpVideoStreamReceiver2::ParseGenericDependenciesExtension(
const RtpPacketReceived& rtp_packet,
@@ -472,19 +473,32 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData(
rtc::CopyOnWriteBuffer codec_payload,
const RtpPacketReceived& rtp_packet,
const RTPVideoHeader& video) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
- auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
- rtp_packet, video, clock_->TimeInMilliseconds());
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+
+ auto packet =
+ std::make_unique<video_coding::PacketBuffer::Packet>(rtp_packet, video);
+
+ int64_t unwrapped_rtp_seq_num =
+ rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber());
+ auto& packet_info =
+ packet_infos_
+ .emplace(
+ unwrapped_rtp_seq_num,
+ RtpPacketInfo(
+ rtp_packet.Ssrc(), rtp_packet.Csrcs(), rtp_packet.Timestamp(),
+ /*audio_level=*/absl::nullopt,
+ rtp_packet.GetExtension<AbsoluteCaptureTimeExtension>(),
+ /*receive_time_ms=*/clock_->CurrentTime()))
+ .first->second;
// Try to extrapolate absolute capture time if it is missing.
- packet->packet_info.set_absolute_capture_time(
- absolute_capture_time_receiver_.OnReceivePacket(
- AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(),
- packet->packet_info.csrcs()),
- packet->packet_info.rtp_timestamp(),
+ packet_info.set_absolute_capture_time(
+ absolute_capture_time_interpolator_.OnReceivePacket(
+ AbsoluteCaptureTimeInterpolator::GetSource(packet_info.ssrc(),
+ packet_info.csrcs()),
+ packet_info.rtp_timestamp(),
// Assume frequency is the same one for all video frames.
- kVideoPayloadTypeFrequency,
- packet->packet_info.absolute_capture_time()));
+ kVideoPayloadTypeFrequency, packet_info.absolute_capture_time()));
RTPVideoHeader& video_header = packet->video_header;
video_header.rotation = kVideoRotation_0;
@@ -617,6 +631,8 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData(
void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet,
size_t rtp_packet_length) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+
RtpPacketReceived packet;
if (!packet.Parse(rtp_packet, rtp_packet_length))
return;
@@ -639,11 +655,10 @@ void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet,
// This method handles both regular RTP packets and packets recovered
// via FlexFEC.
void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
- if (!receiving_) {
+ if (!receiving_)
return;
- }
ReceivePacket(packet);
@@ -700,6 +715,7 @@ bool RtpVideoStreamReceiver2::IsDecryptable() const {
return frames_decryptable_;
}
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::OnInsertedPacket(
video_coding::PacketBuffer::InsertResult result) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
@@ -715,22 +731,24 @@ void RtpVideoStreamReceiver2::OnInsertedPacket(
// PacketBuffer promisses frame boundaries are correctly set on each
// packet. Document that assumption with the DCHECKs.
RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
+ int64_t unwrapped_rtp_seq_num =
+ rtp_seq_num_unwrapper_.Unwrap(packet->seq_num);
+ RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0);
+ RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num];
if (packet->is_first_packet_in_frame()) {
first_packet = packet.get();
max_nack_count = packet->times_nacked;
- min_recv_time = packet->packet_info.receive_time_ms();
- max_recv_time = packet->packet_info.receive_time_ms();
+ min_recv_time = packet_info.receive_time().ms();
+ max_recv_time = packet_info.receive_time().ms();
payloads.clear();
packet_infos.clear();
} else {
max_nack_count = std::max(max_nack_count, packet->times_nacked);
- min_recv_time =
- std::min(min_recv_time, packet->packet_info.receive_time_ms());
- max_recv_time =
- std::max(max_recv_time, packet->packet_info.receive_time_ms());
+ min_recv_time = std::min(min_recv_time, packet_info.receive_time().ms());
+ max_recv_time = std::max(max_recv_time, packet_info.receive_time().ms());
}
payloads.emplace_back(packet->video_payload);
- packet_infos.push_back(packet->packet_info);
+ packet_infos.push_back(packet_info);
frame_boundary = packet->is_last_packet_in_frame();
if (packet->is_last_packet_in_frame()) {
@@ -770,13 +788,14 @@ void RtpVideoStreamReceiver2::OnInsertedPacket(
last_received_rtp_system_time_.reset();
last_received_keyframe_rtp_system_time_.reset();
last_received_keyframe_rtp_timestamp_.reset();
+ packet_infos_.clear();
RequestKeyFrame();
}
}
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::OnAssembledFrame(
std::unique_ptr<RtpFrameObject> frame) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
RTC_DCHECK(frame);
const absl::optional<RTPVideoHeader::GenericDescriptorInfo>& descriptor =
@@ -816,7 +835,6 @@ void RtpVideoStreamReceiver2::OnAssembledFrame(
// start from the |last_completed_picture_id_| and add an offset in case
// of reordering.
reference_finder_ = std::make_unique<RtpFrameReferenceFinder>(
- this,
last_completed_picture_id_ + std::numeric_limits<uint16_t>::max());
current_codec_ = frame->codec_type();
} else {
@@ -838,25 +856,27 @@ void RtpVideoStreamReceiver2::OnAssembledFrame(
} else if (frame_transformer_delegate_) {
frame_transformer_delegate_->TransformFrame(std::move(frame));
} else {
- reference_finder_->ManageFrame(std::move(frame));
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
}
-void RtpVideoStreamReceiver2::OnCompleteFrame(
- std::unique_ptr<EncodedFrame> frame) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
- RtpFrameObject* rtp_frame = static_cast<RtpFrameObject*>(frame.get());
- last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num();
+// RTC_RUN_ON(packet_sequence_checker_)
+void RtpVideoStreamReceiver2::OnCompleteFrames(
+ RtpFrameReferenceFinder::ReturnVector frames) {
+ for (auto& frame : frames) {
+ RtpFrameObject* rtp_frame = static_cast<RtpFrameObject*>(frame.get());
+ last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num();
- last_completed_picture_id_ =
- std::max(last_completed_picture_id_, frame->Id());
- complete_frame_callback_->OnCompleteFrame(std::move(frame));
+ last_completed_picture_id_ =
+ std::max(last_completed_picture_id_, frame->Id());
+ complete_frame_callback_->OnCompleteFrame(std::move(frame));
+ }
}
void RtpVideoStreamReceiver2::OnDecryptedFrame(
std::unique_ptr<RtpFrameObject> frame) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
- reference_finder_->ManageFrame(std::move(frame));
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
void RtpVideoStreamReceiver2::OnDecryptionStatusChange(
@@ -870,7 +890,9 @@ void RtpVideoStreamReceiver2::OnDecryptionStatusChange(
void RtpVideoStreamReceiver2::SetFrameDecryptor(
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // TODO(bugs.webrtc.org/11993): Update callers or post the operation over to
+ // the network thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (buffered_frame_decryptor_ == nullptr) {
buffered_frame_decryptor_ =
std::make_unique<BufferedFrameDecryptor>(this, this);
@@ -882,7 +904,7 @@ void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer(
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
frame_transformer_delegate_ =
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
this, std::move(frame_transformer), rtc::Thread::Current(),
config_.rtp.remote_ssrc);
frame_transformer_delegate_->Init();
@@ -895,7 +917,7 @@ void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) {
}
absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedPacketMs() const {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (last_received_rtp_system_time_) {
return absl::optional<int64_t>(last_received_rtp_system_time_->ms());
}
@@ -904,7 +926,7 @@ absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedPacketMs() const {
absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs()
const {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (last_received_keyframe_rtp_system_time_) {
return absl::optional<int64_t>(
last_received_keyframe_rtp_system_time_->ms());
@@ -914,10 +936,11 @@ absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs()
void RtpVideoStreamReceiver2::ManageFrame(
std::unique_ptr<RtpFrameObject> frame) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
- reference_finder_->ManageFrame(std::move(frame));
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame)));
}
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
if (packet.payload_size() == 0) {
@@ -947,9 +970,9 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) {
parsed_payload->video_header);
}
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader(
const RtpPacketReceived& packet) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
if (packet.PayloadType() == config_.rtp.red_payload_type &&
packet.payload_size() > 0) {
if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) {
@@ -968,10 +991,11 @@ void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader(
// In the case of a video stream without picture ids and no rtx the
// RtpFrameReferenceFinder will need to know about padding to
// correctly calculate frame references.
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
- reference_finder_->PaddingReceived(seq_num);
+ OnCompleteFrames(reference_finder_->PaddingReceived(seq_num));
OnInsertedPacket(packet_buffer_.InsertPadding(seq_num));
if (nack_module_) {
@@ -987,7 +1011,7 @@ void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
size_t rtcp_packet_length) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (!receiving_) {
return false;
@@ -1020,7 +1044,7 @@ bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
absl::optional<int64_t> remote_to_local_clock_offset_ms =
ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
if (remote_to_local_clock_offset_ms.has_value()) {
- absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
+ capture_clock_offset_updater_.SetRemoteToLocalClockOffset(
Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
}
}
@@ -1029,7 +1053,7 @@ bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
}
void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
if (!nack_module_)
return;
@@ -1042,8 +1066,7 @@ void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) {
}
void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
- // Running on the decoder thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
int seq_num = -1;
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
if (seq_num_it != last_seq_num_for_pic_id_.end()) {
@@ -1053,6 +1076,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) {
}
if (seq_num != -1) {
+ int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num);
+ packet_infos_.erase(packet_infos_.begin(),
+ packet_infos_.upper_bound(unwrapped_rtp_seq_num));
packet_buffer_.ClearTo(seq_num);
reference_finder_->ClearTo(seq_num);
}
@@ -1065,12 +1091,12 @@ void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) {
}
void RtpVideoStreamReceiver2::StartReceive() {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
receiving_ = true;
}
void RtpVideoStreamReceiver2::StopReceive() {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
receiving_ = false;
}
@@ -1101,6 +1127,7 @@ void RtpVideoStreamReceiver2::UpdateHistograms() {
}
}
+// RTC_RUN_ON(packet_sequence_checker_)
void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) {
RTC_DCHECK_RUN_ON(&worker_task_checker_);
@@ -1145,7 +1172,7 @@ void RtpVideoStreamReceiver2::UpdatePacketReceiveTimestamps(
<< " with payload type: " << static_cast<int>(packet.PayloadType())
<< ", timestamp: " << packet.Timestamp()
<< ", sequence number: " << packet.SequenceNumber()
- << ", arrival time: " << packet.arrival_time_ms();
+ << ", arrival time: " << ToString(packet.arrival_time());
int32_t time_offset;
if (packet.GetExtension<TransmissionOffset>(&time_offset)) {
ss << ", toffset: " << time_offset;
diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h
index 6649246cbc..0c7e826189 100644
--- a/video/rtp_video_stream_receiver2.h
+++ b/video/rtp_video_stream_receiver2.h
@@ -29,7 +29,8 @@
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h"
+#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
@@ -64,11 +65,18 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
public RecoveredPacketReceiver,
public RtpPacketSinkInterface,
public KeyFrameRequestSender,
- public OnCompleteFrameCallback,
public OnDecryptedFrameCallback,
public OnDecryptionStatusChangeCallback,
public RtpVideoFrameReceiver {
public:
+ // A complete frame is a frame which has received all its packets and all its
+ // references are known.
+ class OnCompleteFrameCallback {
+ public:
+ virtual ~OnCompleteFrameCallback() {}
+ virtual void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) = 0;
+ };
+
RtpVideoStreamReceiver2(
TaskQueueBase* current_queue,
Clock* clock,
@@ -113,7 +121,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
// Returns number of different frames seen.
int GetUniqueFramesSeen() const {
- RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
return frame_counter_.GetUniqueSeen();
}
@@ -145,12 +153,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
// Decryption not SRTP.
bool IsDecryptable() const;
- // Don't use, still experimental.
+ // Request packet retransmits via NACK. Called via
+ // VideoReceiveStream2::SendNack, which gets called when
+ // RtpVideoStreamReceiver2::RtcpFeedbackBuffer's SendNack and
+ // SendBufferedRtcpFeedback methods (see `rtcp_feedback_buffer_` below).
void RequestPacketRetransmit(const std::vector<uint16_t>& sequence_numbers);
- // Implements OnCompleteFrameCallback.
- void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override;
-
// Implements OnDecryptedFrameCallback.
void OnDecryptedFrame(std::unique_ptr<RtpFrameObject> frame) override;
@@ -178,6 +186,9 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
// Implements RtpVideoFrameReceiver.
void ManageFrame(std::unique_ptr<RtpFrameObject> frame) override;
+ void OnCompleteFrames(RtpFrameReferenceFinder::ReturnVector frame)
+ RTC_RUN_ON(packet_sequence_checker_);
+
// Used for buffering RTCP feedback messages and sending them all together.
// Note:
// 1. Key frame requests and NACKs are mutually exclusive, with the
@@ -225,20 +236,20 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
bool decodability_flag;
};
- RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
KeyFrameRequestSender* const key_frame_request_sender_;
NackSender* const nack_sender_;
LossNotificationSender* const loss_notification_sender_;
// Key-frame-request-related state.
- bool request_key_frame_ RTC_GUARDED_BY(worker_task_checker_);
+ bool request_key_frame_ RTC_GUARDED_BY(packet_sequence_checker_);
// NACK-related state.
std::vector<uint16_t> nack_sequence_numbers_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<LossNotificationState> lntf_state_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
};
enum ParseGenericDependenciesResult {
kDropPacket,
@@ -248,22 +259,29 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
// Entry point doing non-stats work for a received packet. Called
// for the same packet both before and after RED decapsulation.
- void ReceivePacket(const RtpPacketReceived& packet);
+ void ReceivePacket(const RtpPacketReceived& packet)
+ RTC_RUN_ON(packet_sequence_checker_);
+
// Parses and handles RED headers.
// This function assumes that it's being called from only one thread.
- void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet);
- void NotifyReceiverOfEmptyPacket(uint16_t seq_num);
+ void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet)
+ RTC_RUN_ON(packet_sequence_checker_);
+ void NotifyReceiverOfEmptyPacket(uint16_t seq_num)
+ RTC_RUN_ON(packet_sequence_checker_);
void UpdateHistograms();
bool IsRedEnabled() const;
- void InsertSpsPpsIntoTracker(uint8_t payload_type);
- void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result);
+ void InsertSpsPpsIntoTracker(uint8_t payload_type)
+ RTC_RUN_ON(packet_sequence_checker_);
+ void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result)
+ RTC_RUN_ON(packet_sequence_checker_);
ParseGenericDependenciesResult ParseGenericDependenciesExtension(
const RtpPacketReceived& rtp_packet,
- RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_);
- void OnAssembledFrame(std::unique_ptr<RtpFrameObject> frame);
+ RTPVideoHeader* video_header) RTC_RUN_ON(packet_sequence_checker_);
+ void OnAssembledFrame(std::unique_ptr<RtpFrameObject> frame)
+ RTC_RUN_ON(packet_sequence_checker_);
void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet,
bool is_keyframe)
- RTC_RUN_ON(worker_task_checker_);
+ RTC_RUN_ON(packet_sequence_checker_);
Clock* const clock_;
// Ownership of this object lies with VideoReceiveStream, which owns |this|.
@@ -282,8 +300,16 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
std::unique_ptr<UlpfecReceiver> ulpfec_receiver_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_;
- bool receiving_ RTC_GUARDED_BY(worker_task_checker_);
- int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_);
+ // TODO(bugs.webrtc.org/11993): This checker conceptually represents
+ // operations that belong to the network thread. The Call class is currently
+ // moving towards handling network packets on the network thread and while
+ // that work is ongoing, this checker may in practice represent the worker
+ // thread, but still serves as a mechanism of grouping together concepts
+ // that belong to the network thread. Once the packets are fully delivered
+ // on the network thread, this comment will be deleted.
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
+ bool receiving_ RTC_GUARDED_BY(packet_sequence_checker_);
+ int64_t last_packet_log_ms_ RTC_GUARDED_BY(packet_sequence_checker_);
const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
@@ -295,68 +321,78 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender,
std::unique_ptr<LossNotificationController> loss_notification_controller_;
video_coding::PacketBuffer packet_buffer_
- RTC_GUARDED_BY(worker_task_checker_);
- UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ UniqueTimestampCounter frame_counter_
+ RTC_GUARDED_BY(packet_sequence_checker_);
SeqNumUnwrapper<uint16_t> frame_id_unwrapper_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
// Video structure provided in the dependency descriptor in a first packet
// of a key frame. It is required to parse dependency descriptor in the
// following delta packets.
std::unique_ptr<FrameDependencyStructure> video_structure_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
// Frame id of the last frame with the attached video structure.
// absl::nullopt when `video_structure_ == nullptr`;
absl::optional<int64_t> video_structure_frame_id_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
std::unique_ptr<RtpFrameReferenceFinder> reference_finder_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<VideoCodecType> current_codec_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
uint32_t last_assembled_frame_rtp_timestamp_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
std::map<int64_t, uint16_t> last_seq_num_for_pic_id_
- RTC_GUARDED_BY(worker_task_checker_);
- video_coding::H264SpsPpsTracker tracker_ RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ video_coding::H264SpsPpsTracker tracker_
+ RTC_GUARDED_BY(packet_sequence_checker_);
// Maps payload id to the depacketizer.
std::map<uint8_t, std::unique_ptr<VideoRtpDepacketizer>> payload_type_map_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
// TODO(johan): Remove pt_codec_params_ once
// https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved.
// Maps a payload type to a map of out-of-band supplied codec parameters.
std::map<uint8_t, std::map<std::string, std::string>> pt_codec_params_
- RTC_GUARDED_BY(worker_task_checker_);
- int16_t last_payload_type_ RTC_GUARDED_BY(worker_task_checker_) = -1;
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ int16_t last_payload_type_ RTC_GUARDED_BY(packet_sequence_checker_) = -1;
- bool has_received_frame_ RTC_GUARDED_BY(worker_task_checker_);
+ bool has_received_frame_ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<uint32_t> last_received_rtp_timestamp_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<uint32_t> last_received_keyframe_rtp_timestamp_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<Timestamp> last_received_rtp_system_time_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
absl::optional<Timestamp> last_received_keyframe_rtp_system_time_
- RTC_GUARDED_BY(worker_task_checker_);
+ RTC_GUARDED_BY(packet_sequence_checker_);
// Handles incoming encrypted frames and forwards them to the
// rtp_reference_finder if they are decryptable.
std::unique_ptr<BufferedFrameDecryptor> buffered_frame_decryptor_
- RTC_PT_GUARDED_BY(worker_task_checker_);
+ RTC_PT_GUARDED_BY(packet_sequence_checker_);
bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_);
absl::optional<ColorSpace> last_color_space_;
- AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_
- RTC_GUARDED_BY(worker_task_checker_);
+ AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_
+ RTC_GUARDED_BY(packet_sequence_checker_);
+
+ CaptureClockOffsetUpdater capture_clock_offset_updater_
+ RTC_GUARDED_BY(packet_sequence_checker_);
int64_t last_completed_picture_id_ = 0;
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate>
frame_transformer_delegate_;
+
+ SeqNumUnwrapper<uint16_t> rtp_seq_num_unwrapper_
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ std::map<int64_t, RtpPacketInfo> packet_infos_
+ RTC_GUARDED_BY(packet_sequence_checker_);
};
} // namespace webrtc
diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc
index 41e9ed881d..d23b6047aa 100644
--- a/video/rtp_video_stream_receiver2_unittest.cc
+++ b/video/rtp_video_stream_receiver2_unittest.cc
@@ -13,6 +13,7 @@
#include <memory>
#include <utility>
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_frame_type.h"
#include "common_video/h264/h264_common.h"
@@ -38,6 +39,7 @@
#include "test/gtest.h"
#include "test/mock_frame_transformer.h"
#include "test/time_controller/simulated_task_queue.h"
+#include "test/time_controller/simulated_time_controller.h"
using ::testing::_;
using ::testing::ElementsAre;
@@ -94,7 +96,8 @@ class MockKeyFrameRequestSender : public KeyFrameRequestSender {
MOCK_METHOD(void, RequestKeyFrame, (), (override));
};
-class MockOnCompleteFrameCallback : public OnCompleteFrameCallback {
+class MockOnCompleteFrameCallback
+ : public RtpVideoStreamReceiver2::OnCompleteFrameCallback {
public:
MOCK_METHOD(void, DoOnCompleteFrame, (EncodedFrame*), ());
MOCK_METHOD(void, DoOnCompleteFrameFailNullptr, (EncodedFrame*), ());
@@ -157,7 +160,12 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test,
public:
RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {}
explicit RtpVideoStreamReceiver2Test(std::string field_trials)
- : override_field_trials_(field_trials),
+ : time_controller_(Timestamp::Millis(100)),
+ task_queue_(time_controller_.GetTaskQueueFactory()->CreateTaskQueue(
+ "RtpVideoStreamReceiver2Test",
+ TaskQueueFactory::Priority::NORMAL)),
+ task_queue_setter_(task_queue_.get()),
+ override_field_trials_(field_trials),
config_(CreateConfig()),
process_thread_(ProcessThread::Create("TestThread")) {
rtp_receive_statistics_ =
@@ -232,8 +240,9 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test,
return config;
}
- TokenTaskQueue task_queue_;
- TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_{&task_queue_};
+ GlobalSimulatedTimeController time_controller_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+ TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_;
const webrtc::test::ScopedFieldTrials override_field_trials_;
VideoReceiveStream::Config config_;
@@ -1120,7 +1129,7 @@ TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
EXPECT_CALL(*mock_frame_transformer,
RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
auto receiver = std::make_unique<RtpVideoStreamReceiver2>(
diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
index f3306f008e..0d85cc08e2 100644
--- a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
+++ b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
@@ -61,10 +61,9 @@ class TestRtpVideoFrameReceiver : public RtpVideoFrameReceiver {
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
RegisterTransformedFrameCallbackSinkOnInit) {
TestRtpVideoFrameReceiver receiver;
- rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
- new rtc::RefCountedObject<MockFrameTransformer>());
- rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ auto frame_transformer(rtc::make_ref_counted<MockFrameTransformer>());
+ auto delegate(
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
&receiver, frame_transformer, rtc::Thread::Current(),
/*remote_ssrc*/ 1111));
EXPECT_CALL(*frame_transformer,
@@ -75,10 +74,9 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
UnregisterTransformedFrameSinkCallbackOnReset) {
TestRtpVideoFrameReceiver receiver;
- rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
- new rtc::RefCountedObject<MockFrameTransformer>());
- rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ auto frame_transformer(rtc::make_ref_counted<MockFrameTransformer>());
+ auto delegate(
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
&receiver, frame_transformer, rtc::Thread::Current(),
/*remote_ssrc*/ 1111));
EXPECT_CALL(*frame_transformer, UnregisterTransformedFrameSinkCallback(1111));
@@ -87,10 +85,10 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
TestRtpVideoFrameReceiver receiver;
- rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
- new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>());
- rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ auto frame_transformer(
+ rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>());
+ auto delegate(
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
&receiver, frame_transformer, rtc::Thread::Current(),
/*remote_ssrc*/ 1111));
auto frame = CreateRtpFrameObject();
@@ -101,10 +99,10 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
ManageFrameOnTransformedFrame) {
TestRtpVideoFrameReceiver receiver;
- rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer(
- new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>());
- rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ auto mock_frame_transformer(
+ rtc::make_ref_counted<NiceMock<MockFrameTransformer>>());
+ auto delegate =
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
&receiver, mock_frame_transformer, rtc::Thread::Current(),
/*remote_ssrc*/ 1111);
@@ -127,10 +125,10 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TransformableFrameMetadataHasCorrectValue) {
TestRtpVideoFrameReceiver receiver;
- rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
- rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
- new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ auto mock_frame_transformer =
+ rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
+ auto delegate =
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
&receiver, mock_frame_transformer, rtc::Thread::Current(), 1111);
delegate->Init();
RTPVideoHeader video_header;
diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc
index acdc2b790d..765e1e1716 100644
--- a/video/rtp_video_stream_receiver_unittest.cc
+++ b/video/rtp_video_stream_receiver_unittest.cc
@@ -93,7 +93,8 @@ class MockKeyFrameRequestSender : public KeyFrameRequestSender {
MOCK_METHOD(void, RequestKeyFrame, (), (override));
};
-class MockOnCompleteFrameCallback : public OnCompleteFrameCallback {
+class MockOnCompleteFrameCallback
+ : public RtpVideoStreamReceiver::OnCompleteFrameCallback {
public:
MOCK_METHOD(void, DoOnCompleteFrame, (EncodedFrame*), ());
MOCK_METHOD(void, DoOnCompleteFrameFailNullptr, (EncodedFrame*), ());
@@ -1165,8 +1166,8 @@ TEST_F(RtpVideoStreamReceiverDeathTest, RepeatedSecondarySinkDisallowed) {
#endif
TEST_F(RtpVideoStreamReceiverTest, TransformFrame) {
- rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
- new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ auto mock_frame_transformer =
+ rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
EXPECT_CALL(*mock_frame_transformer,
RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
auto receiver = std::make_unique<RtpVideoStreamReceiver>(
diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc
index 686b756776..1b968ef8f7 100644
--- a/video/send_statistics_proxy.cc
+++ b/video/send_statistics_proxy.cc
@@ -670,6 +670,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
void SendStatisticsProxy::OnEncoderReconfigured(
const VideoEncoderConfig& config,
const std::vector<VideoStream>& streams) {
+ // Called on VideoStreamEncoder's encoder_queue_.
MutexLock lock(&mutex_);
if (content_type_ != config.content_type) {
@@ -1284,17 +1285,6 @@ void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
uma_container_->first_rtcp_stats_time_ms_ = clock_->TimeInMilliseconds();
}
-void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
- uint32_t ssrc) {
- MutexLock lock(&mutex_);
- VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
- if (!stats)
- return;
-
- stats->rtcp_stats = statistics;
- uma_container_->report_block_stats_.Store(ssrc, statistics);
-}
-
void SendStatisticsProxy::OnReportBlockDataUpdated(
ReportBlockData report_block_data) {
MutexLock lock(&mutex_);
@@ -1302,6 +1292,13 @@ void SendStatisticsProxy::OnReportBlockDataUpdated(
GetStatsEntry(report_block_data.report_block().source_ssrc);
if (!stats)
return;
+ const RTCPReportBlock& report_block = report_block_data.report_block();
+ uma_container_->report_block_stats_.Store(
+ /*ssrc=*/report_block.source_ssrc,
+ /*packets_lost=*/report_block.packets_lost,
+ /*extended_highest_sequence_number=*/
+ report_block.extended_highest_sequence_number);
+
stats->report_block_data = std::move(report_block_data);
}
diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h
index 0de7df290e..bfb221f65c 100644
--- a/video/send_statistics_proxy.h
+++ b/video/send_statistics_proxy.h
@@ -37,7 +37,6 @@
namespace webrtc {
class SendStatisticsProxy : public VideoStreamEncoderObserver,
- public RtcpStatisticsCallback,
public ReportBlockDataObserver,
public RtcpPacketTypeCounterObserver,
public StreamDataCountersCallback,
@@ -106,9 +105,6 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
int GetSendFrameRate() const;
protected:
- // From RtcpStatisticsCallback.
- void StatisticsUpdated(const RtcpStatistics& statistics,
- uint32_t ssrc) override;
// From ReportBlockDataObserver.
void OnReportBlockDataUpdated(ReportBlockData report_block_data) override;
// From RtcpPacketTypeCounterObserver.
diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc
index 71b84c9443..d4a7a49e39 100644
--- a/video/send_statistics_proxy_unittest.cc
+++ b/video/send_statistics_proxy_unittest.cc
@@ -159,11 +159,19 @@ class SendStatisticsProxyTest : public ::testing::Test {
b.rtp_stats.retransmitted.packets);
EXPECT_EQ(a.rtp_stats.fec.packets, b.rtp_stats.fec.packets);
- EXPECT_EQ(a.rtcp_stats.fraction_lost, b.rtcp_stats.fraction_lost);
- EXPECT_EQ(a.rtcp_stats.packets_lost, b.rtcp_stats.packets_lost);
- EXPECT_EQ(a.rtcp_stats.extended_highest_sequence_number,
- b.rtcp_stats.extended_highest_sequence_number);
- EXPECT_EQ(a.rtcp_stats.jitter, b.rtcp_stats.jitter);
+ EXPECT_EQ(a.report_block_data.has_value(),
+ b.report_block_data.has_value());
+ if (a.report_block_data.has_value()) {
+ const RTCPReportBlock& a_rtcp_stats =
+ a.report_block_data->report_block();
+ const RTCPReportBlock& b_rtcp_stats =
+ b.report_block_data->report_block();
+ EXPECT_EQ(a_rtcp_stats.fraction_lost, b_rtcp_stats.fraction_lost);
+ EXPECT_EQ(a_rtcp_stats.packets_lost, b_rtcp_stats.packets_lost);
+ EXPECT_EQ(a_rtcp_stats.extended_highest_sequence_number,
+ b_rtcp_stats.extended_highest_sequence_number);
+ EXPECT_EQ(a_rtcp_stats.jitter, b_rtcp_stats.jitter);
+ }
}
}
@@ -174,29 +182,37 @@ class SendStatisticsProxyTest : public ::testing::Test {
VideoSendStream::Stats expected_;
};
-TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
- RtcpStatisticsCallback* callback = statistics_proxy_.get();
- for (const auto& ssrc : config_.rtp.ssrcs) {
- VideoSendStream::StreamStats& ssrc_stats = expected_.substreams[ssrc];
-
+TEST_F(SendStatisticsProxyTest, ReportBlockDataObserver) {
+ ReportBlockDataObserver* callback = statistics_proxy_.get();
+ for (uint32_t ssrc : config_.rtp.ssrcs) {
// Add statistics with some arbitrary, but unique, numbers.
- uint32_t offset = ssrc * sizeof(RtcpStatistics);
- ssrc_stats.rtcp_stats.packets_lost = offset;
- ssrc_stats.rtcp_stats.extended_highest_sequence_number = offset + 1;
- ssrc_stats.rtcp_stats.fraction_lost = offset + 2;
- ssrc_stats.rtcp_stats.jitter = offset + 3;
- callback->StatisticsUpdated(ssrc_stats.rtcp_stats, ssrc);
+ uint32_t offset = ssrc * 4;
+ RTCPReportBlock report_block;
+ report_block.source_ssrc = ssrc;
+ report_block.packets_lost = offset;
+ report_block.extended_highest_sequence_number = offset + 1;
+ report_block.fraction_lost = offset + 2;
+ report_block.jitter = offset + 3;
+ ReportBlockData data;
+ data.SetReportBlock(report_block, 0);
+ expected_.substreams[ssrc].report_block_data = data;
+
+ callback->OnReportBlockDataUpdated(data);
}
- for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
- VideoSendStream::StreamStats& ssrc_stats = expected_.substreams[ssrc];
-
+ for (uint32_t ssrc : config_.rtp.rtx.ssrcs) {
// Add statistics with some arbitrary, but unique, numbers.
- uint32_t offset = ssrc * sizeof(RtcpStatistics);
- ssrc_stats.rtcp_stats.packets_lost = offset;
- ssrc_stats.rtcp_stats.extended_highest_sequence_number = offset + 1;
- ssrc_stats.rtcp_stats.fraction_lost = offset + 2;
- ssrc_stats.rtcp_stats.jitter = offset + 3;
- callback->StatisticsUpdated(ssrc_stats.rtcp_stats, ssrc);
+ uint32_t offset = ssrc * 4;
+ RTCPReportBlock report_block;
+ report_block.source_ssrc = ssrc;
+ report_block.packets_lost = offset;
+ report_block.extended_highest_sequence_number = offset + 1;
+ report_block.fraction_lost = offset + 2;
+ report_block.jitter = offset + 3;
+ ReportBlockData data;
+ data.SetReportBlock(report_block, 0);
+ expected_.substreams[ssrc].report_block_data = data;
+
+ callback->OnReportBlockDataUpdated(data);
}
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
ExpectEqual(expected_, stats);
@@ -2171,10 +2187,13 @@ TEST_F(SendStatisticsProxyTest, NoSubstreams) {
std::max(*absl::c_max_element(config_.rtp.ssrcs),
*absl::c_max_element(config_.rtp.rtx.ssrcs)) +
1;
- // From RtcpStatisticsCallback.
- RtcpStatistics rtcp_stats;
- RtcpStatisticsCallback* rtcp_callback = statistics_proxy_.get();
- rtcp_callback->StatisticsUpdated(rtcp_stats, excluded_ssrc);
+ // From ReportBlockDataObserver.
+ ReportBlockDataObserver* rtcp_callback = statistics_proxy_.get();
+ RTCPReportBlock report_block;
+ report_block.source_ssrc = excluded_ssrc;
+ ReportBlockData data;
+ data.SetReportBlock(report_block, 0);
+ rtcp_callback->OnReportBlockDataUpdated(data);
// From BitrateStatisticsObserver.
uint32_t total = 0;
@@ -2221,9 +2240,12 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
// Update the first SSRC with bogus RTCP stats to make sure that encoded
// resolution still times out (no global timeout for all stats).
- RtcpStatistics rtcp_statistics;
- RtcpStatisticsCallback* rtcp_stats = statistics_proxy_.get();
- rtcp_stats->StatisticsUpdated(rtcp_statistics, config_.rtp.ssrcs[0]);
+ ReportBlockDataObserver* rtcp_callback = statistics_proxy_.get();
+ RTCPReportBlock report_block;
+ report_block.source_ssrc = config_.rtp.ssrcs[0];
+ ReportBlockData data;
+ data.SetReportBlock(report_block, 0);
+ rtcp_callback->OnReportBlockDataUpdated(data);
// Report stats for second SSRC to make sure it's not outdated along with the
// first SSRC.
diff --git a/video/video_analyzer.cc b/video/video_analyzer.cc
index 6698dadf42..81dcf055b8 100644
--- a/video/video_analyzer.cc
+++ b/video/video_analyzer.cc
@@ -137,10 +137,12 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport,
}
for (uint32_t i = 0; i < num_cores; ++i) {
- rtc::PlatformThread* thread =
- new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer");
- thread->Start();
- comparison_thread_pool_.push_back(thread);
+ comparison_thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable(
+ [this] {
+ while (CompareFrames()) {
+ }
+ },
+ "Analyzer"));
}
if (!rtp_dump_name.empty()) {
@@ -155,10 +157,8 @@ VideoAnalyzer::~VideoAnalyzer() {
MutexLock lock(&comparison_lock_);
quit_ = true;
}
- for (rtc::PlatformThread* thread : comparison_thread_pool_) {
- thread->Stop();
- delete thread;
- }
+ // Joins all threads.
+ comparison_thread_pool_.clear();
}
void VideoAnalyzer::SetReceiver(PacketReceiver* receiver) {
@@ -533,12 +533,6 @@ void VideoAnalyzer::PollStats() {
memory_usage_.AddSample(rtc::GetProcessResidentSizeBytes());
}
-void VideoAnalyzer::FrameComparisonThread(void* obj) {
- VideoAnalyzer* analyzer = static_cast<VideoAnalyzer*>(obj);
- while (analyzer->CompareFrames()) {
- }
-}
-
bool VideoAnalyzer::CompareFrames() {
if (AllFramesRecorded())
return false;
diff --git a/video/video_analyzer.h b/video/video_analyzer.h
index 18bacc16fc..68861d1b5f 100644
--- a/video/video_analyzer.h
+++ b/video/video_analyzer.h
@@ -302,7 +302,7 @@ class VideoAnalyzer : public PacketReceiver,
const double avg_ssim_threshold_;
bool is_quick_test_enabled_;
- std::vector<rtc::PlatformThread*> comparison_thread_pool_;
+ std::vector<rtc::PlatformThread> comparison_thread_pool_;
rtc::Event comparison_available_event_;
std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
bool quit_ RTC_GUARDED_BY(comparison_lock_);
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index a58aa1f33f..b77a4759a2 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -626,7 +626,7 @@ void VideoQualityTest::FillScalabilitySettings(
encoder_config.spatial_layers = params->ss[video_idx].spatial_layers;
encoder_config.simulcast_layers = std::vector<VideoStream>(num_streams);
encoder_config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
params->video[video_idx].codec, kDefaultMaxQp,
params->screenshare[video_idx].enabled, true);
params->ss[video_idx].streams =
@@ -800,7 +800,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
params_.ss[video_idx].streams;
}
video_encoder_configs_[video_idx].video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
params_.video[video_idx].codec,
params_.ss[video_idx].streams[0].max_qp,
params_.screenshare[video_idx].enabled, true);
@@ -829,7 +829,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
vp8_settings.numberOfTemporalLayers = static_cast<unsigned char>(
params_.video[video_idx].num_temporal_layers);
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.video[video_idx].codec == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
@@ -846,7 +846,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
vp9_settings.flexibleMode = true;
}
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
} else if (params_.ss[video_idx].num_spatial_layers > 1) {
@@ -860,8 +860,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
vp9_settings.automaticResizeOn = false;
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9_settings);
RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(),
1);
// Min bitrate will be enforced by spatial layer config instead.
@@ -871,7 +871,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.automaticResizeOn = true;
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.video[video_idx].codec == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
@@ -879,7 +879,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
vp9_settings.automaticResizeOn =
params_.ss[video_idx].num_spatial_layers == 1;
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
} else if (params_.video[video_idx].codec == "H264") {
// Quality scaling is always on for H.264.
@@ -898,18 +898,18 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.automaticResizeOn = false;
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.video[video_idx].codec == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.automaticResizeOn = false;
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
} else if (params_.video[video_idx].codec == "H264") {
VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings();
video_encoder_configs_[video_idx].encoder_specific_settings =
- new rtc::RefCountedObject<
+ rtc::make_ref_counted<
VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
}
}
@@ -925,13 +925,13 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
}
CreateMatchingFecConfig(recv_transport, *GetVideoSendConfig());
- GetFlexFecConfig()->transport_cc = params_.call.send_side_bwe;
+ GetFlexFecConfig()->rtp.transport_cc = params_.call.send_side_bwe;
if (params_.call.send_side_bwe) {
- GetFlexFecConfig()->rtp_header_extensions.push_back(
+ GetFlexFecConfig()->rtp.extensions.push_back(
RtpExtension(RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberExtensionId));
} else {
- GetFlexFecConfig()->rtp_header_extensions.push_back(
+ GetFlexFecConfig()->rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
}
}
@@ -986,7 +986,7 @@ void VideoQualityTest::SetupThumbnails(Transport* send_transport,
thumbnail_encoder_config.max_bitrate_bps = 50000;
std::vector<VideoStream> streams{params_.ss[0].streams[0]};
thumbnail_encoder_config.video_stream_factory =
- new rtc::RefCountedObject<VideoStreamFactory>(streams);
+ rtc::make_ref_counted<VideoStreamFactory>(streams);
thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy());
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
index 8f51a8a0e3..da8eb7de60 100644
--- a/video/video_receive_stream.cc
+++ b/video/video_receive_stream.cc
@@ -332,8 +332,7 @@ void VideoReceiveStream::Start() {
for (const Decoder& decoder : config_.decoders) {
std::unique_ptr<VideoDecoder> video_decoder =
- config_.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format,
- config_.stream_id);
+ config_.decoder_factory->CreateVideoDecoder(decoder.video_format);
// If we still have no valid decoder, we have to create a "Null" decoder
// that ignores all calls. The reason we can get into this state is that the
// old decoder factory interface doesn't have a way to query supported
@@ -507,6 +506,10 @@ void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
int64_t video_playout_ntp_ms;
int64_t sync_offset_ms;
double estimated_freq_khz;
+
+ // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for
+ // |video_frame.packet_infos|. But VideoFrame is const qualified here.
+
// TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the
// function itself, another in GetChannel() and a third in
// GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function
@@ -759,7 +762,6 @@ VideoReceiveStream::RecordingState VideoReceiveStream::SetAndGetRecordingState(
RTC_DCHECK_RUN_ON(&decode_queue_);
// Save old state.
old_state.callback = std::move(encoded_frame_buffer_function_);
- old_state.keyframe_needed = keyframe_generation_requested_;
old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
// Set new state.
@@ -768,7 +770,7 @@ VideoReceiveStream::RecordingState VideoReceiveStream::SetAndGetRecordingState(
RequestKeyFrame(clock_->TimeInMilliseconds());
keyframe_generation_requested_ = true;
} else {
- keyframe_generation_requested_ = state.keyframe_needed;
+ keyframe_generation_requested_ = false;
last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0);
}
event.Set();
diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h
index f3b51892ff..c778d74558 100644
--- a/video/video_receive_stream.h
+++ b/video/video_receive_stream.h
@@ -45,12 +45,13 @@ class VCMTiming;
namespace internal {
-class VideoReceiveStream : public webrtc::DEPRECATED_VideoReceiveStream,
- public rtc::VideoSinkInterface<VideoFrame>,
- public NackSender,
- public OnCompleteFrameCallback,
- public Syncable,
- public CallStatsObserver {
+class VideoReceiveStream
+ : public webrtc::DEPRECATED_VideoReceiveStream,
+ public rtc::VideoSinkInterface<VideoFrame>,
+ public NackSender,
+ public RtpVideoStreamReceiver::OnCompleteFrameCallback,
+ public Syncable,
+ public CallStatsObserver {
public:
// The default number of milliseconds to pass before re-requesting a key frame
// to be sent.
@@ -86,6 +87,8 @@ class VideoReceiveStream : public webrtc::DEPRECATED_VideoReceiveStream,
void Start() override;
void Stop() override;
+ const RtpConfig& rtp_config() const override { return config_.rtp; }
+
webrtc::VideoReceiveStream::Stats GetStats() const override;
void AddSecondarySink(RtpPacketSinkInterface* sink) override;
@@ -111,7 +114,7 @@ class VideoReceiveStream : public webrtc::DEPRECATED_VideoReceiveStream,
void SendNack(const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) override;
- // Implements OnCompleteFrameCallback.
+ // Implements RtpVideoStreamReceiver::OnCompleteFrameCallback.
void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override;
// Implements CallStatsObserver::OnRttUpdate
diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc
index 2c7461ad0d..c929f07616 100644
--- a/video/video_receive_stream2.cc
+++ b/video/video_receive_stream2.cc
@@ -211,30 +211,28 @@ int DetermineMaxWaitForFrame(const VideoReceiveStream::Config& config,
: kMaxWaitForFrameMs;
}
-VideoReceiveStream2::VideoReceiveStream2(
- TaskQueueFactory* task_queue_factory,
- TaskQueueBase* current_queue,
- RtpStreamReceiverControllerInterface* receiver_controller,
- int num_cpu_cores,
- PacketRouter* packet_router,
- VideoReceiveStream::Config config,
- ProcessThread* process_thread,
- CallStats* call_stats,
- Clock* clock,
- VCMTiming* timing)
+VideoReceiveStream2::VideoReceiveStream2(TaskQueueFactory* task_queue_factory,
+ Call* call,
+ int num_cpu_cores,
+ PacketRouter* packet_router,
+ VideoReceiveStream::Config config,
+ ProcessThread* process_thread,
+ CallStats* call_stats,
+ Clock* clock,
+ VCMTiming* timing)
: task_queue_factory_(task_queue_factory),
transport_adapter_(config.rtcp_send_transport),
config_(std::move(config)),
num_cpu_cores_(num_cpu_cores),
- worker_thread_(current_queue),
+ call_(call),
clock_(clock),
call_stats_(call_stats),
source_tracker_(clock_),
- stats_proxy_(&config_, clock_, worker_thread_),
+ stats_proxy_(&config_, clock_, call->worker_thread()),
rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
timing_(timing),
video_receiver_(clock_, timing_.get()),
- rtp_video_stream_receiver_(worker_thread_,
+ rtp_video_stream_receiver_(call->worker_thread(),
clock_,
&transport_adapter_,
call_stats->AsRtcpRttStats(),
@@ -249,7 +247,7 @@ VideoReceiveStream2::VideoReceiveStream2(
this, // OnCompleteFrameCallback
config_.frame_decryptor,
config_.frame_transformer),
- rtp_stream_sync_(current_queue, this),
+ rtp_stream_sync_(call->worker_thread(), this),
max_wait_for_keyframe_ms_(DetermineMaxWaitForFrame(config, true)),
max_wait_for_frame_ms_(DetermineMaxWaitForFrame(config, false)),
low_latency_renderer_enabled_("enabled", true),
@@ -261,10 +259,11 @@ VideoReceiveStream2::VideoReceiveStream2(
TaskQueueFactory::Priority::HIGH)) {
RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString();
- RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(call_->worker_thread());
RTC_DCHECK(config_.renderer);
RTC_DCHECK(call_stats_);
module_process_sequence_checker_.Detach();
+ packet_sequence_checker_.Detach();
RTC_DCHECK(!config_.decoders.empty());
RTC_CHECK(config_.decoder_factory);
@@ -282,15 +281,10 @@ VideoReceiveStream2::VideoReceiveStream2(
frame_buffer_.reset(
new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_));
- // Register with RtpStreamReceiverController.
- media_receiver_ = receiver_controller->CreateReceiver(
- config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
if (config_.rtp.rtx_ssrc) {
rtx_receive_stream_ = std::make_unique<RtxReceiveStream>(
&rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types,
config_.rtp.remote_ssrc, rtp_receive_statistics_.get());
- rtx_receiver_ = receiver_controller->CreateReceiver(
- config_.rtp.rtx_ssrc, rtx_receive_stream_.get());
} else {
rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc,
true);
@@ -309,20 +303,45 @@ VideoReceiveStream2::VideoReceiveStream2(
VideoReceiveStream2::~VideoReceiveStream2() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString();
+ RTC_DCHECK(!media_receiver_);
+ RTC_DCHECK(!rtx_receiver_);
Stop();
}
+void VideoReceiveStream2::RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ RTC_DCHECK(!media_receiver_);
+ RTC_DCHECK(!rtx_receiver_);
+
+ // Register with RtpStreamReceiverController.
+ media_receiver_ = receiver_controller->CreateReceiver(
+ config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
+ if (config_.rtp.rtx_ssrc) {
+ RTC_DCHECK(rtx_receive_stream_);
+ rtx_receiver_ = receiver_controller->CreateReceiver(
+ config_.rtp.rtx_ssrc, rtx_receive_stream_.get());
+ }
+}
+
+void VideoReceiveStream2::UnregisterFromTransport() {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ media_receiver_.reset();
+ rtx_receiver_.reset();
+}
+
void VideoReceiveStream2::SignalNetworkState(NetworkState state) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
rtp_video_stream_receiver_.SignalNetworkState(state);
}
bool VideoReceiveStream2::DeliverRtcp(const uint8_t* packet, size_t length) {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
return rtp_video_stream_receiver_.DeliverRtcp(packet, length);
}
void VideoReceiveStream2::SetSync(Syncable* audio_syncable) {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtp_stream_sync_.ConfigureSync(audio_syncable);
}
@@ -365,9 +384,13 @@ void VideoReceiveStream2::Start() {
const bool raw_payload =
config_.rtp.raw_payload_types.count(decoder.payload_type) > 0;
- rtp_video_stream_receiver_.AddReceiveCodec(decoder.payload_type, codec,
- decoder.video_format.parameters,
- raw_payload);
+ {
+ // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_video_stream_receiver_.AddReceiveCodec(
+ decoder.payload_type, codec, decoder.video_format.parameters,
+ raw_payload);
+ }
RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec(
decoder.payload_type, &codec, num_cpu_cores_));
}
@@ -389,12 +412,23 @@ void VideoReceiveStream2::Start() {
StartNextDecode();
});
decoder_running_ = true;
- rtp_video_stream_receiver_.StartReceive();
+
+ {
+ // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_video_stream_receiver_.StartReceive();
+ }
}
void VideoReceiveStream2::Stop() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- rtp_video_stream_receiver_.StopReceive();
+ {
+ // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
+ // Also call `GetUniqueFramesSeen()` at the same time (since it's a counter
+ // that's updated on the network thread).
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_video_stream_receiver_.StopReceive();
+ }
stats_proxy_.OnUniqueFramesCounted(
rtp_video_stream_receiver_.GetUniqueFramesSeen());
@@ -432,6 +466,8 @@ void VideoReceiveStream2::Stop() {
void VideoReceiveStream2::CreateAndRegisterExternalDecoder(
const Decoder& decoder) {
+ TRACE_EVENT0("webrtc",
+ "VideoReceiveStream2::CreateAndRegisterExternalDecoder");
std::unique_ptr<VideoDecoder> video_decoder =
config_.decoder_factory->CreateVideoDecoder(decoder.video_format);
// If we still have no valid decoder, we have to create a "Null" decoder
@@ -526,7 +562,10 @@ int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const {
void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) {
VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime());
- worker_thread_->PostTask(
+ // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for
+ // |video_frame.packet_infos|. But VideoFrame is const qualified here.
+
+ call_->worker_thread()->PostTask(
ToQueuedTask(task_safety_, [frame_meta, this]() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
int64_t video_playout_ntp_ms;
@@ -614,8 +653,13 @@ void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) {
}
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
- if (last_continuous_pid != -1)
- rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
+ if (last_continuous_pid != -1) {
+ {
+ // TODO(bugs.webrtc.org/11993): Call on the network thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
+ }
+ }
}
void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
@@ -631,7 +675,7 @@ uint32_t VideoReceiveStream2::id() const {
}
absl::optional<Syncable::Info> VideoReceiveStream2::GetInfo() const {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
absl::optional<Syncable::Info> info =
rtp_video_stream_receiver_.GetSyncInfo();
@@ -682,9 +726,10 @@ void VideoReceiveStream2::StartNextDecode() {
HandleEncodedFrame(std::move(frame));
} else {
int64_t now_ms = clock_->TimeInMilliseconds();
- worker_thread_->PostTask(ToQueuedTask(
+ // TODO(bugs.webrtc.org/11993): PostTask to the network thread.
+ call_->worker_thread()->PostTask(ToQueuedTask(
task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
HandleFrameBufferTimeout(now_ms, wait_ms);
}));
}
@@ -743,19 +788,22 @@ void VideoReceiveStream2::HandleEncodedFrame(
force_request_key_frame = true;
}
- worker_thread_->PostTask(ToQueuedTask(
- task_safety_,
- [this, now_ms, received_frame_is_keyframe, force_request_key_frame,
- decoded_frame_picture_id, keyframe_request_is_due]() {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ {
+ // TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread.
+ call_->worker_thread()->PostTask(ToQueuedTask(
+ task_safety_,
+ [this, now_ms, received_frame_is_keyframe, force_request_key_frame,
+ decoded_frame_picture_id, keyframe_request_is_due]() {
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
- if (decoded_frame_picture_id != -1)
- rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id);
+ if (decoded_frame_picture_id != -1)
+ rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id);
- HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms,
- force_request_key_frame,
- keyframe_request_is_due);
- }));
+ HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms,
+ force_request_key_frame,
+ keyframe_request_is_due);
+ }));
+ }
}
int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame(
@@ -818,13 +866,12 @@ int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame(
return decode_result;
}
+// RTC_RUN_ON(packet_sequence_checker_)
void VideoReceiveStream2::HandleKeyFrameGeneration(
bool received_frame_is_keyframe,
int64_t now_ms,
bool always_request_key_frame,
bool keyframe_request_is_due) {
- // Running on worker_sequence_checker_.
-
bool request_key_frame = always_request_key_frame;
// Repeat sending keyframe requests if we've requested a keyframe.
@@ -848,9 +895,9 @@ void VideoReceiveStream2::HandleKeyFrameGeneration(
}
}
+// RTC_RUN_ON(packet_sequence_checker_)
void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms,
int64_t wait_ms) {
- // Running on |worker_sequence_checker_|.
absl::optional<int64_t> last_packet_ms =
rtp_video_stream_receiver_.LastReceivedPacketMs();
@@ -870,8 +917,8 @@ void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms,
}
}
+// RTC_RUN_ON(packet_sequence_checker_)
bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
- // Running on worker_sequence_checker_.
absl::optional<int64_t> last_keyframe_packet_ms =
rtp_video_stream_receiver_.LastReceivedKeyframePacketMs();
@@ -943,13 +990,13 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
event.Set();
});
- old_state.keyframe_needed = keyframe_generation_requested_;
-
if (generate_key_frame) {
rtp_video_stream_receiver_.RequestKeyFrame();
- keyframe_generation_requested_ = true;
- } else {
- keyframe_generation_requested_ = state.keyframe_needed;
+ {
+ // TODO(bugs.webrtc.org/11993): Post this to the network thread.
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
+ keyframe_generation_requested_ = true;
+ }
}
event.Wait(rtc::Event::kForever);
@@ -957,7 +1004,7 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
}
void VideoReceiveStream2::GenerateKeyFrame() {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
RequestKeyFrame(clock_->TimeInMilliseconds());
keyframe_generation_requested_ = true;
}
diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h
index c22ce1c027..8fd995084e 100644
--- a/video/video_receive_stream2.h
+++ b/video/video_receive_stream2.h
@@ -18,6 +18,7 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/units/timestamp.h"
#include "api/video/recordable_encoded_frame.h"
+#include "call/call.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
#include "call/video_receive_stream.h"
@@ -75,12 +76,13 @@ struct VideoFrameMetaData {
const Timestamp decode_timestamp;
};
-class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
- public rtc::VideoSinkInterface<VideoFrame>,
- public NackSender,
- public OnCompleteFrameCallback,
- public Syncable,
- public CallStatsObserver {
+class VideoReceiveStream2
+ : public webrtc::VideoReceiveStream,
+ public rtc::VideoSinkInterface<VideoFrame>,
+ public NackSender,
+ public RtpVideoStreamReceiver2::OnCompleteFrameCallback,
+ public Syncable,
+ public CallStatsObserver {
public:
// The default number of milliseconds to pass before re-requesting a key frame
// to be sent.
@@ -90,8 +92,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
static constexpr size_t kBufferedEncodedFramesMaxSize = 60;
VideoReceiveStream2(TaskQueueFactory* task_queue_factory,
- TaskQueueBase* current_queue,
- RtpStreamReceiverControllerInterface* receiver_controller,
+ Call* call,
int num_cpu_cores,
PacketRouter* packet_router,
VideoReceiveStream::Config config,
@@ -99,8 +100,22 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
CallStats* call_stats,
Clock* clock,
VCMTiming* timing);
+ // Destruction happens on the worker thread. Prior to destruction the caller
+ // must ensure that a registration with the transport has been cleared. See
+ // `RegisterWithTransport` for details.
+ // TODO(tommi): As a further improvement to this, performing the full
+ // destruction on the network thread could be made the default.
~VideoReceiveStream2() override;
+ // Called on `packet_sequence_checker_` to register/unregister with the
+ // network transport.
+ void RegisterWithTransport(
+ RtpStreamReceiverControllerInterface* receiver_controller);
+ // If registration has previously been done (via `RegisterWithTransport`) then
+ // `UnregisterFromTransport` must be called prior to destruction, on the
+ // network thread.
+ void UnregisterFromTransport();
+
const Config& config() const { return config_; }
void SignalNetworkState(NetworkState state);
@@ -112,6 +127,8 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
void Start() override;
void Stop() override;
+ const RtpConfig& rtp_config() const override { return config_.rtp; }
+
webrtc::VideoReceiveStream::Stats GetStats() const override;
// SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called
@@ -134,7 +151,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
void SendNack(const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) override;
- // Implements OnCompleteFrameCallback.
+ // Implements RtpVideoStreamReceiver2::OnCompleteFrameCallback.
void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override;
// Implements CallStatsObserver::OnRttUpdate
@@ -164,18 +181,18 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
void HandleEncodedFrame(std::unique_ptr<EncodedFrame> frame)
RTC_RUN_ON(decode_queue_);
void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms)
- RTC_RUN_ON(worker_sequence_checker_);
+ RTC_RUN_ON(packet_sequence_checker_);
void UpdatePlayoutDelays() const
RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_);
void RequestKeyFrame(int64_t timestamp_ms)
- RTC_RUN_ON(worker_sequence_checker_);
+ RTC_RUN_ON(packet_sequence_checker_);
void HandleKeyFrameGeneration(bool received_frame_is_keyframe,
int64_t now_ms,
bool always_request_key_frame,
bool keyframe_request_is_due)
- RTC_RUN_ON(worker_sequence_checker_);
+ RTC_RUN_ON(packet_sequence_checker_);
bool IsReceivingKeyFrame(int64_t timestamp_ms) const
- RTC_RUN_ON(worker_sequence_checker_);
+ RTC_RUN_ON(packet_sequence_checker_);
int DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr<EncodedFrame> frame)
RTC_RUN_ON(decode_queue_);
@@ -183,13 +200,21 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker module_process_sequence_checker_;
+ // TODO(bugs.webrtc.org/11993): This checker conceptually represents
+ // operations that belong to the network thread. The Call class is currently
+ // moving towards handling network packets on the network thread and while
+ // that work is ongoing, this checker may in practice represent the worker
+ // thread, but still serves as a mechanism of grouping together concepts
+ // that belong to the network thread. Once the packets are fully delivered
+ // on the network thread, this comment will be deleted.
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_;
TaskQueueFactory* const task_queue_factory_;
TransportAdapter transport_adapter_;
const VideoReceiveStream::Config config_;
const int num_cpu_cores_;
- TaskQueueBase* const worker_thread_;
+ Call* const call_;
Clock* const clock_;
CallStats* const call_stats_;
@@ -217,9 +242,12 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
// Members for the new jitter buffer experiment.
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
- std::unique_ptr<RtpStreamReceiverInterface> media_receiver_;
- std::unique_ptr<RtxReceiveStream> rtx_receive_stream_;
- std::unique_ptr<RtpStreamReceiverInterface> rtx_receiver_;
+ std::unique_ptr<RtpStreamReceiverInterface> media_receiver_
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ std::unique_ptr<RtxReceiveStream> rtx_receive_stream_
+ RTC_GUARDED_BY(packet_sequence_checker_);
+ std::unique_ptr<RtpStreamReceiverInterface> rtx_receiver_
+ RTC_GUARDED_BY(packet_sequence_checker_);
// Whenever we are in an undecodable state (stream has just started or due to
// a decoding error) we require a keyframe to restart the stream.
@@ -258,7 +286,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
std::function<void(const RecordableEncodedFrame&)>
encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_);
// Set to true while we're requesting keyframes but not yet received one.
- bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ bool keyframe_generation_requested_ RTC_GUARDED_BY(packet_sequence_checker_) =
false;
// Lock to avoid unnecessary per-frame idle wakeups in the code.
webrtc::Mutex pending_resolution_mutex_;
diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc
index 7a23112119..a37a5defb2 100644
--- a/video/video_receive_stream2_unittest.cc
+++ b/video/video_receive_stream2_unittest.cc
@@ -23,6 +23,7 @@
#include "call/rtp_stream_receiver_controller.h"
#include "common_video/test/utilities.h"
#include "media/base/fake_video_renderer.h"
+#include "media/engine/fake_webrtc_call.h"
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/utility/include/process_thread.h"
@@ -110,16 +111,19 @@ class VideoReceiveStream2Test : public ::testing::Test {
VideoReceiveStream2Test()
: process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
- config_(&mock_transport_),
- call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()),
- h264_decoder_factory_(&mock_h264_video_decoder_) {}
+ h264_decoder_factory_(&mock_h264_video_decoder_),
+ config_(&mock_transport_, &h264_decoder_factory_),
+ call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {}
+ ~VideoReceiveStream2Test() override {
+ if (video_receive_stream_)
+ video_receive_stream_->UnregisterFromTransport();
+ }
- void SetUp() {
+ void SetUp() override {
constexpr int kDefaultNumCpuCores = 2;
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
- config_.decoder_factory = &h264_decoder_factory_;
VideoReceiveStream::Decoder h264_decoder;
h264_decoder.payload_type = 99;
h264_decoder.video_format = SdpVideoFormat("H264");
@@ -133,21 +137,23 @@ class VideoReceiveStream2Test : public ::testing::Test {
video_receive_stream_ =
std::make_unique<webrtc::internal::VideoReceiveStream2>(
- task_queue_factory_.get(), loop_.task_queue(),
- &rtp_stream_receiver_controller_, kDefaultNumCpuCores,
+ task_queue_factory_.get(), &fake_call_, kDefaultNumCpuCores,
&packet_router_, config_.Copy(), process_thread_.get(),
&call_stats_, clock_, timing_);
+ video_receive_stream_->RegisterWithTransport(
+ &rtp_stream_receiver_controller_);
}
protected:
test::RunLoop loop_;
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ test::VideoDecoderProxyFactory h264_decoder_factory_;
VideoReceiveStream::Config config_;
internal::CallStats call_stats_;
MockVideoDecoder mock_h264_video_decoder_;
- test::VideoDecoderProxyFactory h264_decoder_factory_;
cricket::FakeVideoRenderer fake_renderer_;
+ cricket::FakeCall fake_call_;
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
@@ -286,14 +292,17 @@ class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test {
[]() { return std::make_unique<test::FakeDecoder>(); }),
process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
- config_(&mock_transport_),
+ config_(&mock_transport_, &fake_decoder_factory_),
call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {}
+ ~VideoReceiveStream2TestWithFakeDecoder() override {
+ if (video_receive_stream_)
+ video_receive_stream_->UnregisterFromTransport();
+ }
- void SetUp() {
+ void SetUp() override {
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
- config_.decoder_factory = &fake_decoder_factory_;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
@@ -304,12 +313,17 @@ class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test {
void ReCreateReceiveStream(VideoReceiveStream::RecordingState state) {
constexpr int kDefaultNumCpuCores = 2;
- video_receive_stream_ = nullptr;
+ if (video_receive_stream_) {
+ video_receive_stream_->UnregisterFromTransport();
+ video_receive_stream_ = nullptr;
+ }
timing_ = new VCMTiming(clock_);
video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream2(
- task_queue_factory_.get(), loop_.task_queue(),
- &rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_,
- config_.Copy(), process_thread_.get(), &call_stats_, clock_, timing_));
+ task_queue_factory_.get(), &fake_call_, kDefaultNumCpuCores,
+ &packet_router_, config_.Copy(), process_thread_.get(), &call_stats_,
+ clock_, timing_));
+ video_receive_stream_->RegisterWithTransport(
+ &rtp_stream_receiver_controller_);
video_receive_stream_->SetAndGetRecordingState(std::move(state), false);
}
@@ -324,6 +338,7 @@ class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test {
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
+ cricket::FakeCall fake_call_;
std::unique_ptr<webrtc::internal::VideoReceiveStream2> video_receive_stream_;
Clock* clock_;
VCMTiming* timing_;
@@ -388,16 +403,16 @@ TEST_F(VideoReceiveStream2TestWithFakeDecoder, RenderedFrameUpdatesGetSources) {
info.set_csrcs({kCsrc});
info.set_rtp_timestamp(kRtpTimestamp);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 5000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(5000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 3000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(3000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 2000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(2000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 4000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(1000));
infos.push_back(info);
packet_infos = RtpPacketInfos(std::move(infos));
@@ -544,12 +559,11 @@ class VideoReceiveStream2TestWithSimulatedClock
Transport* transport,
VideoDecoderFactory* decoder_factory,
rtc::VideoSinkInterface<webrtc::VideoFrame>* renderer) {
- VideoReceiveStream::Config config(transport);
+ VideoReceiveStream::Config config(transport, decoder_factory);
config.rtp.remote_ssrc = 1111;
config.rtp.local_ssrc = 2222;
config.rtp.nack.rtp_history_ms = GetParam(); // rtx-time.
config.renderer = renderer;
- config.decoder_factory = decoder_factory;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
@@ -568,8 +582,7 @@ class VideoReceiveStream2TestWithSimulatedClock
&fake_renderer_)),
call_stats_(time_controller_.GetClock(), loop_.task_queue()),
video_receive_stream_(time_controller_.GetTaskQueueFactory(),
- loop_.task_queue(),
- &rtp_stream_receiver_controller_,
+ &fake_call_,
/*num_cores=*/2,
&packet_router_,
config_.Copy(),
@@ -577,9 +590,15 @@ class VideoReceiveStream2TestWithSimulatedClock
&call_stats_,
time_controller_.GetClock(),
new VCMTiming(time_controller_.GetClock())) {
+ video_receive_stream_.RegisterWithTransport(
+ &rtp_stream_receiver_controller_);
video_receive_stream_.Start();
}
+ ~VideoReceiveStream2TestWithSimulatedClock() override {
+ video_receive_stream_.UnregisterFromTransport();
+ }
+
void OnFrameDecoded() { event_->Set(); }
void PassEncodedFrameAndWait(std::unique_ptr<EncodedFrame> frame) {
@@ -597,6 +616,7 @@ class VideoReceiveStream2TestWithSimulatedClock
std::unique_ptr<ProcessThread> process_thread_;
MockTransport mock_transport_;
FakeRenderer fake_renderer_;
+ cricket::FakeCall fake_call_;
VideoReceiveStream::Config config_;
internal::CallStats call_stats_;
PacketRouter packet_router_;
@@ -711,17 +731,20 @@ class VideoReceiveStream2TestWithLazyDecoderCreation : public ::testing::Test {
VideoReceiveStream2TestWithLazyDecoderCreation()
: process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
- config_(&mock_transport_),
+ config_(&mock_transport_, &mock_h264_decoder_factory_),
call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {}
- void SetUp() {
+ ~VideoReceiveStream2TestWithLazyDecoderCreation() override {
+ video_receive_stream_->UnregisterFromTransport();
+ }
+
+ void SetUp() override {
webrtc::test::ScopedFieldTrials field_trials(
"WebRTC-PreStreamDecoders/max:0/");
constexpr int kDefaultNumCpuCores = 2;
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
- config_.decoder_factory = &mock_h264_decoder_factory_;
VideoReceiveStream::Decoder h264_decoder;
h264_decoder.payload_type = 99;
h264_decoder.video_format = SdpVideoFormat("H264");
@@ -735,21 +758,23 @@ class VideoReceiveStream2TestWithLazyDecoderCreation : public ::testing::Test {
video_receive_stream_ =
std::make_unique<webrtc::internal::VideoReceiveStream2>(
- task_queue_factory_.get(), loop_.task_queue(),
- &rtp_stream_receiver_controller_, kDefaultNumCpuCores,
+ task_queue_factory_.get(), &fake_call_, kDefaultNumCpuCores,
&packet_router_, config_.Copy(), process_thread_.get(),
&call_stats_, clock_, timing_);
+ video_receive_stream_->RegisterWithTransport(
+ &rtp_stream_receiver_controller_);
}
protected:
test::RunLoop loop_;
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ MockVideoDecoderFactory mock_h264_decoder_factory_;
VideoReceiveStream::Config config_;
internal::CallStats call_stats_;
MockVideoDecoder mock_h264_video_decoder_;
- MockVideoDecoderFactory mock_h264_decoder_factory_;
cricket::FakeVideoRenderer fake_renderer_;
+ cricket::FakeCall fake_call_;
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
diff --git a/video/video_receive_stream_unittest.cc b/video/video_receive_stream_unittest.cc
index e69820e36c..cb14f7dc06 100644
--- a/video/video_receive_stream_unittest.cc
+++ b/video/video_receive_stream_unittest.cc
@@ -96,16 +96,15 @@ class VideoReceiveStreamTest : public ::testing::Test {
VideoReceiveStreamTest()
: process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
- config_(&mock_transport_),
- call_stats_(Clock::GetRealTimeClock(), process_thread_.get()),
- h264_decoder_factory_(&mock_h264_video_decoder_) {}
+ h264_decoder_factory_(&mock_h264_video_decoder_),
+ config_(&mock_transport_, &h264_decoder_factory_),
+ call_stats_(Clock::GetRealTimeClock(), process_thread_.get()) {}
void SetUp() {
constexpr int kDefaultNumCpuCores = 2;
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
- config_.decoder_factory = &h264_decoder_factory_;
VideoReceiveStream::Decoder h264_decoder;
h264_decoder.payload_type = 99;
h264_decoder.video_format = SdpVideoFormat("H264");
@@ -126,10 +125,10 @@ class VideoReceiveStreamTest : public ::testing::Test {
protected:
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ test::VideoDecoderProxyFactory h264_decoder_factory_;
VideoReceiveStream::Config config_;
CallStats call_stats_;
MockVideoDecoder mock_h264_video_decoder_;
- test::VideoDecoderProxyFactory h264_decoder_factory_;
cricket::FakeVideoRenderer fake_renderer_;
MockTransport mock_transport_;
PacketRouter packet_router_;
@@ -235,14 +234,13 @@ class VideoReceiveStreamTestWithFakeDecoder : public ::testing::Test {
[]() { return std::make_unique<test::FakeDecoder>(); }),
process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
- config_(&mock_transport_),
+ config_(&mock_transport_, &fake_decoder_factory_),
call_stats_(Clock::GetRealTimeClock(), process_thread_.get()) {}
void SetUp() {
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
- config_.decoder_factory = &fake_decoder_factory_;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
@@ -336,16 +334,16 @@ TEST_F(VideoReceiveStreamTestWithFakeDecoder, RenderedFrameUpdatesGetSources) {
info.set_csrcs({kCsrc});
info.set_rtp_timestamp(kRtpTimestamp);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 5000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(5000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 3000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(3000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 2000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(2000));
infos.push_back(info);
- info.set_receive_time_ms(clock_->TimeInMilliseconds() - 4000);
+ info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(4000));
infos.push_back(info);
packet_infos = RtpPacketInfos(std::move(infos));
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index 91c246c66e..591a8d58d8 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -23,7 +23,6 @@
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/field_trial.h"
#include "video/adaptation/overuse_frame_detector.h"
-#include "video/video_send_stream_impl.h"
#include "video/video_stream_encoder.h"
namespace webrtc {
@@ -65,7 +64,10 @@ VideoStreamEncoder::BitrateAllocationCallbackType
GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) {
if (webrtc::RtpExtension::FindHeaderExtensionByUri(
config.rtp.extensions,
- webrtc::RtpExtension::kVideoLayersAllocationUri)) {
+ webrtc::RtpExtension::kVideoLayersAllocationUri,
+ config.crypto_options.srtp.enable_encrypted_rtp_header_extensions
+ ? RtpExtension::Filter::kPreferEncryptedExtension
+ : RtpExtension::Filter::kDiscardEncryptedExtension)) {
return VideoStreamEncoder::BitrateAllocationCallbackType::
kVideoLayersAllocation;
}
@@ -77,6 +79,32 @@ GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) {
kVideoBitrateAllocationWhenScreenSharing;
}
+RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig(
+ const VideoSendStream::Config* config) {
+ RtpSenderFrameEncryptionConfig frame_encryption_config;
+ frame_encryption_config.frame_encryptor = config->frame_encryptor;
+ frame_encryption_config.crypto_options = config->crypto_options;
+ return frame_encryption_config;
+}
+
+RtpSenderObservers CreateObservers(RtcpRttStats* call_stats,
+ EncoderRtcpFeedback* encoder_feedback,
+ SendStatisticsProxy* stats_proxy,
+ SendDelayStats* send_delay_stats) {
+ RtpSenderObservers observers;
+ observers.rtcp_rtt_stats = call_stats;
+ observers.intra_frame_callback = encoder_feedback;
+ observers.rtcp_loss_notification_observer = encoder_feedback;
+ observers.report_block_data_observer = stats_proxy;
+ observers.rtp_stats = stats_proxy;
+ observers.bitrate_observer = stats_proxy;
+ observers.frame_count_observer = stats_proxy;
+ observers.rtcp_type_observer = stats_proxy;
+ observers.send_delay_observer = stats_proxy;
+ observers.send_packet_observer = send_delay_stats;
+ return observers;
+}
+
} // namespace
namespace internal {
@@ -96,46 +124,65 @@ VideoSendStream::VideoSendStream(
const std::map<uint32_t, RtpState>& suspended_ssrcs,
const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
std::unique_ptr<FecController> fec_controller)
- : worker_queue_(transport->GetWorkerQueue()),
+ : rtp_transport_queue_(transport->GetWorkerQueue()),
+ transport_(transport),
stats_proxy_(clock, config, encoder_config.content_type),
config_(std::move(config)),
- content_type_(encoder_config.content_type) {
+ content_type_(encoder_config.content_type),
+ video_stream_encoder_(std::make_unique<VideoStreamEncoder>(
+ clock,
+ num_cpu_cores,
+ &stats_proxy_,
+ config_.encoder_settings,
+ std::make_unique<OveruseFrameDetector>(&stats_proxy_),
+ task_queue_factory,
+ GetBitrateAllocationCallbackType(config_))),
+ encoder_feedback_(
+ clock,
+ config_.rtp.ssrcs,
+ video_stream_encoder_.get(),
+ [this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) {
+ return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums);
+ }),
+ rtp_video_sender_(
+ transport->CreateRtpVideoSender(suspended_ssrcs,
+ suspended_payload_states,
+ config_.rtp,
+ config_.rtcp_report_interval_ms,
+ config_.send_transport,
+ CreateObservers(call_stats,
+ &encoder_feedback_,
+ &stats_proxy_,
+ send_delay_stats),
+ event_log,
+ std::move(fec_controller),
+ CreateFrameEncryptionConfig(&config_),
+ config_.frame_transformer)),
+ send_stream_(clock,
+ &stats_proxy_,
+ rtp_transport_queue_,
+ transport,
+ bitrate_allocator,
+ video_stream_encoder_.get(),
+ &config_,
+ encoder_config.max_bitrate_bps,
+ encoder_config.bitrate_priority,
+ encoder_config.content_type,
+ rtp_video_sender_) {
RTC_DCHECK(config_.encoder_settings.encoder_factory);
RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory);
- video_stream_encoder_ = std::make_unique<VideoStreamEncoder>(
- clock, num_cpu_cores, &stats_proxy_, config_.encoder_settings,
- std::make_unique<OveruseFrameDetector>(&stats_proxy_), task_queue_factory,
- GetBitrateAllocationCallbackType(config_));
-
- // TODO(srte): Initialization should not be done posted on a task queue.
- // Note that the posted task must not outlive this scope since the closure
- // references local variables.
- worker_queue_->PostTask(ToQueuedTask(
- [this, clock, call_stats, transport, bitrate_allocator, send_delay_stats,
- event_log, &suspended_ssrcs, &encoder_config, &suspended_payload_states,
- &fec_controller]() {
- send_stream_.reset(new VideoSendStreamImpl(
- clock, &stats_proxy_, worker_queue_, call_stats, transport,
- bitrate_allocator, send_delay_stats, video_stream_encoder_.get(),
- event_log, &config_, encoder_config.max_bitrate_bps,
- encoder_config.bitrate_priority, suspended_ssrcs,
- suspended_payload_states, encoder_config.content_type,
- std::move(fec_controller)));
- },
- [this]() { thread_sync_event_.Set(); }));
-
- // Wait for ConstructionTask to complete so that |send_stream_| can be used.
- // |module_process_thread| must be registered and deregistered on the thread
- // it was created on.
- thread_sync_event_.Wait(rtc::Event::kForever);
- send_stream_->RegisterProcessThread(module_process_thread);
+ video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_);
+
+ rtp_video_sender_->RegisterProcessThread(module_process_thread);
ReconfigureVideoEncoder(std::move(encoder_config));
}
VideoSendStream::~VideoSendStream() {
RTC_DCHECK_RUN_ON(&thread_checker_);
- RTC_DCHECK(!send_stream_);
+ RTC_DCHECK(!running_);
+ rtp_video_sender_->DeRegisterProcessThread();
+ transport_->DestroyRtpVideoSender(rtp_video_sender_);
}
void VideoSendStream::UpdateActiveSimulcastLayers(
@@ -158,35 +205,43 @@ void VideoSendStream::UpdateActiveSimulcastLayers(
RTC_LOG(LS_INFO) << "UpdateActiveSimulcastLayers: "
<< active_layers_string.str();
- VideoSendStreamImpl* send_stream = send_stream_.get();
- worker_queue_->PostTask([this, send_stream, active_layers] {
- send_stream->UpdateActiveSimulcastLayers(active_layers);
- thread_sync_event_.Set();
- });
-
- thread_sync_event_.Wait(rtc::Event::kForever);
+ rtp_transport_queue_->PostTask(
+ ToQueuedTask(transport_queue_safety_, [this, active_layers] {
+ send_stream_.UpdateActiveSimulcastLayers(active_layers);
+ }));
}
void VideoSendStream::Start() {
RTC_DCHECK_RUN_ON(&thread_checker_);
- RTC_LOG(LS_INFO) << "VideoSendStream::Start";
- VideoSendStreamImpl* send_stream = send_stream_.get();
- worker_queue_->PostTask([this, send_stream] {
- send_stream->Start();
+ RTC_DLOG(LS_INFO) << "VideoSendStream::Start";
+ if (running_)
+ return;
+
+ running_ = true;
+
+ rtp_transport_queue_->PostTask(ToQueuedTask([this] {
+ transport_queue_safety_->SetAlive();
+ send_stream_.Start();
thread_sync_event_.Set();
- });
+ }));
// It is expected that after VideoSendStream::Start has been called, incoming
// frames are not dropped in VideoStreamEncoder. To ensure this, Start has to
// be synchronized.
+ // TODO(tommi): ^^^ Validate if this still holds.
thread_sync_event_.Wait(rtc::Event::kForever);
}
void VideoSendStream::Stop() {
RTC_DCHECK_RUN_ON(&thread_checker_);
- RTC_LOG(LS_INFO) << "VideoSendStream::Stop";
- VideoSendStreamImpl* send_stream = send_stream_.get();
- worker_queue_->PostTask([send_stream] { send_stream->Stop(); });
+ if (!running_)
+ return;
+ RTC_DLOG(LS_INFO) << "VideoSendStream::Stop";
+ running_ = false;
+ rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [this] {
+ transport_queue_safety_->SetNotAlive();
+ send_stream_.Stop();
+ }));
}
void VideoSendStream::AddAdaptationResource(
@@ -209,10 +264,8 @@ void VideoSendStream::SetSource(
}
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
- // TODO(perkj): Some test cases in VideoSendStreamTest call
- // ReconfigureVideoEncoder from the network thread.
- // RTC_DCHECK_RUN_ON(&thread_checker_);
- RTC_DCHECK(content_type_ == config.content_type);
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK_EQ(content_type_, config.content_type);
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp));
@@ -226,7 +279,7 @@ VideoSendStream::Stats VideoSendStream::GetStats() {
}
absl::optional<float> VideoSendStream::GetPacingFactorOverride() const {
- return send_stream_->configured_pacing_factor_;
+ return send_stream_.configured_pacing_factor();
}
void VideoSendStream::StopPermanentlyAndGetRtpStates(
@@ -234,12 +287,16 @@ void VideoSendStream::StopPermanentlyAndGetRtpStates(
VideoSendStream::RtpPayloadStateMap* payload_state_map) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->Stop();
- send_stream_->DeRegisterProcessThread();
- worker_queue_->PostTask([this, rtp_state_map, payload_state_map]() {
- send_stream_->Stop();
- *rtp_state_map = send_stream_->GetRtpStates();
- *payload_state_map = send_stream_->GetRtpPayloadStates();
- send_stream_.reset();
+
+ running_ = false;
+ // Always run these cleanup steps regardless of whether running_ was set
+ // or not. This will unregister callbacks before destruction.
+ // See `VideoSendStreamImpl::StopVideoSendStream` for more.
+ rtp_transport_queue_->PostTask([this, rtp_state_map, payload_state_map]() {
+ transport_queue_safety_->SetNotAlive();
+ send_stream_.Stop();
+ *rtp_state_map = send_stream_.GetRtpStates();
+ *payload_state_map = send_stream_.GetRtpPayloadStates();
thread_sync_event_.Set();
});
thread_sync_event_.Wait(rtc::Event::kForever);
@@ -247,7 +304,7 @@ void VideoSendStream::StopPermanentlyAndGetRtpStates(
void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
// Called on a network thread.
- send_stream_->DeliverRtcp(packet, length);
+ send_stream_.DeliverRtcp(packet, length);
}
} // namespace internal
diff --git a/video/video_send_stream.h b/video/video_send_stream.h
index e36f279ca6..7e89c46abd 100644
--- a/video/video_send_stream.h
+++ b/video/video_send_stream.h
@@ -22,9 +22,13 @@
#include "call/video_receive_stream.h"
#include "call/video_send_stream.h"
#include "rtc_base/event.h"
+#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "video/encoder_rtcp_feedback.h"
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
+#include "video/video_send_stream_impl.h"
namespace webrtc {
namespace test {
@@ -45,8 +49,7 @@ class VideoSendStreamImpl;
// VideoSendStream implements webrtc::VideoSendStream.
// Internally, it delegates all public methods to VideoSendStreamImpl and / or
-// VideoStreamEncoder. VideoSendStreamInternal is created and deleted on
-// |worker_queue|.
+// VideoStreamEncoder.
class VideoSendStream : public webrtc::VideoSendStream {
public:
using RtpStateMap = std::map<uint32_t, RtpState>;
@@ -97,15 +100,21 @@ class VideoSendStream : public webrtc::VideoSendStream {
absl::optional<float> GetPacingFactorOverride() const;
- SequenceChecker thread_checker_;
- rtc::TaskQueue* const worker_queue_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_;
+ rtc::TaskQueue* const rtp_transport_queue_;
+ RtpTransportControllerSendInterface* const transport_;
rtc::Event thread_sync_event_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> transport_queue_safety_ =
+ PendingTaskSafetyFlag::CreateDetached();
SendStatisticsProxy stats_proxy_;
const VideoSendStream::Config config_;
const VideoEncoderConfig::ContentType content_type_;
- std::unique_ptr<VideoSendStreamImpl> send_stream_;
std::unique_ptr<VideoStreamEncoderInterface> video_stream_encoder_;
+ EncoderRtcpFeedback encoder_feedback_;
+ RtpVideoSenderInterface* const rtp_video_sender_ = nullptr;
+ VideoSendStreamImpl send_stream_;
+ bool running_ RTC_GUARDED_BY(thread_checker_) = false;
};
} // namespace internal
diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc
index b4adc135ec..3fc6b676dc 100644
--- a/video/video_send_stream_impl.cc
+++ b/video/video_send_stream_impl.cc
@@ -33,6 +33,7 @@
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/field_trial.h"
@@ -130,33 +131,6 @@ int CalculateMaxPadBitrateBps(const std::vector<VideoStream>& streams,
return pad_up_to_bitrate_bps;
}
-RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig(
- const VideoSendStream::Config* config) {
- RtpSenderFrameEncryptionConfig frame_encryption_config;
- frame_encryption_config.frame_encryptor = config->frame_encryptor;
- frame_encryption_config.crypto_options = config->crypto_options;
- return frame_encryption_config;
-}
-
-RtpSenderObservers CreateObservers(RtcpRttStats* call_stats,
- EncoderRtcpFeedback* encoder_feedback,
- SendStatisticsProxy* stats_proxy,
- SendDelayStats* send_delay_stats) {
- RtpSenderObservers observers;
- observers.rtcp_rtt_stats = call_stats;
- observers.intra_frame_callback = encoder_feedback;
- observers.rtcp_loss_notification_observer = encoder_feedback;
- observers.rtcp_stats = stats_proxy;
- observers.report_block_data_observer = stats_proxy;
- observers.rtp_stats = stats_proxy;
- observers.bitrate_observer = stats_proxy;
- observers.frame_count_observer = stats_proxy;
- observers.rtcp_type_observer = stats_proxy;
- observers.send_delay_observer = stats_proxy;
- observers.send_packet_observer = send_delay_stats;
- return observers;
-}
-
absl::optional<AlrExperimentSettings> GetAlrSettings(
VideoEncoderConfig::ContentType content_type) {
if (content_type == VideoEncoderConfig::ContentType::kScreen) {
@@ -178,6 +152,44 @@ bool SameStreamsEnabled(const VideoBitrateAllocation& lhs,
}
return true;
}
+
+// Returns an optional that has value iff TransportSeqNumExtensionConfigured
+// is `true` for the given video send stream config.
+absl::optional<float> GetConfiguredPacingFactor(
+ const VideoSendStream::Config& config,
+ VideoEncoderConfig::ContentType content_type,
+ const PacingConfig& default_pacing_config) {
+ if (!TransportSeqNumExtensionConfigured(config))
+ return absl::nullopt;
+
+ absl::optional<AlrExperimentSettings> alr_settings =
+ GetAlrSettings(content_type);
+ if (alr_settings)
+ return alr_settings->pacing_factor;
+
+ RateControlSettings rate_control_settings =
+ RateControlSettings::ParseFromFieldTrials();
+ return rate_control_settings.GetPacingFactor().value_or(
+ default_pacing_config.pacing_factor);
+}
+
+uint32_t GetInitialEncoderMaxBitrate(int initial_encoder_max_bitrate) {
+ if (initial_encoder_max_bitrate > 0)
+ return rtc::dchecked_cast<uint32_t>(initial_encoder_max_bitrate);
+
+ // TODO(srte): Make sure max bitrate is not set to negative values. We don't
+ // have any way to handle unset values in downstream code, such as the
+ // bitrate allocator. Previously -1 was implicitly casted to UINT32_MAX, a
+ // behaviour that is not safe. Converting to 10 Mbps should be safe for
+ // reasonable use cases as it allows adding the max of multiple streams
+ // without wrappping around.
+ const int kFallbackMaxBitrateBps = 10000000;
+ RTC_DLOG(LS_ERROR) << "ERROR: Initial encoder max bitrate = "
+ << initial_encoder_max_bitrate << " which is <= 0!";
+ RTC_DLOG(LS_INFO) << "Using default encoder max bitrate = 10 Mbps";
+ return kFallbackMaxBitrateBps;
+}
+
} // namespace
PacingConfig::PacingConfig()
@@ -193,162 +205,109 @@ PacingConfig::~PacingConfig() = default;
VideoSendStreamImpl::VideoSendStreamImpl(
Clock* clock,
SendStatisticsProxy* stats_proxy,
- rtc::TaskQueue* worker_queue,
- RtcpRttStats* call_stats,
+ rtc::TaskQueue* rtp_transport_queue,
RtpTransportControllerSendInterface* transport,
BitrateAllocatorInterface* bitrate_allocator,
- SendDelayStats* send_delay_stats,
VideoStreamEncoderInterface* video_stream_encoder,
- RtcEventLog* event_log,
const VideoSendStream::Config* config,
int initial_encoder_max_bitrate,
double initial_encoder_bitrate_priority,
- std::map<uint32_t, RtpState> suspended_ssrcs,
- std::map<uint32_t, RtpPayloadState> suspended_payload_states,
VideoEncoderConfig::ContentType content_type,
- std::unique_ptr<FecController> fec_controller)
+ RtpVideoSenderInterface* rtp_video_sender)
: clock_(clock),
has_alr_probing_(config->periodic_alr_bandwidth_probing ||
GetAlrSettings(content_type)),
pacing_config_(PacingConfig()),
stats_proxy_(stats_proxy),
config_(config),
- worker_queue_(worker_queue),
+ rtp_transport_queue_(rtp_transport_queue),
timed_out_(false),
transport_(transport),
bitrate_allocator_(bitrate_allocator),
disable_padding_(true),
max_padding_bitrate_(0),
encoder_min_bitrate_bps_(0),
+ encoder_max_bitrate_bps_(
+ GetInitialEncoderMaxBitrate(initial_encoder_max_bitrate)),
encoder_target_rate_bps_(0),
encoder_bitrate_priority_(initial_encoder_bitrate_priority),
- has_packet_feedback_(false),
video_stream_encoder_(video_stream_encoder),
- encoder_feedback_(clock, config_->rtp.ssrcs, video_stream_encoder),
bandwidth_observer_(transport->GetBandwidthObserver()),
- rtp_video_sender_(
- transport_->CreateRtpVideoSender(suspended_ssrcs,
- suspended_payload_states,
- config_->rtp,
- config_->rtcp_report_interval_ms,
- config_->send_transport,
- CreateObservers(call_stats,
- &encoder_feedback_,
- stats_proxy_,
- send_delay_stats),
- event_log,
- std::move(fec_controller),
- CreateFrameEncryptionConfig(config_),
- config->frame_transformer)),
- weak_ptr_factory_(this) {
- video_stream_encoder->SetFecControllerOverride(rtp_video_sender_);
- RTC_DCHECK_RUN_ON(worker_queue_);
- RTC_LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString();
- weak_ptr_ = weak_ptr_factory_.GetWeakPtr();
-
- encoder_feedback_.SetRtpVideoSender(rtp_video_sender_);
-
+ rtp_video_sender_(rtp_video_sender),
+ configured_pacing_factor_(
+ GetConfiguredPacingFactor(*config_, content_type, pacing_config_)) {
+ RTC_DCHECK_GE(config_->rtp.payload_type, 0);
+ RTC_DCHECK_LE(config_->rtp.payload_type, 127);
RTC_DCHECK(!config_->rtp.ssrcs.empty());
RTC_DCHECK(transport_);
RTC_DCHECK_NE(initial_encoder_max_bitrate, 0);
-
- if (initial_encoder_max_bitrate > 0) {
- encoder_max_bitrate_bps_ =
- rtc::dchecked_cast<uint32_t>(initial_encoder_max_bitrate);
- } else {
- // TODO(srte): Make sure max bitrate is not set to negative values. We don't
- // have any way to handle unset values in downstream code, such as the
- // bitrate allocator. Previously -1 was implicitly casted to UINT32_MAX, a
- // behaviour that is not safe. Converting to 10 Mbps should be safe for
- // reasonable use cases as it allows adding the max of multiple streams
- // without wrappping around.
- const int kFallbackMaxBitrateBps = 10000000;
- RTC_DLOG(LS_ERROR) << "ERROR: Initial encoder max bitrate = "
- << initial_encoder_max_bitrate << " which is <= 0!";
- RTC_DLOG(LS_INFO) << "Using default encoder max bitrate = 10 Mbps";
- encoder_max_bitrate_bps_ = kFallbackMaxBitrateBps;
- }
+ RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_->ToString();
RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled());
+
+ // Only request rotation at the source when we positively know that the remote
+ // side doesn't support the rotation extension. This allows us to prepare the
+ // encoder in the expectation that rotation is supported - which is the common
+ // case.
+ bool rotation_applied = absl::c_none_of(
+ config_->rtp.extensions, [](const RtpExtension& extension) {
+ return extension.uri == RtpExtension::kVideoRotationUri;
+ });
+
+ video_stream_encoder_->SetSink(this, rotation_applied);
+
+ absl::optional<bool> enable_alr_bw_probing;
+
// If send-side BWE is enabled, check if we should apply updated probing and
// pacing settings.
- if (TransportSeqNumExtensionConfigured(*config_)) {
- has_packet_feedback_ = true;
-
+ if (configured_pacing_factor_) {
absl::optional<AlrExperimentSettings> alr_settings =
GetAlrSettings(content_type);
+ int queue_time_limit_ms;
if (alr_settings) {
- transport->EnablePeriodicAlrProbing(true);
- transport->SetPacingFactor(alr_settings->pacing_factor);
- configured_pacing_factor_ = alr_settings->pacing_factor;
- transport->SetQueueTimeLimit(alr_settings->max_paced_queue_time);
+ enable_alr_bw_probing = true;
+ queue_time_limit_ms = alr_settings->max_paced_queue_time;
} else {
RateControlSettings rate_control_settings =
RateControlSettings::ParseFromFieldTrials();
-
- transport->EnablePeriodicAlrProbing(
- rate_control_settings.UseAlrProbing());
- const double pacing_factor =
- rate_control_settings.GetPacingFactor().value_or(
- pacing_config_.pacing_factor);
- transport->SetPacingFactor(pacing_factor);
- configured_pacing_factor_ = pacing_factor;
- transport->SetQueueTimeLimit(pacing_config_.max_pacing_delay.Get().ms());
+ enable_alr_bw_probing = rate_control_settings.UseAlrProbing();
+ queue_time_limit_ms = pacing_config_.max_pacing_delay.Get().ms();
}
+
+ transport->SetQueueTimeLimit(queue_time_limit_ms);
}
if (config_->periodic_alr_bandwidth_probing) {
- transport->EnablePeriodicAlrProbing(true);
+ enable_alr_bw_probing = config_->periodic_alr_bandwidth_probing;
}
- RTC_DCHECK_GE(config_->rtp.payload_type, 0);
- RTC_DCHECK_LE(config_->rtp.payload_type, 127);
-
- video_stream_encoder_->SetStartBitrate(
- bitrate_allocator_->GetStartBitrate(this));
-}
-
-VideoSendStreamImpl::~VideoSendStreamImpl() {
- RTC_DCHECK_RUN_ON(worker_queue_);
- RTC_DCHECK(!rtp_video_sender_->IsActive())
- << "VideoSendStreamImpl::Stop not called";
- RTC_LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString();
- transport_->DestroyRtpVideoSender(rtp_video_sender_);
-}
-
-void VideoSendStreamImpl::RegisterProcessThread(
- ProcessThread* module_process_thread) {
- // Called on libjingle's worker thread (not worker_queue_), as part of the
- // initialization steps. That's also the correct thread/queue for setting the
- // state for |video_stream_encoder_|.
-
- // Only request rotation at the source when we positively know that the remote
- // side doesn't support the rotation extension. This allows us to prepare the
- // encoder in the expectation that rotation is supported - which is the common
- // case.
- bool rotation_applied = absl::c_none_of(
- config_->rtp.extensions, [](const RtpExtension& extension) {
- return extension.uri == RtpExtension::kVideoRotationUri;
- });
+ if (enable_alr_bw_probing) {
+ transport->EnablePeriodicAlrProbing(*enable_alr_bw_probing);
+ }
- video_stream_encoder_->SetSink(this, rotation_applied);
+ rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [this] {
+ if (configured_pacing_factor_)
+ transport_->SetPacingFactor(*configured_pacing_factor_);
- rtp_video_sender_->RegisterProcessThread(module_process_thread);
+ video_stream_encoder_->SetStartBitrate(
+ bitrate_allocator_->GetStartBitrate(this));
+ }));
}
-void VideoSendStreamImpl::DeRegisterProcessThread() {
- rtp_video_sender_->DeRegisterProcessThread();
+VideoSendStreamImpl::~VideoSendStreamImpl() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_->ToString();
}
void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) {
// Runs on a network thread.
- RTC_DCHECK(!worker_queue_->IsCurrent());
+ RTC_DCHECK(!rtp_transport_queue_->IsCurrent());
rtp_video_sender_->DeliverRtcp(packet, length);
}
void VideoSendStreamImpl::UpdateActiveSimulcastLayers(
const std::vector<bool> active_layers) {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
bool previously_active = rtp_video_sender_->IsActive();
rtp_video_sender_->SetActiveModules(active_layers);
if (!rtp_video_sender_->IsActive() && previously_active) {
@@ -361,17 +320,21 @@ void VideoSendStreamImpl::UpdateActiveSimulcastLayers(
}
void VideoSendStreamImpl::Start() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
RTC_LOG(LS_INFO) << "VideoSendStream::Start";
if (rtp_video_sender_->IsActive())
return;
+
TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start");
rtp_video_sender_->SetActive(true);
StartupVideoSendStream();
}
void VideoSendStreamImpl::StartupVideoSendStream() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
+
+ transport_queue_safety_->SetAlive();
+
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
// Start monitoring encoder activity.
{
@@ -380,8 +343,8 @@ void VideoSendStreamImpl::StartupVideoSendStream() {
activity_ = false;
timed_out_ = false;
check_encoder_activity_task_ = RepeatingTaskHandle::DelayedStart(
- worker_queue_->Get(), kEncoderTimeOut, [this] {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ rtp_transport_queue_->Get(), kEncoderTimeOut, [this] {
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
if (!activity_) {
if (!timed_out_) {
SignalEncoderTimedOut();
@@ -401,25 +364,29 @@ void VideoSendStreamImpl::StartupVideoSendStream() {
}
void VideoSendStreamImpl::Stop() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
RTC_LOG(LS_INFO) << "VideoSendStreamImpl::Stop";
if (!rtp_video_sender_->IsActive())
return;
+
+ RTC_DCHECK(transport_queue_safety_->alive());
TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop");
rtp_video_sender_->SetActive(false);
StopVideoSendStream();
}
+// RTC_RUN_ON(rtp_transport_queue_)
void VideoSendStreamImpl::StopVideoSendStream() {
bitrate_allocator_->RemoveObserver(this);
check_encoder_activity_task_.Stop();
video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(),
DataRate::Zero(), 0, 0, 0);
stats_proxy_->OnSetEncoderTargetRate(0);
+ transport_queue_safety_->SetNotAlive();
}
void VideoSendStreamImpl::SignalEncoderTimedOut() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
// If the encoder has not produced anything the last kEncoderTimeOut and it
// is supposed to, deregister as BitrateAllocatorObserver. This can happen
// if a camera stops producing frames.
@@ -431,17 +398,14 @@ void VideoSendStreamImpl::SignalEncoderTimedOut() {
void VideoSendStreamImpl::OnBitrateAllocationUpdated(
const VideoBitrateAllocation& allocation) {
- if (!worker_queue_->IsCurrent()) {
- auto ptr = weak_ptr_;
- worker_queue_->PostTask([=] {
- if (!ptr.get())
- return;
- ptr->OnBitrateAllocationUpdated(allocation);
- });
+ if (!rtp_transport_queue_->IsCurrent()) {
+ rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [=] {
+ OnBitrateAllocationUpdated(allocation);
+ }));
return;
}
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
int64_t now_ms = clock_->TimeInMilliseconds();
if (encoder_target_rate_bps_ != 0) {
@@ -486,7 +450,7 @@ void VideoSendStreamImpl::OnVideoLayersAllocationUpdated(
}
void VideoSendStreamImpl::SignalEncoderActive() {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
if (rtp_video_sender_->IsActive()) {
RTC_LOG(LS_INFO) << "SignalEncoderActive, Encoder is active.";
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
@@ -508,21 +472,20 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged(
bool is_svc,
VideoEncoderConfig::ContentType content_type,
int min_transmit_bitrate_bps) {
- if (!worker_queue_->IsCurrent()) {
- rtc::WeakPtr<VideoSendStreamImpl> send_stream = weak_ptr_;
- worker_queue_->PostTask([send_stream, streams, is_svc, content_type,
- min_transmit_bitrate_bps]() mutable {
- if (send_stream) {
- send_stream->OnEncoderConfigurationChanged(
- std::move(streams), is_svc, content_type, min_transmit_bitrate_bps);
- }
- });
+ if (!rtp_transport_queue_->IsCurrent()) {
+ rtp_transport_queue_->PostTask(ToQueuedTask(
+ transport_queue_safety_,
+ [this, streams = std::move(streams), is_svc, content_type,
+ min_transmit_bitrate_bps]() mutable {
+ OnEncoderConfigurationChanged(std::move(streams), is_svc,
+ content_type, min_transmit_bitrate_bps);
+ }));
return;
}
RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size());
TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged");
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
const VideoCodecType codec_type =
PayloadStringToCodecType(config_->rtp.payload_name);
@@ -585,14 +548,15 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
auto enable_padding_task = [this]() {
if (disable_padding_) {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
disable_padding_ = false;
// To ensure that padding bitrate is propagated to the bitrate allocator.
SignalEncoderActive();
}
};
- if (!worker_queue_->IsCurrent()) {
- worker_queue_->PostTask(enable_padding_task);
+ if (!rtp_transport_queue_->IsCurrent()) {
+ rtp_transport_queue_->PostTask(
+ ToQueuedTask(transport_queue_safety_, std::move(enable_padding_task)));
} else {
enable_padding_task();
}
@@ -602,18 +566,16 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info);
// Check if there's a throttled VideoBitrateAllocation that we should try
// sending.
- rtc::WeakPtr<VideoSendStreamImpl> send_stream = weak_ptr_;
- auto update_task = [send_stream]() {
- if (send_stream) {
- RTC_DCHECK_RUN_ON(send_stream->worker_queue_);
- auto& context = send_stream->video_bitrate_allocation_context_;
- if (context && context->throttled_allocation) {
- send_stream->OnBitrateAllocationUpdated(*context->throttled_allocation);
- }
+ auto update_task = [this]() {
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
+ auto& context = video_bitrate_allocation_context_;
+ if (context && context->throttled_allocation) {
+ OnBitrateAllocationUpdated(*context->throttled_allocation);
}
};
- if (!worker_queue_->IsCurrent()) {
- worker_queue_->PostTask(update_task);
+ if (!rtp_transport_queue_->IsCurrent()) {
+ rtp_transport_queue_->PostTask(
+ ToQueuedTask(transport_queue_safety_, std::move(update_task)));
} else {
update_task();
}
@@ -636,7 +598,7 @@ std::map<uint32_t, RtpPayloadState> VideoSendStreamImpl::GetRtpPayloadStates()
}
uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) {
- RTC_DCHECK_RUN_ON(worker_queue_);
+ RTC_DCHECK_RUN_ON(rtp_transport_queue_);
RTC_DCHECK(rtp_video_sender_->IsActive())
<< "VideoSendStream::Start has not been called.";
diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h
index 41a7859a77..babf1dcfe5 100644
--- a/video/video_send_stream_impl.h
+++ b/video/video_send_stream_impl.h
@@ -19,8 +19,6 @@
#include <vector>
#include "absl/types/optional.h"
-#include "api/fec_controller.h"
-#include "api/rtc_event_log/rtc_event_log.h"
#include "api/video/encoded_image.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_bitrate_allocator.h"
@@ -33,18 +31,14 @@
#include "call/rtp_video_sender_interface.h"
#include "modules/include/module_common_types.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/experiments/field_trial_parser.h"
-#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
-#include "rtc_base/weak_ptr.h"
-#include "video/encoder_rtcp_feedback.h"
-#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
-#include "video/video_send_stream.h"
namespace webrtc {
namespace internal {
@@ -60,42 +54,28 @@ struct PacingConfig {
};
// VideoSendStreamImpl implements internal::VideoSendStream.
-// It is created and destroyed on |worker_queue|. The intent is to decrease the
-// need for locking and to ensure methods are called in sequence.
-// Public methods except |DeliverRtcp| must be called on |worker_queue|.
+// It is created and destroyed on `rtp_transport_queue`. The intent is to
+// decrease the need for locking and to ensure methods are called in sequence.
+// Public methods except `DeliverRtcp` must be called on `rtp_transport_queue`.
// DeliverRtcp is called on the libjingle worker thread or a network thread.
// An encoder may deliver frames through the EncodedImageCallback on an
// arbitrary thread.
class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
public VideoStreamEncoderInterface::EncoderSink {
public:
- VideoSendStreamImpl(
- Clock* clock,
- SendStatisticsProxy* stats_proxy,
- rtc::TaskQueue* worker_queue,
- RtcpRttStats* call_stats,
- RtpTransportControllerSendInterface* transport,
- BitrateAllocatorInterface* bitrate_allocator,
- SendDelayStats* send_delay_stats,
- VideoStreamEncoderInterface* video_stream_encoder,
- RtcEventLog* event_log,
- const VideoSendStream::Config* config,
- int initial_encoder_max_bitrate,
- double initial_encoder_bitrate_priority,
- std::map<uint32_t, RtpState> suspended_ssrcs,
- std::map<uint32_t, RtpPayloadState> suspended_payload_states,
- VideoEncoderConfig::ContentType content_type,
- std::unique_ptr<FecController> fec_controller);
+ VideoSendStreamImpl(Clock* clock,
+ SendStatisticsProxy* stats_proxy,
+ rtc::TaskQueue* rtp_transport_queue,
+ RtpTransportControllerSendInterface* transport,
+ BitrateAllocatorInterface* bitrate_allocator,
+ VideoStreamEncoderInterface* video_stream_encoder,
+ const VideoSendStream::Config* config,
+ int initial_encoder_max_bitrate,
+ double initial_encoder_bitrate_priority,
+ VideoEncoderConfig::ContentType content_type,
+ RtpVideoSenderInterface* rtp_video_sender);
~VideoSendStreamImpl() override;
- // RegisterProcessThread register |module_process_thread| with those objects
- // that use it. Registration has to happen on the thread were
- // |module_process_thread| was created (libjingle's worker thread).
- // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue,
- // maybe |worker_queue|.
- void RegisterProcessThread(ProcessThread* module_process_thread);
- void DeRegisterProcessThread();
-
void DeliverRtcp(const uint8_t* packet, size_t length);
void UpdateActiveSimulcastLayers(const std::vector<bool> active_layers);
void Start();
@@ -106,7 +86,9 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
std::map<uint32_t, RtpPayloadState> GetRtpPayloadStates() const;
- absl::optional<float> configured_pacing_factor_;
+ const absl::optional<float>& configured_pacing_factor() const {
+ return configured_pacing_factor_;
+ }
private:
// Implements BitrateAllocatorObserver.
@@ -138,14 +120,16 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
void StartupVideoSendStream();
// Removes the bitrate observer, stops monitoring and notifies the video
// encoder of the bitrate update.
- void StopVideoSendStream() RTC_RUN_ON(worker_queue_);
+ void StopVideoSendStream() RTC_RUN_ON(rtp_transport_queue_);
void ConfigureProtection();
void ConfigureSsrcs();
void SignalEncoderTimedOut();
void SignalEncoderActive();
MediaStreamAllocationConfig GetAllocationConfig() const
- RTC_RUN_ON(worker_queue_);
+ RTC_RUN_ON(rtp_transport_queue_);
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_;
Clock* const clock_;
const bool has_alr_probing_;
const PacingConfig pacing_config_;
@@ -153,40 +137,31 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
SendStatisticsProxy* const stats_proxy_;
const VideoSendStream::Config* const config_;
- rtc::TaskQueue* const worker_queue_;
+ rtc::TaskQueue* const rtp_transport_queue_;
RepeatingTaskHandle check_encoder_activity_task_
- RTC_GUARDED_BY(worker_queue_);
+ RTC_GUARDED_BY(rtp_transport_queue_);
std::atomic_bool activity_;
- bool timed_out_ RTC_GUARDED_BY(worker_queue_);
+ bool timed_out_ RTC_GUARDED_BY(rtp_transport_queue_);
RtpTransportControllerSendInterface* const transport_;
BitrateAllocatorInterface* const bitrate_allocator_;
- Mutex ivf_writers_mutex_;
-
bool disable_padding_;
int max_padding_bitrate_;
int encoder_min_bitrate_bps_;
uint32_t encoder_max_bitrate_bps_;
uint32_t encoder_target_rate_bps_;
double encoder_bitrate_priority_;
- bool has_packet_feedback_;
VideoStreamEncoderInterface* const video_stream_encoder_;
- EncoderRtcpFeedback encoder_feedback_;
RtcpBandwidthObserver* const bandwidth_observer_;
RtpVideoSenderInterface* const rtp_video_sender_;
- // |weak_ptr_| to our self. This is used since we can not call
- // |weak_ptr_factory_.GetWeakPtr| from multiple sequences but it is ok to copy
- // an existing WeakPtr.
- rtc::WeakPtr<VideoSendStreamImpl> weak_ptr_;
- // |weak_ptr_factory_| must be declared last to make sure all WeakPtr's are
- // invalidated before any other members are destroyed.
- rtc::WeakPtrFactory<VideoSendStreamImpl> weak_ptr_factory_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> transport_queue_safety_ =
+ PendingTaskSafetyFlag::CreateDetached();
// Context for the most recent and last sent video bitrate allocation. Used to
// throttle sending of similar bitrate allocations.
@@ -196,7 +171,8 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
int64_t last_send_time_ms;
};
absl::optional<VbaSendContext> video_bitrate_allocation_context_
- RTC_GUARDED_BY(worker_queue_);
+ RTC_GUARDED_BY(rtp_transport_queue_);
+ const absl::optional<float> configured_pacing_factor_;
};
} // namespace internal
} // namespace webrtc
diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc
index ee303b4eac..71cec7c981 100644
--- a/video/video_send_stream_impl_unittest.cc
+++ b/video/video_send_stream_impl_unittest.cc
@@ -31,6 +31,7 @@
#include "test/mock_transport.h"
#include "video/call_stats.h"
#include "video/test/mock_video_stream_encoder.h"
+#include "video/video_send_stream.h"
namespace webrtc {
@@ -145,17 +146,24 @@ class VideoSendStreamImplTest : public ::testing::Test {
int initial_encoder_max_bitrate,
double initial_encoder_bitrate_priority,
VideoEncoderConfig::ContentType content_type) {
+ RTC_DCHECK(!test_queue_.IsCurrent());
+
EXPECT_CALL(bitrate_allocator_, GetStartBitrate(_))
.WillOnce(Return(123000));
+
std::map<uint32_t, RtpState> suspended_ssrcs;
std::map<uint32_t, RtpPayloadState> suspended_payload_states;
- return std::make_unique<VideoSendStreamImpl>(
- &clock_, &stats_proxy_, &test_queue_, &call_stats_,
- &transport_controller_, &bitrate_allocator_, &send_delay_stats_,
- &video_stream_encoder_, &event_log_, &config_,
+ auto ret = std::make_unique<VideoSendStreamImpl>(
+ &clock_, &stats_proxy_, &test_queue_, &transport_controller_,
+ &bitrate_allocator_, &video_stream_encoder_, &config_,
initial_encoder_max_bitrate, initial_encoder_bitrate_priority,
- suspended_ssrcs, suspended_payload_states, content_type,
- std::make_unique<FecControllerDefault>(&clock_));
+ content_type, &rtp_video_sender_);
+
+ // The call to GetStartBitrate() executes asynchronously on the tq.
+ test_queue_.WaitForPreviouslyPostedTasks();
+ testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
+
+ return ret;
}
protected:
@@ -179,22 +187,22 @@ class VideoSendStreamImplTest : public ::testing::Test {
};
TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) {
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
+ .WillOnce(Invoke(
+ [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) {
+ EXPECT_EQ(config.min_bitrate_bps, 0u);
+ EXPECT_EQ(config.max_bitrate_bps, kDefaultInitialBitrateBps);
+ EXPECT_EQ(config.pad_up_bitrate_bps, 0u);
+ EXPECT_EQ(config.enforce_min_bitrate, !kSuspend);
+ EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority);
+ }));
test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
- EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
- .WillOnce(Invoke([&](BitrateAllocatorObserver*,
- MediaStreamAllocationConfig config) {
- EXPECT_EQ(config.min_bitrate_bps, 0u);
- EXPECT_EQ(config.max_bitrate_bps, kDefaultInitialBitrateBps);
- EXPECT_EQ(config.pad_up_bitrate_bps, 0u);
- EXPECT_EQ(config.enforce_min_bitrate, !kSuspend);
- EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority);
- }));
+ [&] {
vss_impl->Start();
EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get()))
.Times(1);
@@ -204,15 +212,16 @@ TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) {
}
TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) {
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
+ 1);
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+
test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ [&] {
vss_impl->Start();
// QVGA + VGA configuration matching defaults in
@@ -269,16 +278,16 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) {
}
TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) {
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
+ 1);
+ config_.periodic_alr_bandwidth_probing = true;
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- config_.periodic_alr_bandwidth_probing = true;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ [&] {
vss_impl->Start();
// Simulcast screenshare.
@@ -341,11 +350,12 @@ TEST_F(VideoSendStreamImplTest,
test::ScopedFieldTrials hysteresis_experiment(
"WebRTC-VideoRateControl/video_hysteresis:1.25/");
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+
test_queue_.SendTask(
- [this] {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ [&] {
vss_impl->Start();
// 2-layer video simulcast.
@@ -401,17 +411,17 @@ TEST_F(VideoSendStreamImplTest,
TEST_F(VideoSendStreamImplTest, SetsScreensharePacingFactorWithFeedback) {
test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString());
+ constexpr int kId = 1;
+ config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
+ kId);
+ EXPECT_CALL(transport_controller_,
+ SetPacingFactor(kAlrProbingExperimentPaceMultiplier))
+ .Times(1);
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
- constexpr int kId = 1;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, kId);
- EXPECT_CALL(transport_controller_,
- SetPacingFactor(kAlrProbingExperimentPaceMultiplier))
- .Times(1);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ [&] {
vss_impl->Start();
vss_impl->Stop();
},
@@ -420,12 +430,12 @@ TEST_F(VideoSendStreamImplTest, SetsScreensharePacingFactorWithFeedback) {
TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) {
test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString());
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
+ [&] {
EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
vss_impl->Start();
vss_impl->Stop();
},
@@ -433,12 +443,12 @@ TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) {
}
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) {
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
+ [&] {
EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
VideoStreamEncoderInterface::EncoderSink* const sink =
static_cast<VideoStreamEncoderInterface::EncoderSink*>(
vss_impl.get());
@@ -483,11 +493,11 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) {
}
TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) {
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ [&] {
vss_impl->Start();
// Unpause encoder, to allows allocations to be passed through.
const uint32_t kBitrateBps = 100000;
@@ -529,8 +539,8 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) {
.Times(1);
sink->OnBitrateAllocationUpdated(updated_alloc);
- // This is now a decrease compared to last forward allocation, forward
- // immediately.
+ // This is now a decrease compared to last forward allocation,
+ // forward immediately.
updated_alloc.SetBitrate(0, 0, base_layer_min_update_bitrate_bps - 1);
EXPECT_CALL(rtp_video_sender_,
OnBitrateAllocationUpdated(updated_alloc))
@@ -543,11 +553,11 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) {
}
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) {
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ [&] {
vss_impl->Start();
// Unpause encoder, to allows allocations to be passed through.
const uint32_t kBitrateBps = 100000;
@@ -572,8 +582,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) {
.Times(1);
sink->OnBitrateAllocationUpdated(alloc);
- // Move some bitrate from one layer to a new one, but keep sum the same.
- // Since layout has changed, immediately trigger forward.
+ // Move some bitrate from one layer to a new one, but keep sum the
+ // same. Since layout has changed, immediately trigger forward.
VideoBitrateAllocation updated_alloc = alloc;
updated_alloc.SetBitrate(2, 0, 10000);
updated_alloc.SetBitrate(1, 1, alloc.GetBitrate(1, 1) - 10000);
@@ -589,11 +599,11 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) {
}
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kScreen);
test_queue_.SendTask(
- [this] {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ [&] {
vss_impl->Start();
const uint32_t kBitrateBps = 100000;
// Unpause encoder, to allows allocations to be passed through.
@@ -639,7 +649,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
clock_.AdvanceTimeMicroseconds(kMaxVbaThrottleTimeMs * 1000);
{
- // Sending similar allocation again after timeout, should forward.
+ // Sending similar allocation again after timeout, should
+ // forward.
EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc))
.Times(1);
sink->OnBitrateAllocationUpdated(alloc);
@@ -661,8 +672,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
}
{
- // Advance time and send encoded image, this should wake up and send
- // cached bitrate allocation.
+ // Advance time and send encoded image, this should wake up and
+ // send cached bitrate allocation.
clock_.AdvanceTimeMicroseconds(kMaxVbaThrottleTimeMs * 1000);
EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc))
.Times(1);
@@ -671,8 +682,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
}
{
- // Advance time and send encoded image, there should be no cached
- // allocation to send.
+ // Advance time and send encoded image, there should be no
+ // cached allocation to send.
clock_.AdvanceTimeMicroseconds(kMaxVbaThrottleTimeMs * 1000);
EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc))
.Times(0);
@@ -686,15 +697,15 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
}
TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
+ 1);
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
test_queue_.SendTask(
- [this] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ [&] {
vss_impl->Start();
VideoStream qvga_stream;
@@ -733,8 +744,8 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
->OnBitrateUpdated(update);
- // Test allocation where the link allocation is larger than the target,
- // meaning we have some headroom on the link.
+ // Test allocation where the link allocation is larger than the
+ // target, meaning we have some headroom on the link.
const DataRate qvga_max_bitrate =
DataRate::BitsPerSec(qvga_stream.max_bitrate_bps);
const DataRate headroom = DataRate::BitsPerSec(50000);
@@ -750,8 +761,8 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
static_cast<BitrateAllocatorObserver*>(vss_impl.get())
->OnBitrateUpdated(update);
- // Add protection bitrate to the mix, this should be subtracted from the
- // headroom.
+ // Add protection bitrate to the mix, this should be subtracted
+ // from the headroom.
const uint32_t protection_bitrate_bps = 10000;
EXPECT_CALL(rtp_video_sender_, GetProtectionBitrateBps())
.WillOnce(Return(protection_bitrate_bps));
@@ -791,14 +802,11 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) {
TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) {
int padding_bitrate = 0;
- std::unique_ptr<VideoSendStreamImpl> vss_impl;
-
+ std::unique_ptr<VideoSendStreamImpl> vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
test_queue_.SendTask(
[&] {
- vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
-
// Capture padding bitrate for testing.
EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
.WillRepeatedly(Invoke([&](BitrateAllocatorObserver*,
@@ -871,7 +879,6 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) {
EXPECT_EQ(0, padding_bitrate);
testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
vss_impl->Stop();
- vss_impl.reset();
done.Set();
},
5000);
@@ -881,12 +888,11 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) {
}
TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) {
- std::unique_ptr<VideoSendStreamImpl> vss_impl;
+ std::unique_ptr<VideoSendStreamImpl> vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
test_queue_.SendTask(
[&] {
- vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
vss_impl->Start();
const uint32_t kBitrateBps = 100000;
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
@@ -909,7 +915,6 @@ TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) {
[&] {
testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
vss_impl->Stop();
- vss_impl.reset();
done.Set();
},
2000);
@@ -933,18 +938,18 @@ TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) {
}
for (const TestConfig& test_config : test_variants) {
+ const bool kSuspend = false;
+ config_.suspend_below_min_bitrate = kSuspend;
+ config_.rtp.extensions.emplace_back(
+ RtpExtension::kTransportSequenceNumberUri, 1);
+ config_.periodic_alr_bandwidth_probing = test_config.alr;
+ auto vss_impl = CreateVideoSendStreamImpl(
+ kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ test_config.screenshare
+ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo);
test_queue_.SendTask(
- [this, test_config] {
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- config_.rtp.extensions.emplace_back(
- RtpExtension::kTransportSequenceNumberUri, 1);
- config_.periodic_alr_bandwidth_probing = test_config.alr;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- test_config.screenshare
- ? VideoEncoderConfig::ContentType::kScreen
- : VideoEncoderConfig::ContentType::kRealtimeVideo);
+ [&] {
vss_impl->Start();
// Svc
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index 3ee97a9202..4e94d8fc77 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -948,10 +948,10 @@ void VideoSendStreamTest::TestNackRetransmission(
non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount,
non_padding_sequence_numbers_.end());
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.outgoing_transport = transport_adapter_.get();
- config.rtcp_report_interval_ms = kRtcpIntervalMs;
+ config.rtcp_report_interval = TimeDelta::Millis(kRtcpIntervalMs);
config.local_media_ssrc = kReceiverLocalVideoSsrc;
RTCPSender rtcp_sender(config);
@@ -1164,11 +1164,11 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
kVideoSendSsrcs[0], rtp_packet.SequenceNumber(),
packets_lost_, // Cumulative lost.
loss_ratio); // Loss percent.
- RtpRtcpInterface::Configuration config;
+ RTCPSender::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.receive_statistics = &lossy_receive_stats;
config.outgoing_transport = transport_adapter_.get();
- config.rtcp_report_interval_ms = kRtcpIntervalMs;
+ config.rtcp_report_interval = TimeDelta::Millis(kRtcpIntervalMs);
config.local_media_ssrc = kVideoSendSsrcs[0];
RTCPSender rtcp_sender(config);
@@ -1474,7 +1474,9 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
if (!rtp_packet.Parse(packet, length))
return DROP_PACKET;
RTC_DCHECK(stream_);
- VideoSendStream::Stats stats = stream_->GetStats();
+ VideoSendStream::Stats stats;
+ SendTask(RTC_FROM_HERE, task_queue_,
+ [&]() { stats = stream_->GetStats(); });
if (!stats.substreams.empty()) {
EXPECT_EQ(1u, stats.substreams.size());
int total_bitrate_bps =
@@ -2422,14 +2424,16 @@ class VideoCodecConfigObserver : public test::SendTest,
public test::FakeEncoder {
public:
VideoCodecConfigObserver(VideoCodecType video_codec_type,
- const char* codec_name)
+ const char* codec_name,
+ TaskQueueBase* task_queue)
: SendTest(VideoSendStreamTest::kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
video_codec_type_(video_codec_type),
codec_name_(codec_name),
num_initializations_(0),
stream_(nullptr),
- encoder_factory_(this) {
+ encoder_factory_(this),
+ task_queue_(task_queue) {
InitCodecSpecifics();
}
@@ -2477,7 +2481,9 @@ class VideoCodecConfigObserver : public test::SendTest,
// Change encoder settings to actually trigger reconfiguration.
encoder_settings_.frameDroppingOn = !encoder_settings_.frameDroppingOn;
encoder_config_.encoder_specific_settings = GetEncoderSpecificSettings();
- stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
+ SendTask(RTC_FROM_HERE, task_queue_, [&]() {
+ stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
+ });
ASSERT_TRUE(
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(2u, num_initializations_)
@@ -2499,6 +2505,7 @@ class VideoCodecConfigObserver : public test::SendTest,
VideoSendStream* stream_;
test::VideoEncoderProxyFactory encoder_factory_;
VideoEncoderConfig encoder_config_;
+ TaskQueueBase* task_queue_;
};
template <>
@@ -2531,8 +2538,8 @@ void VideoCodecConfigObserver<VideoCodecH264>::VerifyCodecSpecifics(
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecH264>::GetEncoderSpecificSettings() const {
- return new rtc::RefCountedObject<
- VideoEncoderConfig::H264EncoderSpecificSettings>(encoder_settings_);
+ return rtc::make_ref_counted<VideoEncoderConfig::H264EncoderSpecificSettings>(
+ encoder_settings_);
}
template <>
@@ -2565,8 +2572,8 @@ void VideoCodecConfigObserver<VideoCodecVP8>::VerifyCodecSpecifics(
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecVP8>::GetEncoderSpecificSettings() const {
- return new rtc::RefCountedObject<
- VideoEncoderConfig::Vp8EncoderSpecificSettings>(encoder_settings_);
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ encoder_settings_);
}
template <>
@@ -2599,17 +2606,19 @@ void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecVP9>::GetEncoderSpecificSettings() const {
- return new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(encoder_settings_);
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ encoder_settings_);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
- VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8");
+ VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8",
+ task_queue());
RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) {
- VideoCodecConfigObserver<VideoCodecVP9> test(kVideoCodecVP9, "VP9");
+ VideoCodecConfigObserver<VideoCodecVP9> test(kVideoCodecVP9, "VP9",
+ task_queue());
RunBaseTest(&test);
}
@@ -2621,7 +2630,8 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) {
#define MAYBE_EncoderSetupPropagatesH264Config EncoderSetupPropagatesH264Config
#endif
TEST_F(VideoSendStreamTest, MAYBE_EncoderSetupPropagatesH264Config) {
- VideoCodecConfigObserver<VideoCodecH264> test(kVideoCodecH264, "H264");
+ VideoCodecConfigObserver<VideoCodecH264> test(kVideoCodecH264, "H264",
+ task_queue());
RunBaseTest(&test);
}
@@ -2726,7 +2736,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
send_config->encoder_settings.encoder_factory = &encoder_factory_;
EXPECT_EQ(1u, encoder_config->number_of_streams);
encoder_config->video_stream_factory =
- new rtc::RefCountedObject<VideoStreamFactory>();
+ rtc::make_ref_counted<VideoStreamFactory>();
EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
encoder_config->simulcast_layers[0].num_temporal_layers = 2;
encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
@@ -2904,7 +2914,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// Encoder rate is capped by EncoderConfig max_bitrate_bps.
WaitForSetRates(kMaxBitrateKbps);
encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
- send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ SendTask(RTC_FROM_HERE, task_queue_, [&]() {
+ send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ });
ASSERT_TRUE(create_rate_allocator_event_.Wait(
VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(2, num_rate_allocator_creations_)
@@ -2914,7 +2926,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
EXPECT_EQ(1, num_encoder_initializations_);
encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
- send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ SendTask(RTC_FROM_HERE, task_queue_, [&]() {
+ send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
+ });
ASSERT_TRUE(create_rate_allocator_event_.Wait(
VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(3, num_rate_allocator_creations_)
@@ -2955,11 +2969,12 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
class ScreencastTargetBitrateTest : public test::SendTest,
public test::FakeEncoder {
public:
- ScreencastTargetBitrateTest()
+ explicit ScreencastTargetBitrateTest(TaskQueueBase* task_queue)
: SendTest(kDefaultTimeoutMs),
test::FakeEncoder(Clock::GetRealTimeClock()),
send_stream_(nullptr),
- encoder_factory_(this) {}
+ encoder_factory_(this),
+ task_queue_(task_queue) {}
private:
int32_t Encode(const VideoFrame& input_image,
@@ -3007,7 +3022,9 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
void PerformTest() override {
EXPECT_TRUE(Wait())
<< "Timed out while waiting for the encoder to send one frame.";
- VideoSendStream::Stats stats = send_stream_->GetStats();
+ VideoSendStream::Stats stats;
+ SendTask(RTC_FROM_HERE, task_queue_,
+ [&]() { stats = send_stream_->GetStats(); });
for (size_t i = 0; i < kNumStreams; ++i) {
ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) !=
@@ -3029,7 +3046,8 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
VideoSendStream* send_stream_;
test::VideoEncoderProxyFactory encoder_factory_;
- } test;
+ TaskQueueBase* const task_queue_;
+ } test(task_queue());
RunBaseTest(&test);
}
@@ -3064,8 +3082,9 @@ class Vp9HeaderObserver : public test::SendTest {
send_config->rtp.payload_name = "VP9";
send_config->rtp.payload_type = kVp9PayloadType;
ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
- encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
+ encoder_config->encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9_settings_);
EXPECT_EQ(1u, encoder_config->number_of_streams);
EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
encoder_config->simulcast_layers[0].num_temporal_layers =
@@ -3799,14 +3818,15 @@ class ContentSwitchTest : public test::SendTest {
};
static const uint32_t kMinPacketsToSend = 50;
- explicit ContentSwitchTest(T* stream_reset_fun)
+ explicit ContentSwitchTest(T* stream_reset_fun, TaskQueueBase* task_queue)
: SendTest(test::CallTest::kDefaultTimeoutMs),
call_(nullptr),
state_(StreamState::kBeforeSwitch),
send_stream_(nullptr),
send_stream_config_(nullptr),
packets_sent_(0),
- stream_resetter_(stream_reset_fun) {
+ stream_resetter_(stream_reset_fun),
+ task_queue_(task_queue) {
RTC_DCHECK(stream_resetter_);
}
@@ -3840,8 +3860,10 @@ class ContentSwitchTest : public test::SendTest {
float pacing_factor =
internal_send_peer.GetPacingFactorOverride().value_or(0.0f);
float expected_pacing_factor = 1.1; // Strict pacing factor.
- if (send_stream_->GetStats().content_type ==
- webrtc::VideoContentType::SCREENSHARE) {
+ VideoSendStream::Stats stats;
+ SendTask(RTC_FROM_HERE, task_queue_,
+ [&stats, stream = send_stream_]() { stats = stream->GetStats(); });
+ if (stats.content_type == webrtc::VideoContentType::SCREENSHARE) {
expected_pacing_factor = 1.0f; // Currently used pacing factor in ALR.
}
@@ -3909,6 +3931,7 @@ class ContentSwitchTest : public test::SendTest {
VideoEncoderConfig encoder_config_;
uint32_t packets_sent_ RTC_GUARDED_BY(mutex_);
T* stream_resetter_;
+ TaskQueueBase* task_queue_;
};
TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) {
@@ -3928,7 +3951,7 @@ TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) {
Start();
});
};
- ContentSwitchTest<decltype(reset_fun)> test(&reset_fun);
+ ContentSwitchTest<decltype(reset_fun)> test(&reset_fun, task_queue());
RunBaseTest(&test);
}
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 191918a591..68aae2816f 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -660,6 +660,7 @@ VideoStreamEncoder::VideoStreamEncoder(
encoder_queue_(task_queue_factory->CreateTaskQueue(
"EncoderQueue",
TaskQueueFactory::Priority::NORMAL)) {
+ TRACE_EVENT0("webrtc", "VideoStreamEncoder::VideoStreamEncoder");
RTC_DCHECK(main_queue_);
RTC_DCHECK(encoder_stats_observer);
RTC_DCHECK_GE(number_of_cores, 1);
@@ -742,11 +743,16 @@ void VideoStreamEncoder::SetFecControllerOverride(
void VideoStreamEncoder::AddAdaptationResource(
rtc::scoped_refptr<Resource> resource) {
RTC_DCHECK_RUN_ON(main_queue_);
+ TRACE_EVENT0("webrtc", "VideoStreamEncoder::AddAdaptationResource");
// Map any externally added resources as kCpu for the sake of stats reporting.
// TODO(hbos): Make the manager map any unknown resources to kCpu and get rid
// of this MapResourceToReason() call.
+ TRACE_EVENT_ASYNC_BEGIN0(
+ "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this);
rtc::Event map_resource_event;
encoder_queue_.PostTask([this, resource, &map_resource_event] {
+ TRACE_EVENT_ASYNC_END0(
+ "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this);
RTC_DCHECK_RUN_ON(&encoder_queue_);
additional_resources_.push_back(resource);
stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu);
@@ -1610,6 +1616,12 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
if (encoder_failed_)
return;
+ // It's possible that EncodeVideoFrame can be called after we've completed
+ // a Stop() operation. Check if the encoder_ is set before continuing.
+ // See: bugs.webrtc.org/12857
+ if (!encoder_)
+ return;
+
TraceFrameDropEnd();
// Encoder metadata needs to be updated before encode complete callback.
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index 7ff3d4dca1..cbfd93e9e2 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -42,6 +42,7 @@
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
+#include "modules/video_coding/codecs/vp9/svc_config.h"
#include "modules/video_coding/utility/quality_scaler.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/event.h"
@@ -144,8 +145,8 @@ class FakeNativeBuffer : public webrtc::VideoFrameBuffer {
int crop_height,
int scaled_width,
int scaled_height) override {
- return new rtc::RefCountedObject<FakeNativeBuffer>(nullptr, scaled_width,
- scaled_height);
+ return rtc::make_ref_counted<FakeNativeBuffer>(nullptr, scaled_width,
+ scaled_height);
}
private:
@@ -517,7 +518,7 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
&cropped_height, &out_width, &out_height)) {
VideoFrame adapted_frame =
VideoFrame::Builder()
- .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+ .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
nullptr, out_width, out_height))
.set_ntp_time_ms(video_frame.ntp_time_ms())
.set_timestamp_ms(99)
@@ -545,6 +546,15 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
}
}
+ void OnOutputFormatRequest(int width, int height) {
+ absl::optional<std::pair<int, int>> target_aspect_ratio =
+ std::make_pair(width, height);
+ absl::optional<int> max_pixel_count = width * height;
+ absl::optional<int> max_fps;
+ adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count,
+ max_fps);
+ }
+
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
MutexLock lock(&mutex_);
@@ -715,8 +725,8 @@ class VideoStreamEncoderTest : public ::testing::Test {
vp9_settings.numberOfSpatialLayers = num_spatial_layers;
vp9_settings.automaticResizeOn = num_spatial_layers <= 1;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<
- VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9_settings);
}
ConfigureEncoder(std::move(video_encoder_config), allocation_callback_type);
}
@@ -724,7 +734,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
VideoFrame CreateFrame(int64_t ntp_time_ms,
rtc::Event* destruction_event) const {
return VideoFrame::Builder()
- .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+ .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
destruction_event, codec_width_, codec_height_))
.set_ntp_time_ms(ntp_time_ms)
.set_timestamp_ms(99)
@@ -736,7 +746,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
rtc::Event* destruction_event,
int offset_x) const {
return VideoFrame::Builder()
- .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+ .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
destruction_event, codec_width_, codec_height_))
.set_ntp_time_ms(ntp_time_ms)
.set_timestamp_ms(99)
@@ -746,9 +756,10 @@ class VideoStreamEncoderTest : public ::testing::Test {
}
VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const {
+ auto buffer = rtc::make_ref_counted<TestBuffer>(nullptr, width, height);
+ I420Buffer::SetBlack(buffer.get());
return VideoFrame::Builder()
- .set_video_frame_buffer(
- new rtc::RefCountedObject<TestBuffer>(nullptr, width, height))
+ .set_video_frame_buffer(std::move(buffer))
.set_ntp_time_ms(ntp_time_ms)
.set_timestamp_ms(ntp_time_ms)
.set_rotation(kVideoRotation_0)
@@ -769,7 +780,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
int width,
int height) const {
return VideoFrame::Builder()
- .set_video_frame_buffer(new rtc::RefCountedObject<FakeNativeBuffer>(
+ .set_video_frame_buffer(rtc::make_ref_counted<FakeNativeBuffer>(
destruction_event, width, height))
.set_ntp_time_ms(ntp_time_ms)
.set_timestamp_ms(99)
@@ -782,7 +793,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
int width,
int height) const {
return VideoFrame::Builder()
- .set_video_frame_buffer(new rtc::RefCountedObject<FakeNV12NativeBuffer>(
+ .set_video_frame_buffer(rtc::make_ref_counted<FakeNV12NativeBuffer>(
destruction_event, width, height))
.set_ntp_time_ms(ntp_time_ms)
.set_timestamp_ms(99)
@@ -1305,13 +1316,15 @@ class VideoStreamEncoderTest : public ::testing::Test {
uint32_t timestamp = encoded_image.Timestamp();
if (last_timestamp_ != timestamp) {
num_received_layers_ = 1;
+ last_width_ = encoded_image._encodedWidth;
+ last_height_ = encoded_image._encodedHeight;
} else {
++num_received_layers_;
+ last_width_ = std::max(encoded_image._encodedWidth, last_width_);
+ last_height_ = std::max(encoded_image._encodedHeight, last_height_);
}
last_timestamp_ = timestamp;
last_capture_time_ms_ = encoded_image.capture_time_ms_;
- last_width_ = encoded_image._encodedWidth;
- last_height_ = encoded_image._encodedHeight;
last_rotation_ = encoded_image.rotation_;
if (num_received_layers_ == num_expected_layers_) {
encoded_frame_event_.Set();
@@ -1587,7 +1600,7 @@ TEST_F(VideoStreamEncoderTest,
NativeFrameWithoutI420SupportGetsCroppedIfNecessary) {
// Use the cropping factory.
video_encoder_config_.video_stream_factory =
- new rtc::RefCountedObject<CroppingVideoStreamFactory>();
+ rtc::make_ref_counted<CroppingVideoStreamFactory>();
video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config_),
kMaxPayloadLength);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
@@ -2071,7 +2084,7 @@ TEST_F(VideoStreamEncoderTest,
config.simulcast_layers[0].active = false;
config.simulcast_layers[1].active = true;
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2135,7 +2148,7 @@ TEST_F(VideoStreamEncoderTest,
config.simulcast_layers[0].active = false;
config.simulcast_layers[1].active = true;
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2208,7 +2221,7 @@ TEST_F(VideoStreamEncoderTest,
config.simulcast_layers[1].active = true;
config.simulcast_layers[2].active = false;
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2252,7 +2265,7 @@ TEST_F(VideoStreamEncoderTest,
config.simulcast_layers[1].active = false;
config.simulcast_layers[2].active = false;
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2288,7 +2301,7 @@ TEST_F(VideoStreamEncoderTest,
config.simulcast_layers[1].active = true;
config.simulcast_layers[1].max_bitrate_bps = kMaxBitrateBps;
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2376,7 +2389,7 @@ TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) {
config.simulcast_layers[i].scale_resolution_down_by = scale_factors_[i];
}
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength);
@@ -3340,6 +3353,257 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
}
TEST_F(VideoStreamEncoderTest,
+ FpsCountReturnsToZeroForFewerAdaptationsUpThanDown) {
+ const int kWidth = 640;
+ const int kHeight = 360;
+ const int64_t kFrameIntervalMs = 150;
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+
+ // Enable BALANCED preference, no initial limitation.
+ AdaptingFrameForwarder source(&time_controller_);
+ source.set_adaptation_enabled(true);
+ video_stream_encoder_->SetSource(&source,
+ webrtc::DegradationPreference::BALANCED);
+
+ int64_t timestamp_ms = kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(kWidth, kHeight);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect reduced fps (640x360@15fps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMatchesResolutionMax(Lt(kDefaultFramerate)));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Source requests 270p, expect reduced resolution (480x270@15fps).
+ source.OnOutputFormatRequest(480, 270);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(480, 270);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect reduced fps (480x270@10fps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Source requests QVGA, expect reduced resolution (320x180@10fps).
+ source.OnOutputFormatRequest(320, 180);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(320, 180);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect reduced fps (320x180@7fps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Source requests VGA, expect increased resolution (640x360@7fps).
+ source.OnOutputFormatRequest(640, 360);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt up, expect increased fps (640x360@(max-2)fps).
+ video_stream_encoder_->TriggerQualityHigh();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt up, expect increased fps (640x360@(max-1)fps).
+ video_stream_encoder_->TriggerQualityHigh();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt up, expect increased fps (640x360@maxfps).
+ video_stream_encoder_->TriggerQualityHigh();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ video_stream_encoder_->Stop();
+}
+
+TEST_F(VideoStreamEncoderTest,
+ FpsCountReturnsToZeroForFewerAdaptationsUpThanDownWithTwoResources) {
+ const int kWidth = 1280;
+ const int kHeight = 720;
+ const int64_t kFrameIntervalMs = 150;
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps),
+ DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
+
+ // Enable BALANCED preference, no initial limitation.
+ AdaptingFrameForwarder source(&time_controller_);
+ source.set_adaptation_enabled(true);
+ video_stream_encoder_->SetSource(&source,
+ webrtc::DegradationPreference::BALANCED);
+
+ int64_t timestamp_ms = kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(kWidth, kHeight);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect scaled down resolution (960x540@maxfps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect scaled down resolution (640x360@maxfps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ sink_.WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Trigger adapt down, expect reduced fps (640x360@15fps).
+ video_stream_encoder_->TriggerQualityLow();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+
+ // Source requests QVGA, expect reduced resolution (320x180@15fps).
+ source.OnOutputFormatRequest(320, 180);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(320, 180);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Trigger adapt down, expect reduced fps (320x180@7fps).
+ video_stream_encoder_->TriggerCpuOveruse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Source requests HD, expect increased resolution (640x360@7fps).
+ source.OnOutputFormatRequest(1280, 720);
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Trigger adapt up, expect increased fps (640x360@(max-1)fps).
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Trigger adapt up, expect increased fps (640x360@maxfps).
+ video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Trigger adapt up, expect increased resolution (960x570@maxfps).
+ video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ // Trigger adapt up, expect increased resolution (1280x720@maxfps).
+ video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+
+ video_stream_encoder_->Stop();
+}
+
+TEST_F(VideoStreamEncoderTest,
NoChangeForInitialNormalUsage_MaintainFramerateMode) {
const int kWidth = 1280;
const int kHeight = 720;
@@ -4406,7 +4670,7 @@ TEST_F(VideoStreamEncoderTest,
video_encoder_config.content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
VideoEncoder::GetDefaultVp8Settings());
for (auto& layer : video_encoder_config.simulcast_layers) {
layer.num_temporal_layers = 2;
@@ -4451,7 +4715,7 @@ TEST_F(VideoStreamEncoderTest,
video_encoder_config.content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
VideoEncoder::GetDefaultVp8Settings());
for (auto& layer : video_encoder_config.simulcast_layers) {
layer.num_temporal_layers = 2;
@@ -4500,7 +4764,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOn;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
ConfigureEncoder(std::move(video_encoder_config),
VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4555,7 +4819,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOn;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
ConfigureEncoder(std::move(video_encoder_config),
VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4603,7 +4867,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOnKeyPic;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
ConfigureEncoder(std::move(video_encoder_config),
VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4651,7 +4915,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOn;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
// Simulcast layers are used for enabling/disabling streams.
video_encoder_config.simulcast_layers.resize(3);
@@ -4710,7 +4974,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOn;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
// Simulcast layers are used for enabling/disabling streams.
video_encoder_config.simulcast_layers.resize(3);
@@ -4762,7 +5026,7 @@ TEST_F(VideoStreamEncoderTest,
vp9_settings.interLayerPred = InterLayerPredMode::kOn;
vp9_settings.automaticResizeOn = false;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
// Simulcast layers are used for enabling/disabling streams.
video_encoder_config.simulcast_layers.resize(3);
@@ -5362,7 +5626,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) {
test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 3,
&video_encoder_config);
video_encoder_config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
for (auto& layer : video_encoder_config.simulcast_layers) {
@@ -5429,7 +5693,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) {
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
@@ -5488,7 +5752,7 @@ TEST_F(VideoStreamEncoderTest,
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
@@ -5546,7 +5810,7 @@ TEST_F(VideoStreamEncoderTest,
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
@@ -5611,7 +5875,7 @@ TEST_F(VideoStreamEncoderTest, DefaultMaxAndMinBitratesNotUsedIfDisabled) {
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
@@ -5687,7 +5951,7 @@ TEST_F(VideoStreamEncoderTest,
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
- new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
@@ -6348,7 +6612,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger adapt up, expect expect increased fps (320x180@10fps).
+ // Trigger adapt up, expect increased fps (320x180@10fps).
video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
@@ -6685,7 +6949,7 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) {
video_encoder_config.simulcast_layers[0].max_framerate = kFramerate;
video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
video_encoder_config.video_stream_factory =
- new rtc::RefCountedObject<CroppingVideoStreamFactory>();
+ rtc::make_ref_counted<CroppingVideoStreamFactory>();
video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
kMaxPayloadLength);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
@@ -7650,7 +7914,7 @@ TEST_F(VideoStreamEncoderTest, EncoderResetAccordingToParameterChange) {
config.simulcast_layers[i].active = true;
}
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@@ -7778,7 +8042,7 @@ TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSimulcast) {
config.simulcast_layers[i].active = true;
}
config.video_stream_factory =
- new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
"VP8", /*max qp*/ 56, /*screencast*/ false,
/*screenshare enabled*/ false);
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@@ -7831,10 +8095,13 @@ TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSimulcast) {
}
TEST_F(VideoStreamEncoderTest, QpPresent_QpKept) {
- // Enable encoder source to force encoder reconfig.
- encoder_factory_.SetHasInternalSource(true);
ResetEncoder("VP8", 1, 1, 1, false);
+ // Force encoder reconfig.
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(1, codec_width_, codec_height_));
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
// Set QP on encoded frame and pass the frame to encode complete callback.
// Since QP is present QP parsing won't be triggered and the original value
// should be kept.
@@ -7851,10 +8118,13 @@ TEST_F(VideoStreamEncoderTest, QpPresent_QpKept) {
}
TEST_F(VideoStreamEncoderTest, QpAbsent_QpParsed) {
- // Enable encoder source to force encoder reconfig.
- encoder_factory_.SetHasInternalSource(true);
ResetEncoder("VP8", 1, 1, 1, false);
+ // Force encoder reconfig.
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(1, codec_width_, codec_height_));
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
// Pass an encoded frame without QP to encode complete callback. QP should be
// parsed and set.
EncodedImage encoded_image;
@@ -7873,10 +8143,13 @@ TEST_F(VideoStreamEncoderTest, QpAbsentParsingDisabled_QpAbsent) {
webrtc::test::ScopedFieldTrials field_trials(
"WebRTC-QpParsingKillSwitch/Enabled/");
- // Enable encoder source to force encoder reconfig.
- encoder_factory_.SetHasInternalSource(true);
ResetEncoder("VP8", 1, 1, 1, false);
+ // Force encoder reconfig.
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(1, codec_width_, codec_height_));
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
EncodedImage encoded_image;
encoded_image.qp_ = -1;
encoded_image.SetEncodedData(EncodedImageBuffer::Create(
@@ -8066,6 +8339,133 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeNV12) {
}
}
+TEST_P(VideoStreamEncoderWithRealEncoderTest, HandlesLayerToggling) {
+ if (codec_type_ == kVideoCodecMultiplex) {
+ // Multiplex codec here uses wrapped mock codecs, ignore for this test.
+ return;
+ }
+
+ const size_t kNumSpatialLayers = 3u;
+ const float kDownscaleFactors[] = {4.0, 2.0, 1.0};
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ const rtc::VideoSinkWants::FrameSize kLayer0Size(
+ kFrameWidth / kDownscaleFactors[0], kFrameHeight / kDownscaleFactors[0]);
+ const rtc::VideoSinkWants::FrameSize kLayer1Size(
+ kFrameWidth / kDownscaleFactors[1], kFrameHeight / kDownscaleFactors[1]);
+ const rtc::VideoSinkWants::FrameSize kLayer2Size(
+ kFrameWidth / kDownscaleFactors[2], kFrameHeight / kDownscaleFactors[2]);
+
+ VideoEncoderConfig config;
+ if (codec_type_ == VideoCodecType::kVideoCodecVP9) {
+ test::FillEncoderConfiguration(codec_type_, 1, &config);
+ config.max_bitrate_bps = kSimulcastTargetBitrateBps;
+ VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
+ vp9_settings.numberOfSpatialLayers = kNumSpatialLayers;
+ vp9_settings.numberOfTemporalLayers = 3;
+ vp9_settings.automaticResizeOn = false;
+ config.encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9_settings);
+ config.spatial_layers = GetSvcConfig(kFrameWidth, kFrameHeight,
+ /*fps=*/30.0,
+ /*first_active_layer=*/0,
+ /*num_spatial_layers=*/3,
+ /*num_temporal_layers=*/3,
+ /*is_screenshare=*/false);
+ } else if (codec_type_ == VideoCodecType::kVideoCodecAV1) {
+ test::FillEncoderConfiguration(codec_type_, 1, &config);
+ config.max_bitrate_bps = kSimulcastTargetBitrateBps;
+ config.spatial_layers = GetSvcConfig(kFrameWidth, kFrameHeight,
+ /*fps=*/30.0,
+ /*first_active_layer=*/0,
+ /*num_spatial_layers=*/3,
+ /*num_temporal_layers=*/3,
+ /*is_screenshare=*/false);
+ config.simulcast_layers[0].scalability_mode = "L3T3_KEY";
+ } else {
+ // Simulcast for VP8/H264.
+ test::FillEncoderConfiguration(codec_type_, kNumSpatialLayers, &config);
+ for (size_t i = 0; i < kNumSpatialLayers; ++i) {
+ config.simulcast_layers[i].scale_resolution_down_by =
+ kDownscaleFactors[i];
+ config.simulcast_layers[i].active = true;
+ }
+ if (codec_type_ == VideoCodecType::kVideoCodecH264) {
+ // Turn off frame dropping to prevent flakiness.
+ VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings();
+ h264_settings.frameDroppingOn = false;
+ config.encoder_specific_settings = rtc::make_ref_counted<
+ VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
+ }
+ }
+
+ auto set_layer_active = [&](int layer_idx, bool active) {
+ if (codec_type_ == VideoCodecType::kVideoCodecVP9 ||
+ codec_type_ == VideoCodecType::kVideoCodecAV1) {
+ config.spatial_layers[layer_idx].active = active;
+ } else {
+ config.simulcast_layers[layer_idx].active = active;
+ }
+ };
+
+ config.video_stream_factory =
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
+ CodecTypeToPayloadString(codec_type_), /*max qp*/ 56,
+ /*screencast*/ false,
+ /*screenshare enabled*/ false);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::BitsPerSec(kSimulcastTargetBitrateBps),
+ DataRate::BitsPerSec(kSimulcastTargetBitrateBps),
+ DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0);
+
+ // Capture a frame with all layers active.
+ sink_.SetNumExpectedLayers(kNumSpatialLayers);
+ video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
+ int64_t timestamp_ms = kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+
+ WaitForEncodedFrame(kLayer2Size.width, kLayer2Size.height);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
+ // Capture a frame with one of the layers inactive.
+ set_layer_active(2, false);
+ sink_.SetNumExpectedLayers(kNumSpatialLayers - 1);
+ video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
+ timestamp_ms += kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ WaitForEncodedFrame(kLayer1Size.width, kLayer1Size.height);
+
+ // New target bitrates signaled based on lower resolution.
+ DataRate kTwoLayerBitrate = DataRate::KilobitsPerSec(833);
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ kTwoLayerBitrate, kTwoLayerBitrate, kTwoLayerBitrate, 0, 0, 0);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
+ // Re-enable the top layer.
+ set_layer_active(2, true);
+ sink_.SetNumExpectedLayers(kNumSpatialLayers);
+ video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
+ // Bitrate target adjusted back up to enable HD layer...
+ video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
+ DataRate::KilobitsPerSec(1800), DataRate::KilobitsPerSec(1800),
+ DataRate::KilobitsPerSec(1800), 0, 0, 0);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
+ // ...then add a new frame.
+ timestamp_ms += kFrameIntervalMs;
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
+ WaitForEncodedFrame(kLayer2Size.width, kLayer2Size.height);
+ video_stream_encoder_->WaitUntilTaskQueueIsIdle();
+
+ video_stream_encoder_->Stop();
+}
+
std::string TestParametersVideoCodecAndAllowI420ConversionToString(
testing::TestParamInfo<std::pair<VideoCodecType, bool>> info) {
VideoCodecType codec_type = std::get<0>(info.param);
diff --git a/webrtc.gni b/webrtc.gni
index ac6c325cbc..559078db34 100644
--- a/webrtc.gni
+++ b/webrtc.gni
@@ -207,9 +207,9 @@ declare_args() {
rtc_win_undef_unicode = false
# When set to true, a capturer implementation that uses the
- # Windows.Graphics.Capture APIs will be available for use. These APIs are
- # available in the Win 10 SDK v10.0.19041.
- rtc_enable_win_wgc = false
+ # Windows.Graphics.Capture APIs will be available for use. This introduces a
+ # dependency on the Win 10 SDK v10.0.17763.0.
+ rtc_enable_win_wgc = is_win
}
if (!build_with_mozilla) {
@@ -286,6 +286,9 @@ declare_args() {
}
declare_args() {
+ # Enable the dcsctp backend for DataChannels and related unittests
+ rtc_build_dcsctp = !build_with_mozilla && rtc_enable_sctp
+
# Enable the usrsctp backend for DataChannels and related unittests
rtc_build_usrsctp = !build_with_mozilla && rtc_enable_sctp
}
diff --git a/webrtc_lib_link_test.cc b/webrtc_lib_link_test.cc
index 37e1b14eae..055bd969ff 100644
--- a/webrtc_lib_link_test.cc
+++ b/webrtc_lib_link_test.cc
@@ -65,9 +65,10 @@ void TestCase1ModularFactory() {
auto peer_connection_factory =
webrtc::CreateModularPeerConnectionFactory(std::move(pcf_deps));
webrtc::PeerConnectionInterface::RTCConfiguration rtc_config;
- auto peer_connection = peer_connection_factory->CreatePeerConnection(
- rtc_config, nullptr, nullptr, nullptr);
- printf("peer_connection=%s\n", peer_connection == nullptr ? "nullptr" : "ok");
+ auto result = peer_connection_factory->CreatePeerConnectionOrError(
+ rtc_config, PeerConnectionDependencies(nullptr));
+ // Creation will fail because of null observer, but that's OK.
+ printf("peer_connection creation=%s\n", result.ok() ? "succeeded" : "failed");
}
void TestCase2RegularFactory() {
@@ -81,9 +82,10 @@ void TestCase2RegularFactory() {
std::move(media_deps.video_encoder_factory),
std::move(media_deps.video_decoder_factory), nullptr, nullptr);
webrtc::PeerConnectionInterface::RTCConfiguration rtc_config;
- auto peer_connection = peer_connection_factory->CreatePeerConnection(
- rtc_config, nullptr, nullptr, nullptr);
- printf("peer_connection=%s\n", peer_connection == nullptr ? "nullptr" : "ok");
+ auto result = peer_connection_factory->CreatePeerConnectionOrError(
+ rtc_config, PeerConnectionDependencies(nullptr));
+ // Creation will fail because of null observer, but that's OK.
+ printf("peer_connection creation=%s\n", result.ok() ? "succeeded" : "failed");
}
} // namespace webrtc
diff --git a/windows_x86_64.cmake b/windows_x86_64.cmake
index 6969d28f77..36c95d4641 100644
--- a/windows_x86_64.cmake
+++ b/windows_x86_64.cmake
@@ -1,4 +1,4 @@
-# Generated on 04/21/21 for target: Windows
+# Generated on 06/23/21 for target: Windows
# This is an autogenerated file by calling:
# ./import-webrtc.py --target webrtc_api_video_codecs_builtin_video_decoder_factory --target webrtc_api_video_codecs_builtin_video_encoder_factory --target webrtc_api_libjingle_peerconnection_api --target webrtc_pc_peerconnection --target webrtc_api_create_peerconnection_factory --target webrtc_api_audio_codecs_builtin_audio_decoder_factory --target webrtc_api_audio_codecs_builtin_audio_encoder_factory --target webrtc_common_audio_common_audio_unittests --target webrtc_common_video_common_video_unittests --target webrtc_media_rtc_media_unittests --target webrtc_modules_audio_coding_audio_decoder_unittests --target webrtc_pc_peerconnection_unittests --target webrtc_pc_rtc_pc_unittests --root /usr/local/google/home/jansene/src/webrtc_g3/ --platform Windows BUILD .
@@ -6,11 +6,11 @@
# Re-running this script will require you to merge in the latest upstream-master for webrtc
# Expecting jsoncpp at 9059f5cad030ba11d37818847443a53918c327b1
-# Expecting libaom at 6c93db7ff63430d2e5dcdfc42e84e3a0514da608
-# Expecting libsrtp2 at 7990ca64c616b150a9cb4714601c4a3b0c84fe91
+# Expecting libaom at 12287adee94fc3b1f5349d3f4bd85cea4e57f62b
+# Expecting libsrtp2 at 5b7c744eb8310250ccc534f3f86a2015b3887a0a
# Expecting libvpx at 61edec1efbea1c02d71857e2aff9426d9cd2df4e
-# Expecting libyuv at 64994843e652443df2d5201c6ae3fb725097360f
-# Expecting usrsctp at 70d42ae95a1de83bd317c8cc9503f894671d1392
+# Expecting libyuv at 49ebc996aa8c4bdf89c1b5ea461eb677234c61cc
+# Expecting usrsctp at 22ba62ffe79c3881581ab430368bf3764d9533eb
@@ -294,12 +294,12 @@ target_include_directories(webrtc_api_call_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_
# api:callfactory_api
add_library(webrtc_api_callfactory_api INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_callfactory_api INTERFACE webrtc_call_rtp_interfaces webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_callfactory_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:callfactory_api.headers
add_library(webrtc_api_callfactory_api.headers INTERFACE)
-target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_callfactory_api.headers INTERFACE webrtc_call_rtp_interfaces.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_callfactory_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:create_frame_generator
@@ -328,7 +328,7 @@ add_library(webrtc_api_create_time_controller ${WEBRTC_ROOT}/api/test/create_tim
target_include_directories(webrtc_api_create_time_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_create_time_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_create_time_controller PRIVATE -GR-)
-target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_api_create_time_controller PUBLIC webrtc_api_callfactory_api webrtc_api_time_controller webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_test_time_controller_time_controller )
# api/crypto:frame_decryptor_interface
add_library(webrtc_api_crypto_frame_decryptor_interface INTERFACE)
@@ -429,15 +429,15 @@ add_library(webrtc_api_libjingle_logging_api.headers INTERFACE)
target_include_directories(webrtc_api_libjingle_logging_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:libjingle_peerconnection_api
-add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/proxy.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
+add_library(webrtc_api_libjingle_peerconnection_api ${WEBRTC_ROOT}/api/candidate.cc ${WEBRTC_ROOT}/api/data_channel_interface.cc ${WEBRTC_ROOT}/api/dtls_transport_interface.cc ${WEBRTC_ROOT}/api/jsep.cc ${WEBRTC_ROOT}/api/jsep_ice_candidate.cc ${WEBRTC_ROOT}/api/peer_connection_interface.cc ${WEBRTC_ROOT}/api/rtp_receiver_interface.cc ${WEBRTC_ROOT}/api/rtp_sender_interface.cc ${WEBRTC_ROOT}/api/rtp_transceiver_interface.cc ${WEBRTC_ROOT}/api/sctp_transport_interface.cc ${WEBRTC_ROOT}/api/stats_types.cc)
target_include_directories(webrtc_api_libjingle_peerconnection_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_libjingle_peerconnection_api PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_libjingle_peerconnection_api PRIVATE -GR-)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api PUBLIC absl::algorithm absl::algorithm_container absl::config absl::core_headers absl::algorithm_container absl::memory absl::type_traits absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_logging_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_packet_info webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_enums webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_call_rtp_interfaces webrtc_media_rtc_media_base webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_p2p_rtc_p2p.headers webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# api:libjingle_peerconnection_api.headers
add_library(webrtc_api_libjingle_peerconnection_api.headers INTERFACE)
-target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_libjingle_peerconnection_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_async_dns_resolver.headers webrtc_api_audio_options_api.headers webrtc_api_callfactory_api.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_libjingle_logging_api.headers webrtc_api_media_stream_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_packet_socket_factory.headers webrtc_api_priority.headers webrtc_api_rtc_error.headers webrtc_api_rtc_stats_api.headers webrtc_api_rtp_packet_info.headers webrtc_api_rtp_parameters.headers webrtc_api_rtp_transceiver_direction.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_enums.headers webrtc_api_transport_network_control.headers webrtc_api_transport_sctp_transport_factory_interface.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_units_data_rate.headers webrtc_api_units_timestamp.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_call_rtp_interfaces.headers webrtc_media_rtc_media_base.headers webrtc_media_rtc_media_config.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_libjingle_peerconnection_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:media_stream_interface
@@ -573,12 +573,12 @@ target_include_directories(webrtc_api_priority.headers INTERFACE ${WEBRTC_ROOT}
# api:refcountedbase
add_library(webrtc_api_refcountedbase INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_api_refcountedbase INTERFACE webrtc_rtc_base_macromagic webrtc_rtc_base_refcount )
target_include_directories(webrtc_api_refcountedbase INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:refcountedbase.headers
add_library(webrtc_api_refcountedbase.headers INTERFACE)
-target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_api_refcountedbase.headers INTERFACE webrtc_rtc_base_macromagic.headers webrtc_rtc_base_refcount.headers )
target_include_directories(webrtc_api_refcountedbase.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_error
@@ -621,12 +621,12 @@ target_link_libraries(webrtc_api_rtc_event_log_rtc_event_log_factory PUBLIC webr
# api:rtc_stats_api
add_library(webrtc_api_rtc_stats_api INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtc_stats_api INTERFACE webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
target_include_directories(webrtc_api_rtc_stats_api INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtc_stats_api.headers
add_library(webrtc_api_rtc_stats_api.headers INTERFACE)
-target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtc_stats_api.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtc_stats_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_headers
@@ -646,11 +646,11 @@ add_library(webrtc_api_rtp_packet_info ${WEBRTC_ROOT}/api/rtp_packet_info.cc)
target_include_directories(webrtc_api_rtp_packet_info PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_rtp_packet_info PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_rtp_packet_info PRIVATE -GR-)
-target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_rtp_packet_info PUBLIC absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_scoped_refptr webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api:rtp_packet_info.headers
add_library(webrtc_api_rtp_packet_info.headers INTERFACE)
-target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_rtp_packet_info.headers INTERFACE webrtc_api_array_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_scoped_refptr.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_rtp_packet_info.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api:rtp_parameters
@@ -767,6 +767,11 @@ add_library(webrtc_api_transport_datagram_transport_interface INTERFACE)
target_link_libraries(webrtc_api_transport_datagram_transport_interface INTERFACE absl::optional webrtc_api_array_view webrtc_api_rtc_error webrtc_rtc_base_rtc_base_approved )
target_include_directories(webrtc_api_transport_datagram_transport_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# api/transport:datagram_transport_interface.headers
+add_library(webrtc_api_transport_datagram_transport_interface.headers INTERFACE)
+target_link_libraries(webrtc_api_transport_datagram_transport_interface.headers INTERFACE webrtc_api_array_view.headers webrtc_api_rtc_error.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_api_transport_datagram_transport_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# api/transport:enums
add_library(webrtc_api_transport_enums INTERFACE)
target_include_directories(webrtc_api_transport_enums INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -957,15 +962,15 @@ target_compile_options(webrtc_api_video_codecs_rtc_software_fallback_wrappers PR
target_link_libraries(webrtc_api_video_codecs_rtc_software_fallback_wrappers PUBLIC absl::core_headers absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_media_rtc_media_base webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# api/video_codecs:video_codecs_api
-add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder_factory.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
+add_library(webrtc_api_video_codecs_video_codecs_api ${WEBRTC_ROOT}/api/video_codecs/h264_profile_level_id.cc ${WEBRTC_ROOT}/api/video_codecs/sdp_video_format.cc ${WEBRTC_ROOT}/api/video_codecs/spatial_layer.cc ${WEBRTC_ROOT}/api/video_codecs/video_codec.cc ${WEBRTC_ROOT}/api/video_codecs/video_decoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder.cc ${WEBRTC_ROOT}/api/video_codecs/video_encoder_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_frame_config.cc ${WEBRTC_ROOT}/api/video_codecs/vp8_temporal_layers.cc ${WEBRTC_ROOT}/api/video_codecs/vp9_profile.cc)
target_include_directories(webrtc_api_video_codecs_video_codecs_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_api_video_codecs_video_codecs_api PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_api_video_codecs_video_codecs_api PRIVATE -GR-)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api PUBLIC absl::algorithm_container absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_units_data_rate webrtc_api_video_encoded_image webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_rtc_export )
# api/video_codecs:video_codecs_api.headers
add_library(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE)
-target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
+target_link_libraries(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_scoped_refptr.headers webrtc_api_units_data_rate.headers webrtc_api_video_encoded_image.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_codec_constants.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_system_rtc_export.headers )
target_include_directories(webrtc_api_video_codecs_video_codecs_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# api/video_codecs:vp8_temporal_layers_factory
@@ -1144,7 +1149,7 @@ add_library(webrtc_audio_audio ${WEBRTC_ROOT}/audio/audio_level.cc ${WEBRTC_ROOT
target_include_directories(webrtc_audio_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_audio_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_audio_audio PRIVATE -GR-)
-target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_audio_audio PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_api_call_api webrtc_api_frame_transformer_interface webrtc_api_function_view webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_aec3_factory webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_rtp_rtp_source webrtc_audio_utility_audio_frame_operations webrtc_call_audio_sender_interface webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_logging_rtc_event_audio webrtc_logging_rtc_stream_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_audio_coding webrtc_modules_audio_coding_audio_coding_module_typedefs webrtc_modules_audio_coding_audio_encoder_cng webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_modules_audio_coding_red webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_proxies webrtc_modules_audio_processing_rms_level webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# audio/utility:audio_frame_operations
add_library(webrtc_audio_utility_audio_frame_operations ${WEBRTC_ROOT}/audio/utility/audio_frame_operations.cc ${WEBRTC_ROOT}/audio/utility/channel_mixer.cc ${WEBRTC_ROOT}/audio/utility/channel_mixing_matrix.cc)
@@ -1201,18 +1206,18 @@ add_library(webrtc_call_call ${WEBRTC_ROOT}/call/call.cc ${WEBRTC_ROOT}/call/cal
target_include_directories(webrtc_call_call PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call PRIVATE -GR-)
-target_link_libraries(webrtc_call_call PUBLIC absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
+target_link_libraries(webrtc_call_call PUBLIC absl::bind_front absl::optional webrtc_api_array_view webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_transport_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_network_control webrtc_api_units_time_delta webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_fake_network webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_version webrtc_call_video_stream_api webrtc_call_adaptation_resource_adaptation webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_congestion_controller_congestion_controller webrtc_modules_pacing_pacing webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_video )
# call:call_interfaces
add_library(webrtc_call_call_interfaces ${WEBRTC_ROOT}/call/audio_receive_stream.cc ${WEBRTC_ROOT}/call/audio_send_stream.cc ${WEBRTC_ROOT}/call/audio_state.cc ${WEBRTC_ROOT}/call/call_config.cc ${WEBRTC_ROOT}/call/flexfec_receive_stream.cc ${WEBRTC_ROOT}/call/syncable.cc)
target_include_directories(webrtc_call_call_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_call_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_call_interfaces PRIVATE -GR-)
-target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_call_audio_sender_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
+target_link_libraries(webrtc_call_call_interfaces PUBLIC absl::bind_front absl::optional webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_call_audio_sender_interface webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_call_video_stream_api webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_network_sent_packet )
# call:call_interfaces.headers
add_library(webrtc_call_call_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_call_audio_sender_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
+target_link_libraries(webrtc_call_call_interfaces.headers INTERFACE webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtc_error.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_audio_mixer_api.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_neteq_neteq_api.headers webrtc_api_task_queue_task_queue.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_call_audio_sender_interface.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_call_video_stream_api.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_device_audio_device.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_audio_format_to_string.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_network_sent_packet.headers )
target_include_directories(webrtc_call_call_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:fake_network
@@ -1227,16 +1232,26 @@ add_library(webrtc_call_mock_rtp_interfaces INTERFACE)
target_link_libraries(webrtc_call_mock_rtp_interfaces INTERFACE webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_bitrate_settings webrtc_call_rtp_interfaces webrtc_modules_pacing_pacing webrtc_rtc_base_rtc_base webrtc_rtc_base_rate_limiter webrtc_rtc_base_network_sent_packet webrtc_test_test_support )
target_include_directories(webrtc_call_mock_rtp_interfaces INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# call:receive_stream_interface
+add_library(webrtc_call_receive_stream_interface INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface INTERFACE webrtc_api_frame_transformer_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source )
+target_include_directories(webrtc_call_receive_stream_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# call:receive_stream_interface.headers
+add_library(webrtc_call_receive_stream_interface.headers INTERFACE)
+target_link_libraries(webrtc_call_receive_stream_interface.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_transport_rtp_rtp_source.headers )
+target_include_directories(webrtc_call_receive_stream_interface.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# call:rtp_interfaces
add_library(webrtc_call_rtp_interfaces ${WEBRTC_ROOT}/call/rtp_config.cc)
target_include_directories(webrtc_call_rtp_interfaces PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_rtp_interfaces PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_rtp_interfaces PRIVATE -GR-)
-target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_rtp_interfaces PUBLIC absl::algorithm_container absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_transport_bitrate_settings webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_timestamp webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue )
# call:rtp_interfaces.headers
add_library(webrtc_call_rtp_interfaces.headers INTERFACE)
-target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_rtp_interfaces.headers INTERFACE webrtc_api_array_view.headers webrtc_api_fec_controller_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_network_state_predictor_api.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_crypto_options.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_transport_bitrate_settings.headers webrtc_api_transport_network_control.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_units_timestamp.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_modules_utility_utility.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers )
target_include_directories(webrtc_call_rtp_interfaces.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# call:rtp_receiver
@@ -1276,11 +1291,11 @@ add_library(webrtc_call_video_stream_api ${WEBRTC_ROOT}/call/video_receive_strea
target_include_directories(webrtc_call_video_stream_api PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_call_video_stream_api PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_call_video_stream_api PRIVATE -GR-)
-target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_call_video_stream_api PUBLIC absl::optional webrtc_api_frame_transformer_interface webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_transport_api webrtc_api_adaptation_resource_adaptation_api webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_receive_stream_interface webrtc_call_rtp_interfaces webrtc_common_video_common_video webrtc_common_video_frame_counts webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
# call:video_stream_api.headers
add_library(webrtc_call_video_stream_api.headers INTERFACE)
-target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_call_video_stream_api.headers INTERFACE webrtc_api_frame_transformer_interface.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_transport_api.headers webrtc_api_adaptation_resource_adaptation_api.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_video_recordable_encoded_frame.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_video_stream_encoder.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_receive_stream_interface.headers webrtc_call_rtp_interfaces.headers webrtc_common_video_common_video.headers webrtc_common_video_frame_counts.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_call_video_stream_api.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# common_audio
@@ -1439,11 +1454,11 @@ add_library(webrtc_logging_ice_log ${WEBRTC_ROOT}/logging/rtc_event_log/events/r
target_include_directories(webrtc_logging_ice_log PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_ice_log PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_ice_log PRIVATE -GR-)
-target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_ice_log PUBLIC absl::memory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_rtc_base_approved )
# logging:ice_log.headers
add_library(webrtc_logging_ice_log.headers INTERFACE)
-target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_link_libraries(webrtc_logging_ice_log.headers INTERFACE webrtc_api_libjingle_logging_api.headers webrtc_api_libjingle_peerconnection_api.headers webrtc_api_rtc_event_log_rtc_event_log.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_rtc_base_approved.headers )
target_include_directories(webrtc_logging_ice_log.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# logging:rtc_event_audio
@@ -1451,41 +1466,41 @@ add_library(webrtc_logging_rtc_event_audio ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_audio PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_audio PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_audio PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_audio PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks )
# logging:rtc_event_bwe
add_library(webrtc_logging_rtc_event_bwe ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_probe_result_success.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_route_change.cc)
target_include_directories(webrtc_logging_rtc_event_bwe PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_bwe PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_bwe PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate )
+target_link_libraries(webrtc_logging_rtc_event_bwe PUBLIC absl::memory absl::optional webrtc_api_network_state_predictor_api webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_data_rate webrtc_api_units_timestamp )
# logging:rtc_event_frame_events
add_library(webrtc_logging_rtc_event_frame_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_frame_decoded.cc)
target_include_directories(webrtc_logging_rtc_event_frame_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_frame_events PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_frame_events PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_frame_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_rtc_base_timeutils )
# logging:rtc_event_generic_packet_events
add_library(webrtc_logging_rtc_event_generic_packet_events ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc)
target_include_directories(webrtc_logging_rtc_event_generic_packet_events PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_generic_packet_events PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_generic_packet_events PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_logging_rtc_event_generic_packet_events PUBLIC absl::memory absl::optional webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_rtc_base_timeutils )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log2_proto
-add_library(webrtc_logging_rtc_event_log2_proto_bridge)
+add_library(webrtc_logging_rtc_event_log2_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log2_proto_bridge
+ TARGET webrtc_logging_rtc_event_log2_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log2.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log2_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log2_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log2_proto_lib ALIAS webrtc_logging_rtc_event_log2_proto)
+target_link_libraries(webrtc_logging_rtc_event_log2_proto PUBLIC libprotobuf)
# logging:rtc_event_log_api
add_library(webrtc_logging_rtc_event_log_api INTERFACE)
@@ -1504,27 +1519,27 @@ add_library(webrtc_logging_rtc_event_log_impl_encoder ${WEBRTC_ROOT}/logging/rtc
target_include_directories(webrtc_logging_rtc_event_log_impl_encoder PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_log_impl_encoder PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_log_impl_encoder PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto_bridge webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto_bridge webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_logging_rtc_event_log_impl_encoder PUBLIC absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_transport_network_control webrtc_logging_ice_log webrtc_logging_rtc_event_audio webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_frame_events webrtc_logging_rtc_event_generic_packet_events webrtc_logging_rtc_event_log2_proto webrtc_logging_rtc_event_log_api webrtc_logging_rtc_event_log_proto webrtc_logging_rtc_event_pacing webrtc_logging_rtc_event_rtp_rtcp webrtc_logging_rtc_event_video webrtc_logging_rtc_stream_config webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved )
# //third_party/webrtc/files/stable/webrtc/logging:rtc_event_log_proto
-add_library(webrtc_logging_rtc_event_log_proto_bridge)
+add_library(webrtc_logging_rtc_event_log_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_logging_rtc_event_log_proto_bridge
+ TARGET webrtc_logging_rtc_event_log_proto
PROTOS ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_event_log.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/logging/rtc_event_log
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-target_include_directories(webrtc_logging_rtc_event_log_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
-add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto_bridge)
-target_link_libraries(webrtc_logging_rtc_event_log_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_logging_rtc_event_log_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/logging/rtc_event_log)
+add_library(webrtc_logging_rtc_event_log_proto_lib ALIAS webrtc_logging_rtc_event_log_proto)
+target_link_libraries(webrtc_logging_rtc_event_log_proto PUBLIC libprotobuf)
# logging:rtc_event_pacing
add_library(webrtc_logging_rtc_event_pacing ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_alr_state.cc)
target_include_directories(webrtc_logging_rtc_event_pacing PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_pacing PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_pacing PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log )
+target_link_libraries(webrtc_logging_rtc_event_pacing PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp )
# logging:rtc_event_rtp_rtcp
add_library(webrtc_logging_rtc_event_rtp_rtcp ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc ${WEBRTC_ROOT}/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc)
@@ -1538,7 +1553,7 @@ add_library(webrtc_logging_rtc_event_video ${WEBRTC_ROOT}/logging/rtc_event_log/
target_include_directories(webrtc_logging_rtc_event_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_logging_rtc_event_video PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_logging_rtc_event_video PRIVATE -GR-)
-target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
+target_link_libraries(webrtc_logging_rtc_event_video PUBLIC absl::memory webrtc_api_scoped_refptr webrtc_api_rtc_event_log_rtc_event_log webrtc_api_units_timestamp webrtc_logging_rtc_stream_config webrtc_rtc_base_checks )
# logging:rtc_stream_config
add_library(webrtc_logging_rtc_stream_config ${WEBRTC_ROOT}/logging/rtc_event_log/rtc_stream_config.cc)
@@ -1552,18 +1567,25 @@ add_library(webrtc_media_rtc_audio_video ${WEBRTC_ROOT}/media/engine/adm_helpers
target_include_directories(webrtc_media_rtc_audio_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_audio_video PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_audio_video PRIVATE -GR-)
-target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_media_rtc_audio_video PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_call_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_audio_audio_frame_processor webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_data_rate webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_rtc_software_fallback_wrappers webrtc_api_video_codecs_video_codecs_api webrtc_call_call webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_api webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_modules_audio_processing_aec_dump_aec_dump_impl webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_video_capture_video_capture_internal_impl webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_rtc_base_rtc_base webrtc_rtc_base_audio_format_to_string webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_normalize_simulcast_size_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+
+# media:rtc_data_dcsctp_transport
+add_library(webrtc_media_rtc_data_dcsctp_transport ${WEBRTC_ROOT}/media/sctp/dcsctp_transport.cc)
+target_include_directories(webrtc_media_rtc_data_dcsctp_transport PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_media_rtc_data_dcsctp_transport PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_media_rtc_data_dcsctp_transport PRIVATE -GR-)
+target_link_libraries(webrtc_media_rtc_data_dcsctp_transport PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_net_dcsctp_public_factory webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_task_queue_timeout webrtc_p2p_rtc_p2p webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers )
# media:rtc_data_sctp_transport_factory
add_library(webrtc_media_rtc_data_sctp_transport_factory ${WEBRTC_ROOT}/media/sctp/sctp_transport_factory.cc)
target_include_directories(webrtc_media_rtc_data_sctp_transport_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_definitions(webrtc_media_rtc_data_sctp_transport_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_DCSCTP WEBRTC_HAVE_SCTP WEBRTC_HAVE_USRSCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_data_sctp_transport_factory PRIVATE -GR-)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_system_unused )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_factory PUBLIC webrtc_api_transport_sctp_transport_factory_interface webrtc_media_rtc_data_dcsctp_transport webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_rtc_base_threading webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_system_unused webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# media:rtc_data_sctp_transport_internal
add_library(webrtc_media_rtc_data_sctp_transport_internal INTERFACE)
-target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_media_rtc_data_sctp_transport_internal INTERFACE webrtc_api_transport_datagram_transport_interface webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
target_include_directories(webrtc_media_rtc_data_sctp_transport_internal INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_data_usrsctp_transport
@@ -1604,11 +1626,11 @@ add_library(webrtc_media_rtc_media_base ${WEBRTC_ROOT}/media/base/adapted_video_
target_include_directories(webrtc_media_rtc_media_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_base PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_media_rtc_media_base PRIVATE -GR-)
-target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_media_rtc_media_base PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_frame_transformer_interface webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_frame_processor webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_transport_datagram_transport_interface webrtc_api_transport_stun_types webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_config webrtc_modules_async_audio_processing_async_audio_processing webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_sanitizer webrtc_rtc_base_socket webrtc_rtc_base_stringutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# media:rtc_media_base.headers
add_library(webrtc_media_rtc_media_base.headers INTERFACE)
-target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_media_rtc_media_base.headers INTERFACE webrtc_api_array_view.headers webrtc_api_audio_options_api.headers webrtc_api_frame_transformer_interface.headers webrtc_api_media_stream_interface.headers webrtc_api_rtc_error.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_audio_audio_frame_processor.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_crypto_frame_decryptor_interface.headers webrtc_api_crypto_frame_encryptor_interface.headers webrtc_api_crypto_options.headers webrtc_api_transport_datagram_transport_interface.headers webrtc_api_transport_stun_types.headers webrtc_api_transport_webrtc_key_value_config.headers webrtc_api_transport_rtp_rtp_source.headers webrtc_api_video_video_bitrate_allocation.headers webrtc_api_video_video_bitrate_allocator_factory.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_rtp_headers.headers webrtc_api_video_codecs_video_codecs_api.headers webrtc_call_call_interfaces.headers webrtc_call_video_stream_api.headers webrtc_common_video_common_video.headers webrtc_media_rtc_media_config.headers webrtc_modules_async_audio_processing_async_audio_processing.headers webrtc_modules_audio_processing_audio_processing_statistics.headers webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_rtc_base.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_sanitizer.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_media_rtc_media_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# media:rtc_media_config
@@ -1638,7 +1660,7 @@ target_link_libraries(webrtc_media_rtc_media_tests_utils PUBLIC gmock gtest absl
target_include_directories(webrtc_media_rtc_media_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_media_rtc_media_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_media_rtc_media_unittests PRIVATE -GR-)
-target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_media_rtc_media_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional usrsctp webrtc_api_create_simulcast_test_fixture_api webrtc_api_libjingle_peerconnection_api webrtc_api_mock_video_bitrate_allocator webrtc_api_mock_video_bitrate_allocator_factory webrtc_api_mock_video_codec_factory webrtc_api_mock_video_encoder webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_simulcast_test_fixture_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_field_trial_based_config webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_data_usrsctp_transport webrtc_media_rtc_encoder_simulcast_proxy webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_media_rtc_sdp_video_format_utils webrtc_media_rtc_simulcast_encoder_adapter webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_mocks webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_simulcast_test_fixture_impl webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_codecs_av1_libaom_av1_decoder webrtc_p2p_p2p_test_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_test_audio_codec_mocks webrtc_test_fake_video_codecs webrtc_test_field_trial webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_video_test_common )
# media:rtc_sdp_video_format_utils
add_library(webrtc_media_rtc_sdp_video_format_utils ${WEBRTC_ROOT}/media/base/sdp_video_format_utils.cc)
@@ -1667,31 +1689,31 @@ target_link_libraries(webrtc_modules_async_audio_processing_async_audio_processi
target_include_directories(webrtc_modules_async_audio_processing_async_audio_processing.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_config_proto
-add_library(webrtc_modules_audio_coding_ana_config_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_config_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_config_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_config_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/config.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_config_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_coding_ana_config_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_config_proto_lib ALIAS webrtc_modules_audio_coding_ana_config_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_config_proto PUBLIC libprotobuf)
# //third_party/webrtc/files/stable/webrtc/modules/audio_coding:ana_debug_dump_proto
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_coding_ana_debug_dump_proto_bridge
+ TARGET webrtc_modules_audio_coding_ana_debug_dump_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/debug_dump.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
-add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto_bridge)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PUBLIC libprotobuf)
-target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto_bridge PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
+target_include_directories(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_coding/audio_network_adaptor)
+add_library(webrtc_modules_audio_coding_ana_debug_dump_proto_lib ALIAS webrtc_modules_audio_coding_ana_debug_dump_proto)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PUBLIC libprotobuf)
+target_link_libraries(webrtc_modules_audio_coding_ana_debug_dump_proto PRIVATE webrtc_modules_audio_coding_ana_config_proto_lib)
# modules/audio_coding
add_library(webrtc_modules_audio_coding_audio_coding ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_receiver.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_remixing.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/acm_resampler.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/audio_coding_module.cc ${WEBRTC_ROOT}/modules/audio_coding/acm2/call_statistics.cc)
@@ -1730,7 +1752,7 @@ add_library(webrtc_modules_audio_coding_audio_network_adaptor ${WEBRTC_ROOT}/mod
target_include_directories(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_audio_network_adaptor PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional libprotobuf webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_coding_ana_debug_dump_proto_bridge webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_audio_network_adaptor PUBLIC absl::algorithm_container absl::optional webrtc_api_audio_codecs_audio_codecs_api webrtc_api_rtc_event_log_rtc_event_log webrtc_common_audio_common_audio webrtc_logging_rtc_event_audio webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_coding_ana_debug_dump_proto webrtc_modules_audio_coding_audio_network_adaptor_config webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial )
# modules/audio_coding:audio_network_adaptor_config
add_library(webrtc_modules_audio_coding_audio_network_adaptor_config ${WEBRTC_ROOT}/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc)
@@ -1881,7 +1903,7 @@ add_library(webrtc_modules_audio_coding_neteq_tools_minimal ${WEBRTC_ROOT}/modul
target_include_directories(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_CODEC_ISAC WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1)
target_compile_options(webrtc_modules_audio_coding_neteq_tools_minimal PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_coding_neteq_tools_minimal PUBLIC absl::optional webrtc_api_array_view webrtc_api_neteq_simulator_api webrtc_api_rtp_headers webrtc_api_audio_audio_frame_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_neteq_custom_neteq_factory webrtc_api_neteq_default_neteq_controller_factory webrtc_api_neteq_neteq_api webrtc_modules_audio_coding_default_neteq_factory webrtc_modules_audio_coding_neteq webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/audio_coding:pcm16b
add_library(webrtc_modules_audio_coding_pcm16b ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc ${WEBRTC_ROOT}/modules/audio_coding/codecs/pcm16b/pcm16b_common.cc)
@@ -1901,7 +1923,7 @@ add_library(webrtc_modules_audio_coding_red ${WEBRTC_ROOT}/modules/audio_coding/
target_include_directories(webrtc_modules_audio_coding_red PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_red PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_coding_red PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_link_libraries(webrtc_modules_audio_coding_red PUBLIC absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_units_time_delta webrtc_common_audio_common_audio webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_cng
add_library(webrtc_modules_audio_coding_webrtc_cng ${WEBRTC_ROOT}/modules/audio_coding/codecs/cng/webrtc_cng.cc)
@@ -1922,7 +1944,7 @@ add_library(webrtc_modules_audio_coding_webrtc_opus ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_coding_webrtc_opus PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_coding_webrtc_opus PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_CODEC_ISAC GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_CODEC_OPUS WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_OPUS_SUPPORT_120MS_PTIME=1)
target_compile_options(webrtc_modules_audio_coding_webrtc_opus PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional libprotobuf webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_audio_coding_webrtc_opus PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_opus_audio_encoder_opus_config webrtc_common_audio_common_audio webrtc_modules_audio_coding_audio_coding_opus_common webrtc_modules_audio_coding_audio_network_adaptor webrtc_modules_audio_coding_webrtc_opus_wrapper webrtc_rtc_base_checks webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial )
# modules/audio_coding:webrtc_opus_wrapper
add_library(webrtc_modules_audio_coding_webrtc_opus_wrapper ${WEBRTC_ROOT}/modules/audio_coding/codecs/opus/opus_interface.cc)
@@ -2014,7 +2036,7 @@ add_library(webrtc_modules_audio_mixer_audio_mixer_impl ${WEBRTC_ROOT}/modules/a
target_include_directories(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_mixer_audio_mixer_impl PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_mixer_audio_mixer_impl PUBLIC webrtc_api_array_view webrtc_api_rtp_packet_info webrtc_api_scoped_refptr webrtc_api_audio_audio_frame_api webrtc_api_audio_audio_mixer_api webrtc_audio_utility_audio_frame_operations webrtc_common_audio_common_audio webrtc_modules_audio_mixer_audio_frame_manipulator webrtc_modules_audio_processing_api webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc2_fixed_digital webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_conversions webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/audio_processing/aec3:adaptive_fir_filter
add_library(webrtc_modules_audio_processing_aec3_adaptive_fir_filter INTERFACE)
@@ -2133,11 +2155,11 @@ add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl ${WEBRTC_ROOT
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN GOOGLE_PROTOBUF_NO_RTTI GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC libprotobuf webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto_bridge webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl PUBLIC webrtc_api_audio_audio_frame_api webrtc_api_task_queue_task_queue webrtc_modules_audio_processing_aec_dump_interface webrtc_modules_audio_processing_audioproc_debug_proto webrtc_modules_audio_processing_aec_dump_aec_dump webrtc_rtc_base_checks webrtc_rtc_base_ignore_wundef webrtc_rtc_base_protobuf_utils webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_system_file_wrapper webrtc_system_wrappers_system_wrappers )
# modules/audio_processing/aec_dump:aec_dump_impl.headers
add_library(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE webrtc_api_audio_audio_frame_api.headers webrtc_api_task_queue_task_queue.headers webrtc_modules_audio_processing_aec_dump_interface.headers webrtc_modules_audio_processing_aec_dump_aec_dump.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ignore_wundef.headers webrtc_rtc_base_protobuf_utils.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_audio_processing_aec_dump_aec_dump_impl.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/audio_processing:aec_dump_interface
@@ -2395,13 +2417,49 @@ add_library(webrtc_modules_audio_processing_agc_agc ${WEBRTC_ROOT}/modules/audio
target_include_directories(webrtc_modules_audio_processing_agc_agc PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_audio_processing_agc_agc PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_audio_processing_agc_agc PRIVATE -GR-)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_common_audio_common_audio_c webrtc_modules_audio_processing_apm_logging webrtc_modules_audio_processing_audio_buffer webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor webrtc_modules_audio_processing_agc_clipping_predictor_evaluator webrtc_modules_audio_processing_agc_gain_control_interface webrtc_modules_audio_processing_agc_gain_map webrtc_modules_audio_processing_agc_level_estimation webrtc_modules_audio_processing_vad_vad webrtc_rtc_base_checks webrtc_rtc_base_gtest_prod webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_safe_minmax webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/audio_processing/agc:agc.headers
add_library(webrtc_modules_audio_processing_agc_agc.headers INTERFACE)
-target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
+target_link_libraries(webrtc_modules_audio_processing_agc_agc.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_common_audio_common_audio_c.headers webrtc_modules_audio_processing_apm_logging.headers webrtc_modules_audio_processing_audio_buffer.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor.headers webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers webrtc_modules_audio_processing_agc_gain_control_interface.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_modules_audio_processing_agc_level_estimation.headers webrtc_modules_audio_processing_vad_vad.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_gtest_prod.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_safe_minmax.headers webrtc_system_wrappers_field_trial.headers webrtc_system_wrappers_metrics.headers )
target_include_directories(webrtc_modules_audio_processing_agc_agc.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# modules/audio_processing/agc:clipping_predictor
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor PRIVATE -GR-)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor PUBLIC absl::optional webrtc_common_audio_common_audio webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_frame_view webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer webrtc_modules_audio_processing_agc_gain_map webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_safe_minmax )
+
+# modules/audio_processing/agc:clipping_predictor.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE webrtc_common_audio_common_audio.headers webrtc_modules_audio_processing_api.headers webrtc_modules_audio_processing_audio_frame_view.headers webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers webrtc_modules_audio_processing_agc_gain_map.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_safe_minmax.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_evaluator
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_evaluator.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PRIVATE -GR-)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging )
+
+# modules/audio_processing/agc:clipping_predictor_evaluator.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_evaluator.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer ${WEBRTC_ROOT}/modules/audio_processing/agc/clipping_predictor_level_buffer.cc)
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PRIVATE -GR-)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer PUBLIC absl::optional webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved )
+
+# modules/audio_processing/agc:clipping_predictor_level_buffer.headers
+add_library(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE)
+target_link_libraries(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE webrtc_rtc_base_checks.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_rtc_base_approved.headers )
+target_include_directories(webrtc_modules_audio_processing_agc_clipping_predictor_level_buffer.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# modules/audio_processing/agc:gain_control_interface
add_library(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE)
target_include_directories(webrtc_modules_audio_processing_agc_gain_control_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -2525,17 +2583,17 @@ target_link_libraries(webrtc_modules_audio_processing_audio_processing_statistic
target_include_directories(webrtc_modules_audio_processing_audio_processing_statistics.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# //third_party/webrtc/files/stable/webrtc/modules/audio_processing:audioproc_debug_proto
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto)
protobuf_generate_with_plugin(
- TARGET webrtc_modules_audio_processing_audioproc_debug_proto_bridge
+ TARGET webrtc_modules_audio_processing_audioproc_debug_proto
PROTOS ${WEBRTC_ROOT}/modules/audio_processing/debug.proto
HEADERFILEEXTENSION .pb.h
APPEND_PATH
PROTOPATH -I${WEBRTC_ROOT}/modules/audio_processing
PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
-add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto_bridge)
-target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto_bridge PUBLIC libprotobuf)
+target_include_directories(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/modules/audio_processing)
+add_library(webrtc_modules_audio_processing_audioproc_debug_proto_lib ALIAS webrtc_modules_audio_processing_audioproc_debug_proto)
+target_link_libraries(webrtc_modules_audio_processing_audioproc_debug_proto PUBLIC libprotobuf)
# modules/audio_processing:audioproc_test_utils
add_library(webrtc_modules_audio_processing_audioproc_test_utils ${WEBRTC_ROOT}/modules/audio_processing/test/audio_buffer_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/audio_processing_builder_for_testing.cc ${WEBRTC_ROOT}/modules/audio_processing/test/bitexactness_tools.cc ${WEBRTC_ROOT}/modules/audio_processing/test/performance_timer.cc ${WEBRTC_ROOT}/modules/audio_processing/test/simulator_buffers.cc ${WEBRTC_ROOT}/modules/audio_processing/test/test_utils.cc)
@@ -2702,11 +2760,11 @@ target_link_libraries(webrtc_modules_audio_processing_voice_detection.headers IN
target_include_directories(webrtc_modules_audio_processing_voice_detection.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/congestion_controller
-add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc)
+add_library(webrtc_modules_congestion_controller_congestion_controller ${WEBRTC_ROOT}/modules/congestion_controller/receive_side_congestion_controller.cc ${WEBRTC_ROOT}/modules/congestion_controller/remb_throttler.cc)
target_include_directories(webrtc_modules_congestion_controller_congestion_controller PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_congestion_controller_congestion_controller PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_congestion_controller_congestion_controller PRIVATE -GR-)
-target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_modules_congestion_controller_congestion_controller PUBLIC webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_rtc_base webrtc_rtc_base_synchronization_mutex )
# modules/congestion_controller/goog_cc:alr_detector
add_library(webrtc_modules_congestion_controller_goog_cc_alr_detector ${WEBRTC_ROOT}/modules/congestion_controller/goog_cc/alr_detector.cc)
@@ -2825,11 +2883,11 @@ target_compile_options(webrtc_modules_pacing_pacing PRIVATE -GR-)
target_link_libraries(webrtc_modules_pacing_pacing PUBLIC absl::memory absl::strings absl::optional webrtc_api_function_view webrtc_api_sequence_checker webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_logging_rtc_event_bwe webrtc_logging_rtc_event_pacing webrtc_modules_module_api webrtc_modules_pacing_interval_budget webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_metrics )
# modules/remote_bitrate_estimator
-add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
+add_library(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/aimd_rate_control.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/bwe_defines.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/inter_arrival.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_detector.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/overuse_estimator.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/packet_arrival_map.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc ${WEBRTC_ROOT}/modules/remote_bitrate_estimator/remote_estimator_proxy.cc)
target_include_directories(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PRIVATE -GR-)
-target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator PUBLIC absl::strings absl::optional webrtc_api_network_state_predictor_api webrtc_api_rtp_headers webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_congestion_controller_goog_cc_link_capacity_estimator webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_safe_minmax webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# modules/rtp_rtcp:mock_rtp_rtcp
add_library(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE)
@@ -2837,7 +2895,7 @@ target_link_libraries(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE absl::opti
target_include_directories(webrtc_modules_rtp_rtcp_mock_rtp_rtcp INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp
-add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
+add_library(webrtc_modules_rtp_rtcp_rtp_rtcp ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/absolute_capture_time_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/active_decode_targets_helper.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/capture_clock_offset_updater.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/dtmf_queue.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_bursty.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/fec_private_tables_random.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/flexfec_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/forward_error_correction_internal.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_loss_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/packet_sequencer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/receive_statistics_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_nack_stats.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_receiver.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtcp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_video_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_format_vp9.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_header_extension_size.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packet_history.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_packetizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_audio.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_egress.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_sequence_number_map.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/rtp_utility.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/source_tracker.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/time_util.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/tmmbr_help.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_generator.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc ${WEBRTC_ROOT}/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc)
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp PRIVATE -GR-)
@@ -2848,11 +2906,11 @@ add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format ${WEBRTC_ROOT}/modules/rtp_r
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_rtp_rtcp_rtp_rtcp_format PRIVATE -GR-)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format PUBLIC absl::algorithm_container absl::strings absl::optional absl::variant webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_audio_codecs_audio_codecs_api webrtc_api_transport_network_control webrtc_api_transport_rtp_dependency_descriptor webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_video_frame webrtc_api_video_video_layers_allocation webrtc_api_video_video_rtp_headers webrtc_common_video_common_video webrtc_modules_module_api_public webrtc_modules_video_coding_codec_globals_headers webrtc_rtc_base_checks webrtc_rtc_base_divide_round webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
# modules/rtp_rtcp:rtp_rtcp_format.headers
add_library(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE)
-target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_rtp_headers.headers webrtc_api_rtp_parameters.headers webrtc_api_scoped_refptr.headers webrtc_api_audio_codecs_audio_codecs_api.headers webrtc_api_transport_network_control.headers webrtc_api_transport_rtp_dependency_descriptor.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_api_video_video_frame.headers webrtc_api_video_video_layers_allocation.headers webrtc_api_video_video_rtp_headers.headers webrtc_common_video_common_video.headers webrtc_modules_module_api_public.headers webrtc_modules_video_coding_codec_globals_headers.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_divide_round.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_modules_rtp_rtcp_rtp_rtcp_format.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# modules/rtp_rtcp:rtp_video_header
@@ -3067,7 +3125,7 @@ add_library(webrtc_modules_video_coding_webrtc_vp9 ${WEBRTC_ROOT}/modules/video_
target_include_directories(webrtc_modules_video_coding_webrtc_vp9 PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_modules_video_coding_webrtc_vp9 PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_modules_video_coding_webrtc_vp9 PRIVATE -GR-)
-target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_modules_video_coding_webrtc_vp9 PUBLIC absl::algorithm_container absl::memory absl::strings webrtc-yuv libvpx webrtc_api_fec_controller_api webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_video_video_frame webrtc_api_video_video_frame_i010 webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_libvpx_interface webrtc_modules_video_coding_webrtc_vp9_helpers webrtc_modules_video_coding_svc_scalability_structures webrtc_modules_video_coding_svc_scalable_video_controller webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_experiments_encoder_info_settings webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_system_wrappers_field_trial )
# modules/video_coding:webrtc_vp9_helpers
add_library(webrtc_modules_video_coding_webrtc_vp9_helpers ${WEBRTC_ROOT}/modules/video_coding/codecs/vp9/svc_config.cc)
@@ -3094,6 +3152,215 @@ target_compile_definitions(webrtc_modules_video_processing_video_processing_sse2
target_compile_options(webrtc_modules_video_processing_video_processing_sse2 PRIVATE -GR-)
target_link_libraries(webrtc_modules_video_processing_video_processing_sse2 PUBLIC webrtc_modules_video_processing_denoiser_filter webrtc_rtc_base_rtc_base_approved webrtc_system_wrappers_system_wrappers )
+# net/dcsctp/common:internal_types
+add_library(webrtc_net_dcsctp_common_internal_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_internal_types INTERFACE webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_common_internal_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:math
+add_library(webrtc_net_dcsctp_common_math INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_math INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:pair_hash
+add_library(webrtc_net_dcsctp_common_pair_hash INTERFACE)
+target_include_directories(webrtc_net_dcsctp_common_pair_hash INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:sequence_numbers
+add_library(webrtc_net_dcsctp_common_sequence_numbers INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_sequence_numbers INTERFACE webrtc_net_dcsctp_common_internal_types )
+target_include_directories(webrtc_net_dcsctp_common_sequence_numbers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/common:str_join
+add_library(webrtc_net_dcsctp_common_str_join INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_common_str_join INTERFACE absl::strings webrtc_rtc_base_stringutils )
+target_include_directories(webrtc_net_dcsctp_common_str_join INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:bounded_io
+add_library(webrtc_net_dcsctp_packet_bounded_io INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_bounded_io INTERFACE webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_bounded_io INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:chunk
+add_library(webrtc_net_dcsctp_packet_chunk ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/abort_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/cookie_echo_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/data_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/error_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/forward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/idata_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/init_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/reconfig_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/sack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_chunk.cc ${WEBRTC_ROOT}/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:chunk_validators
+add_library(webrtc_net_dcsctp_packet_chunk_validators ${WEBRTC_ROOT}/net/dcsctp/packet/chunk_validators.cc)
+target_include_directories(webrtc_net_dcsctp_packet_chunk_validators PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_chunk_validators PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_chunk_validators PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_chunk_validators PUBLIC webrtc_net_dcsctp_packet_chunk webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:crc32c
+add_library(webrtc_net_dcsctp_packet_crc32c ${WEBRTC_ROOT}/net/dcsctp/packet/crc32c.cc)
+target_include_directories(webrtc_net_dcsctp_packet_crc32c PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_crc32c PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_crc32c PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_crc32c PUBLIC crc32c webrtc_api_array_view webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:data
+add_library(webrtc_net_dcsctp_packet_data INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_packet_data INTERFACE webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_packet_data INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/packet:error_cause
+add_library(webrtc_net_dcsctp_packet_error_cause ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/no_user_data_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/protocol_violation_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc ${WEBRTC_ROOT}/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc)
+target_include_directories(webrtc_net_dcsctp_packet_error_cause PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_error_cause PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_error_cause PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_error_cause PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:parameter
+add_library(webrtc_net_dcsctp_packet_parameter ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/state_cookie_parameter.cc ${WEBRTC_ROOT}/net/dcsctp/packet/parameter/supported_extensions_parameter.cc)
+target_include_directories(webrtc_net_dcsctp_packet_parameter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_parameter PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_parameter PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_parameter PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:sctp_packet
+add_library(webrtc_net_dcsctp_packet_sctp_packet ${WEBRTC_ROOT}/net/dcsctp/packet/sctp_packet.cc)
+target_include_directories(webrtc_net_dcsctp_packet_sctp_packet PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_sctp_packet PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_sctp_packet PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_sctp_packet PUBLIC absl::memory absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_math webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_crc32c webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/packet:tlv_trait
+add_library(webrtc_net_dcsctp_packet_tlv_trait ${WEBRTC_ROOT}/net/dcsctp/packet/tlv_trait.cc)
+target_include_directories(webrtc_net_dcsctp_packet_tlv_trait PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_packet_tlv_trait PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_packet_tlv_trait PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_packet_tlv_trait PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/public:factory
+add_library(webrtc_net_dcsctp_public_factory ${WEBRTC_ROOT}/net/dcsctp/public/dcsctp_socket_factory.cc)
+target_include_directories(webrtc_net_dcsctp_public_factory PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_public_factory PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_public_factory PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_public_factory PUBLIC absl::strings webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_dcsctp_socket )
+
+# net/dcsctp/public:socket
+add_library(webrtc_net_dcsctp_public_socket INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_socket INTERFACE absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+target_include_directories(webrtc_net_dcsctp_public_socket INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:strong_alias
+add_library(webrtc_net_dcsctp_public_strong_alias INTERFACE)
+target_include_directories(webrtc_net_dcsctp_public_strong_alias INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/public:types
+add_library(webrtc_net_dcsctp_public_types INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_public_types INTERFACE webrtc_api_array_view webrtc_net_dcsctp_public_strong_alias )
+target_include_directories(webrtc_net_dcsctp_public_types INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:data_tracker
+add_library(webrtc_net_dcsctp_rx_data_tracker ${WEBRTC_ROOT}/net/dcsctp/rx/data_tracker.cc)
+target_include_directories(webrtc_net_dcsctp_rx_data_tracker PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_data_tracker PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_data_tracker PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_rx_data_tracker PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_queue
+add_library(webrtc_net_dcsctp_rx_reassembly_queue ${WEBRTC_ROOT}/net/dcsctp/rx/reassembly_queue.cc)
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_reassembly_queue PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_queue PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_net_dcsctp_rx_traditional_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/rx:reassembly_streams
+add_library(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE absl::strings webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_rx_reassembly_streams INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/rx:traditional_reassembly_streams
+add_library(webrtc_net_dcsctp_rx_traditional_reassembly_streams ${WEBRTC_ROOT}/net/dcsctp/rx/traditional_reassembly_streams.cc)
+target_include_directories(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_rx_traditional_reassembly_streams PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_rx_traditional_reassembly_streams PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_reassembly_streams webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:context
+add_library(webrtc_net_dcsctp_socket_context INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_socket_context INTERFACE absl::strings webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_socket_context INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# net/dcsctp/socket:dcsctp_socket
+add_library(webrtc_net_dcsctp_socket_dcsctp_socket ${WEBRTC_ROOT}/net/dcsctp/socket/dcsctp_socket.cc ${WEBRTC_ROOT}/net/dcsctp/socket/state_cookie.cc)
+target_include_directories(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_dcsctp_socket PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_socket_dcsctp_socket PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_chunk_validators webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_packet_error_cause webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_socket_transmission_control_block webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_rr_send_queue webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:heartbeat_handler
+add_library(webrtc_net_dcsctp_socket_heartbeat_handler ${WEBRTC_ROOT}/net/dcsctp/socket/heartbeat_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_heartbeat_handler PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_socket_heartbeat_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_packet_bounded_io webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:stream_reset_handler
+add_library(webrtc_net_dcsctp_socket_stream_reset_handler ${WEBRTC_ROOT}/net/dcsctp/socket/stream_reset_handler.cc)
+target_include_directories(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_stream_reset_handler PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_socket_stream_reset_handler PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_parameter webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_packet_tlv_trait webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/socket:transmission_control_block
+add_library(webrtc_net_dcsctp_socket_transmission_control_block ${WEBRTC_ROOT}/net/dcsctp/socket/transmission_control_block.cc)
+target_include_directories(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_socket_transmission_control_block PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_socket_transmission_control_block PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_sctp_packet webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_rx_data_tracker webrtc_net_dcsctp_rx_reassembly_queue webrtc_net_dcsctp_socket_context webrtc_net_dcsctp_socket_heartbeat_handler webrtc_net_dcsctp_socket_stream_reset_handler webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_error_counter webrtc_net_dcsctp_tx_retransmission_queue webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/timer:task_queue_timeout
+add_library(webrtc_net_dcsctp_timer_task_queue_timeout ${WEBRTC_ROOT}/net/dcsctp/timer/task_queue_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_task_queue_timeout PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_timer_task_queue_timeout PUBLIC webrtc_api_array_view webrtc_api_task_queue_task_queue webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
+
+# net/dcsctp/timer
+add_library(webrtc_net_dcsctp_timer_timer ${WEBRTC_ROOT}/net/dcsctp/timer/timer.cc)
+target_include_directories(webrtc_net_dcsctp_timer_timer PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_timer_timer PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_timer_timer PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_timer_timer PUBLIC absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_strong_alias webrtc_net_dcsctp_public_types webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_error_counter
+add_library(webrtc_net_dcsctp_tx_retransmission_error_counter ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_error_counter.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_error_counter PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_error_counter PUBLIC absl::strings webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_queue
+add_library(webrtc_net_dcsctp_tx_retransmission_queue ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_queue PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_math webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_common_sequence_numbers webrtc_net_dcsctp_common_str_join webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types webrtc_net_dcsctp_timer_timer webrtc_net_dcsctp_tx_retransmission_timeout webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:retransmission_timeout
+add_library(webrtc_net_dcsctp_tx_retransmission_timeout ${WEBRTC_ROOT}/net/dcsctp/tx/retransmission_timeout.cc)
+target_include_directories(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_retransmission_timeout PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_tx_retransmission_timeout PUBLIC webrtc_net_dcsctp_public_types webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:rr_send_queue
+add_library(webrtc_net_dcsctp_tx_rr_send_queue ${WEBRTC_ROOT}/net/dcsctp/tx/rr_send_queue.cc)
+target_include_directories(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN DLOG_ALWAYS_ON WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_net_dcsctp_tx_rr_send_queue PRIVATE -GR-)
+target_link_libraries(webrtc_net_dcsctp_tx_rr_send_queue PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_pair_hash webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_socket webrtc_net_dcsctp_public_types webrtc_net_dcsctp_tx_send_queue webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved )
+
+# net/dcsctp/tx:send_queue
+add_library(webrtc_net_dcsctp_tx_send_queue INTERFACE)
+target_link_libraries(webrtc_net_dcsctp_tx_send_queue INTERFACE absl::optional webrtc_api_array_view webrtc_net_dcsctp_common_internal_types webrtc_net_dcsctp_packet_chunk webrtc_net_dcsctp_packet_data webrtc_net_dcsctp_public_types )
+target_include_directories(webrtc_net_dcsctp_tx_send_queue INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# p2p:fake_ice_transport
add_library(webrtc_p2p_fake_ice_transport INTERFACE)
target_link_libraries(webrtc_p2p_fake_ice_transport INTERFACE absl::algorithm_container absl::optional webrtc_api_libjingle_peerconnection_api webrtc_p2p_rtc_p2p webrtc_rtc_base_rtc_base webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
@@ -3135,7 +3402,7 @@ add_library(webrtc_pc_audio_rtp_receiver ${WEBRTC_ROOT}/pc/audio_rtp_receiver.cc
target_include_directories(webrtc_pc_audio_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_audio_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_audio_rtp_receiver PRIVATE -GR-)
-target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_audio_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_media_rtc_media_base webrtc_pc_audio_track webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task )
# pc:audio_track
add_library(webrtc_pc_audio_track ${WEBRTC_ROOT}/pc/audio_track.cc)
@@ -3149,38 +3416,28 @@ add_library(webrtc_pc_connection_context ${WEBRTC_ROOT}/pc/connection_context.cc
target_include_directories(webrtc_pc_connection_context PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_connection_context PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_connection_context PRIVATE -GR-)
-target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
+target_link_libraries(webrtc_pc_connection_context PUBLIC webrtc_api_callfactory_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_neteq_neteq_api webrtc_api_transport_field_trial_based_config webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_media_rtc_data_sctp_transport_factory webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_to_queued_task )
# pc:dtmf_sender
add_library(webrtc_pc_dtmf_sender ${WEBRTC_ROOT}/pc/dtmf_sender.cc)
target_include_directories(webrtc_pc_dtmf_sender PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_dtmf_sender PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_dtmf_sender PRIVATE -GR-)
-target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_dtmf_sender PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_scoped_refptr webrtc_pc_proxy webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:integration_test_helpers
add_library(webrtc_pc_integration_test_helpers ${WEBRTC_ROOT}/pc/test/integration_test_helpers.cc)
target_include_directories(webrtc_pc_integration_test_helpers PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_integration_test_helpers PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_integration_test_helpers PRIVATE -GR-)
-target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_integration_test_helpers PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
# pc:jitter_buffer_delay
add_library(webrtc_pc_jitter_buffer_delay ${WEBRTC_ROOT}/pc/jitter_buffer_delay.cc)
target_include_directories(webrtc_pc_jitter_buffer_delay PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_jitter_buffer_delay PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_jitter_buffer_delay PRIVATE -GR-)
-target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_sequence_checker webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_safe_minmax webrtc_rtc_base_threading )
-
-# pc:jitter_buffer_delay_interface
-add_library(webrtc_pc_jitter_buffer_delay_interface INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_interface INTERFACE absl::algorithm_container absl::strings absl::optional webrtc_media_rtc_media_base webrtc_rtc_base_refcount )
-target_include_directories(webrtc_pc_jitter_buffer_delay_interface INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
-
-# pc:jitter_buffer_delay_proxy
-add_library(webrtc_pc_jitter_buffer_delay_proxy INTERFACE)
-target_link_libraries(webrtc_pc_jitter_buffer_delay_proxy INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay_interface )
-target_include_directories(webrtc_pc_jitter_buffer_delay_proxy INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_link_libraries(webrtc_pc_jitter_buffer_delay PUBLIC absl::optional webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_safe_conversions webrtc_rtc_base_safe_minmax webrtc_rtc_base_system_no_unique_address )
# pc:libjingle_peerconnection
add_library(webrtc_pc_libjingle_peerconnection INTERFACE)
@@ -3205,7 +3462,7 @@ add_library(webrtc_pc_pc_test_utils ${WEBRTC_ROOT}/pc/test/fake_audio_capture_mo
target_include_directories(webrtc_pc_pc_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_pc_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_pc_test_utils PRIVATE -GR-)
-target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
+target_link_libraries(webrtc_pc_pc_test_utils PUBLIC absl::optional webrtc_api_audio_options_api webrtc_api_create_frame_generator webrtc_api_create_peerconnection_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtc_error webrtc_api_rtc_stats_api webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_media_rtc_media webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_peerconnection webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_test_test_support webrtc_test_video_test_common )
# pc:peer_connection_message_handler
add_library(webrtc_pc_peer_connection_message_handler ${WEBRTC_ROOT}/pc/peer_connection_message_handler.cc)
@@ -3219,14 +3476,21 @@ add_library(webrtc_pc_peerconnection ${WEBRTC_ROOT}/pc/data_channel_controller.c
target_include_directories(webrtc_pc_peerconnection PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_peerconnection PRIVATE -GR-)
-target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_peerconnection PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_callfactory_api webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_ice_transport_factory webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_network_state_predictor_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_adaptation_resource_adaptation_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_neteq_neteq_api webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_bitrate_settings webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_field_trial_based_config webrtc_api_transport_network_control webrtc_api_transport_sctp_transport_factory_interface webrtc_api_transport_webrtc_key_value_config webrtc_api_units_data_rate webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_sender webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_connection_context webrtc_pc_dtmf_sender webrtc_pc_jitter_buffer_delay webrtc_pc_media_protocol_names webrtc_pc_media_stream webrtc_pc_peer_connection_message_handler webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_rtp_transmission_manager webrtc_pc_sdp_state_provider webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_pc_stats_collector_interface webrtc_pc_transceiver_list webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_track webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_operations_chain webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_stats_stats webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:peerconnection_unittests
android_add_executable(TARGET webrtc_pc_peerconnection_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/data_channel_integrationtest.cc ${WEBRTC_ROOT}/pc/data_channel_unittest.cc ${WEBRTC_ROOT}/pc/dtmf_sender_unittest.cc ${WEBRTC_ROOT}/pc/ice_server_parsing_unittest.cc ${WEBRTC_ROOT}/pc/jitter_buffer_delay_unittest.cc ${WEBRTC_ROOT}/pc/jsep_session_description_unittest.cc ${WEBRTC_ROOT}/pc/local_audio_source_unittest.cc ${WEBRTC_ROOT}/pc/media_stream_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_adaptation_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_bundle_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_crypto_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_data_channel_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_end_to_end_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_factory_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_header_extension_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_histogram_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_ice_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_integrationtest.cc ${WEBRTC_ROOT}/pc/peer_connection_interface_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_jsep_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_media_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_rtp_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_signaling_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_simulcast_unittest.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.cc ${WEBRTC_ROOT}/pc/peer_connection_wrapper.h ${WEBRTC_ROOT}/pc/proxy_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/rtc_stats_integrationtest.cc ${WEBRTC_ROOT}/pc/rtc_stats_traversal_unittest.cc ${WEBRTC_ROOT}/pc/rtp_media_utils_unittest.cc ${WEBRTC_ROOT}/pc/rtp_parameters_conversion_unittest.cc ${WEBRTC_ROOT}/pc/rtp_sender_receiver_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transceiver_unittest.cc ${WEBRTC_ROOT}/pc/sctp_utils_unittest.cc ${WEBRTC_ROOT}/pc/sdp_serializer_unittest.cc ${WEBRTC_ROOT}/pc/stats_collector_unittest.cc ${WEBRTC_ROOT}/pc/test/fake_audio_capture_module_unittest.cc ${WEBRTC_ROOT}/pc/test/test_sdp_strings.h ${WEBRTC_ROOT}/pc/track_media_info_map_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_track_source_unittest.cc ${WEBRTC_ROOT}/pc/video_track_unittest.cc ${WEBRTC_ROOT}/pc/webrtc_sdp_unittest.cc)
target_include_directories(webrtc_pc_peerconnection_unittests PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_peerconnection_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_peerconnection_unittests PRIVATE -GR-)
-target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+target_link_libraries(webrtc_pc_peerconnection_unittests PUBLIC absl::algorithm_container absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_callfactory_api webrtc_api_create_peerconnection_factory webrtc_api_fake_frame_decryptor webrtc_api_fake_frame_encryptor webrtc_api_function_view webrtc_api_libjingle_logging_api webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_mock_rtp webrtc_api_packet_socket_factory webrtc_api_rtc_error webrtc_api_rtc_event_log_output_file webrtc_api_rtc_stats_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_audio_audio_mixer_api webrtc_api_audio_codecs_audio_codecs_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_audio_codecs_opus_audio_decoder_factory webrtc_api_audio_codecs_opus_audio_encoder_factory webrtc_api_audio_codecs_L16_audio_decoder_L16 webrtc_api_audio_codecs_L16_audio_encoder_L16 webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_frame_encryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_task_queue webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_transport_webrtc_key_value_config webrtc_api_transport_rtp_rtp_source webrtc_api_units_time_delta webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_adaptation_resource_adaptation_test_utilities webrtc_logging_fake_rtc_event_log webrtc_media_rtc_audio_video webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_media_engine_defaults webrtc_media_rtc_media_tests_utils webrtc_modules_audio_device_audio_device_api webrtc_modules_audio_processing_audio_processing webrtc_modules_audio_processing_api webrtc_modules_audio_processing_audio_processing_statistics webrtc_modules_audio_processing_audioproc_test_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_utility_utility webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_server_utils webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_audio_rtp_receiver webrtc_pc_audio_track webrtc_pc_dtmf_sender webrtc_pc_integration_test_helpers webrtc_pc_jitter_buffer_delay webrtc_pc_libjingle_peerconnection webrtc_pc_media_stream webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_proxy webrtc_pc_remote_audio_source webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_rtp_transceiver webrtc_pc_session_description webrtc_pc_usage_pattern webrtc_pc_video_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_pc_video_track_source webrtc_pc_scenario_tests_pc_scenario_tests webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_ip_address webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_json webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_conversions webrtc_rtc_base_socket_address webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_test_audio_codec_mocks webrtc_test_field_trial webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_main webrtc_test_test_support webrtc_test_pc_sctp_fake_sctp_transport )
+
+# pc:proxy
+add_library(webrtc_pc_proxy ${WEBRTC_ROOT}/pc/proxy.cc)
+target_include_directories(webrtc_pc_proxy PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_proxy PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_proxy PRIVATE -GR-)
+target_link_libraries(webrtc_pc_proxy PUBLIC webrtc_api_scoped_refptr webrtc_api_task_queue_task_queue webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_rtc_export )
# pc:remote_audio_source
add_library(webrtc_pc_remote_audio_source ${WEBRTC_ROOT}/pc/remote_audio_source.cc)
@@ -3241,32 +3505,32 @@ target_link_libraries(webrtc_pc_rtc_pc INTERFACE libsrtp webrtc_media_rtc_audio_
target_include_directories(webrtc_pc_rtc_pc INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# pc:rtc_pc_base
-add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/session_description.cc ${WEBRTC_ROOT}/pc/simulcast_description.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc)
+add_library(webrtc_pc_rtc_pc_base ${WEBRTC_ROOT}/pc/channel.cc ${WEBRTC_ROOT}/pc/channel_manager.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport.cc ${WEBRTC_ROOT}/pc/dtls_transport.cc ${WEBRTC_ROOT}/pc/external_hmac.cc ${WEBRTC_ROOT}/pc/ice_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport.cc ${WEBRTC_ROOT}/pc/jsep_transport_collection.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller.cc ${WEBRTC_ROOT}/pc/media_session.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter.cc ${WEBRTC_ROOT}/pc/rtp_media_utils.cc ${WEBRTC_ROOT}/pc/rtp_transport.cc ${WEBRTC_ROOT}/pc/sctp_data_channel_transport.cc ${WEBRTC_ROOT}/pc/sctp_transport.cc ${WEBRTC_ROOT}/pc/sctp_utils.cc ${WEBRTC_ROOT}/pc/srtp_filter.cc ${WEBRTC_ROOT}/pc/srtp_session.cc ${WEBRTC_ROOT}/pc/srtp_transport.cc ${WEBRTC_ROOT}/pc/transport_stats.cc ${WEBRTC_ROOT}/pc/video_track_source_proxy.cc)
target_include_directories(webrtc_pc_rtc_pc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtc_pc_base PRIVATE -GR- -Ithird_party/libsrtp/include)
-target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
+target_link_libraries(webrtc_pc_rtc_pc_base PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional libsrtp webrtc_api_array_view webrtc_api_async_dns_resolver webrtc_api_audio_options_api webrtc_api_call_api webrtc_api_function_view webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_packet_socket_factory webrtc_api_priority webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_transport_datagram_transport_interface webrtc_api_transport_enums webrtc_api_transport_sctp_transport_factory_interface webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_common_video_common_video webrtc_logging_ice_log webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_config webrtc_media_rtc_sdp_video_format_utils webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_proxy webrtc_pc_session_description webrtc_pc_simulcast_description webrtc_rtc_base_rtc_base webrtc_rtc_base_callback_list webrtc_rtc_base_checks webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics )
# pc:rtc_pc_unittests
android_add_executable(TARGET webrtc_pc_rtc_pc_unittests NODISTRIBUTE SRC ${WEBRTC_ROOT}/pc/channel_manager_unittest.cc ${WEBRTC_ROOT}/pc/channel_unittest.cc ${WEBRTC_ROOT}/pc/dtls_srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/dtls_transport_unittest.cc ${WEBRTC_ROOT}/pc/ice_transport_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_controller_unittest.cc ${WEBRTC_ROOT}/pc/jsep_transport_unittest.cc ${WEBRTC_ROOT}/pc/media_session_unittest.cc ${WEBRTC_ROOT}/pc/rtcp_mux_filter_unittest.cc ${WEBRTC_ROOT}/pc/rtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/sctp_transport_unittest.cc ${WEBRTC_ROOT}/pc/session_description_unittest.cc ${WEBRTC_ROOT}/pc/srtp_filter_unittest.cc ${WEBRTC_ROOT}/pc/srtp_session_unittest.cc ${WEBRTC_ROOT}/pc/srtp_transport_unittest.cc ${WEBRTC_ROOT}/pc/test/rtp_transport_test_util.h ${WEBRTC_ROOT}/pc/test/srtp_test_util.h ${WEBRTC_ROOT}/pc/used_ids_unittest.cc ${WEBRTC_ROOT}/pc/video_rtp_receiver_unittest.cc)
target_include_directories(webrtc_pc_rtc_pc_unittests PRIVATE ${WEBRTC_ROOT}/pc/../third_party/libsrtp/srtp ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtc_pc_unittests PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_pc_rtc_pc_unittests PRIVATE -GR-)
-target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support strmiids::strmiids)
+target_link_libraries(webrtc_pc_rtc_pc_unittests PUBLIC absl::algorithm_container absl::memory absl::strings webrtc_api_array_view webrtc_api_audio_options_api webrtc_api_ice_transport_factory webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_headers webrtc_api_rtp_parameters webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_test_mock_recordable_encoded_frame webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_media_rtc_data_sctp_transport_internal webrtc_media_rtc_media_base webrtc_media_rtc_media_tests_utils webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_fake_ice_transport webrtc_p2p_fake_port_allocator webrtc_p2p_p2p_test_utils webrtc_p2p_rtc_p2p webrtc_pc_libjingle_peerconnection webrtc_pc_pc_test_utils webrtc_pc_peerconnection webrtc_pc_rtc_pc webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_pc_video_rtp_receiver webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_gunit_helpers webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_metrics webrtc_test_field_trial webrtc_test_test_main webrtc_test_test_support strmiids::strmiids)
# pc:rtp_parameters_conversion
add_library(webrtc_pc_rtp_parameters_conversion ${WEBRTC_ROOT}/pc/rtp_parameters_conversion.cc)
target_include_directories(webrtc_pc_rtp_parameters_conversion PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_parameters_conversion PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_parameters_conversion PRIVATE -GR-)
-target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
+target_link_libraries(webrtc_pc_rtp_parameters_conversion PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_checks )
# pc:rtp_receiver
add_library(webrtc_pc_rtp_receiver ${WEBRTC_ROOT}/pc/rtp_receiver.cc)
target_include_directories(webrtc_pc_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_receiver PRIVATE -GR-)
-target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_crypto_frame_decryptor_interface webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_video_track_source webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
# pc:rtp_sender
add_library(webrtc_pc_rtp_sender ${WEBRTC_ROOT}/pc/rtp_sender.cc)
@@ -3280,7 +3544,7 @@ add_library(webrtc_pc_rtp_transceiver ${WEBRTC_ROOT}/pc/rtp_transceiver.cc)
target_include_directories(webrtc_pc_rtp_transceiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_rtp_transceiver PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_rtp_transceiver PRIVATE -GR-)
-target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_pc_rtp_transceiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_array_view webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_error webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_media_rtc_media_base webrtc_pc_proxy webrtc_pc_rtc_pc_base webrtc_pc_rtp_parameters_conversion webrtc_pc_rtp_receiver webrtc_pc_rtp_sender webrtc_pc_session_description webrtc_rtc_base_checks webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_refcount webrtc_rtc_base_threading webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# pc:rtp_transmission_manager
add_library(webrtc_pc_rtp_transmission_manager ${WEBRTC_ROOT}/pc/rtp_transmission_manager.cc)
@@ -3301,6 +3565,20 @@ add_library(webrtc_pc_sdp_state_provider INTERFACE)
target_link_libraries(webrtc_pc_sdp_state_provider INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_pc_rtc_pc_base )
target_include_directories(webrtc_pc_sdp_state_provider INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# pc:session_description
+add_library(webrtc_pc_session_description ${WEBRTC_ROOT}/pc/session_description.cc)
+target_include_directories(webrtc_pc_session_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_session_description PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_session_description PRIVATE -GR-)
+target_link_libraries(webrtc_pc_session_description PUBLIC absl::algorithm_container absl::memory webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_rtp_transceiver_direction webrtc_media_rtc_media_base webrtc_p2p_rtc_p2p webrtc_pc_media_protocol_names webrtc_pc_simulcast_description webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
+# pc:simulcast_description
+add_library(webrtc_pc_simulcast_description ${WEBRTC_ROOT}/pc/simulcast_description.cc)
+target_include_directories(webrtc_pc_simulcast_description PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+target_compile_definitions(webrtc_pc_simulcast_description PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
+target_compile_options(webrtc_pc_simulcast_description PRIVATE -GR-)
+target_link_libraries(webrtc_pc_simulcast_description PUBLIC webrtc_rtc_base_checks webrtc_rtc_base_socket_address webrtc_rtc_base_system_rtc_export )
+
# pc:stats_collector_interface
add_library(webrtc_pc_stats_collector_interface INTERFACE)
target_link_libraries(webrtc_pc_stats_collector_interface INTERFACE webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface )
@@ -3325,7 +3603,7 @@ add_library(webrtc_pc_video_rtp_receiver ${WEBRTC_ROOT}/pc/video_rtp_receiver.cc
target_include_directories(webrtc_pc_video_rtp_receiver PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_pc_video_rtp_receiver PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_pc_video_rtp_receiver PRIVATE -GR-)
-target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_jitter_buffer_delay_interface webrtc_pc_jitter_buffer_delay_proxy webrtc_pc_media_stream webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading )
+target_link_libraries(webrtc_pc_video_rtp_receiver PUBLIC absl::algorithm_container absl::strings absl::optional webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_media_stream_interface webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_crypto_frame_decryptor_interface webrtc_api_transport_rtp_rtp_source webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_frame webrtc_media_rtc_media_base webrtc_pc_jitter_buffer_delay webrtc_pc_media_stream webrtc_pc_rtc_pc_base webrtc_pc_rtp_receiver webrtc_pc_video_rtp_track_source webrtc_pc_video_track webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_threading webrtc_rtc_base_system_no_unique_address )
# pc:video_rtp_track_source
add_library(webrtc_pc_video_rtp_track_source ${WEBRTC_ROOT}/pc/video_rtp_track_source.cc)
@@ -3695,7 +3973,7 @@ add_library(webrtc_rtc_base_platform_thread ${WEBRTC_ROOT}/rtc_base/platform_thr
target_include_directories(webrtc_rtc_base_platform_thread PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_platform_thread PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_platform_thread PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::strings webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
+target_link_libraries(webrtc_rtc_base_platform_thread PUBLIC absl::memory absl::strings absl::optional webrtc_api_sequence_checker webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_event webrtc_rtc_base_timeutils )
# rtc_base:platform_thread.headers
add_library(webrtc_rtc_base_platform_thread.headers INTERFACE)
@@ -3714,6 +3992,15 @@ add_library(webrtc_rtc_base_platform_thread_types.headers INTERFACE)
target_link_libraries(webrtc_rtc_base_platform_thread_types.headers INTERFACE webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_platform_thread_types.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+# rtc_base:protobuf_utils
+add_library(webrtc_rtc_base_protobuf_utils INTERFACE)
+target_link_libraries(webrtc_rtc_base_protobuf_utils INTERFACE libprotobuf )
+target_include_directories(webrtc_rtc_base_protobuf_utils INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
+# rtc_base:protobuf_utils.headers
+add_library(webrtc_rtc_base_protobuf_utils.headers INTERFACE)
+target_include_directories(webrtc_rtc_base_protobuf_utils.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
+
# rtc_base:rate_limiter
add_library(webrtc_rtc_base_rate_limiter ${WEBRTC_ROOT}/rtc_base/rate_limiter.cc)
target_include_directories(webrtc_rtc_base_rate_limiter PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
@@ -3723,12 +4010,12 @@ target_link_libraries(webrtc_rtc_base_rate_limiter PUBLIC absl::optional webrtc_
# rtc_base:refcount
add_library(webrtc_rtc_base_refcount INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_rtc_base_macromagic )
+target_link_libraries(webrtc_rtc_base_refcount INTERFACE webrtc_api_scoped_refptr webrtc_rtc_base_macromagic )
target_include_directories(webrtc_rtc_base_refcount INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:refcount.headers
add_library(webrtc_rtc_base_refcount.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_rtc_base_macromagic.headers )
+target_link_libraries(webrtc_rtc_base_refcount.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_rtc_base_macromagic.headers )
target_include_directories(webrtc_rtc_base_refcount.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base
@@ -3736,11 +4023,11 @@ add_library(webrtc_rtc_base_rtc_base ${WEBRTC_ROOT}/rtc_base/win32_socket_server
target_include_directories(webrtc_rtc_base_rtc_base PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_base PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_rtc_base PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC webrtc_rtc_base_win32 absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
+target_link_libraries(webrtc_rtc_base_rtc_base PUBLIC webrtc_rtc_base_win32 absl::algorithm_container absl::flat_hash_map absl::memory absl::strings absl::optional jsoncpp ssl webrtc_api_array_view webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_numerics_numerics webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_async_socket webrtc_rtc_base_checks webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket webrtc_rtc_base_socket_address webrtc_rtc_base_socket_factory webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_network_sent_packet webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_file_wrapper webrtc_rtc_base_system_inline webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_base64_base64 webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_field_trial )
# rtc_base:rtc_base.headers
add_library(webrtc_rtc_base_rtc_base.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_rtc_base_win32.headers webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
+target_link_libraries(webrtc_rtc_base_rtc_base.headers INTERFACE webrtc_rtc_base_win32.headers webrtc_api_array_view.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_numerics_numerics.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_async_socket.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_numerics.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_factory.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_stringutils.headers webrtc_rtc_base_threading.headers webrtc_rtc_base_network_sent_packet.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_file_wrapper.headers webrtc_rtc_base_system_inline.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_repeating_task.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_base64_base64.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers webrtc_system_wrappers_field_trial.headers )
target_include_directories(webrtc_rtc_base_rtc_base.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base:rtc_base_approved
@@ -3824,7 +4111,7 @@ add_library(webrtc_rtc_base_rtc_task_queue_win ${WEBRTC_ROOT}/rtc_base/task_queu
target_include_directories(webrtc_rtc_base_rtc_task_queue_win PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_rtc_task_queue_win PRIVATE RTC_ENABLE_VP9 WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_WIN)
target_compile_options(webrtc_rtc_base_rtc_task_queue_win PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_rtc_task_queue_win PUBLIC absl::strings webrtc_api_task_queue_task_queue webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread webrtc_rtc_base_rtc_event webrtc_rtc_base_safe_conversions webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex )
+target_link_libraries(webrtc_rtc_base_rtc_task_queue_win PUBLIC absl::strings absl::optional webrtc_api_task_queue_task_queue webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_platform_thread webrtc_rtc_base_rtc_event webrtc_rtc_base_safe_conversions webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex )
# rtc_base:safe_compare
add_library(webrtc_rtc_base_safe_compare INTERFACE)
@@ -4060,11 +4347,11 @@ add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag ${WEBRTC_ROOT}/r
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_pending_task_safety_flag PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_refcount webrtc_rtc_base_system_no_unique_address )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag PUBLIC webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_rtc_base_checks webrtc_rtc_base_system_no_unique_address )
# rtc_base/task_utils:pending_task_safety_flag.headers
add_library(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_refcount.headers webrtc_rtc_base_system_no_unique_address.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_system_no_unique_address.headers )
target_include_directories(webrtc_rtc_base_task_utils_pending_task_safety_flag.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:repeating_task
@@ -4072,11 +4359,11 @@ add_library(webrtc_rtc_base_task_utils_repeating_task ${WEBRTC_ROOT}/rtc_base/ta
target_include_directories(webrtc_rtc_base_task_utils_repeating_task PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_task_utils_repeating_task PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_task_utils_repeating_task PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task PUBLIC absl::memory webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_rtc_base_logging webrtc_rtc_base_timeutils webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers )
# rtc_base/task_utils:repeating_task.headers
add_library(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
+target_link_libraries(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_api_units_time_delta.headers webrtc_api_units_timestamp.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_system_wrappers_system_wrappers.headers )
target_include_directories(webrtc_rtc_base_task_utils_repeating_task.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/task_utils:to_queued_task
@@ -4116,11 +4403,11 @@ add_library(webrtc_rtc_base_threading ${WEBRTC_ROOT}/rtc_base/async_resolver.cc
target_include_directories(webrtc_rtc_base_threading PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_rtc_base_threading PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_rtc_base_threading PRIVATE -GR-)
-target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_win32 absl::algorithm_container webrtc_api_function_view webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
+target_link_libraries(webrtc_rtc_base_threading PUBLIC webrtc_rtc_base_win32 absl::algorithm_container webrtc_api_function_view webrtc_api_refcountedbase webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_task_queue_task_queue webrtc_rtc_base_async_resolver_interface webrtc_rtc_base_atomicops webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_ip_address webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_network_constants webrtc_rtc_base_null_socket_server webrtc_rtc_base_platform_thread_types webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_event webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_timeutils webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_rtc_export webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot )
# rtc_base:threading.headers
add_library(webrtc_rtc_base_threading.headers INTERFACE)
-target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_win32.headers webrtc_api_function_view.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
+target_link_libraries(webrtc_rtc_base_threading.headers INTERFACE webrtc_rtc_base_win32.headers webrtc_api_function_view.headers webrtc_api_refcountedbase.headers webrtc_api_scoped_refptr.headers webrtc_api_sequence_checker.headers webrtc_api_task_queue_task_queue.headers webrtc_rtc_base_async_resolver_interface.headers webrtc_rtc_base_atomicops.headers webrtc_rtc_base_checks.headers webrtc_rtc_base_criticalsection.headers webrtc_rtc_base_ip_address.headers webrtc_rtc_base_logging.headers webrtc_rtc_base_macromagic.headers webrtc_rtc_base_network_constants.headers webrtc_rtc_base_null_socket_server.headers webrtc_rtc_base_platform_thread_types.headers webrtc_rtc_base_rtc_base_approved.headers webrtc_rtc_base_rtc_event.headers webrtc_rtc_base_rtc_task_queue.headers webrtc_rtc_base_socket_address.headers webrtc_rtc_base_socket_server.headers webrtc_rtc_base_timeutils.headers webrtc_rtc_base_synchronization_mutex.headers webrtc_rtc_base_system_no_unique_address.headers webrtc_rtc_base_system_rtc_export.headers webrtc_rtc_base_task_utils_pending_task_safety_flag.headers webrtc_rtc_base_task_utils_to_queued_task.headers webrtc_rtc_base_third_party_sigslot_sigslot.headers )
target_include_directories(webrtc_rtc_base_threading.headers INTERFACE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
# rtc_base/time:timestamp_extrapolator
@@ -4320,7 +4607,7 @@ add_library(webrtc_test_network_emulated_network ${WEBRTC_ROOT}/test/network/cro
target_include_directories(webrtc_test_network_emulated_network PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_network_emulated_network PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_network_emulated_network PRIVATE -GR-)
-target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_third_party_sigslot_sigslot webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_network_emulated_network PUBLIC absl::algorithm_container absl::memory absl::optional webrtc_api_array_view webrtc_api_network_emulation_manager_api webrtc_api_packet_socket_factory webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_simulated_network_api webrtc_api_time_controller webrtc_api_numerics_numerics webrtc_api_test_network_emulation_network_emulation webrtc_api_transport_stun_types webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_call_simulated_network webrtc_p2p_p2p_server_utils webrtc_rtc_base_rtc_base webrtc_rtc_base_async_socket webrtc_rtc_base_ip_address webrtc_rtc_base_network_constants webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_socket_server webrtc_rtc_base_stringutils webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_system_wrappers_system_wrappers webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller )
# test/pc/sctp:fake_sctp_transport
add_library(webrtc_test_pc_sctp_fake_sctp_transport INTERFACE)
@@ -4332,7 +4619,7 @@ add_library(webrtc_test_peer_scenario_peer_scenario ${WEBRTC_ROOT}/test/peer_sce
target_include_directories(webrtc_test_peer_scenario_peer_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_peer_scenario_peer_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_peer_scenario_peer_scenario PRIVATE -GR-)
-target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
+target_link_libraries(webrtc_test_peer_scenario_peer_scenario PUBLIC absl::flags absl::memory webrtc_api_create_time_controller webrtc_api_libjingle_peerconnection_api webrtc_api_network_emulation_manager_api webrtc_api_rtc_stats_api webrtc_api_time_controller webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_task_queue_default_task_queue_factory webrtc_api_transport_field_trial_based_config webrtc_api_video_codecs_builtin_video_decoder_factory webrtc_api_video_codecs_builtin_video_encoder_factory webrtc_media_rtc_audio_video webrtc_media_rtc_media_base webrtc_modules_audio_device_audio_device_impl webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_p2p_rtc_p2p webrtc_pc_pc_test_utils webrtc_pc_rtc_pc_base webrtc_pc_session_description webrtc_rtc_base_rtc_base webrtc_rtc_base_null_socket_server webrtc_rtc_base_stringutils webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_scenario webrtc_test_time_controller_time_controller )
# test:perf_test
add_library(webrtc_test_perf_test ${WEBRTC_ROOT}/test/testsupport/perf_result_reporter.cc ${WEBRTC_ROOT}/test/testsupport/perf_test.cc ${WEBRTC_ROOT}/test/testsupport/perf_test_histogram_writer.cc)
@@ -4358,7 +4645,7 @@ add_library(webrtc_test_rtp_test_utils ${WEBRTC_ROOT}/test/rtcp_packet_parser.cc
target_include_directories(webrtc_test_rtp_test_utils PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_rtp_test_utils PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_test_rtp_test_utils PRIVATE -GR-)
-target_link_libraries(webrtc_test_rtp_test_utils PUBLIC webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
+target_link_libraries(webrtc_test_rtp_test_utils PUBLIC absl::optional webrtc_api_array_view webrtc_api_rtp_parameters webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_rtc_base_checks webrtc_rtc_base_criticalsection webrtc_rtc_base_logging webrtc_rtc_base_macromagic webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_arch )
# test/scenario:column_printer
add_library(webrtc_test_scenario_column_printer ${WEBRTC_ROOT}/test/scenario/column_printer.cc)
@@ -4372,7 +4659,7 @@ add_library(webrtc_test_scenario_scenario ${WEBRTC_ROOT}/test/scenario/audio_str
target_include_directories(webrtc_test_scenario_scenario PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_test_scenario_scenario PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN HAVE_WEBRTC_VIDEO WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1 WEBRTC_UNIT_TEST)
target_compile_options(webrtc_test_scenario_scenario PRIVATE -GR-)
-target_link_libraries(webrtc_test_scenario_scenario PUBLIC absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto_bridge webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
+target_link_libraries(webrtc_test_scenario_scenario PUBLIC absl::flags absl::flags_parse absl::memory absl::strings absl::optional webrtc_api_create_frame_generator webrtc_api_fec_controller_api webrtc_api_frame_generator_api webrtc_api_libjingle_peerconnection_api webrtc_api_rtc_event_log_output_file webrtc_api_rtp_parameters webrtc_api_sequence_checker webrtc_api_time_controller webrtc_api_transport_api webrtc_api_audio_codecs_builtin_audio_decoder_factory webrtc_api_audio_codecs_builtin_audio_encoder_factory webrtc_api_rtc_event_log_rtc_event_log webrtc_api_rtc_event_log_rtc_event_log_factory webrtc_api_test_video_function_video_factory webrtc_api_transport_network_control webrtc_api_units_data_rate webrtc_api_units_data_size webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_builtin_video_bitrate_allocator_factory webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_codecs_video_codecs_api webrtc_audio_audio webrtc_call_call webrtc_call_call_interfaces webrtc_call_rtp_sender webrtc_call_simulated_network webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_audio_video webrtc_media_rtc_internal_video_codecs webrtc_media_rtc_media_base webrtc_modules_audio_coding_ana_config_proto webrtc_modules_audio_device_audio_device webrtc_modules_audio_device_audio_device_impl webrtc_modules_audio_device_mock_audio_device webrtc_modules_audio_mixer_audio_mixer_impl webrtc_modules_audio_processing_audio_processing webrtc_modules_congestion_controller_goog_cc_test_goog_cc_printer webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_mock_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_coding_webrtc_h264 webrtc_modules_video_coding_webrtc_multiplex webrtc_modules_video_coding_webrtc_vp8 webrtc_modules_video_coding_webrtc_vp9 webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_base_tests_utils webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_stats_counters webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_safe_minmax webrtc_rtc_base_socket_address webrtc_rtc_base_task_queue_for_test webrtc_rtc_base_threading webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_task_utils_repeating_task webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_test_fake_video_codecs webrtc_test_fileutils webrtc_test_rtp_test_utils webrtc_test_test_common webrtc_test_test_support webrtc_test_video_test_common webrtc_test_logging_log_writer webrtc_test_network_emulated_network webrtc_test_scenario_column_printer webrtc_test_time_controller_time_controller webrtc_video_video )
# test:test_common
add_library(webrtc_test_test_common ${WEBRTC_ROOT}/test/call_test.cc ${WEBRTC_ROOT}/test/drifting_clock.cc ${WEBRTC_ROOT}/test/layer_filtering_transport.cc ${WEBRTC_ROOT}/test/run_loop.cc)
@@ -4433,7 +4720,7 @@ add_library(webrtc_video_video ${WEBRTC_ROOT}/video/buffered_frame_decryptor.cc
target_include_directories(webrtc_video_video PRIVATE ${WEBRTC_ROOT} ${CMAKE_CURRENT_BINARY_DIR})
target_compile_definitions(webrtc_video_video PRIVATE RTC_ENABLE_VP9 WEBRTC_WIN WEBRTC_ABSL_MUTEX WEBRTC_ENABLE_AVX2 WEBRTC_ENABLE_PROTOBUF=1 WEBRTC_HAVE_SCTP WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1)
target_compile_options(webrtc_video_video PRIVATE -GR-)
-target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
+target_link_libraries(webrtc_video_video PUBLIC absl::algorithm_container absl::core_headers absl::memory absl::strings absl::optional webrtc_api_array_view webrtc_api_fec_controller_api webrtc_api_frame_transformer_interface webrtc_api_libjingle_peerconnection_api webrtc_api_rtp_parameters webrtc_api_scoped_refptr webrtc_api_sequence_checker webrtc_api_transport_api webrtc_api_crypto_frame_decryptor_interface webrtc_api_crypto_options webrtc_api_rtc_event_log_rtc_event_log webrtc_api_task_queue_task_queue webrtc_api_units_time_delta webrtc_api_units_timestamp webrtc_api_video_encoded_image webrtc_api_video_recordable_encoded_frame webrtc_api_video_video_bitrate_allocation webrtc_api_video_video_bitrate_allocator webrtc_api_video_video_codec_constants webrtc_api_video_video_frame webrtc_api_video_video_rtp_headers webrtc_api_video_video_stream_encoder webrtc_api_video_codecs_video_codecs_api webrtc_call_bitrate_allocator webrtc_call_call_interfaces webrtc_call_rtp_interfaces webrtc_call_rtp_receiver webrtc_call_rtp_sender webrtc_call_video_stream_api webrtc_common_video_common_video webrtc_media_rtc_media_base webrtc_modules_module_api webrtc_modules_module_api_public webrtc_modules_pacing_pacing webrtc_modules_remote_bitrate_estimator_remote_bitrate_estimator webrtc_modules_rtp_rtcp_rtp_rtcp webrtc_modules_rtp_rtcp_rtp_rtcp_format webrtc_modules_rtp_rtcp_rtp_video_header webrtc_modules_utility_utility webrtc_modules_video_coding_video_coding webrtc_modules_video_coding_codec_globals_headers webrtc_modules_video_coding_nack_module webrtc_modules_video_coding_video_codec_interface webrtc_modules_video_coding_video_coding_utility webrtc_modules_video_processing_video_processing webrtc_rtc_base_rtc_base webrtc_rtc_base_checks webrtc_rtc_base_rate_limiter webrtc_rtc_base_rtc_base_approved webrtc_rtc_base_rtc_numerics webrtc_rtc_base_rtc_task_queue webrtc_rtc_base_stringutils webrtc_rtc_base_threading webrtc_rtc_base_weak_ptr webrtc_rtc_base_experiments_alr_experiment webrtc_rtc_base_experiments_field_trial_parser webrtc_rtc_base_experiments_keyframe_interval_settings_experiment webrtc_rtc_base_experiments_min_video_bitrate_experiment webrtc_rtc_base_experiments_quality_scaling_experiment webrtc_rtc_base_experiments_rate_control_settings webrtc_rtc_base_synchronization_mutex webrtc_rtc_base_system_no_unique_address webrtc_rtc_base_system_thread_registry webrtc_rtc_base_task_utils_pending_task_safety_flag webrtc_rtc_base_task_utils_repeating_task webrtc_rtc_base_task_utils_to_queued_task webrtc_rtc_base_time_timestamp_extrapolator webrtc_system_wrappers_system_wrappers webrtc_system_wrappers_field_trial webrtc_system_wrappers_metrics webrtc_video_frame_dumping_decoder webrtc_video_video_stream_encoder_impl webrtc_video_adaptation_video_adaptation )
# video:video_stream_encoder_impl
add_library(webrtc_video_video_stream_encoder_impl ${WEBRTC_ROOT}/video/alignment_adjuster.cc ${WEBRTC_ROOT}/video/encoder_bitrate_adjuster.cc ${WEBRTC_ROOT}/video/encoder_overshoot_detector.cc ${WEBRTC_ROOT}/video/frame_encode_metadata_writer.cc ${WEBRTC_ROOT}/video/video_source_sink_controller.cc ${WEBRTC_ROOT}/video/video_stream_encoder.cc)